diff --git a/src/bootstrap/bin/llvm-config-wrapper.rs b/src/bootstrap/bin/llvm-config-wrapper.rs index 5e3625eb22e52..cf77af44ff606 100644 --- a/src/bootstrap/bin/llvm-config-wrapper.rs +++ b/src/bootstrap/bin/llvm-config-wrapper.rs @@ -2,8 +2,8 @@ // `src/bootstrap/native.rs` for why this is needed when compiling LLD. use std::env; -use std::process::{self, Stdio, Command}; use std::io::{self, Write}; +use std::process::{self, Command, Stdio}; fn main() { let real_llvm_config = env::var_os("LLVM_CONFIG_REAL").unwrap(); diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs index 8ddce5c247db6..123899e6bbad6 100644 --- a/src/bootstrap/bin/main.rs +++ b/src/bootstrap/bin/main.rs @@ -11,7 +11,7 @@ extern crate bootstrap; use std::env; -use bootstrap::{Config, Build}; +use bootstrap::{Build, Config}; fn main() { let args = env::args().skip(1).collect::>(); diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index a0c75cd9e9476..4f0b1420f0ac0 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -37,12 +37,15 @@ fn main() { // Dirty code for borrowing issues let mut new = None; if let Some(current_as_str) = args[i].to_str() { - if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) || - current_as_str.starts_with("-Cmetadata") { + if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) + || current_as_str.starts_with("-Cmetadata") + { new = Some(format!("{}-{}", current_as_str, s)); } } - if let Some(new) = new { args[i] = new.into(); } + if let Some(new) = new { + args[i] = new.into(); + } } } @@ -60,7 +63,8 @@ fn main() { // Detect whether or not we're a build script depending on whether --target // is passed (a bit janky...) - let target = args.windows(2) + let target = args + .windows(2) .find(|w| &*w[0] == "--target") .and_then(|w| w[1].to_str()); let version = args.iter().find(|w| &**w == "-vV"); @@ -93,8 +97,10 @@ fn main() { cmd.args(&args) .arg("--cfg") .arg(format!("stage{}", stage)) - .env(bootstrap::util::dylib_path_var(), - env::join_paths(&dylib_path).unwrap()); + .env( + bootstrap::util::dylib_path_var(), + env::join_paths(&dylib_path).unwrap(), + ); let mut maybe_crate = None; // Print backtrace in case of ICE @@ -132,9 +138,7 @@ fn main() { cmd.arg(format!("-Clinker={}", target_linker)); } - let crate_name = args.windows(2) - .find(|a| &*a[0] == "--crate-name") - .unwrap(); + let crate_name = args.windows(2).find(|a| &*a[0] == "--crate-name").unwrap(); let crate_name = &*crate_name[1]; maybe_crate = Some(crate_name); @@ -150,8 +154,7 @@ fn main() { // `compiler_builtins` are unconditionally compiled with panic=abort to // workaround undefined references to `rust_eh_unwind_resume` generated // otherwise, see issue https://github.com/rust-lang/rust/issues/43095. - if crate_name == "panic_abort" || - crate_name == "compiler_builtins" && stage != "0" { + if crate_name == "panic_abort" || crate_name == "compiler_builtins" && stage != "0" { cmd.arg("-C").arg("panic=abort"); } @@ -163,7 +166,13 @@ fn main() { cmd.arg("-Cdebuginfo=1"); } let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") { - Ok(s) => if s == "true" { "y" } else { "n" }, + Ok(s) => { + if s == "true" { + "y" + } else { + "n" + } + } Err(..) => "n", }; @@ -172,7 +181,8 @@ fn main() { if crate_name == "compiler_builtins" { cmd.arg("-C").arg("debug-assertions=no"); } else { - cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions)); + cmd.arg("-C") + .arg(format!("debug-assertions={}", debug_assertions)); } if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") { @@ -182,10 +192,12 @@ fn main() { // Emit save-analysis info. if env::var("RUSTC_SAVE_ANALYSIS") == Ok("api".to_string()) { cmd.arg("-Zsave-analysis"); - cmd.env("RUST_SAVE_ANALYSIS_CONFIG", - "{\"output_file\": null,\"full_docs\": false,\ - \"pub_only\": true,\"reachable_only\": false,\ - \"distro_crate\": true,\"signatures\": false,\"borrow_data\": false}"); + cmd.env( + "RUST_SAVE_ANALYSIS_CONFIG", + "{\"output_file\": null,\"full_docs\": false,\ + \"pub_only\": true,\"reachable_only\": false,\ + \"distro_crate\": true,\"signatures\": false,\"borrow_data\": false}", + ); } // Dealing with rpath here is a little special, so let's go into some @@ -216,7 +228,6 @@ fn main() { // to change a flag in a binary? if env::var("RUSTC_RPATH") == Ok("true".to_string()) { let rpath = if target.contains("apple") { - // Note that we need to take one extra step on macOS to also pass // `-Wl,-instal_name,@rpath/...` to get things to work right. To // do that we pass a weird flag to the compiler to get it to do @@ -224,9 +235,10 @@ fn main() { // flesh out rpath support more fully in the future. cmd.arg("-Z").arg("osx-rpath-install-name"); Some("-Wl,-rpath,@loader_path/../lib") - } else if !target.contains("windows") && - !target.contains("wasm32") && - !target.contains("fuchsia") { + } else if !target.contains("windows") + && !target.contains("wasm32") + && !target.contains("fuchsia") + { Some("-Wl,-rpath,$ORIGIN/../lib") } else { None @@ -246,7 +258,10 @@ fn main() { } // When running miri tests, we need to generate MIR for all libraries - if env::var("TEST_MIRI").ok().map_or(false, |val| val == "true") { + if env::var("TEST_MIRI") + .ok() + .map_or(false, |val| val == "true") + { // The flags here should be kept in sync with `add_miri_default_args` // in miri's `src/lib.rs`. cmd.arg("-Zalways-encode-mir"); @@ -310,7 +325,10 @@ fn main() { Ok(s) if s.success() => std::process::exit(0), e => e, }; - println!("\nDid not run successfully: {:?}\n{:?}\n-------------", e, cmd); + println!( + "\nDid not run successfully: {:?}\n{:?}\n-------------", + e, cmd + ); exec_cmd(&mut on_fail).expect("could not run the backup command"); std::process::exit(1); } @@ -324,11 +342,13 @@ fn main() { let dur = start.elapsed(); let is_test = args.iter().any(|a| a == "--test"); - eprintln!("[RUSTC-TIMING] {} test:{} {}.{:03}", - krate.to_string_lossy(), - is_test, - dur.as_secs(), - dur.subsec_nanos() / 1_000_000); + eprintln!( + "[RUSTC-TIMING] {} test:{} {}.{:03}", + krate.to_string_lossy(), + is_test, + dur.as_secs(), + dur.subsec_nanos() / 1_000_000 + ); match status.code() { Some(i) => std::process::exit(i), diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs index dec74e60c71f3..a89438d98e5cb 100644 --- a/src/bootstrap/bin/rustdoc.rs +++ b/src/bootstrap/bin/rustdoc.rs @@ -7,8 +7,8 @@ extern crate bootstrap; use std::env; -use std::process::Command; use std::path::PathBuf; +use std::process::Command; fn main() { let args = env::args_os().skip(1).collect::>(); @@ -37,8 +37,10 @@ fn main() { .arg("dox") .arg("--sysroot") .arg(sysroot) - .env(bootstrap::util::dylib_path_var(), - env::join_paths(&dylib_path).unwrap()); + .env( + bootstrap::util::dylib_path_var(), + env::join_paths(&dylib_path).unwrap(), + ); // Force all crates compiled by this compiler to (a) be unstable and (b) // allow the `rustc_private` feature to link to other unstable crates @@ -47,7 +49,10 @@ fn main() { cmd.arg("-Z").arg("force-unstable-if-unmarked"); } if let Some(linker) = env::var_os("RUSTC_TARGET_LINKER") { - cmd.arg("--linker").arg(linker).arg("-Z").arg("unstable-options"); + cmd.arg("--linker") + .arg(linker) + .arg("-Z") + .arg("unstable-options"); } // Bootstrap's Cargo-command builder sets this variable to the current Rust version; let's pick @@ -55,8 +60,9 @@ fn main() { if let Some(version) = env::var_os("RUSTDOC_CRATE_VERSION") { // This "unstable-options" can be removed when `--crate-version` is stabilized cmd.arg("-Z") - .arg("unstable-options") - .arg("--crate-version").arg(version); + .arg("unstable-options") + .arg("--crate-version") + .arg(version); } if verbose > 1 { diff --git a/src/bootstrap/bin/sccache-plus-cl.rs b/src/bootstrap/bin/sccache-plus-cl.rs index f9e14d1ff6d30..2d860f8000aaf 100644 --- a/src/bootstrap/bin/sccache-plus-cl.rs +++ b/src/bootstrap/bin/sccache-plus-cl.rs @@ -10,12 +10,12 @@ fn main() { env::set_var("CXX", env::var_os("SCCACHE_CXX").unwrap()); let mut cfg = cc::Build::new(); cfg.cargo_metadata(false) - .out_dir("/") - .target(&target) - .host(&target) - .opt_level(0) - .warnings(false) - .debug(false); + .out_dir("/") + .target(&target) + .host(&target) + .opt_level(0) + .warnings(false) + .debug(false); let compiler = cfg.get_compiler(); // Invoke sccache with said compiler diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 117ff0e721474..e641cb5b5a6ed 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -22,7 +22,7 @@ use crate::native; use crate::test; use crate::tool; use crate::util::{add_lib_path, exe, libdir}; -use crate::{Build, DocTests, Mode, GitRepo}; +use crate::{Build, DocTests, GitRepo, Mode}; pub use crate::Compiler; @@ -708,7 +708,7 @@ impl<'a> Builder<'a> { "build" => self.cargo_out(compiler, mode, target), // This is the intended out directory for crate documentation. - "doc" | "rustdoc" => self.crate_doc_out(target), + "doc" | "rustdoc" => self.crate_doc_out(target), _ => self.stage_out(compiler, mode), }; @@ -748,41 +748,38 @@ impl<'a> Builder<'a> { match mode { Mode::Std => { self.clear_if_dirty(&my_out, &self.rustc(compiler)); - }, + } Mode::Test => { self.clear_if_dirty(&my_out, &libstd_stamp); - }, + } Mode::Rustc => { self.clear_if_dirty(&my_out, &self.rustc(compiler)); self.clear_if_dirty(&my_out, &libstd_stamp); self.clear_if_dirty(&my_out, &libtest_stamp); - }, + } Mode::Codegen => { self.clear_if_dirty(&my_out, &librustc_stamp); - }, - Mode::ToolBootstrap => { }, + } + Mode::ToolBootstrap => {} Mode::ToolStd => { self.clear_if_dirty(&my_out, &libstd_stamp); - }, + } Mode::ToolTest => { self.clear_if_dirty(&my_out, &libstd_stamp); self.clear_if_dirty(&my_out, &libtest_stamp); - }, + } Mode::ToolRustc => { self.clear_if_dirty(&my_out, &libstd_stamp); self.clear_if_dirty(&my_out, &libtest_stamp); self.clear_if_dirty(&my_out, &librustc_stamp); - }, + } } } - cargo - .env("CARGO_TARGET_DIR", out_dir) - .arg(cmd); + cargo.env("CARGO_TARGET_DIR", out_dir).arg(cmd); if cmd != "install" { - cargo.arg("--target") - .arg(target); + cargo.arg("--target").arg(target); } else { assert_eq!(target, compiler.host); } @@ -896,7 +893,10 @@ impl<'a> Builder<'a> { cargo.env("RUSTC_ERROR_FORMAT", error_format); } if cmd != "build" && cmd != "check" && cmd != "rustc" && want_rustdoc { - cargo.env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.config.build)); + cargo.env( + "RUSTDOC_LIBDIR", + self.sysroot_libdir(compiler, self.config.build), + ); } if mode.is_tool() { @@ -1114,10 +1114,12 @@ impl<'a> Builder<'a> { cargo.arg("-v"); } - match (mode, self.config.rust_codegen_units_std, self.config.rust_codegen_units) { - (Mode::Std, Some(n), _) | - (Mode::Test, Some(n), _) | - (_, _, Some(n)) => { + match ( + mode, + self.config.rust_codegen_units_std, + self.config.rust_codegen_units, + ) { + (Mode::Std, Some(n), _) | (Mode::Test, Some(n), _) | (_, _, Some(n)) => { cargo.env("RUSTC_CODEGEN_UNITS", n.to_string()); } _ => { @@ -1860,10 +1862,7 @@ mod __test { #[test] fn test_exclude() { let mut config = configure(&[], &[]); - config.exclude = vec![ - "src/test/run-pass".into(), - "src/tools/tidy".into(), - ]; + config.exclude = vec!["src/test/run-pass".into(), "src/tools/tidy".into()]; config.cmd = Subcommand::Test { paths: Vec::new(), test_args: Vec::new(), diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs index ea8bc657a57aa..31cd6f9527d79 100644 --- a/src/bootstrap/cache.rs +++ b/src/bootstrap/cache.rs @@ -1,6 +1,7 @@ use std::any::{Any, TypeId}; use std::borrow::Borrow; use std::cell::RefCell; +use std::cmp::{Ord, Ordering, PartialOrd}; use std::collections::HashMap; use std::convert::AsRef; use std::ffi::OsStr; @@ -11,7 +12,6 @@ use std::mem; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::sync::Mutex; -use std::cmp::{PartialOrd, Ord, Ordering}; use crate::builder::Step; @@ -45,7 +45,7 @@ impl Eq for Interned {} impl PartialEq for Interned { fn eq(&self, other: &str) -> bool { - *self == other + *self == other } } impl<'a> PartialEq<&'a str> for Interned { @@ -176,14 +176,14 @@ impl Default for TyIntern { impl TyIntern { fn intern_borrow(&mut self, item: &B) -> Interned where - B: Eq + Hash + ToOwned + ?Sized, + B: Eq + Hash + ToOwned + ?Sized, T: Borrow, { if let Some(i) = self.set.get(&item) { return *i; } let item = item.to_owned(); - let interned = Interned(self.items.len(), PhantomData::<*const T>); + let interned = Interned(self.items.len(), PhantomData::<*const T>); self.set.insert(item.clone(), interned); self.items.push(item); interned @@ -193,7 +193,7 @@ impl TyIntern { if let Some(i) = self.set.get(&item) { return *i; } - let interned = Interned(self.items.len(), PhantomData::<*const T>); + let interned = Interned(self.items.len(), PhantomData::<*const T>); self.set.insert(item.clone(), interned); self.items.push(item); interned @@ -233,10 +233,12 @@ lazy_static! { /// get() method. #[derive(Debug)] pub struct Cache( - RefCell, // actually a HashMap> - >> + RefCell< + HashMap< + TypeId, + Box, // actually a HashMap> + >, + >, ); impl Cache { @@ -247,21 +249,27 @@ impl Cache { pub fn put(&self, step: S, value: S::Output) { let mut cache = self.0.borrow_mut(); let type_id = TypeId::of::(); - let stepcache = cache.entry(type_id) - .or_insert_with(|| Box::new(HashMap::::new())) - .downcast_mut::>() - .expect("invalid type mapped"); - assert!(!stepcache.contains_key(&step), "processing {:?} a second time", step); + let stepcache = cache + .entry(type_id) + .or_insert_with(|| Box::new(HashMap::::new())) + .downcast_mut::>() + .expect("invalid type mapped"); + assert!( + !stepcache.contains_key(&step), + "processing {:?} a second time", + step + ); stepcache.insert(step, value); } pub fn get(&self, step: &S) -> Option { let mut cache = self.0.borrow_mut(); let type_id = TypeId::of::(); - let stepcache = cache.entry(type_id) - .or_insert_with(|| Box::new(HashMap::::new())) - .downcast_mut::>() - .expect("invalid type mapped"); + let stepcache = cache + .entry(type_id) + .or_insert_with(|| Box::new(HashMap::::new())) + .downcast_mut::>() + .expect("invalid type mapped"); stepcache.get(step).cloned() } @@ -269,7 +277,8 @@ impl Cache { pub fn all(&mut self) -> Vec<(S, S::Output)> { let cache = self.0.get_mut(); let type_id = TypeId::of::(); - let mut v = cache.remove(&type_id) + let mut v = cache + .remove(&type_id) .map(|b| b.downcast::>().expect("correct type")) .map(|m| m.into_iter().collect::>()) .unwrap_or_default(); diff --git a/src/bootstrap/cc_detect.rs b/src/bootstrap/cc_detect.rs index 37844759c7b9a..015d58d42512a 100644 --- a/src/bootstrap/cc_detect.rs +++ b/src/bootstrap/cc_detect.rs @@ -22,16 +22,16 @@ //! everything. use std::collections::HashSet; -use std::{env, iter}; use std::path::{Path, PathBuf}; use std::process::Command; +use std::{env, iter}; use build_helper::output; use cc; -use crate::{Build, GitRepo}; -use crate::config::Target; use crate::cache::Interned; +use crate::config::Target; +use crate::{Build, GitRepo}; // The `cc` crate doesn't provide a way to obtain a path to the detected archiver, // so use some simplified logic here. First we respect the environment variable `AR`, then @@ -63,14 +63,25 @@ fn cc2ar(cc: &Path, target: &str) -> Option { pub fn find(build: &mut Build) { // For all targets we're going to need a C compiler for building some shims // and such as well as for being a linker for Rust code. - let targets = build.targets.iter().chain(&build.hosts).cloned().chain(iter::once(build.build)) - .collect::>(); + let targets = build + .targets + .iter() + .chain(&build.hosts) + .cloned() + .chain(iter::once(build.build)) + .collect::>(); for target in targets.into_iter() { let mut cfg = cc::Build::new(); - cfg.cargo_metadata(false).opt_level(2).warnings(false).debug(false) - .target(&target).host(&build.build); + cfg.cargo_metadata(false) + .opt_level(2) + .warnings(false) + .debug(false) + .target(&target) + .host(&build.build); match build.crt_static(target) { - Some(a) => { cfg.static_crt(a); } + Some(a) => { + cfg.static_crt(a); + } None => { if target.contains("msvc") { cfg.static_crt(true); @@ -97,7 +108,11 @@ pub fn find(build: &mut Build) { build.cc.insert(target, compiler); build.verbose(&format!("CC_{} = {:?}", &target, build.cc(target))); - build.verbose(&format!("CFLAGS_{} = {:?}", &target, build.cflags(target, GitRepo::Rustc))); + build.verbose(&format!( + "CFLAGS_{} = {:?}", + &target, + build.cflags(target, GitRepo::Rustc) + )); if let Some(ar) = ar { build.verbose(&format!("AR_{} = {:?}", &target, ar)); build.ar.insert(target, ar); @@ -105,11 +120,21 @@ pub fn find(build: &mut Build) { } // For all host triples we need to find a C++ compiler as well - let hosts = build.hosts.iter().cloned().chain(iter::once(build.build)).collect::>(); + let hosts = build + .hosts + .iter() + .cloned() + .chain(iter::once(build.build)) + .collect::>(); for host in hosts.into_iter() { let mut cfg = cc::Build::new(); - cfg.cargo_metadata(false).opt_level(2).warnings(false).debug(false).cpp(true) - .target(&host).host(&build.build); + cfg.cargo_metadata(false) + .opt_level(2) + .warnings(false) + .debug(false) + .cpp(true) + .target(&host) + .host(&build.build); let config = build.config.target_config.get(&host); if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) { cfg.compiler(cxx); @@ -122,21 +147,24 @@ pub fn find(build: &mut Build) { } } -fn set_compiler(cfg: &mut cc::Build, - compiler: Language, - target: Interned, - config: Option<&Target>, - build: &Build) { +fn set_compiler( + cfg: &mut cc::Build, + compiler: Language, + target: Interned, + config: Option<&Target>, + build: &Build, +) { match &*target { // When compiling for android we may have the NDK configured in the // config.toml in which case we look there. Otherwise the default // compiler already takes into account the triple in question. t if t.contains("android") => { if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) { - let target = target.replace("armv7neon", "arm") - .replace("armv7", "arm") - .replace("thumbv7neon", "arm") - .replace("thumbv7", "arm"); + let target = target + .replace("armv7neon", "arm") + .replace("armv7", "arm") + .replace("thumbv7neon", "arm") + .replace("thumbv7", "arm"); let compiler = format!("{}-{}", target, compiler.clang()); cfg.compiler(ndk.join("bin").join(compiler)); } @@ -148,7 +176,7 @@ fn set_compiler(cfg: &mut cc::Build, let c = cfg.get_compiler(); let gnu_compiler = compiler.gcc(); if !c.path().ends_with(gnu_compiler) { - return + return; } let output = output(c.to_command().arg("--version")); @@ -157,7 +185,7 @@ fn set_compiler(cfg: &mut cc::Build, None => return, }; match output[i + 3..].chars().next().unwrap() { - '0' ... '6' => {} + '0'...'6' => {} _ => return, } let alternative = format!("e{}", gnu_compiler); diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs index 63741b9b677b4..47102ed616b20 100644 --- a/src/bootstrap/channel.rs +++ b/src/bootstrap/channel.rs @@ -10,8 +10,8 @@ use std::process::Command; use build_helper::output; -use crate::Build; use crate::config::Config; +use crate::Build; // The version number pub const CFG_RELEASE_NUM: &str = "1.33.0"; @@ -30,31 +30,41 @@ impl GitInfo { pub fn new(config: &Config, dir: &Path) -> GitInfo { // See if this even begins to look like a git dir if config.ignore_git || !dir.join(".git").exists() { - return GitInfo { inner: None } + return GitInfo { inner: None }; } // Make sure git commands work let out = Command::new("git") - .arg("rev-parse") - .current_dir(dir) - .output() - .expect("failed to spawn git"); + .arg("rev-parse") + .current_dir(dir) + .output() + .expect("failed to spawn git"); if !out.status.success() { - return GitInfo { inner: None } + return GitInfo { inner: None }; } // Ok, let's scrape some info - let ver_date = output(Command::new("git").current_dir(dir) - .arg("log").arg("-1") - .arg("--date=short") - .arg("--pretty=format:%cd")); - let ver_hash = output(Command::new("git").current_dir(dir) - .arg("rev-parse").arg("HEAD")); - let short_ver_hash = output(Command::new("git") - .current_dir(dir) - .arg("rev-parse") - .arg("--short=9") - .arg("HEAD")); + let ver_date = output( + Command::new("git") + .current_dir(dir) + .arg("log") + .arg("-1") + .arg("--date=short") + .arg("--pretty=format:%cd"), + ); + let ver_hash = output( + Command::new("git") + .current_dir(dir) + .arg("rev-parse") + .arg("HEAD"), + ); + let short_ver_hash = output( + Command::new("git") + .current_dir(dir) + .arg("rev-parse") + .arg("--short=9") + .arg("HEAD"), + ); GitInfo { inner: Some(Info { commit_date: ver_date.trim().to_string(), diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 050db936daab1..853b89da77020 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -1,11 +1,12 @@ //! Implementation of compiling the compiler and standard library, in "check" mode. -use crate::compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, rustc_cargo_env, - add_to_sysroot}; -use crate::builder::{RunConfig, Builder, ShouldRun, Step}; +use crate::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::cache::{Interned, INTERNER}; +use crate::compile::{ + add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo, test_cargo, +}; use crate::tool::{prepare_tool_cargo, SourceType}; use crate::{Compiler, Mode}; -use crate::cache::{INTERNER, Interned}; use std::path::PathBuf; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] @@ -22,9 +23,7 @@ impl Step for Std { } fn make_run(run: RunConfig) { - run.builder.ensure(Std { - target: run.target, - }); + run.builder.ensure(Std { target: run.target }); } fn run(self, builder: &Builder) { @@ -35,12 +34,17 @@ impl Step for Std { std_cargo(builder, &compiler, target, &mut cargo); let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage)); - builder.info(&format!("Checking std artifacts ({} -> {})", &compiler.host, target)); - run_cargo(builder, - &mut cargo, - vec![], - &libstd_stamp(builder, compiler, target), - true); + builder.info(&format!( + "Checking std artifacts ({} -> {})", + &compiler.host, target + )); + run_cargo( + builder, + &mut cargo, + vec![], + &libstd_stamp(builder, compiler, target), + true, + ); let libdir = builder.sysroot_libdir(compiler, target); add_to_sysroot(&builder, &libdir, &libstd_stamp(builder, compiler, target)); @@ -62,9 +66,7 @@ impl Step for Rustc { } fn make_run(run: RunConfig) { - run.builder.ensure(Rustc { - target: run.target, - }); + run.builder.ensure(Rustc { target: run.target }); } /// Build the compiler. @@ -82,15 +84,24 @@ impl Step for Rustc { rustc_cargo(builder, &mut cargo); let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage)); - builder.info(&format!("Checking compiler artifacts ({} -> {})", &compiler.host, target)); - run_cargo(builder, - &mut cargo, - vec![], - &librustc_stamp(builder, compiler, target), - true); + builder.info(&format!( + "Checking compiler artifacts ({} -> {})", + &compiler.host, target + )); + run_cargo( + builder, + &mut cargo, + vec![], + &librustc_stamp(builder, compiler, target), + true, + ); let libdir = builder.sysroot_libdir(compiler, target); - add_to_sysroot(&builder, &libdir, &librustc_stamp(builder, compiler, target)); + add_to_sysroot( + &builder, + &libdir, + &librustc_stamp(builder, compiler, target), + ); } } @@ -111,9 +122,9 @@ impl Step for CodegenBackend { fn make_run(run: RunConfig) { let backend = run.builder.config.rust_codegen_backends.get(0); - let backend = backend.cloned().unwrap_or_else(|| { - INTERNER.intern_str("llvm") - }); + let backend = backend + .cloned() + .unwrap_or_else(|| INTERNER.intern_str("llvm")); run.builder.ensure(CodegenBackend { target: run.target, backend, @@ -128,17 +139,21 @@ impl Step for CodegenBackend { builder.ensure(Rustc { target }); let mut cargo = builder.cargo(compiler, Mode::Codegen, target, "check"); - cargo.arg("--manifest-path").arg(builder.src.join("src/librustc_codegen_llvm/Cargo.toml")); + cargo + .arg("--manifest-path") + .arg(builder.src.join("src/librustc_codegen_llvm/Cargo.toml")); rustc_cargo_env(builder, &mut cargo); // We won't build LLVM if it's not available, as it shouldn't affect `check`. let _folder = builder.fold_output(|| format!("stage{}-rustc_codegen_llvm", compiler.stage)); - run_cargo(builder, - &mut cargo, - vec![], - &codegen_backend_stamp(builder, compiler, target, backend), - true); + run_cargo( + builder, + &mut cargo, + vec![], + &codegen_backend_stamp(builder, compiler, target, backend), + true, + ); } } @@ -156,9 +171,7 @@ impl Step for Test { } fn make_run(run: RunConfig) { - run.builder.ensure(Test { - target: run.target, - }); + run.builder.ensure(Test { target: run.target }); } fn run(self, builder: &Builder) { @@ -171,12 +184,17 @@ impl Step for Test { test_cargo(builder, &compiler, target, &mut cargo); let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage)); - builder.info(&format!("Checking test artifacts ({} -> {})", &compiler.host, target)); - run_cargo(builder, - &mut cargo, - vec![], - &libtest_stamp(builder, compiler, target), - true); + builder.info(&format!( + "Checking test artifacts ({} -> {})", + &compiler.host, target + )); + run_cargo( + builder, + &mut cargo, + vec![], + &libtest_stamp(builder, compiler, target), + true, + ); let libdir = builder.sysroot_libdir(compiler, target); add_to_sysroot(builder, &libdir, &libtest_stamp(builder, compiler, target)); @@ -198,9 +216,7 @@ impl Step for Rustdoc { } fn make_run(run: RunConfig) { - run.builder.ensure(Rustdoc { - target: run.target, - }); + run.builder.ensure(Rustdoc { target: run.target }); } fn run(self, builder: &Builder) { @@ -209,22 +225,29 @@ impl Step for Rustdoc { builder.ensure(Rustc { target }); - let mut cargo = prepare_tool_cargo(builder, - compiler, - Mode::ToolRustc, - target, - "check", - "src/tools/rustdoc", - SourceType::InTree, - &[]); + let mut cargo = prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + target, + "check", + "src/tools/rustdoc", + SourceType::InTree, + &[], + ); let _folder = builder.fold_output(|| format!("stage{}-rustdoc", compiler.stage)); - println!("Checking rustdoc artifacts ({} -> {})", &compiler.host, target); - run_cargo(builder, - &mut cargo, - vec![], - &rustdoc_stamp(builder, compiler, target), - true); + println!( + "Checking rustdoc artifacts ({} -> {})", + &compiler.host, target + ); + run_cargo( + builder, + &mut cargo, + vec![], + &rustdoc_stamp(builder, compiler, target), + true, + ); let libdir = builder.sysroot_libdir(compiler, target); add_to_sysroot(&builder, &libdir, &rustdoc_stamp(builder, compiler, target)); @@ -235,34 +258,44 @@ impl Step for Rustdoc { /// Cargo's output path for the standard library in a given stage, compiled /// by a particular compiler for the specified target. pub fn libstd_stamp(builder: &Builder, compiler: Compiler, target: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check.stamp") + builder + .cargo_out(compiler, Mode::Std, target) + .join(".libstd-check.stamp") } /// Cargo's output path for libtest in a given stage, compiled by a particular /// compiler for the specified target. pub fn libtest_stamp(builder: &Builder, compiler: Compiler, target: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::Test, target).join(".libtest-check.stamp") + builder + .cargo_out(compiler, Mode::Test, target) + .join(".libtest-check.stamp") } /// Cargo's output path for librustc in a given stage, compiled by a particular /// compiler for the specified target. pub fn librustc_stamp(builder: &Builder, compiler: Compiler, target: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc-check.stamp") + builder + .cargo_out(compiler, Mode::Rustc, target) + .join(".librustc-check.stamp") } /// Cargo's output path for librustc_codegen_llvm in a given stage, compiled by a particular /// compiler for the specified target and backend. -fn codegen_backend_stamp(builder: &Builder, - compiler: Compiler, - target: Interned, - backend: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::Codegen, target) - .join(format!(".librustc_codegen_llvm-{}-check.stamp", backend)) +fn codegen_backend_stamp( + builder: &Builder, + compiler: Compiler, + target: Interned, + backend: Interned, +) -> PathBuf { + builder + .cargo_out(compiler, Mode::Codegen, target) + .join(format!(".librustc_codegen_llvm-{}-check.stamp", backend)) } /// Cargo's output path for rustdoc in a given stage, compiled by a particular /// compiler for the specified target. pub fn rustdoc_stamp(builder: &Builder, compiler: Compiler, target: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::ToolRustc, target) + builder + .cargo_out(compiler, Mode::ToolRustc, target) .join(".rustdoc-check.stamp") } diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs index 74a2b7e4aa98b..7e20defe633f8 100644 --- a/src/bootstrap/clean.rs +++ b/src/bootstrap/clean.rs @@ -29,7 +29,7 @@ pub fn clean(build: &Build, all: bool) { for entry in entries { let entry = t!(entry); if entry.file_name().to_str() == Some("llvm") { - continue + continue; } let path = t!(entry.path().canonicalize()); rm_rf(&path); @@ -45,7 +45,7 @@ fn rm_rf(path: &Path) { return; } panic!("failed to get metadata for file {}: {}", path.display(), e); - }, + } Ok(metadata) => { if metadata.file_type().is_file() || metadata.file_type().is_symlink() { do_op(path, "remove file", |p| fs::remove_file(p)); @@ -56,20 +56,20 @@ fn rm_rf(path: &Path) { rm_rf(&t!(file).path()); } do_op(path, "remove dir", |p| fs::remove_dir(p)); - }, + } }; } fn do_op(path: &Path, desc: &str, mut f: F) - where F: FnMut(&Path) -> io::Result<()> +where + F: FnMut(&Path) -> io::Result<()>, { match f(path) { Ok(()) => {} // On windows we can't remove a readonly file, and git will often clone files as readonly. // As a result, we have some special logic to remove readonly files on windows. // This is also the reason that we can't use things like fs::remove_dir_all(). - Err(ref e) if cfg!(windows) && - e.kind() == ErrorKind::PermissionDenied => { + Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => { let mut p = t!(path.symlink_metadata()).permissions(); p.set_readonly(false); t!(fs::set_permissions(path, p)); diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index 821bd002e95b3..c0561721be3b5 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -9,22 +9,22 @@ use std::borrow::Cow; use std::env; use std::fs; -use std::io::BufReader; use std::io::prelude::*; +use std::io::BufReader; use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio, exit}; +use std::process::{exit, Command, Stdio}; use std::str; -use build_helper::{output, mtime, up_to_date}; +use build_helper::{mtime, output, up_to_date}; use filetime::FileTime; use serde_json; -use crate::util::{exe, libdir, is_dylib}; -use crate::{Compiler, Mode, GitRepo}; use crate::native; +use crate::util::{exe, is_dylib, libdir}; +use crate::{Compiler, GitRepo, Mode}; -use crate::cache::{INTERNER, Interned}; -use crate::builder::{Step, RunConfig, ShouldRun, Builder}; +use crate::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::cache::{Interned, INTERNER}; #[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)] pub struct Std { @@ -74,7 +74,10 @@ impl Step for Std { compiler: from, target, }); - builder.info(&format!("Uplifting stage1 std ({} -> {})", from.host, target)); + builder.info(&format!( + "Uplifting stage1 std ({} -> {})", + from.host, target + )); // Even if we're not building std this stage, the new sysroot must // still contain the musl startup objects. @@ -100,13 +103,17 @@ impl Step for Std { std_cargo(builder, &compiler, target, &mut cargo); let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage)); - builder.info(&format!("Building stage{} std artifacts ({} -> {})", compiler.stage, - &compiler.host, target)); - run_cargo(builder, - &mut cargo, - vec![], - &libstd_stamp(builder, compiler, target), - false); + builder.info(&format!( + "Building stage{} std artifacts ({} -> {})", + compiler.stage, &compiler.host, target + )); + run_cargo( + builder, + &mut cargo, + vec![], + &libstd_stamp(builder, compiler, target), + false, + ); builder.ensure(StdLink { compiler: builder.compiler(compiler.stage, builder.config.build), @@ -122,20 +129,23 @@ impl Step for Std { /// with a glibc-targeting toolchain, given we have the appropriate startup /// files. As those shipped with glibc won't work, copy the ones provided by /// musl so we have them on linux-gnu hosts. -fn copy_musl_third_party_objects(builder: &Builder, - target: Interned, - into: &Path) { +fn copy_musl_third_party_objects(builder: &Builder, target: Interned, into: &Path) { for &obj in &["crt1.o", "crti.o", "crtn.o"] { - builder.copy(&builder.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj)); + builder.copy( + &builder.musl_root(target).unwrap().join("lib").join(obj), + &into.join(obj), + ); } } /// Configure cargo to compile the standard library, adding appropriate env vars /// and such. -pub fn std_cargo(builder: &Builder, - compiler: &Compiler, - target: Interned, - cargo: &mut Command) { +pub fn std_cargo( + builder: &Builder, + compiler: &Compiler, + target: Interned, + cargo: &mut Command, +) { if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { cargo.env("MACOSX_DEPLOYMENT_TARGET", target); } @@ -166,7 +176,9 @@ pub fn std_cargo(builder: &Builder, cargo.env("LLVM_CONFIG", llvm_config); } - cargo.arg("--features").arg(features) + cargo + .arg("--features") + .arg(features) .arg("--manifest-path") .arg(builder.src.join("src/libstd/Cargo.toml")); @@ -204,12 +216,10 @@ impl Step for StdLink { let compiler = self.compiler; let target_compiler = self.target_compiler; let target = self.target; - builder.info(&format!("Copying stage{} std from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - &compiler.host, - target_compiler.host, - target)); + builder.info(&format!( + "Copying stage{} std from stage{} ({} -> {} / {})", + target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target + )); let libdir = builder.sysroot_libdir(target_compiler, target); add_to_sysroot(builder, &libdir, &libstd_stamp(builder, compiler, target)); @@ -226,7 +236,10 @@ impl Step for StdLink { fn copy_apple_sanitizer_dylibs(builder: &Builder, native_dir: &Path, platform: &str, into: &Path) { for &sanitizer in &["asan", "tsan"] { - let filename = format!("lib__rustc__clang_rt.{}_{}_dynamic.dylib", sanitizer, platform); + let filename = format!( + "lib__rustc__clang_rt.{}_{}_dynamic.dylib", + sanitizer, platform + ); let mut src_path = native_dir.join(sanitizer); src_path.push("build"); src_path.push("lib"); @@ -266,7 +279,7 @@ impl Step for StartupObjects { let for_compiler = self.compiler; let target = self.target; if !target.contains("pc-windows-gnu") { - return + return; } let src_dir = &builder.src.join("src/rtstartup"); @@ -279,22 +292,24 @@ impl Step for StartupObjects { let dst_file = &dst_dir.join(file.to_string() + ".o"); if !up_to_date(src_file, dst_file) { let mut cmd = Command::new(&builder.initial_rustc); - builder.run(cmd.env("RUSTC_BOOTSTRAP", "1") - .arg("--cfg").arg("stage0") - .arg("--target").arg(target) - .arg("--emit=obj") - .arg("-o").arg(dst_file) - .arg(src_file)); + builder.run( + cmd.env("RUSTC_BOOTSTRAP", "1") + .arg("--cfg") + .arg("stage0") + .arg("--target") + .arg(target) + .arg("--emit=obj") + .arg("-o") + .arg(dst_file) + .arg(src_file), + ); } builder.copy(dst_file, &sysroot_dir.join(file.to_string() + ".o")); } for obj in ["crt2.o", "dllcrt2.o"].iter() { - let src = compiler_file(builder, - builder.cc(target), - target, - obj); + let src = compiler_file(builder, builder.cc(target), target, obj); builder.copy(&src, &sysroot_dir.join(obj)); } } @@ -347,8 +362,10 @@ impl Step for Test { compiler: builder.compiler(1, builder.config.build), target, }); - builder.info( - &format!("Uplifting stage1 test ({} -> {})", builder.config.build, target)); + builder.info(&format!( + "Uplifting stage1 test ({} -> {})", + builder.config.build, target + )); builder.ensure(TestLink { compiler: builder.compiler(1, builder.config.build), target_compiler: compiler, @@ -361,13 +378,17 @@ impl Step for Test { test_cargo(builder, &compiler, target, &mut cargo); let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage)); - builder.info(&format!("Building stage{} test artifacts ({} -> {})", compiler.stage, - &compiler.host, target)); - run_cargo(builder, - &mut cargo, - vec![], - &libtest_stamp(builder, compiler, target), - false); + builder.info(&format!( + "Building stage{} test artifacts ({} -> {})", + compiler.stage, &compiler.host, target + )); + run_cargo( + builder, + &mut cargo, + vec![], + &libtest_stamp(builder, compiler, target), + false, + ); builder.ensure(TestLink { compiler: builder.compiler(compiler.stage, builder.config.build), @@ -378,14 +399,17 @@ impl Step for Test { } /// Same as `std_cargo`, but for libtest -pub fn test_cargo(builder: &Builder, - _compiler: &Compiler, - _target: Interned, - cargo: &mut Command) { +pub fn test_cargo( + builder: &Builder, + _compiler: &Compiler, + _target: Interned, + cargo: &mut Command, +) { if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { cargo.env("MACOSX_DEPLOYMENT_TARGET", target); } - cargo.arg("--manifest-path") + cargo + .arg("--manifest-path") .arg(builder.src.join("src/libtest/Cargo.toml")); } @@ -408,14 +432,15 @@ impl Step for TestLink { let compiler = self.compiler; let target_compiler = self.target_compiler; let target = self.target; - builder.info(&format!("Copying stage{} test from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - &compiler.host, - target_compiler.host, - target)); - add_to_sysroot(builder, &builder.sysroot_libdir(target_compiler, target), - &libtest_stamp(builder, compiler, target)); + builder.info(&format!( + "Copying stage{} test from stage{} ({} -> {} / {})", + target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target + )); + add_to_sysroot( + builder, + &builder.sysroot_libdir(target_compiler, target), + &libtest_stamp(builder, compiler, target), + ); builder.cargo(target_compiler, Mode::ToolTest, target, "clean"); } @@ -469,8 +494,10 @@ impl Step for Rustc { compiler: builder.compiler(1, builder.config.build), target, }); - builder.info(&format!("Uplifting stage1 rustc ({} -> {})", - builder.config.build, target)); + builder.info(&format!( + "Uplifting stage1 rustc ({} -> {})", + builder.config.build, target + )); builder.ensure(RustcLink { compiler: builder.compiler(1, builder.config.build), target_compiler: compiler, @@ -489,13 +516,17 @@ impl Step for Rustc { rustc_cargo(builder, &mut cargo); let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage)); - builder.info(&format!("Building stage{} compiler artifacts ({} -> {})", - compiler.stage, &compiler.host, target)); - run_cargo(builder, - &mut cargo, - vec![], - &librustc_stamp(builder, compiler, target), - false); + builder.info(&format!( + "Building stage{} compiler artifacts ({} -> {})", + compiler.stage, &compiler.host, target + )); + run_cargo( + builder, + &mut cargo, + vec![], + &librustc_stamp(builder, compiler, target), + false, + ); builder.ensure(RustcLink { compiler: builder.compiler(compiler.stage, builder.config.build), @@ -506,20 +537,29 @@ impl Step for Rustc { } pub fn rustc_cargo(builder: &Builder, cargo: &mut Command) { - cargo.arg("--features").arg(builder.rustc_features()) - .arg("--manifest-path") - .arg(builder.src.join("src/rustc/Cargo.toml")); + cargo + .arg("--features") + .arg(builder.rustc_features()) + .arg("--manifest-path") + .arg(builder.src.join("src/rustc/Cargo.toml")); rustc_cargo_env(builder, cargo); } pub fn rustc_cargo_env(builder: &Builder, cargo: &mut Command) { // Set some configuration variables picked up by build scripts and // the compiler alike - cargo.env("CFG_RELEASE", builder.rust_release()) - .env("CFG_RELEASE_CHANNEL", &builder.config.channel) - .env("CFG_VERSION", builder.rust_version()) - .env("CFG_PREFIX", builder.config.prefix.clone().unwrap_or_default()) - .env("CFG_CODEGEN_BACKENDS_DIR", &builder.config.rust_codegen_backends_dir); + cargo + .env("CFG_RELEASE", builder.rust_release()) + .env("CFG_RELEASE_CHANNEL", &builder.config.channel) + .env("CFG_VERSION", builder.rust_version()) + .env( + "CFG_PREFIX", + builder.config.prefix.clone().unwrap_or_default(), + ) + .env( + "CFG_CODEGEN_BACKENDS_DIR", + &builder.config.rust_codegen_backends_dir, + ); let libdir_relative = builder.config.libdir_relative().unwrap_or(Path::new("lib")); cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative); @@ -570,14 +610,15 @@ impl Step for RustcLink { let compiler = self.compiler; let target_compiler = self.target_compiler; let target = self.target; - builder.info(&format!("Copying stage{} rustc from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - &compiler.host, - target_compiler.host, - target)); - add_to_sysroot(builder, &builder.sysroot_libdir(target_compiler, target), - &librustc_stamp(builder, compiler, target)); + builder.info(&format!( + "Copying stage{} rustc from stage{} ({} -> {} / {})", + target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target + )); + add_to_sysroot( + builder, + &builder.sysroot_libdir(target_compiler, target), + &librustc_stamp(builder, compiler, target), + ); builder.cargo(target_compiler, Mode::ToolRustc, target, "clean"); } } @@ -600,9 +641,9 @@ impl Step for CodegenBackend { fn make_run(run: RunConfig) { let backend = run.builder.config.rust_codegen_backends.get(0); - let backend = backend.cloned().unwrap_or_else(|| { - INTERNER.intern_str("llvm") - }); + let backend = backend + .cloned() + .unwrap_or_else(|| INTERNER.intern_str("llvm")); run.builder.ensure(CodegenBackend { compiler: run.builder.compiler(run.builder.top_stage, run.host), target: run.target, @@ -618,8 +659,10 @@ impl Step for CodegenBackend { builder.ensure(Rustc { compiler, target }); if builder.config.keep_stage.contains(&compiler.stage) { - builder.info("Warning: Using a potentially old codegen backend. \ - This may not behave well."); + builder.info( + "Warning: Using a potentially old codegen backend. \ + This may not behave well.", + ); // Codegen backends are linked separately from this step today, so we don't do // anything here. return; @@ -637,7 +680,8 @@ impl Step for CodegenBackend { let out_dir = builder.cargo_out(compiler, Mode::Codegen, target); let mut cargo = builder.cargo(compiler, Mode::Codegen, target, "rustc"); - cargo.arg("--manifest-path") + cargo + .arg("--manifest-path") .arg(builder.src.join("src/librustc_codegen_llvm/Cargo.toml")); rustc_cargo_env(builder, &mut cargo); @@ -674,27 +718,30 @@ impl Step for CodegenBackend { let tmp_stamp = out_dir.join(".tmp.stamp"); let _folder = builder.fold_output(|| format!("stage{}-rustc_codegen_llvm", compiler.stage)); - let files = run_cargo(builder, - cargo.arg("--features").arg(features), - cargo_tails_args, - &tmp_stamp, - false); + let files = run_cargo( + builder, + cargo.arg("--features").arg(features), + cargo_tails_args, + &tmp_stamp, + false, + ); if builder.config.dry_run { return; } - let mut files = files.into_iter() - .filter(|f| { - let filename = f.file_name().unwrap().to_str().unwrap(); - is_dylib(filename) && filename.contains("rustc_codegen_llvm-") - }); + let mut files = files.into_iter().filter(|f| { + let filename = f.file_name().unwrap().to_str().unwrap(); + is_dylib(filename) && filename.contains("rustc_codegen_llvm-") + }); let codegen_backend = match files.next() { Some(f) => f, None => panic!("no dylibs built for codegen backend?"), }; if let Some(f) = files.next() { - panic!("codegen backend built two dylibs:\n{}\n{}", - codegen_backend.display(), - f.display()); + panic!( + "codegen backend built two dylibs:\n{}\n{}", + codegen_backend.display(), + f.display() + ); } let stamp = codegen_backend_stamp(builder, compiler, target, backend); let codegen_backend = codegen_backend.to_str().unwrap(); @@ -702,11 +749,13 @@ impl Step for CodegenBackend { } } -pub fn build_codegen_backend(builder: &Builder, - cargo: &mut Command, - compiler: &Compiler, - target: Interned, - backend: Interned) -> String { +pub fn build_codegen_backend( + builder: &Builder, + cargo: &mut Command, + compiler: &Compiler, + target: Interned, + backend: Interned, +) -> String { let mut features = String::new(); match &*backend { @@ -722,8 +771,10 @@ pub fn build_codegen_backend(builder: &Builder, features.push_str(" emscripten"); } - builder.info(&format!("Building stage{} codegen artifacts ({} -> {}, {})", - compiler.stage, &compiler.host, target, backend)); + builder.info(&format!( + "Building stage{} codegen artifacts ({} -> {}, {})", + compiler.stage, &compiler.host, target, backend + )); // Pass down configuration from the LLVM build into the build of // librustc_llvm and librustc_codegen_llvm. @@ -739,14 +790,13 @@ pub fn build_codegen_backend(builder: &Builder, } // Building with a static libstdc++ is only supported on linux right now, // not for MSVC or macOS - if builder.config.llvm_static_stdcpp && - !target.contains("freebsd") && - !target.contains("windows") && - !target.contains("apple") { - let file = compiler_file(builder, - builder.cxx(target).unwrap(), - target, - "libstdc++.a"); + if builder.config.llvm_static_stdcpp + && !target.contains("freebsd") + && !target.contains("windows") + && !target.contains("apple") + { + let file = + compiler_file(builder, builder.cxx(target).unwrap(), target, "libstdc++.a"); cargo.env("LLVM_STATIC_STDCPP", file); } if builder.config.llvm_link_shared { @@ -765,9 +815,11 @@ pub fn build_codegen_backend(builder: &Builder, /// This will take the codegen artifacts produced by `compiler` and link them /// into an appropriate location for `target_compiler` to be a functional /// compiler. -fn copy_codegen_backends_to_sysroot(builder: &Builder, - compiler: Compiler, - target_compiler: Compiler) { +fn copy_codegen_backends_to_sysroot( + builder: &Builder, + compiler: Compiler, + target_compiler: Compiler, +) { let target = target_compiler.host; // Note that this step is different than all the other `*Link` steps in @@ -794,21 +846,17 @@ fn copy_codegen_backends_to_sysroot(builder: &Builder, let target_filename = { let dash = filename.find('-').unwrap(); let dot = filename.find('.').unwrap(); - format!("{}-{}{}", - &filename[..dash], - backend, - &filename[dot..]) + format!("{}-{}{}", &filename[..dash], backend, &filename[dot..]) }; builder.copy(&file, &dst.join(target_filename)); } } -fn copy_lld_to_sysroot(builder: &Builder, - target_compiler: Compiler, - lld_install_root: &Path) { +fn copy_lld_to_sysroot(builder: &Builder, target_compiler: Compiler, lld_install_root: &Path) { let target = target_compiler.host; - let dst = builder.sysroot_libdir(target_compiler, target) + let dst = builder + .sysroot_libdir(target_compiler, target) .parent() .unwrap() .join("bin"); @@ -818,41 +866,55 @@ fn copy_lld_to_sysroot(builder: &Builder, let dst_exe = exe("rust-lld", &target); // we prepend this bin directory to the user PATH when linking Rust binaries. To // avoid shadowing the system LLD we rename the LLD we provide to `rust-lld`. - builder.copy(&lld_install_root.join("bin").join(&src_exe), &dst.join(&dst_exe)); + builder.copy( + &lld_install_root.join("bin").join(&src_exe), + &dst.join(&dst_exe), + ); } /// Cargo's output path for the standard library in a given stage, compiled /// by a particular compiler for the specified target. pub fn libstd_stamp(builder: &Builder, compiler: Compiler, target: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::Std, target).join(".libstd.stamp") + builder + .cargo_out(compiler, Mode::Std, target) + .join(".libstd.stamp") } /// Cargo's output path for libtest in a given stage, compiled by a particular /// compiler for the specified target. pub fn libtest_stamp(builder: &Builder, compiler: Compiler, target: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::Test, target).join(".libtest.stamp") + builder + .cargo_out(compiler, Mode::Test, target) + .join(".libtest.stamp") } /// Cargo's output path for librustc in a given stage, compiled by a particular /// compiler for the specified target. pub fn librustc_stamp(builder: &Builder, compiler: Compiler, target: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc.stamp") + builder + .cargo_out(compiler, Mode::Rustc, target) + .join(".librustc.stamp") } /// Cargo's output path for librustc_codegen_llvm in a given stage, compiled by a particular /// compiler for the specified target and backend. -fn codegen_backend_stamp(builder: &Builder, - compiler: Compiler, - target: Interned, - backend: Interned) -> PathBuf { - builder.cargo_out(compiler, Mode::Codegen, target) +fn codegen_backend_stamp( + builder: &Builder, + compiler: Compiler, + target: Interned, + backend: Interned, +) -> PathBuf { + builder + .cargo_out(compiler, Mode::Codegen, target) .join(format!(".librustc_codegen_llvm-{}.stamp", backend)) } -pub fn compiler_file(builder: &Builder, - compiler: &Path, - target: Interned, - file: &str) -> PathBuf { +pub fn compiler_file( + builder: &Builder, + compiler: &Path, + target: Interned, + file: &str, +) -> PathBuf { let mut cmd = Command::new(compiler); cmd.args(builder.cflags(target, GitRepo::Rustc)); cmd.arg(format!("-print-file-name={}", file)); @@ -883,7 +945,10 @@ impl Step for Sysroot { let sysroot = if compiler.stage == 0 { builder.out.join(&compiler.host).join("stage0-sysroot") } else { - builder.out.join(&compiler.host).join(format!("stage{}", compiler.stage)) + builder + .out + .join(&compiler.host) + .join(format!("stage{}", compiler.stage)) }; let _ = fs::remove_dir_all(&sysroot); t!(fs::create_dir_all(&sysroot)); @@ -916,8 +981,10 @@ impl Step for Assemble { let target_compiler = self.target_compiler; if target_compiler.stage == 0 { - assert_eq!(builder.config.build, target_compiler.host, - "Cannot obtain compiler for non-native build triple at stage 0"); + assert_eq!( + builder.config.build, target_compiler.host, + "Cannot obtain compiler for non-native build triple at stage 0" + ); // The stage 0 compiler for the build triple is always pre-built. return target_compiler; } @@ -938,8 +1005,7 @@ impl Step for Assemble { // // FIXME: It may be faster if we build just a stage 1 compiler and then // use that to bootstrap this compiler forward. - let build_compiler = - builder.compiler(target_compiler.stage - 1, builder.config.build); + let build_compiler = builder.compiler(target_compiler.stage - 1, builder.config.build); // Build the libraries for this compiler to link to (i.e., the libraries // it uses at runtime). NOTE: Crates the target compiler compiles don't @@ -982,9 +1048,7 @@ impl Step for Assemble { } } - copy_codegen_backends_to_sysroot(builder, - build_compiler, - target_compiler); + copy_codegen_backends_to_sysroot(builder, build_compiler, target_compiler); if let Some(lld_install) = lld_install { copy_lld_to_sysroot(builder, target_compiler, &lld_install); } @@ -1013,13 +1077,13 @@ pub fn add_to_sysroot(builder: &Builder, sysroot_dst: &Path, stamp: &Path) { } } -pub fn run_cargo(builder: &Builder, - cargo: &mut Command, - tail_args: Vec, - stamp: &Path, - is_check: bool) - -> Vec -{ +pub fn run_cargo( + builder: &Builder, + cargo: &mut Command, + tail_args: Vec, + stamp: &Path, + is_check: bool, +) -> Vec { if builder.config.dry_run { return Vec::new(); } @@ -1029,9 +1093,12 @@ pub fn run_cargo(builder: &Builder, // `target_deps_dir` looks like $dir/$target/release/deps let target_deps_dir = target_root_dir.join("deps"); // `host_root_dir` looks like $dir/release - let host_root_dir = target_root_dir.parent().unwrap() // chop off `release` - .parent().unwrap() // chop off `$target` - .join(target_root_dir.file_name().unwrap()); + let host_root_dir = target_root_dir + .parent() + .unwrap() // chop off `release` + .parent() + .unwrap() // chop off `$target` + .join(target_root_dir.file_name().unwrap()); // Spawn Cargo slurping up its JSON output. We'll start building up the // `deps` array of all files it generated along with a `toplevel` array of @@ -1045,10 +1112,11 @@ pub fn run_cargo(builder: &Builder, }; for filename in filenames { // Skip files like executables - if !filename.ends_with(".rlib") && - !filename.ends_with(".lib") && - !is_dylib(&filename) && - !(is_check && filename.ends_with(".rmeta")) { + if !filename.ends_with(".rlib") + && !filename.ends_with(".lib") + && !is_dylib(&filename) + && !(is_check && filename.ends_with(".rmeta")) + { continue; } @@ -1096,18 +1164,23 @@ pub fn run_cargo(builder: &Builder, // most recent file in the `deps` folder corresponding to each one. let contents = t!(target_deps_dir.read_dir()) .map(|e| t!(e)) - .map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata()))) + .map(|e| { + ( + e.path(), + e.file_name().into_string().unwrap(), + t!(e.metadata()), + ) + }) .collect::>(); for (prefix, extension, expected_len) in toplevel { let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| { - filename.starts_with(&prefix[..]) && - filename[prefix.len()..].starts_with("-") && - filename.ends_with(&extension[..]) && - meta.len() == expected_len - }); - let max = candidates.max_by_key(|&&(_, _, ref metadata)| { - FileTime::from_last_modification_time(metadata) + filename.starts_with(&prefix[..]) + && filename[prefix.len()..].starts_with("-") + && filename.ends_with(&extension[..]) + && meta.len() == expected_len }); + let max = candidates + .max_by_key(|&&(_, _, ref metadata)| FileTime::from_last_modification_time(metadata)); let path_to_add = match max { Some(triple) => triple.0.to_str().unwrap(), None => panic!("no output generated for {:?} {:?}", prefix, extension), @@ -1147,9 +1220,11 @@ pub fn run_cargo(builder: &Builder, .map(|contents| contents == new_contents) .unwrap_or_default(); if contents_equal && max <= stamp_mtime { - builder.verbose(&format!("not updating {:?}; contents equal and {:?} <= {:?}", - stamp, max, stamp_mtime)); - return deps + builder.verbose(&format!( + "not updating {:?}; contents equal and {:?} <= {:?}", + stamp, max, stamp_mtime + )); + return deps; } if max > stamp_mtime { builder.verbose(&format!("updating {:?} as {:?} changed", stamp, max_path)); @@ -1171,8 +1246,10 @@ pub fn stream_cargo( } // Instruct Cargo to give us json messages on stdout, critically leaving // stderr as piped so we can get those pretty colors. - cargo.arg("--message-format").arg("json") - .stdout(Stdio::piped()); + cargo + .arg("--message-format") + .arg("json") + .stdout(Stdio::piped()); for arg in tail_args { cargo.arg(arg); @@ -1193,17 +1270,18 @@ pub fn stream_cargo( match serde_json::from_str::(&line) { Ok(msg) => cb(msg), // If this was informational, just print it out and continue - Err(_) => println!("{}", line) + Err(_) => println!("{}", line), } } // Make sure Cargo actually succeeded after we read all of its stdout. let status = t!(child.wait()); if !status.success() { - eprintln!("command did not execute successfully: {:?}\n\ - expected success, got: {}", - cargo, - status); + eprintln!( + "command did not execute successfully: {:?}\n\ + expected success, got: {}", + cargo, status + ); } status.success() } @@ -1218,5 +1296,5 @@ pub enum CargoMessage<'a> { }, BuildScriptExecuted { package_id: Cow<'a, str>, - } + }, } diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 8655cf0eb3053..da1ab3d152cff 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -3,18 +3,18 @@ //! This module implements parsing `config.toml` configuration files to tweak //! how the build runs. +use std::cmp; use std::collections::{HashMap, HashSet}; use std::env; use std::fs; use std::path::{Path, PathBuf}; use std::process; -use std::cmp; -use num_cpus; -use toml; -use crate::cache::{INTERNER, Interned}; +use crate::cache::{Interned, INTERNER}; use crate::flags::Flags; pub use crate::flags::Subcommand; +use num_cpus; +use toml; /// Global configuration for the entire build and/or bootstrap. /// @@ -404,17 +404,22 @@ impl Config { // If --target was specified but --host wasn't specified, don't run any host-only tests. config.run_host_only = !(flags.host.is_empty() && !flags.target.is_empty()); - let toml = file.map(|file| { - let contents = t!(fs::read_to_string(&file)); - match toml::from_str(&contents) { - Ok(table) => table, - Err(err) => { - println!("failed to parse TOML configuration '{}': {}", - file.display(), err); - process::exit(2); + let toml = file + .map(|file| { + let contents = t!(fs::read_to_string(&file)); + match toml::from_str(&contents) { + Ok(table) => table, + Err(err) => { + println!( + "failed to parse TOML configuration '{}': {}", + file.display(), + err + ); + process::exit(2); + } } - } - }).unwrap_or_else(|| TomlConfig::default()); + }) + .unwrap_or_else(|| TomlConfig::default()); let build = toml.build.clone().unwrap_or_default(); // set by bootstrap.py @@ -425,7 +430,10 @@ impl Config { config.hosts.push(host); } } - for target in config.hosts.iter().cloned() + for target in config + .hosts + .iter() + .cloned() .chain(build.target.iter().map(|s| INTERNER.intern_str(s))) { if !config.targets.contains(&target) { @@ -443,7 +451,6 @@ impl Config { config.targets }; - config.nodejs = build.nodejs.map(PathBuf::from); config.gdb = build.gdb.map(PathBuf::from); config.python = build.python.map(PathBuf::from); @@ -490,9 +497,7 @@ impl Config { if let Some(ref llvm) = toml.llvm { match llvm.ccache { - Some(StringOrBool::String(ref s)) => { - config.ccache = Some(s.to_string()) - } + Some(StringOrBool::String(ref s)) => config.ccache = Some(s.to_string()), Some(StringOrBool::Bool(true)) => { config.ccache = Some("ccache".to_string()); } @@ -508,7 +513,9 @@ impl Config { set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); set(&mut config.llvm_link_shared, llvm.link_shared); config.llvm_targets = llvm.targets.clone(); - config.llvm_experimental_targets = llvm.experimental_targets.clone() + config.llvm_experimental_targets = llvm + .experimental_targets + .clone() .unwrap_or_else(|| "WebAssembly;RISCV".to_string()); config.llvm_link_jobs = llvm.link_jobs; config.llvm_version_suffix = llvm.version_suffix.clone(); @@ -547,18 +554,23 @@ impl Config { config.rustc_default_linker = rust.default_linker.clone(); config.musl_root = rust.musl_root.clone().map(PathBuf::from); config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from); - set(&mut config.deny_warnings, rust.deny_warnings.or(flags.warnings)); + set( + &mut config.deny_warnings, + rust.deny_warnings.or(flags.warnings), + ); set(&mut config.backtrace_on_ice, rust.backtrace_on_ice); set(&mut config.rust_verify_llvm_ir, rust.verify_llvm_ir); set(&mut config.rust_remap_debuginfo, rust.remap_debuginfo); if let Some(ref backends) = rust.codegen_backends { - config.rust_codegen_backends = backends.iter() - .map(|s| INTERNER.intern_str(s)) - .collect(); + config.rust_codegen_backends = + backends.iter().map(|s| INTERNER.intern_str(s)).collect(); } - set(&mut config.rust_codegen_backends_dir, rust.codegen_backends_dir.clone()); + set( + &mut config.rust_codegen_backends_dir, + rust.codegen_backends_dir.clone(), + ); match rust.codegen_units { Some(0) => config.rust_codegen_units = Some(num_cpus::get() as u32), @@ -591,7 +603,9 @@ impl Config { target.musl_root = cfg.musl_root.clone().map(PathBuf::from); target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from); - config.target_config.insert(INTERNER.intern_string(triple.clone()), target); + config + .target_config + .insert(INTERNER.intern_string(triple.clone()), target); } } diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 9f97e57b4562e..90de7c61e2606 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -11,18 +11,18 @@ use std::env; use std::fs; use std::io::Write; -use std::path::{PathBuf, Path}; +use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use build_helper::output; -use crate::{Compiler, Mode, LLVM_TOOLS}; -use crate::channel; -use crate::util::{libdir, is_dylib, exe}; use crate::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::cache::{Interned, INTERNER}; +use crate::channel; use crate::compile; use crate::tool::{self, Tool}; -use crate::cache::{INTERNER, Interned}; +use crate::util::{exe, is_dylib, libdir}; +use crate::{Compiler, Mode, LLVM_TOOLS}; use time; pub fn pkgname(builder: &Builder, component: &str) -> String { @@ -109,16 +109,19 @@ impl Step for Docs { let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust-Documentation") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-documentation-is-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rust-docs") - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--bulk-dirs=share/doc/rust/html"); + .arg("--product-name=Rust-Documentation") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-documentation-is-installed.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rust-docs") + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--bulk-dirs=share/doc/rust/html"); builder.run(&mut cmd); builder.remove_dir(&image); @@ -171,16 +174,19 @@ impl Step for RustcDocs { let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rustc-Documentation") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rustc-documentation-is-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rustc-docs") - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--bulk-dirs=share/doc/rust/html"); + .arg("--product-name=Rustc-Documentation") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rustc-documentation-is-installed.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rustc-docs") + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--bulk-dirs=share/doc/rust/html"); builder.run(&mut cmd); builder.remove_dir(&image); @@ -192,10 +198,7 @@ fn find_files(files: &[&str], path: &[PathBuf]) -> Vec { let mut found = Vec::with_capacity(files.len()); for file in files { - let file_path = - path.iter() - .map(|dir| dir.join(file)) - .find(|p| p.exists()); + let file_path = path.iter().map(|dir| dir.join(file)).find(|p| p.exists()); if let Some(file_path) = file_path { found.push(file_path); @@ -208,7 +211,10 @@ fn find_files(files: &[&str], path: &[PathBuf]) -> Vec { } fn make_win_dist( - rust_root: &Path, plat_root: &Path, target_triple: Interned, builder: &Builder + rust_root: &Path, + plat_root: &Path, + target_triple: Interned, + builder: &Builder, ) { //Ask gcc where it keeps its stuff let mut cmd = Command::new(builder.cc(target_triple)); @@ -222,11 +228,10 @@ fn make_win_dist( let idx = line.find(':').unwrap(); let key = &line[..idx]; let trim_chars: &[_] = &[' ', '=']; - let value = - line[(idx + 1)..] - .trim_left_matches(trim_chars) - .split(';') - .map(PathBuf::from); + let value = line[(idx + 1)..] + .trim_left_matches(trim_chars) + .split(';') + .map(PathBuf::from); if key == "programs" { bin_path.extend(value); @@ -243,7 +248,8 @@ fn make_win_dist( rustc_dlls.push("libgcc_s_seh-1.dll"); } - let target_libs = [ //MinGW libs + let target_libs = [ + //MinGW libs "libgcc.a", "libgcc_eh.a", "libgcc_s.a", @@ -301,7 +307,11 @@ fn make_win_dist( } //Copy platform tools to platform-specific bin directory - let target_bin_dir = plat_root.join("lib").join("rustlib").join(target_triple).join("bin"); + let target_bin_dir = plat_root + .join("lib") + .join("rustlib") + .join(target_triple) + .join("bin"); fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed"); for src in target_tools { builder.copy_to_folder(&src, &target_bin_dir); @@ -312,11 +322,15 @@ fn make_win_dist( &target_bin_dir.join("GCC-WARNING.txt"), "gcc.exe contained in this folder cannot be used for compiling C files - it is only\ used as a linker. In order to be able to compile projects containing C code use\ - the GCC provided by MinGW or Cygwin." + the GCC provided by MinGW or Cygwin.", ); //Copy platform libs to platform-specific lib directory - let target_lib_dir = plat_root.join("lib").join("rustlib").join(target_triple).join("lib"); + let target_lib_dir = plat_root + .join("lib") + .join("rustlib") + .join(target_triple) + .join("lib"); fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed"); for src in target_libs { builder.copy_to_folder(&src, &target_lib_dir); @@ -365,15 +379,18 @@ impl Step for Mingw { let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust-MinGW") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-MinGW-is-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rust-mingw") - .arg("--legacy-manifest-dirs=rustlib,cargo"); + .arg("--product-name=Rust-MinGW") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-MinGW-is-installed.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rust-mingw") + .arg("--legacy-manifest-dirs=rustlib,cargo"); builder.run(&mut cmd); t!(fs::remove_dir_all(&image)); Some(distdir(builder).join(format!("{}-{}.tar.gz", name, host))) @@ -452,16 +469,20 @@ impl Step for Rustc { // Finally, wrap everything up in a nice tarball! let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rustc") - .arg("--legacy-manifest-dirs=rustlib,cargo"); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-is-ready-to-roll.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rustc") + .arg("--legacy-manifest-dirs=rustlib,cargo"); builder.run(&mut cmd); builder.remove_dir(&image); builder.remove_dir(&overlay); @@ -508,13 +529,15 @@ impl Step for Rustc { // Copy over lld if it's there if builder.config.lld_enabled { let exe = exe("rust-lld", &compiler.host); - let src = builder.sysroot_libdir(compiler, host) + let src = builder + .sysroot_libdir(compiler, host) .parent() .unwrap() .join("bin") .join(&exe); // for the rationale about this rename check `compile::copy_lld_to_sysroot` - let dst = image.join("lib/rustlib") + let dst = image + .join("lib/rustlib") .join(&*host) .join("bin") .join(&exe); @@ -534,9 +557,13 @@ impl Step for Rustc { let page_dst = man_dst.join(file_entry.file_name()); t!(fs::copy(&page_src, &page_dst)); // template in month/year and version number - builder.replace_in_file(&page_dst, - &[("", &month_year), - ("", channel::CFG_RELEASE_NUM)]); + builder.replace_in_file( + &page_dst, + &[ + ("", &month_year), + ("", channel::CFG_RELEASE_NUM), + ], + ); } // Debugger scripts @@ -547,7 +574,11 @@ impl Step for Rustc { // Misc license info let cp = |file: &str| { - builder.install(&builder.src.join(file), &image.join("share/doc/rust"), 0o644); + builder.install( + &builder.src.join(file), + &image.join("share/doc/rust"), + 0o644, + ); }; cp("COPYRIGHT"); cp("LICENSE-APACHE"); @@ -572,7 +603,9 @@ impl Step for DebuggerScripts { fn make_run(run: RunConfig) { run.builder.ensure(DebuggerScripts { - sysroot: run.builder.sysroot(run.builder.compiler(run.builder.top_stage, run.host)), + sysroot: run + .builder + .sysroot(run.builder.compiler(run.builder.top_stage, run.host)), host: run.target, }); } @@ -588,8 +621,11 @@ impl Step for DebuggerScripts { }; if host.contains("windows-msvc") { // windbg debugger scripts - builder.install(&builder.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"), - 0o755); + builder.install( + &builder.src.join("src/etc/rust-windbg.cmd"), + &sysroot.join("bin"), + 0o755, + ); cp_debugger_script("natvis/intrinsic.natvis"); cp_debugger_script("natvis/liballoc.natvis"); @@ -598,15 +634,21 @@ impl Step for DebuggerScripts { cp_debugger_script("debugger_pretty_printers_common.py"); // gdb debugger scripts - builder.install(&builder.src.join("src/etc/rust-gdb"), &sysroot.join("bin"), - 0o755); + builder.install( + &builder.src.join("src/etc/rust-gdb"), + &sysroot.join("bin"), + 0o755, + ); cp_debugger_script("gdb_load_rust_pretty_printers.py"); cp_debugger_script("gdb_rust_pretty_printing.py"); // lldb debugger scripts - builder.install(&builder.src.join("src/etc/rust-lldb"), &sysroot.join("bin"), - 0o755); + builder.install( + &builder.src.join("src/etc/rust-lldb"), + &sysroot.join("bin"), + 0o755, + ); cp_debugger_script("lldb_rust_formatters.py"); } @@ -629,7 +671,9 @@ impl Step for Std { fn make_run(run: RunConfig) { run.builder.ensure(Std { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + compiler: run + .builder + .compiler(run.builder.top_stage, run.builder.config.build), target: run.target, }); } @@ -639,8 +683,10 @@ impl Step for Std { let target = self.target; let name = pkgname(builder, "rust-std"); - builder.info(&format!("Dist std stage{} ({} -> {})", - compiler.stage, &compiler.host, target)); + builder.info(&format!( + "Dist std stage{} ({} -> {})", + compiler.stage, &compiler.host, target + )); // The only true set of target libraries came from the build triple, so // let's reduce redundant work by only producing archives from that host. @@ -672,22 +718,23 @@ impl Step for Std { src.pop(); // Remove the trailing /lib folder from the sysroot_libdir builder.cp_filtered(&src, &dst, &|path| { let name = path.file_name().and_then(|s| s.to_str()); - name != Some(builder.config.rust_codegen_backends_dir.as_str()) && - name != Some("bin") - + name != Some(builder.config.rust_codegen_backends_dir.as_str()) && name != Some("bin") }); let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=std-is-standing-at-the-ready.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg(format!("--package-name={}-{}", name, target)) - .arg(format!("--component-name=rust-std-{}", target)) - .arg("--legacy-manifest-dirs=rustlib,cargo"); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=std-is-standing-at-the-ready.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, target)) + .arg(format!("--component-name=rust-std-{}", target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); builder.run(&mut cmd); builder.remove_dir(&image); distdir(builder).join(format!("{}-{}.tar.gz", name, target)) @@ -706,12 +753,15 @@ impl Step for Analysis { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("analysis").default_condition(builder.config.extended) + run.path("analysis") + .default_condition(builder.config.extended) } fn make_run(run: RunConfig) { run.builder.ensure(Analysis { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + compiler: run + .builder + .compiler(run.builder.top_stage, run.builder.config.build), target: run.target, }); } @@ -741,8 +791,11 @@ impl Step for Analysis { let image = tmpdir(builder).join(format!("{}-{}-image", name, target)); - let src = builder.stage_out(compiler, Mode::Std) - .join(target).join(builder.cargo_dir()).join("deps"); + let src = builder + .stage_out(compiler, Mode::Std) + .join(target) + .join(builder.cargo_dir()) + .join("deps"); let image_src = src.join("save-analysis"); let dst = image.join("lib/rustlib").join(target).join("analysis"); @@ -752,15 +805,18 @@ impl Step for Analysis { let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=save-analysis-saved.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg(format!("--package-name={}-{}", name, target)) - .arg(format!("--component-name=rust-analysis-{}", target)) - .arg("--legacy-manifest-dirs=rustlib,cargo"); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=save-analysis-saved.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}-{}", name, target)) + .arg(format!("--component-name=rust-analysis-{}", target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); builder.run(&mut cmd); builder.remove_dir(&image); distdir(builder).join(format!("{}-{}.tar.gz", name, target)) @@ -774,16 +830,15 @@ fn copy_src_dirs(builder: &Builder, src_dirs: &[&str], exclude_dirs: &[&str], ds None => return false, }; if spath.ends_with("~") || spath.ends_with(".pyc") { - return false + return false; } - if (spath.contains("llvm/test") || spath.contains("llvm\\test")) && - (spath.ends_with(".ll") || - spath.ends_with(".td") || - spath.ends_with(".s")) { - return false + if (spath.contains("llvm/test") || spath.contains("llvm\\test")) + && (spath.ends_with(".ll") || spath.ends_with(".td") || spath.ends_with(".s")) + { + return false; } if spath.contains("test/emscripten") || spath.contains("test\\emscripten") { - return false + return false; } let full_path = Path::new(dir).join(path); @@ -792,22 +847,41 @@ fn copy_src_dirs(builder: &Builder, src_dirs: &[&str], exclude_dirs: &[&str], ds } let excludes = [ - "CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules", - ".gitattributes", ".cvsignore", ".svn", ".arch-ids", "{arch}", - "=RELEASE-ID", "=meta-update", "=update", ".bzr", ".bzrignore", - ".bzrtags", ".hg", ".hgignore", ".hgrags", "_darcs", + "CVS", + "RCS", + "SCCS", + ".git", + ".gitignore", + ".gitmodules", + ".gitattributes", + ".cvsignore", + ".svn", + ".arch-ids", + "{arch}", + "=RELEASE-ID", + "=meta-update", + "=update", + ".bzr", + ".bzrignore", + ".bzrtags", + ".hg", + ".hgignore", + ".hgrags", + "_darcs", ]; - !path.iter() - .map(|s| s.to_str().unwrap()) - .any(|s| excludes.contains(&s)) + !path + .iter() + .map(|s| s.to_str().unwrap()) + .any(|s| excludes.contains(&s)) } // Copy the directories using our filter for item in src_dirs { let dst = &dst_dir.join(item); t!(fs::create_dir_all(dst)); - builder.cp_filtered( - &builder.src.join(item), dst, &|path| filter_fn(exclude_dirs, item, path)); + builder.cp_filtered(&builder.src.join(item), dst, &|path| { + filter_fn(exclude_dirs, item, path) + }); } } @@ -840,9 +914,7 @@ impl Step for Src { let dst_src = dst.join("rust"); t!(fs::create_dir_all(&dst_src)); - let src_files = [ - "Cargo.lock", - ]; + let src_files = ["Cargo.lock"]; // This is the reduced set of paths which will become the rust-src component // (essentially libstd and all of its path dependencies) let std_src_dirs = [ @@ -873,15 +945,18 @@ impl Step for Src { // Create source tarball in rust-installer format let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Awesome-Source.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg(format!("--package-name={}", name)) - .arg("--component-name=rust-src") - .arg("--legacy-manifest-dirs=rustlib,cargo"); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Awesome-Source.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!("--package-name={}", name)) + .arg("--component-name=rust-src") + .arg("--legacy-manifest-dirs=rustlib,cargo"); builder.run(&mut cmd); builder.remove_dir(&image); @@ -902,7 +977,8 @@ impl Step for PlainSourceTarball { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src").default_condition(builder.config.rust_dist_src) + run.path("src") + .default_condition(builder.config.rust_dist_src) } fn make_run(run: RunConfig) { @@ -933,9 +1009,7 @@ impl Step for PlainSourceTarball { "Cargo.toml", "Cargo.lock", ]; - let src_dirs = [ - "src", - ]; + let src_dirs = ["src"]; copy_src_dirs(builder, &src_dirs[..], &[], &plain_dst_src); @@ -963,19 +1037,19 @@ impl Step for PlainSourceTarball { builder.compiler(0, builder.config.build), Mode::ToolBootstrap, builder.config.build, - "install" + "install", ); cmd.arg("--force") - .arg("--debug") - .arg("--vers").arg(CARGO_VENDOR_VERSION) - .arg("cargo-vendor"); + .arg("--debug") + .arg("--vers") + .arg(CARGO_VENDOR_VERSION) + .arg("cargo-vendor"); builder.run(&mut cmd); } // Vendor all Cargo dependencies let mut cmd = Command::new(&builder.initial_cargo); - cmd.arg("vendor") - .current_dir(&plain_dst_src); + cmd.arg("vendor").current_dir(&plain_dst_src); builder.run(&mut cmd); } @@ -990,10 +1064,12 @@ impl Step for PlainSourceTarball { builder.info("running installer"); let mut cmd = rust_installer(builder); cmd.arg("tarball") - .arg("--input").arg(&plain_name) - .arg("--output").arg(&tarball) - .arg("--work-dir=.") - .current_dir(tmpdir(builder)); + .arg("--input") + .arg(&plain_name) + .arg("--output") + .arg(&tarball) + .arg("--work-dir=.") + .current_dir(tmpdir(builder)); builder.run(&mut cmd); distdir(builder).join(&format!("{}.tar.gz", plain_name)) } @@ -1009,10 +1085,10 @@ pub fn sanitize_sh(path: &Path) -> String { let mut ch = s.chars(); let drive = ch.next().unwrap_or('C'); if ch.next() != Some(':') { - return None + return None; } if ch.next() != Some('/') { - return None + return None; } Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..])) } @@ -1060,16 +1136,22 @@ impl Step for Cargo { builder.create_dir(&image.join("etc/bash_completion.d")); let cargo = builder.ensure(tool::Cargo { compiler: builder.compiler(stage, builder.config.build), - target + target, }); builder.install(&cargo, &image.join("bin"), 0o755); for man in t!(etc.join("man").read_dir()) { let man = t!(man); builder.install(&man.path(), &image.join("share/man/man1"), 0o644); } - builder.install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644); - builder.copy(&etc.join("cargo.bashcomp.sh"), - &image.join("etc/bash_completion.d/cargo")); + builder.install( + &etc.join("_cargo"), + &image.join("share/zsh/site-functions"), + 0o644, + ); + builder.copy( + &etc.join("cargo.bashcomp.sh"), + &image.join("etc/bash_completion.d/cargo"), + ); let doc = image.join("share/doc/cargo"); builder.install(&src.join("README.md"), &doc, 0o644); builder.install(&src.join("LICENSE-MIT"), &doc, 0o644); @@ -1089,16 +1171,20 @@ impl Step for Cargo { // Generate the installer tarball let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--component-name=cargo") - .arg("--legacy-manifest-dirs=rustlib,cargo"); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-is-ready-to-roll.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--component-name=cargo") + .arg("--legacy-manifest-dirs=rustlib,cargo"); builder.run(&mut cmd); distdir(builder).join(format!("{}-{}.tar.gz", name, target)) } @@ -1144,10 +1230,16 @@ impl Step for Rls { // Prepare the image directory // We expect RLS to build, because we've exited this step above if tool // state for RLS isn't testing. - let rls = builder.ensure(tool::Rls { - compiler: builder.compiler(stage, builder.config.build), - target, extra_features: Vec::new() - }).or_else(|| { missing_tool("RLS", builder.build.config.missing_tools); None })?; + let rls = builder + .ensure(tool::Rls { + compiler: builder.compiler(stage, builder.config.build), + target, + extra_features: Vec::new(), + }) + .or_else(|| { + missing_tool("RLS", builder.build.config.missing_tools); + None + })?; builder.install(&rls, &image.join("bin"), 0o755); let doc = image.join("share/doc/rls"); @@ -1167,16 +1259,20 @@ impl Step for Rls { // Generate the installer tarball let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=RLS-ready-to-serve.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--component-name=rls-preview"); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=RLS-ready-to-serve.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--component-name=rls-preview"); builder.run(&mut cmd); Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target))) @@ -1223,14 +1319,26 @@ impl Step for Clippy { // Prepare the image directory // We expect clippy to build, because we've exited this step above if tool // state for clippy isn't testing. - let clippy = builder.ensure(tool::Clippy { - compiler: builder.compiler(stage, builder.config.build), - target, extra_features: Vec::new() - }).or_else(|| { missing_tool("clippy", builder.build.config.missing_tools); None })?; - let cargoclippy = builder.ensure(tool::CargoClippy { - compiler: builder.compiler(stage, builder.config.build), - target, extra_features: Vec::new() - }).or_else(|| { missing_tool("cargo clippy", builder.build.config.missing_tools); None })?; + let clippy = builder + .ensure(tool::Clippy { + compiler: builder.compiler(stage, builder.config.build), + target, + extra_features: Vec::new(), + }) + .or_else(|| { + missing_tool("clippy", builder.build.config.missing_tools); + None + })?; + let cargoclippy = builder + .ensure(tool::CargoClippy { + compiler: builder.compiler(stage, builder.config.build), + target, + extra_features: Vec::new(), + }) + .or_else(|| { + missing_tool("cargo clippy", builder.build.config.missing_tools); + None + })?; builder.install(&clippy, &image.join("bin"), 0o755); builder.install(&cargoclippy, &image.join("bin"), 0o755); @@ -1251,16 +1359,20 @@ impl Step for Clippy { // Generate the installer tarball let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=clippy-ready-to-serve.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--component-name=clippy-preview"); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=clippy-ready-to-serve.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--component-name=clippy-preview"); builder.run(&mut cmd); Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target))) @@ -1304,14 +1416,26 @@ impl Step for Rustfmt { builder.create_dir(&image); // Prepare the image directory - let rustfmt = builder.ensure(tool::Rustfmt { - compiler: builder.compiler(stage, builder.config.build), - target, extra_features: Vec::new() - }).or_else(|| { missing_tool("Rustfmt", builder.build.config.missing_tools); None })?; - let cargofmt = builder.ensure(tool::Cargofmt { - compiler: builder.compiler(stage, builder.config.build), - target, extra_features: Vec::new() - }).or_else(|| { missing_tool("Cargofmt", builder.build.config.missing_tools); None })?; + let rustfmt = builder + .ensure(tool::Rustfmt { + compiler: builder.compiler(stage, builder.config.build), + target, + extra_features: Vec::new(), + }) + .or_else(|| { + missing_tool("Rustfmt", builder.build.config.missing_tools); + None + })?; + let cargofmt = builder + .ensure(tool::Cargofmt { + compiler: builder.compiler(stage, builder.config.build), + target, + extra_features: Vec::new(), + }) + .or_else(|| { + missing_tool("Cargofmt", builder.build.config.missing_tools); + None + })?; builder.install(&rustfmt, &image.join("bin"), 0o755); builder.install(&cargofmt, &image.join("bin"), 0o755); @@ -1332,16 +1456,20 @@ impl Step for Rustfmt { // Generate the installer tarball let mut cmd = rust_installer(builder); cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=rustfmt-ready-to-fmt.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--component-name=rustfmt-preview"); + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=rustfmt-ready-to-fmt.") + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--component-name=rustfmt-preview"); builder.run(&mut cmd); Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target))) @@ -1362,7 +1490,8 @@ impl Step for Extended { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("extended").default_condition(builder.config.extended) + run.path("extended") + .default_condition(builder.config.extended) } fn make_run(run: RunConfig) { @@ -1392,10 +1521,13 @@ impl Step for Extended { let mingw_installer = builder.ensure(Mingw { host: target }); let analysis_installer = builder.ensure(Analysis { compiler: builder.compiler(stage, self.host), - target + target, }); - let docs_installer = builder.ensure(Docs { stage, host: target, }); + let docs_installer = builder.ensure(Docs { + stage, + host: target, + }); let std_installer = builder.ensure(Std { compiler: builder.compiler(stage, self.host), target, @@ -1448,12 +1580,20 @@ impl Step for Extended { .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--work-dir").arg(&work) - .arg("--output-dir").arg(&distdir(builder)) - .arg(format!("--package-name={}-{}", pkgname(builder, "rust"), target)) + .arg("--work-dir") + .arg(&work) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg(format!( + "--package-name={}-{}", + pkgname(builder, "rust"), + target + )) .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--input-tarballs").arg(input_tarballs) - .arg("--non-installed-overlay").arg(&overlay); + .arg("--input-tarballs") + .arg(input_tarballs) + .arg("--non-installed-overlay") + .arg(&overlay); builder.run(&mut cmd); let mut license = String::new(); @@ -1512,8 +1652,10 @@ impl Step for Extended { let pkgbuild = |component: &str| { let mut cmd = Command::new("pkgbuild"); - cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component)) - .arg("--scripts").arg(pkg.join(component)) + cmd.arg("--identifier") + .arg(format!("org.rust-lang.{}", component)) + .arg("--scripts") + .arg(pkg.join(component)) .arg("--nopayload") .arg(pkg.join(component).with_extension("pkg")); builder.run(&mut cmd); @@ -1521,8 +1663,10 @@ impl Step for Extended { let prepare = |name: &str| { builder.create_dir(&pkg.join(name)); - builder.cp_r(&work.join(&format!("{}-{}", pkgname(builder, name), target)), - &pkg.join(name)); + builder.cp_r( + &work.join(&format!("{}-{}", pkgname(builder, name), target)), + &pkg.join(name), + ); builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755); pkgbuild(name); }; @@ -1547,12 +1691,13 @@ impl Step for Extended { builder.create(&pkg.join("res/LICENSE.txt"), &license); builder.install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644); let mut cmd = Command::new("productbuild"); - cmd.arg("--distribution").arg(xform(&etc.join("pkg/Distribution.xml"))) - .arg("--resources").arg(pkg.join("res")) - .arg(distdir(builder).join(format!("{}-{}.pkg", - pkgname(builder, "rust"), - target))) - .arg("--package-path").arg(&pkg); + cmd.arg("--distribution") + .arg(xform(&etc.join("pkg/Distribution.xml"))) + .arg("--resources") + .arg(pkg.join("res")) + .arg(distdir(builder).join(format!("{}-{}.pkg", pkgname(builder, "rust"), target))) + .arg("--package-path") + .arg(&pkg); builder.run(&mut cmd); } @@ -1571,9 +1716,12 @@ impl Step for Extended { } else { name.to_string() }; - builder.cp_r(&work.join(&format!("{}-{}", pkgname(builder, name), target)) - .join(dir), - &exe.join(name)); + builder.cp_r( + &work + .join(&format!("{}-{}", pkgname(builder, name), target)) + .join(dir), + &exe.join(name), + ); builder.remove(&exe.join(name).join("manifest.in")); }; prepare("rustc"); @@ -1599,16 +1747,17 @@ impl Step for Extended { // Generate exe installer let mut cmd = Command::new("iscc"); - cmd.arg("rust.iss") - .current_dir(&exe); + cmd.arg("rust.iss").current_dir(&exe); if target.contains("windows-gnu") { cmd.arg("/dMINGW"); } add_env(builder, &mut cmd, target); builder.run(&mut cmd); - builder.install(&exe.join(format!("{}-{}.exe", pkgname(builder, "rust"), target)), - &distdir(builder), - 0o755); + builder.install( + &exe.join(format!("{}-{}.exe", pkgname(builder, "rust"), target)), + &distdir(builder), + 0o755, + ); // Generate msi installer let wix = PathBuf::from(env::var_os("WIX").unwrap()); @@ -1617,94 +1766,152 @@ impl Step for Extended { let light = wix.join("bin/light.exe"); let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"]; - builder.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rustc") - .args(&heat_flags) - .arg("-cg").arg("RustcGroup") - .arg("-dr").arg("Rustc") - .arg("-var").arg("var.RustcDir") - .arg("-out").arg(exe.join("RustcGroup.wxs"))); - builder.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-docs") - .args(&heat_flags) - .arg("-cg").arg("DocsGroup") - .arg("-dr").arg("Docs") - .arg("-var").arg("var.DocsDir") - .arg("-out").arg(exe.join("DocsGroup.wxs")) - .arg("-t").arg(etc.join("msi/squash-components.xsl"))); - builder.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("cargo") - .args(&heat_flags) - .arg("-cg").arg("CargoGroup") - .arg("-dr").arg("Cargo") - .arg("-var").arg("var.CargoDir") - .arg("-out").arg(exe.join("CargoGroup.wxs")) - .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); - builder.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-std") - .args(&heat_flags) - .arg("-cg").arg("StdGroup") - .arg("-dr").arg("Std") - .arg("-var").arg("var.StdDir") - .arg("-out").arg(exe.join("StdGroup.wxs"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rustc") + .args(&heat_flags) + .arg("-cg") + .arg("RustcGroup") + .arg("-dr") + .arg("Rustc") + .arg("-var") + .arg("var.RustcDir") + .arg("-out") + .arg(exe.join("RustcGroup.wxs")), + ); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-docs") + .args(&heat_flags) + .arg("-cg") + .arg("DocsGroup") + .arg("-dr") + .arg("Docs") + .arg("-var") + .arg("var.DocsDir") + .arg("-out") + .arg(exe.join("DocsGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/squash-components.xsl")), + ); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("cargo") + .args(&heat_flags) + .arg("-cg") + .arg("CargoGroup") + .arg("-dr") + .arg("Cargo") + .arg("-var") + .arg("var.CargoDir") + .arg("-out") + .arg(exe.join("CargoGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-std") + .args(&heat_flags) + .arg("-cg") + .arg("StdGroup") + .arg("-dr") + .arg("Std") + .arg("-var") + .arg("var.StdDir") + .arg("-out") + .arg(exe.join("StdGroup.wxs")), + ); if rls_installer.is_some() { - builder.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rls") - .args(&heat_flags) - .arg("-cg").arg("RlsGroup") - .arg("-dr").arg("Rls") - .arg("-var").arg("var.RlsDir") - .arg("-out").arg(exe.join("RlsGroup.wxs")) - .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rls") + .args(&heat_flags) + .arg("-cg") + .arg("RlsGroup") + .arg("-dr") + .arg("Rls") + .arg("-var") + .arg("var.RlsDir") + .arg("-out") + .arg(exe.join("RlsGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); } if clippy_installer.is_some() { - builder.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("clippy") - .args(&heat_flags) - .arg("-cg").arg("ClippyGroup") - .arg("-dr").arg("Clippy") - .arg("-var").arg("var.ClippyDir") - .arg("-out").arg(exe.join("ClippyGroup.wxs")) - .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("clippy") + .args(&heat_flags) + .arg("-cg") + .arg("ClippyGroup") + .arg("-dr") + .arg("Clippy") + .arg("-var") + .arg("var.ClippyDir") + .arg("-out") + .arg(exe.join("ClippyGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); } - builder.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-analysis") - .args(&heat_flags) - .arg("-cg").arg("AnalysisGroup") - .arg("-dr").arg("Analysis") - .arg("-var").arg("var.AnalysisDir") - .arg("-out").arg(exe.join("AnalysisGroup.wxs")) - .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-analysis") + .args(&heat_flags) + .arg("-cg") + .arg("AnalysisGroup") + .arg("-dr") + .arg("Analysis") + .arg("-var") + .arg("var.AnalysisDir") + .arg("-out") + .arg(exe.join("AnalysisGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); if target.contains("windows-gnu") { - builder.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-mingw") - .args(&heat_flags) - .arg("-cg").arg("GccGroup") - .arg("-dr").arg("Gcc") - .arg("-var").arg("var.GccDir") - .arg("-out").arg(exe.join("GccGroup.wxs"))); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-mingw") + .args(&heat_flags) + .arg("-cg") + .arg("GccGroup") + .arg("-dr") + .arg("Gcc") + .arg("-var") + .arg("var.GccDir") + .arg("-out") + .arg(exe.join("GccGroup.wxs")), + ); } let candle = |input: &Path| { - let output = exe.join(input.file_stem().unwrap()) - .with_extension("wixobj"); - let arch = if target.contains("x86_64") {"x64"} else {"x86"}; + let output = exe + .join(input.file_stem().unwrap()) + .with_extension("wixobj"); + let arch = if target.contains("x86_64") { + "x64" + } else { + "x86" + }; let mut cmd = Command::new(&candle); cmd.current_dir(&exe) .arg("-nologo") @@ -1713,8 +1920,10 @@ impl Step for Extended { .arg("-dCargoDir=cargo") .arg("-dStdDir=rust-std") .arg("-dAnalysisDir=rust-analysis") - .arg("-arch").arg(&arch) - .arg("-out").arg(&output) + .arg("-arch") + .arg(&arch) + .arg("-out") + .arg(&output) .arg(&input); add_env(builder, &mut cmd, target); @@ -1755,9 +1964,12 @@ impl Step for Extended { let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target); let mut cmd = Command::new(&light); cmd.arg("-nologo") - .arg("-ext").arg("WixUIExtension") - .arg("-ext").arg("WixUtilExtension") - .arg("-out").arg(exe.join(&filename)) + .arg("-ext") + .arg("WixUIExtension") + .arg("-ext") + .arg("WixUtilExtension") + .arg("-out") + .arg(exe.join(&filename)) .arg("rust.wixobj") .arg("ui.wixobj") .arg("rustwelcomedlg.wixobj") @@ -1784,7 +1996,10 @@ impl Step for Extended { builder.run(&mut cmd); if !builder.config.dry_run { - t!(fs::rename(exe.join(&filename), distdir(builder).join(&filename))); + t!(fs::rename( + exe.join(&filename), + distdir(builder).join(&filename) + )); } } } @@ -1793,29 +2008,27 @@ impl Step for Extended { fn add_env(builder: &Builder, cmd: &mut Command, target: Interned) { let mut parts = channel::CFG_RELEASE_NUM.split('.'); cmd.env("CFG_RELEASE_INFO", builder.rust_version()) - .env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM) - .env("CFG_RELEASE", builder.rust_release()) - .env("CFG_VER_MAJOR", parts.next().unwrap()) - .env("CFG_VER_MINOR", parts.next().unwrap()) - .env("CFG_VER_PATCH", parts.next().unwrap()) - .env("CFG_VER_BUILD", "0") // just needed to build - .env("CFG_PACKAGE_VERS", builder.rust_package_vers()) - .env("CFG_PACKAGE_NAME", pkgname(builder, "rust")) - .env("CFG_BUILD", target) - .env("CFG_CHANNEL", &builder.config.channel); + .env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM) + .env("CFG_RELEASE", builder.rust_release()) + .env("CFG_VER_MAJOR", parts.next().unwrap()) + .env("CFG_VER_MINOR", parts.next().unwrap()) + .env("CFG_VER_PATCH", parts.next().unwrap()) + .env("CFG_VER_BUILD", "0") // just needed to build + .env("CFG_PACKAGE_VERS", builder.rust_package_vers()) + .env("CFG_PACKAGE_NAME", pkgname(builder, "rust")) + .env("CFG_BUILD", target) + .env("CFG_CHANNEL", &builder.config.channel); if target.contains("windows-gnu") { - cmd.env("CFG_MINGW", "1") - .env("CFG_ABI", "GNU"); + cmd.env("CFG_MINGW", "1").env("CFG_ABI", "GNU"); } else { - cmd.env("CFG_MINGW", "0") - .env("CFG_ABI", "MSVC"); + cmd.env("CFG_MINGW", "0").env("CFG_ABI", "MSVC"); } if target.contains("x86_64") { - cmd.env("CFG_PLATFORM", "x64"); + cmd.env("CFG_PLATFORM", "x64"); } else { - cmd.env("CFG_PLATFORM", "x86"); + cmd.env("CFG_PLATFORM", "x86"); } } @@ -1845,9 +2058,13 @@ impl Step for HashSign { let addr = builder.config.dist_upload_addr.as_ref().unwrap_or_else(|| { panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n") }); - let file = builder.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| { - panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n") - }); + let file = builder + .config + .dist_gpg_password_file + .as_ref() + .unwrap_or_else(|| { + panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n") + }); let pass = t!(fs::read_to_string(&file)); let today = output(Command::new("date").arg("+%Y-%m-%d")); @@ -1877,12 +2094,8 @@ impl Step for HashSign { // LLVM tools are linked dynamically. // Note: This function does no yet support Windows but we also don't support // linking LLVM tools dynamically on Windows yet. -fn maybe_install_llvm_dylib(builder: &Builder, - target: Interned, - image: &Path) { - let src_libdir = builder - .llvm_out(target) - .join("lib"); +fn maybe_install_llvm_dylib(builder: &Builder, target: Interned, image: &Path) { + let src_libdir = builder.llvm_out(target).join("lib"); let dst_libdir = image.join("lib/rustlib").join(&*target).join("lib"); t!(fs::create_dir_all(&dst_libdir)); @@ -1891,7 +2104,7 @@ fn maybe_install_llvm_dylib(builder: &Builder, if llvm_dylib_path.exists() { builder.install(&llvm_dylib_path, &dst_libdir, 0o644); } - return + return; } // Usually libLLVM.so is a symlink to something like libLLVM-6.0.so. @@ -1900,11 +2113,13 @@ fn maybe_install_llvm_dylib(builder: &Builder, let llvm_dylib_path = src_libdir.join("libLLVM.so"); if llvm_dylib_path.exists() { let llvm_dylib_path = llvm_dylib_path.canonicalize().unwrap_or_else(|e| { - panic!("dist: Error calling canonicalize path `{}`: {}", - llvm_dylib_path.display(), e); + panic!( + "dist: Error calling canonicalize path `{}`: {}", + llvm_dylib_path.display(), + e + ); }); - builder.install(&llvm_dylib_path, &dst_libdir, 0o644); } } @@ -1938,8 +2153,10 @@ impl Step for LlvmTools { /* run only if llvm-config isn't used */ if let Some(config) = builder.config.target_config.get(&target) { if let Some(ref _s) = config.llvm_config { - builder.info(&format!("Skipping LlvmTools stage{} ({}): external LLVM", - stage, target)); + builder.info(&format!( + "Skipping LlvmTools stage{} ({}): external LLVM", + stage, target + )); return None; } } @@ -1953,12 +2170,8 @@ impl Step for LlvmTools { drop(fs::remove_dir_all(&image)); // Prepare the image directory - let src_bindir = builder - .llvm_out(target) - .join("bin"); - let dst_bindir = image.join("lib/rustlib") - .join(&*target) - .join("bin"); + let src_bindir = builder.llvm_out(target).join("bin"); + let dst_bindir = image.join("lib/rustlib").join(&*target).join("bin"); t!(fs::create_dir_all(&dst_bindir)); for tool in LLVM_TOOLS { let exe = src_bindir.join(exe(tool, &target)); @@ -1979,15 +2192,18 @@ impl Step for LlvmTools { .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=llvm-tools-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg("--non-installed-overlay").arg(&overlay) + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) .arg(format!("--package-name={}-{}", name, target)) .arg("--legacy-manifest-dirs=rustlib,cargo") .arg("--component-name=llvm-tools-preview"); - builder.run(&mut cmd); Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target))) } @@ -2008,9 +2224,7 @@ impl Step for Lldb { } fn make_run(run: RunConfig) { - run.builder.ensure(Lldb { - target: run.target, - }); + run.builder.ensure(Lldb { target: run.target }); } fn run(self, builder: &Builder) -> Option { @@ -2020,9 +2234,7 @@ impl Step for Lldb { return None; } - let bindir = builder - .llvm_out(target) - .join("bin"); + let bindir = builder.llvm_out(target).join("bin"); let lldb_exe = bindir.join(exe("lldb", &target)); if !lldb_exe.exists() { return None; @@ -2056,7 +2268,7 @@ impl Step for Lldb { if t!(entry.file_type()).is_symlink() { builder.copy_to_folder(&entry.path(), &dst); } else { - builder.install(&entry.path(), &dst, 0o755); + builder.install(&entry.path(), &dst, 0o755); } } } @@ -2075,8 +2287,7 @@ impl Step for Lldb { let entry = t!(entry); if let Ok(name) = entry.file_name().into_string() { if name.starts_with("python") { - let dst = root.join(libdir_name) - .join(entry.file_name()); + let dst = root.join(libdir_name).join(entry.file_name()); t!(fs::create_dir_all(&dst)); builder.cp_r(&entry.path(), &dst); break; @@ -2097,15 +2308,18 @@ impl Step for Lldb { .arg("--product-name=Rust") .arg("--rel-manifest-dir=rustlib") .arg("--success-message=lldb-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(builder)) - .arg("--output-dir").arg(&distdir(builder)) - .arg("--non-installed-overlay").arg(&overlay) + .arg("--image-dir") + .arg(&image) + .arg("--work-dir") + .arg(&tmpdir(builder)) + .arg("--output-dir") + .arg(&distdir(builder)) + .arg("--non-installed-overlay") + .arg(&overlay) .arg(format!("--package-name={}-{}", name, target)) .arg("--legacy-manifest-dirs=rustlib,cargo") .arg("--component-name=lldb-preview"); - builder.run(&mut cmd); Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target))) } diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index eec193c21f5db..ffa7d606a23b5 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -10,17 +10,17 @@ use std::collections::HashSet; use std::fs; use std::io; -use std::path::{PathBuf, Path}; +use std::path::{Path, PathBuf}; use crate::Mode; use build_helper::up_to_date; -use crate::util::symlink_dir; use crate::builder::{Builder, Compiler, RunConfig, ShouldRun, Step}; -use crate::tool::{self, prepare_tool_cargo, Tool, SourceType}; +use crate::cache::{Interned, INTERNER}; use crate::compile; -use crate::cache::{INTERNER, Interned}; use crate::config::Config; +use crate::tool::{self, prepare_tool_cargo, SourceType, Tool}; +use crate::util::symlink_dir; macro_rules! book { ($($name:ident, $path:expr, $book_name:expr;)+) => { @@ -105,13 +105,12 @@ impl Step for UnstableBook { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/doc/unstable-book").default_condition(builder.config.docs) + run.path("src/doc/unstable-book") + .default_condition(builder.config.docs) } fn make_run(run: RunConfig) { - run.builder.ensure(UnstableBook { - target: run.target, - }); + run.builder.ensure(UnstableBook { target: run.target }); } fn run(self, builder: &Builder) { @@ -138,7 +137,8 @@ impl Step for CargoBook { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/cargo/src/doc/book").default_condition(builder.config.docs) + run.path("src/tools/cargo/src/doc/book") + .default_condition(builder.config.docs) } fn make_run(run: RunConfig) { @@ -162,11 +162,14 @@ impl Step for CargoBook { let _ = fs::remove_dir_all(&out); - builder.run(builder.tool_cmd(Tool::Rustbook) - .arg("build") - .arg(&src) - .arg("-d") - .arg(out)); + builder.run( + builder + .tool_cmd(Tool::Rustbook) + .arg("build") + .arg(&src) + .arg("-d") + .arg(out), + ); } } @@ -201,15 +204,11 @@ impl Step for RustbookSrc { let rustbook = builder.tool_exe(Tool::Rustbook); let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); if up_to_date(&src, &index) && up_to_date(&rustbook, &index) { - return + return; } builder.info(&format!("Rustbook ({}) - {}", target, name)); let _ = fs::remove_dir_all(&out); - builder.run(rustbook_cmd - .arg("build") - .arg(&src) - .arg("-d") - .arg(out)); + builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out)); } } @@ -226,12 +225,15 @@ impl Step for TheBook { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/doc/book").default_condition(builder.config.docs) + run.path("src/doc/book") + .default_condition(builder.config.docs) } fn make_run(run: RunConfig) { run.builder.ensure(TheBook { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + compiler: run + .builder + .compiler(run.builder.top_stage, run.builder.config.build), target: run.target, name: "book", }); @@ -278,10 +280,7 @@ impl Step for TheBook { }); // build the version info page and CSS - builder.ensure(Standalone { - compiler, - target, - }); + builder.ensure(Standalone { compiler, target }); // build the redirect pages builder.info(&format!("Documenting book redirect pages ({})", target)); @@ -308,13 +307,17 @@ fn invoke_rustdoc(builder: &Builder, compiler: Compiler, target: Interned ShouldRun { let builder = run.builder; - run.krate("rustc-main").default_condition(builder.config.docs) + run.krate("rustc-main") + .default_condition(builder.config.docs) } fn make_run(run: RunConfig) { @@ -597,7 +625,10 @@ impl Step for WhitelistedRustc { fn run(self, builder: &Builder) { let stage = self.stage; let target = self.target; - builder.info(&format!("Documenting stage{} whitelisted compiler ({})", stage, target)); + builder.info(&format!( + "Documenting stage{} whitelisted compiler ({})", + stage, target + )); let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); let compiler = builder.compiler(stage, builder.config.build); @@ -611,8 +642,10 @@ impl Step for WhitelistedRustc { builder.ensure(Std { stage, target }); builder.ensure(compile::Rustc { compiler, target }); - let out_dir = builder.stage_out(compiler, Mode::Rustc) - .join(target).join("doc"); + let out_dir = builder + .stage_out(compiler, Mode::Rustc) + .join(target) + .join("doc"); // See docs in std above for why we symlink let my_out = builder.crate_doc_out(target); @@ -647,7 +680,8 @@ impl Step for Rustc { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.krate("rustc-main").default_condition(builder.config.docs) + run.krate("rustc-main") + .default_condition(builder.config.docs) } fn make_run(run: RunConfig) { @@ -690,7 +724,10 @@ impl Step for Rustc { // We do not symlink to the same shared folder that already contains std library // documentation from previous steps as we do not want to include that. - let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target).join("doc"); + let out_dir = builder + .stage_out(compiler, Mode::Rustc) + .join(target) + .join("doc"); t!(symlink_dir_force(&builder.config, &out, &out_dir)); // Build cargo command. @@ -723,7 +760,7 @@ impl Step for Rustc { fn find_compiler_crates( builder: &Builder, name: &Interned, - crates: &mut HashSet> + crates: &mut HashSet>, ) { // Add current crate. crates.insert(*name); @@ -790,10 +827,13 @@ impl Step for Rustdoc { builder.ensure(Rustc { stage, target }); // Build rustdoc. - builder.ensure(tool::Rustdoc { host: compiler.host }); + builder.ensure(tool::Rustdoc { + host: compiler.host, + }); // Symlink compiler docs to the output directory of rustdoc documentation. - let out_dir = builder.stage_out(compiler, Mode::ToolRustc) + let out_dir = builder + .stage_out(compiler, Mode::ToolRustc) .join(target) .join("doc"); t!(fs::create_dir_all(&out_dir)); @@ -808,7 +848,7 @@ impl Step for Rustdoc { "doc", "src/tools/rustdoc", SourceType::InTree, - &[] + &[], ); // Only include compiler crates, no dependencies of those, such as `libc`. @@ -832,13 +872,12 @@ impl Step for ErrorIndex { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/error_index_generator").default_condition(builder.config.docs) + run.path("src/tools/error_index_generator") + .default_condition(builder.config.docs) } fn make_run(run: RunConfig) { - run.builder.ensure(ErrorIndex { - target: run.target, - }); + run.builder.ensure(ErrorIndex { target: run.target }); } /// Generates the HTML rendered error-index by running the @@ -854,8 +893,9 @@ impl Step for ErrorIndex { index.arg(out.join("error-index.html")); // FIXME: shouldn't have to pass this env var - index.env("CFG_BUILD", &builder.config.build) - .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir()); + index + .env("CFG_BUILD", &builder.config.build) + .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir()); builder.run(&mut index); } @@ -873,13 +913,12 @@ impl Step for UnstableBookGen { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/unstable-book-gen").default_condition(builder.config.docs) + run.path("src/tools/unstable-book-gen") + .default_condition(builder.config.docs) } fn make_run(run: RunConfig) { - run.builder.ensure(UnstableBookGen { - target: run.target, - }); + run.builder.ensure(UnstableBookGen { target: run.target }); } fn run(self, builder: &Builder) { @@ -912,9 +951,7 @@ fn symlink_dir_force(config: &Config, src: &Path, dst: &Path) -> io::Result<()> } else { // handle directory junctions on windows by falling back to // `remove_dir`. - fs::remove_file(dst).or_else(|_| { - fs::remove_dir(dst) - })?; + fs::remove_file(dst).or_else(|_| fs::remove_dir(dst))?; } } diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 0f9a4271ac062..957b45a29b7df 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -83,7 +83,8 @@ impl Default for Subcommand { impl Flags { pub fn parse(args: &[String]) -> Flags { let mut extra_help = String::new(); - let mut subcommand_help = String::from("\ + let mut subcommand_help = String::from( + "\ Usage: x.py [options] [...] Subcommands: @@ -96,7 +97,7 @@ Subcommands: dist Build distribution artifacts install Install distribution artifacts -To learn more about a subcommand, run `./x.py -h`" +To learn more about a subcommand, run `./x.py -h`", ); let mut opts = Options::new(); @@ -110,12 +111,20 @@ To learn more about a subcommand, run `./x.py -h`" opts.optmulti("", "exclude", "build paths to exclude", "PATH"); opts.optopt("", "on-fail", "command to run on failure", "CMD"); opts.optflag("", "dry-run", "dry run; don't build anything"); - opts.optopt("", "stage", + opts.optopt( + "", + "stage", "stage to build (indicates compiler to use/test, e.g., stage 0 uses the \ bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)", - "N"); - opts.optmulti("", "keep-stage", "stage(s) to keep without recompiling \ - (pass multiple times to keep e.g., both stages 0 and 1)", "N"); + "N", + ); + opts.optmulti( + "", + "keep-stage", + "stage(s) to keep without recompiling \ + (pass multiple times to keep e.g., both stages 0 and 1)", + "N", + ); opts.optopt("", "src", "path to the root of the rust checkout", "DIR"); opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS"); opts.optflag("h", "help", "print this help message"); @@ -344,7 +353,8 @@ Arguments: format!( "Run `./x.py {} -h -v` to see a list of available paths.", subcommand - ).as_str(), + ) + .as_str(), ); } @@ -399,8 +409,10 @@ Arguments: dry_run: matches.opt_present("dry-run"), on_fail: matches.opt_str("on-fail"), rustc_error_format: matches.opt_str("error-format"), - keep_stage: matches.opt_strs("keep-stage") - .into_iter().map(|j| j.parse().unwrap()) + keep_stage: matches + .opt_strs("keep-stage") + .into_iter() + .map(|j| j.parse().unwrap()) .collect(), host: split(&matches.opt_strs("host")) .into_iter() diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs index 669aae68c635b..c60b16c8225af 100644 --- a/src/bootstrap/install.rs +++ b/src/bootstrap/install.rs @@ -5,7 +5,7 @@ use std::env; use std::fs; -use std::path::{Path, PathBuf, Component}; +use std::path::{Component, Path, PathBuf}; use std::process::Command; use crate::dist::{self, pkgname, sanitize_sh, tmpdir}; @@ -53,7 +53,7 @@ fn install_sh( package: &str, name: &str, stage: u32, - host: Option> + host: Option>, ) { builder.info(&format!("Install {} stage{} ({:?})", package, stage, host)); @@ -67,7 +67,11 @@ fn install_sh( let prefix = builder.config.prefix.as_ref().map_or(prefix_default, |p| { fs::canonicalize(p).unwrap_or_else(|_| panic!("could not canonicalize {}", p.display())) }); - let sysconfdir = builder.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default); + let sysconfdir = builder + .config + .sysconfdir + .as_ref() + .unwrap_or(&sysconfdir_default); let datadir = builder.config.datadir.as_ref().unwrap_or(&datadir_default); let docdir = builder.config.docdir.as_ref().unwrap_or(&docdir_default); let bindir = builder.config.bindir.as_ref().unwrap_or(&bindir_default); @@ -102,7 +106,9 @@ fn install_sh( let mut cmd = Command::new("sh"); cmd.current_dir(&empty_dir) - .arg(sanitize_sh(&tmpdir(builder).join(&package_name).join("install.sh"))) + .arg(sanitize_sh( + &tmpdir(builder).join(&package_name).join("install.sh"), + )) .arg(format!("--prefix={}", sanitize_sh(&prefix))) .arg(format!("--sysconfdir={}", sanitize_sh(&sysconfdir))) .arg(format!("--datadir={}", sanitize_sh(&datadir))) @@ -253,8 +259,7 @@ impl Step for Src { fn should_run(run: ShouldRun) -> ShouldRun { let config = &run.builder.config; - let cond = config.extended && - config.tools.as_ref().map_or(true, |t| t.contains("src")); + let cond = config.extended && config.tools.as_ref().map_or(true, |t| t.contains("src")); run.path("src").default_condition(cond) } diff --git a/src/bootstrap/job.rs b/src/bootstrap/job.rs index df492e0fdfd1c..0ffc37a767b72 100644 --- a/src/bootstrap/job.rs +++ b/src/bootstrap/job.rs @@ -29,10 +29,10 @@ #![allow(nonstandard_style, dead_code)] +use crate::Build; use std::env; use std::io; use std::mem; -use crate::Build; type HANDLE = *mut u8; type BOOL = i32; @@ -60,21 +60,23 @@ extern "system" { fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE; fn CloseHandle(hObject: HANDLE) -> BOOL; fn GetCurrentProcess() -> HANDLE; - fn OpenProcess(dwDesiredAccess: DWORD, - bInheritHandle: BOOL, - dwProcessId: DWORD) -> HANDLE; - fn DuplicateHandle(hSourceProcessHandle: HANDLE, - hSourceHandle: HANDLE, - hTargetProcessHandle: HANDLE, - lpTargetHandle: LPHANDLE, - dwDesiredAccess: DWORD, - bInheritHandle: BOOL, - dwOptions: DWORD) -> BOOL; + fn OpenProcess(dwDesiredAccess: DWORD, bInheritHandle: BOOL, dwProcessId: DWORD) -> HANDLE; + fn DuplicateHandle( + hSourceProcessHandle: HANDLE, + hSourceHandle: HANDLE, + hTargetProcessHandle: HANDLE, + lpTargetHandle: LPHANDLE, + dwDesiredAccess: DWORD, + bInheritHandle: BOOL, + dwOptions: DWORD, + ) -> BOOL; fn AssignProcessToJobObject(hJob: HANDLE, hProcess: HANDLE) -> BOOL; - fn SetInformationJobObject(hJob: HANDLE, - JobObjectInformationClass: JOBOBJECTINFOCLASS, - lpJobObjectInformation: LPVOID, - cbJobObjectInformationLength: DWORD) -> BOOL; + fn SetInformationJobObject( + hJob: HANDLE, + JobObjectInformationClass: JOBOBJECTINFOCLASS, + lpJobObjectInformation: LPVOID, + cbJobObjectInformationLength: DWORD, + ) -> BOOL; fn SetErrorMode(mode: UINT) -> UINT; } @@ -131,10 +133,12 @@ pub unsafe fn setup(build: &mut Build) { info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS; info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS; } - let r = SetInformationJobObject(job, - JobObjectExtendedLimitInformation, - &mut info as *mut _ as LPVOID, - mem::size_of_val(&info) as DWORD); + let r = SetInformationJobObject( + job, + JobObjectExtendedLimitInformation, + &mut info as *mut _ as LPVOID, + mem::size_of_val(&info) as DWORD, + ); assert!(r != 0, "{}", io::Error::last_os_error()); // Assign our process to this job object. Note that if this fails, one very @@ -149,7 +153,7 @@ pub unsafe fn setup(build: &mut Build) { let r = AssignProcessToJobObject(job, GetCurrentProcess()); if r == 0 { CloseHandle(job); - return + return; } // If we've got a parent process (e.g., the python script that called us) @@ -168,9 +172,15 @@ pub unsafe fn setup(build: &mut Build) { let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap()); assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error()); let mut parent_handle = 0 as *mut _; - let r = DuplicateHandle(GetCurrentProcess(), job, - parent, &mut parent_handle, - 0, FALSE, DUPLICATE_SAME_ACCESS); + let r = DuplicateHandle( + GetCurrentProcess(), + job, + parent, + &mut parent_handle, + 0, + FALSE, + DUPLICATE_SAME_ACCESS, + ); // If this failed, well at least we tried! An example of DuplicateHandle // failing in the past has been when the wrong python2 package spawned this diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index e460ef5a44e92..8001fab7d3584 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -114,15 +114,15 @@ extern crate build_helper; extern crate serde_derive; #[macro_use] extern crate lazy_static; -extern crate serde_json; +extern crate cc; extern crate cmake; extern crate filetime; -extern crate cc; extern crate getopts; extern crate num_cpus; -extern crate toml; -extern crate time; extern crate petgraph; +extern crate serde_json; +extern crate time; +extern crate toml; #[cfg(test)] #[macro_use] @@ -131,12 +131,12 @@ extern crate pretty_assertions; #[cfg(unix)] extern crate libc; -use std::cell::{RefCell, Cell}; -use std::collections::{HashSet, HashMap}; +use std::cell::{Cell, RefCell}; +use std::collections::{HashMap, HashSet}; use std::env; -use std::fs::{self, OpenOptions, File}; -use std::io::{self, Seek, SeekFrom, Write, Read}; -use std::path::{PathBuf, Path}; +use std::fs::{self, File, OpenOptions}; +use std::io::{self, Read, Seek, SeekFrom, Write}; +use std::path::{Path, PathBuf}; use std::process::{self, Command}; use std::slice; use std::str; @@ -146,30 +146,30 @@ use std::os::unix::fs::symlink as symlink_file; #[cfg(windows)] use std::os::windows::fs::symlink_file; -use build_helper::{run_silent, run_suppressed, try_run_silent, try_run_suppressed, output, mtime}; +use build_helper::{mtime, output, run_silent, run_suppressed, try_run_silent, try_run_suppressed}; use filetime::FileTime; -use crate::util::{exe, libdir, OutputFolder, CiEnv}; +use crate::util::{exe, libdir, CiEnv, OutputFolder}; +mod builder; +mod cache; mod cc_detect; mod channel; mod check; -mod test; mod clean; mod compile; -mod metadata; mod config; mod dist; mod doc; mod flags; mod install; +mod metadata; mod native; mod sanity; -pub mod util; -mod builder; -mod cache; +mod test; mod tool; mod toolstate; +pub mod util; #[cfg(windows)] mod job; @@ -187,13 +187,12 @@ mod job { #[cfg(any(target_os = "haiku", not(any(unix, windows))))] mod job { - pub unsafe fn setup(_build: &mut crate::Build) { - } + pub unsafe fn setup(_build: &mut crate::Build) {} } +use crate::cache::{Interned, INTERNER}; pub use crate::config::Config; use crate::flags::Subcommand; -use crate::cache::{Interned, INTERNER}; use crate::toolstate::ToolState; const LLVM_TOOLS: &[&str] = &[ @@ -280,10 +279,8 @@ pub struct Build { ci_env: CiEnv, delayed_failures: RefCell>, prerelease_version: Cell>, - tool_artifacts: RefCell, - HashMap)> - >>, + tool_artifacts: + RefCell, HashMap)>>>, } #[derive(Debug)] @@ -301,8 +298,7 @@ struct Crate { impl Crate { fn is_local(&self, build: &Build) -> bool { - self.path.starts_with(&build.config.src) && - !self.path.to_string_lossy().ends_with("_shim") + self.path.starts_with(&build.config.src) && !self.path.to_string_lossy().ends_with("_shim") } fn local_path(&self, build: &Build) -> PathBuf { @@ -347,7 +343,7 @@ impl Mode { pub fn is_tool(&self) -> bool { match self { Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd => true, - _ => false + _ => false, } } } @@ -362,12 +358,10 @@ impl Build { let out = config.out.clone(); let is_sudo = match env::var_os("SUDO_USER") { - Some(sudo_user) => { - match env::var_os("USER") { - Some(user) => user != sudo_user, - None => false, - } - } + Some(sudo_user) => match env::var_os("USER") { + Some(user) => user != sudo_user, + None => false, + }, None => false, }; let rust_info = channel::GitInfo::new(&config, &src); @@ -417,12 +411,23 @@ impl Build { // If local-rust is the same major.minor as the current version, then force a // local-rebuild let local_version_verbose = output( - Command::new(&build.initial_rustc).arg("--version").arg("--verbose")); + Command::new(&build.initial_rustc) + .arg("--version") + .arg("--verbose"), + ); let local_release = local_version_verbose - .lines().filter(|x| x.starts_with("release:")) - .next().unwrap().trim_left_matches("release:").trim(); + .lines() + .filter(|x| x.starts_with("release:")) + .next() + .unwrap() + .trim_left_matches("release:") + .trim(); let my_version = channel::CFG_RELEASE_NUM; - if local_release.split('.').take(2).eq(my_version.split('.').take(2)) { + if local_release + .split('.') + .take(2) + .eq(my_version.split('.').take(2)) + { build.verbose(&format!("auto-detected local-rebuild {}", local_release)); build.local_rebuild = true; } @@ -434,9 +439,7 @@ impl Build { } pub fn build_triple(&self) -> &[Interned] { - unsafe { - slice::from_raw_parts(&self.build, 1) - } + unsafe { slice::from_raw_parts(&self.build, 1) } } /// Executes the entire build, as configured by the flags and configuration. @@ -475,7 +478,10 @@ impl Build { // Check for postponed failures from `test --no-fail-fast`. let failures = self.delayed_failures.borrow(); if failures.len() > 0 { - println!("\n{} command(s) did not execute successfully:\n", failures.len()); + println!( + "\n{} command(s) did not execute successfully:\n", + failures.len() + ); for failure in failures.iter() { println!(" - {}\n", failure); } @@ -530,11 +536,18 @@ impl Build { /// Component directory that Cargo will produce output into (e.g. /// release/debug) fn cargo_dir(&self) -> &'static str { - if self.config.rust_optimize {"release"} else {"debug"} + if self.config.rust_optimize { + "release" + } else { + "debug" + } } fn tools_dir(&self, compiler: Compiler) -> PathBuf { - let out = self.out.join(&*compiler.host).join(format!("stage{}-tools-bin", compiler.stage)); + let out = self + .out + .join(&*compiler.host) + .join(format!("stage{}-tools-bin", compiler.stage)); t!(fs::create_dir_all(&out)); out } @@ -554,18 +567,18 @@ impl Build { Mode::ToolTest => "-tools", Mode::ToolRustc => "-tools", }; - self.out.join(&*compiler.host) - .join(format!("stage{}{}", compiler.stage, suffix)) + self.out + .join(&*compiler.host) + .join(format!("stage{}{}", compiler.stage, suffix)) } /// Returns the root output directory for all Cargo output in a given stage, /// running a particular compiler, whether or not we're building the /// standard library, and targeting the specified architecture. - fn cargo_out(&self, - compiler: Compiler, - mode: Mode, - target: Interned) -> PathBuf { - self.stage_out(compiler, mode).join(&*target).join(self.cargo_dir()) + fn cargo_out(&self, compiler: Compiler, mode: Mode, target: Interned) -> PathBuf { + self.stage_out(compiler, mode) + .join(&*target) + .join(self.cargo_dir()) } /// Root output directory for LLVM compiled for `target` @@ -612,7 +625,7 @@ impl Build { fn is_rust_llvm(&self, target: Interned) -> bool { match self.config.target_config.get(&target) { Some(ref c) => c.llvm_config.is_none(), - None => true + None => true, } } @@ -631,7 +644,8 @@ impl Build { // llvm subdirectory of the libdir. let llvm_libdir = output(Command::new(s).arg("--libdir")); let lib_filecheck = Path::new(llvm_libdir.trim()) - .join("llvm").join(exe("FileCheck", &*target)); + .join("llvm") + .join(exe("FileCheck", &*target)); if lib_filecheck.exists() { lib_filecheck } else { @@ -680,7 +694,8 @@ impl Build { /// Returns the libdir of the snapshot compiler. fn rustc_snapshot_libdir(&self) -> PathBuf { - self.rustc_snapshot_sysroot().join(libdir(&self.config.build)) + self.rustc_snapshot_sysroot() + .join(libdir(&self.config.build)) } /// Returns the sysroot of the snapshot compiler. @@ -690,14 +705,18 @@ impl Build { /// Runs a command, printing out nice contextual information if it fails. fn run(&self, cmd: &mut Command) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } self.verbose(&format!("running: {:?}", cmd)); run_silent(cmd) } /// Runs a command, printing out nice contextual information if it fails. fn run_quiet(&self, cmd: &mut Command) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } self.verbose(&format!("running: {:?}", cmd)); run_suppressed(cmd) } @@ -706,7 +725,9 @@ impl Build { /// Exits if the command failed to execute at all, otherwise returns its /// `status.success()`. fn try_run(&self, cmd: &mut Command) -> bool { - if self.config.dry_run { return true; } + if self.config.dry_run { + return true; + } self.verbose(&format!("running: {:?}", cmd)); try_run_silent(cmd) } @@ -715,7 +736,9 @@ impl Build { /// Exits if the command failed to execute at all, otherwise returns its /// `status.success()`. fn try_run_quiet(&self, cmd: &mut Command) -> bool { - if self.config.dry_run { return true; } + if self.config.dry_run { + return true; + } self.verbose(&format!("running: {:?}", cmd)); try_run_suppressed(cmd) } @@ -732,7 +755,9 @@ impl Build { } fn info(&self, msg: &str) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } println!("{}", msg); } @@ -744,7 +769,7 @@ impl Build { fn debuginfo_map(&self, which: GitRepo) -> Option { if !self.config.rust_remap_debuginfo { - return None + return None; } let path = match which { @@ -767,10 +792,12 @@ impl Build { fn cflags(&self, target: Interned, which: GitRepo) -> Vec { // Filter out -O and /O (the optimization flags) that we picked up from // cc-rs because the build scripts will determine that for themselves. - let mut base = self.cc[&target].args().iter() - .map(|s| s.to_string_lossy().into_owned()) - .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) - .collect::>(); + let mut base = self.cc[&target] + .args() + .iter() + .map(|s| s.to_string_lossy().into_owned()) + .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) + .collect::>(); // If we're compiling on macOS then we add a few unconditional flags // indicating that we want libc++ (more filled out than libstdc++) and @@ -788,7 +815,7 @@ impl Build { } if let Some(map) = self.debuginfo_map(which) { - let cc = self.cc(target); + let cc = self.cc(target); if cc.ends_with("clang") || cc.ends_with("gcc") { base.push(format!("-fdebug-prefix-map={}", map)); } else if cc.ends_with("clang-cl.exe") { @@ -814,21 +841,27 @@ impl Build { match self.cxx.get(&target) { Some(p) => Ok(p.path()), None => Err(format!( - "target `{}` is not configured as a host, only as a target", - target)) + "target `{}` is not configured as a host, only as a target", + target + )), } } /// Returns the path to the linker for the given target if it needs to be overridden. fn linker(&self, target: Interned) -> Option<&Path> { - if let Some(linker) = self.config.target_config.get(&target) - .and_then(|c| c.linker.as_ref()) { + if let Some(linker) = self + .config + .target_config + .get(&target) + .and_then(|c| c.linker.as_ref()) + { Some(linker) - } else if target != self.config.build && - !target.contains("msvc") && - !target.contains("emscripten") && - !target.contains("wasm32") && - !target.contains("fuchsia") { + } else if target != self.config.build + && !target.contains("msvc") + && !target.contains("emscripten") + && !target.contains("wasm32") + && !target.contains("fuchsia") + { Some(self.cc(target)) } else { None @@ -840,14 +873,18 @@ impl Build { if target.contains("pc-windows-msvc") { Some(true) } else { - self.config.target_config.get(&target) + self.config + .target_config + .get(&target) .and_then(|t| t.crt_static) } } /// Returns the "musl root" for this `target`, if defined fn musl_root(&self, target: Interned) -> Option<&Path> { - self.config.target_config.get(&target) + self.config + .target_config + .get(&target) .and_then(|t| t.musl_root.as_ref()) .or(self.config.musl_root.as_ref()) .map(|p| &**p) @@ -855,15 +892,15 @@ impl Build { /// Returns true if this is a no-std `target`, if defined fn no_std(&self, target: Interned) -> Option { - self.config.target_config.get(&target) - .map(|t| t.no_std) + self.config.target_config.get(&target).map(|t| t.no_std) } /// Returns whether the target will be tested using the `remote-test-client` /// and `remote-test-server` binaries. fn remote_tested(&self, target: Interned) -> bool { - self.qemu_rootfs(target).is_some() || target.contains("android") || - env::var_os("TEST_DEVICE_ADDR").is_some() + self.qemu_rootfs(target).is_some() + || target.contains("android") + || env::var_os("TEST_DEVICE_ADDR").is_some() } /// Returns the root of the "rootfs" image that this target will be using, @@ -872,7 +909,9 @@ impl Build { /// If `Some` is returned then that means that tests for this target are /// emulated with QEMU and binaries will need to be shipped to the emulator. fn qemu_rootfs(&self, target: Interned) -> Option<&Path> { - self.config.target_config.get(&target) + self.config + .target_config + .get(&target) .and_then(|t| t.qemu_rootfs.as_ref()) .map(|p| &**p) } @@ -906,9 +945,9 @@ impl Build { /// When all of these conditions are met the build will lift artifacts from /// the previous stage forward. fn force_use_stage1(&self, compiler: Compiler, target: Interned) -> bool { - !self.config.full_bootstrap && - compiler.stage >= 2 && - (self.hosts.iter().any(|h| *h == target) || target == self.build) + !self.config.full_bootstrap + && compiler.stage >= 2 + && (self.hosts.iter().any(|h| *h == target) || target == self.build) } /// Given `num` in the form "a.b.c" return a "release string" which @@ -919,11 +958,13 @@ impl Build { fn release(&self, num: &str) -> String { match &self.config.channel[..] { "stable" => num.to_string(), - "beta" => if self.rust_info.is_git() { - format!("{}-beta.{}", num, self.beta_prerelease_version()) - } else { - format!("{}-beta", num) - }, + "beta" => { + if self.rust_info.is_git() { + format!("{}-beta.{}", num, self.beta_prerelease_version()) + } else { + format!("{}-beta", num) + } + } "nightly" => format!("{}-nightly", num), _ => format!("{}-dev", num), } @@ -931,7 +972,7 @@ impl Build { fn beta_prerelease_version(&self) -> u32 { if let Some(s) = self.prerelease_version.get() { - return s + return s; } let beta = output( @@ -939,7 +980,7 @@ impl Build { .arg("ls-remote") .arg("origin") .arg("beta") - .current_dir(&self.src) + .current_dir(&self.src), ); let beta = beta.trim().split_whitespace().next().unwrap(); let master = output( @@ -947,7 +988,7 @@ impl Build { .arg("ls-remote") .arg("origin") .arg("master") - .current_dir(&self.src) + .current_dir(&self.src), ); let master = master.trim().split_whitespace().next().unwrap(); @@ -1063,7 +1104,7 @@ impl Build { let prefix = "version = \""; let suffix = "\""; if line.starts_with(prefix) && line.ends_with(suffix) { - return line[prefix.len()..line.len() - suffix.len()].to_string() + return line[prefix.len()..line.len() - suffix.len()].to_string(); } } @@ -1083,7 +1124,9 @@ impl Build { /// ends when the returned object is dropped. Folding can only be used in /// the Travis CI environment. pub fn fold_output(&self, name: F) -> Option - where D: Into, F: FnOnce() -> D + where + D: Into, + F: FnOnce() -> D, { if !self.config.dry_run && self.ci_env == CiEnv::Travis { Some(OutputFolder::new(name().into())) @@ -1145,7 +1188,7 @@ impl Build { // run_cargo for more information (in compile.rs). for part in contents.split(|b| *b == 0) { if part.is_empty() { - continue + continue; } let path = PathBuf::from(t!(str::from_utf8(part))); paths.push(path); @@ -1155,7 +1198,9 @@ impl Build { /// Copies a file from `src` to `dst` pub fn copy(&self, src: &Path, dst: &Path) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } let _ = fs::remove_file(&dst); let metadata = t!(src.symlink_metadata()); if metadata.file_type().is_symlink() { @@ -1167,8 +1212,12 @@ impl Build { // just fall back to a slow `copy` operation. } else { if let Err(e) = fs::copy(src, dst) { - panic!("failed to copy `{}` to `{}`: {}", src.display(), - dst.display(), e) + panic!( + "failed to copy `{}` to `{}`: {}", + src.display(), + dst.display(), + e + ) } t!(fs::set_permissions(dst, metadata.permissions())); let atime = FileTime::from_last_access_time(&metadata); @@ -1180,7 +1229,9 @@ impl Build { /// Search-and-replaces within a file. (Not maximally efficiently: allocates a /// new string for each replacement.) pub fn replace_in_file(&self, path: &Path, replacements: &[(&str, &str)]) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } let mut contents = String::new(); let mut file = t!(OpenOptions::new().read(true).write(true).open(path)); t!(file.read_to_string(&mut contents)); @@ -1195,7 +1246,9 @@ impl Build { /// Copies the `src` directory recursively to `dst`. Both are assumed to exist /// when this function is called. pub fn cp_r(&self, src: &Path, dst: &Path) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } for f in t!(fs::read_dir(src)) { let f = t!(f); let path = f.path(); @@ -1247,7 +1300,9 @@ impl Build { } fn install(&self, src: &Path, dstdir: &Path, perms: u32) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } let dst = dstdir.join(src.file_name().unwrap()); t!(fs::create_dir_all(dstdir)); drop(fs::remove_file(&dst)); @@ -1263,26 +1318,34 @@ impl Build { } fn create(&self, path: &Path, s: &str) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } t!(fs::write(path, s)); } fn read(&self, path: &Path) -> String { - if self.config.dry_run { return String::new(); } + if self.config.dry_run { + return String::new(); + } t!(fs::read_to_string(path)) } fn create_dir(&self, dir: &Path) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } t!(fs::create_dir_all(dir)) } fn remove_dir(&self, dir: &Path) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } t!(fs::remove_dir_all(dir)) } - fn read_dir(&self, dir: &Path) -> impl Iterator { + fn read_dir(&self, dir: &Path) -> impl Iterator { let iter = match fs::read_dir(dir) { Ok(v) => v, Err(_) if self.config.dry_run => return vec![].into_iter(), @@ -1292,7 +1355,9 @@ impl Build { } fn remove(&self, f: &Path) { - if self.config.dry_run { return; } + if self.config.dry_run { + return; + } fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {:?}", f)); } } @@ -1305,7 +1370,6 @@ fn chmod(path: &Path, perms: u32) { #[cfg(windows)] fn chmod(_path: &Path, _perms: u32) {} - impl<'a> Compiler { pub fn with_stage(mut self, stage: u32) -> Compiler { self.stage = stage; diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs index 7fa377f310b4f..f1c78f5e4f836 100644 --- a/src/bootstrap/metadata.rs +++ b/src/bootstrap/metadata.rs @@ -1,13 +1,13 @@ use std::collections::HashMap; -use std::process::Command; -use std::path::PathBuf; use std::collections::HashSet; +use std::path::PathBuf; +use std::process::Command; use build_helper::output; use serde_json; -use crate::{Build, Crate}; use crate::cache::INTERNER; +use crate::{Build, Crate}; #[derive(Deserialize)] struct Output { @@ -71,10 +71,14 @@ fn build_krate(features: &str, build: &mut Build, resolves: &mut Vec "Debug", (true, false) => "Release", (true, true) => "RelWithDebInfo", @@ -123,28 +130,32 @@ impl Step for Llvm { &builder.config.llvm_experimental_targets[..] }; - let assertions = if builder.config.llvm_assertions {"ON"} else {"OFF"}; + let assertions = if builder.config.llvm_assertions { + "ON" + } else { + "OFF" + }; cfg.out_dir(&out_dir) - .profile(profile) - .define("LLVM_ENABLE_ASSERTIONS", assertions) - .define("LLVM_TARGETS_TO_BUILD", llvm_targets) - .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets) - .define("LLVM_INCLUDE_EXAMPLES", "OFF") - .define("LLVM_INCLUDE_TESTS", "OFF") - .define("LLVM_INCLUDE_DOCS", "OFF") - .define("LLVM_INCLUDE_BENCHMARKS", "OFF") - .define("LLVM_ENABLE_ZLIB", "OFF") - .define("WITH_POLLY", "OFF") - .define("LLVM_ENABLE_TERMINFO", "OFF") - .define("LLVM_ENABLE_LIBEDIT", "OFF") - .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string()) - .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) - .define("LLVM_DEFAULT_TARGET_TRIPLE", target); + .profile(profile) + .define("LLVM_ENABLE_ASSERTIONS", assertions) + .define("LLVM_TARGETS_TO_BUILD", llvm_targets) + .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets) + .define("LLVM_INCLUDE_EXAMPLES", "OFF") + .define("LLVM_INCLUDE_TESTS", "OFF") + .define("LLVM_INCLUDE_DOCS", "OFF") + .define("LLVM_INCLUDE_BENCHMARKS", "OFF") + .define("LLVM_ENABLE_ZLIB", "OFF") + .define("WITH_POLLY", "OFF") + .define("LLVM_ENABLE_TERMINFO", "OFF") + .define("LLVM_ENABLE_LIBEDIT", "OFF") + .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string()) + .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) + .define("LLVM_DEFAULT_TARGET_TRIPLE", target); if builder.config.llvm_thin_lto && !emscripten { cfg.define("LLVM_ENABLE_LTO", "Thin") - .define("LLVM_ENABLE_LLD", "ON"); + .define("LLVM_ENABLE_LLD", "ON"); } // By default, LLVM will automatically find OCaml and, if it finds it, @@ -153,8 +164,10 @@ impl Step for Llvm { // This causes problem for non-root builds of Rust. Side-step the issue // by setting LLVM_OCAML_INSTALL_PATH to a relative path, so it installs // in the prefix. - cfg.define("LLVM_OCAML_INSTALL_PATH", - env::var_os("LLVM_OCAML_INSTALL_PATH").unwrap_or_else(|| "usr/lib/ocaml".into())); + cfg.define( + "LLVM_OCAML_INSTALL_PATH", + env::var_os("LLVM_OCAML_INSTALL_PATH").unwrap_or_else(|| "usr/lib/ocaml".into()), + ); let want_lldb = builder.config.lldb_enabled && !self.emscripten; @@ -189,8 +202,14 @@ impl Step for Llvm { } if want_lldb { - cfg.define("LLVM_EXTERNAL_CLANG_SOURCE_DIR", builder.src.join("src/tools/clang")); - cfg.define("LLVM_EXTERNAL_LLDB_SOURCE_DIR", builder.src.join("src/tools/lldb")); + cfg.define( + "LLVM_EXTERNAL_CLANG_SOURCE_DIR", + builder.src.join("src/tools/clang"), + ); + cfg.define( + "LLVM_EXTERNAL_LLDB_SOURCE_DIR", + builder.src.join("src/tools/lldb"), + ); // For the time being, disable code signing. cfg.define("LLDB_CODESIGN_IDENTITY", ""); } else { @@ -214,17 +233,22 @@ impl Step for Llvm { // FIXME: if the llvm root for the build triple is overridden then we // should use llvm-tblgen from there, also should verify that it // actually exists most of the time in normal installs of LLVM. - let host = builder.llvm_out(builder.config.build).join("bin/llvm-tblgen"); + let host = builder + .llvm_out(builder.config.build) + .join("bin/llvm-tblgen"); cfg.define("CMAKE_CROSSCOMPILING", "True") - .define("LLVM_TABLEGEN", &host); + .define("LLVM_TABLEGEN", &host); if target.contains("netbsd") { - cfg.define("CMAKE_SYSTEM_NAME", "NetBSD"); + cfg.define("CMAKE_SYSTEM_NAME", "NetBSD"); } else if target.contains("freebsd") { - cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD"); + cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD"); } - cfg.define("LLVM_NATIVE_BUILD", builder.llvm_out(builder.config.build).join("build")); + cfg.define( + "LLVM_NATIVE_BUILD", + builder.llvm_out(builder.config.build).join("build"), + ); } if let Some(ref suffix) = builder.config.llvm_version_suffix { @@ -256,7 +280,7 @@ impl Step for Llvm { fn check_llvm_version(builder: &Builder, llvm_config: &Path) { if !builder.config.llvm_version_check { - return + return; } if builder.config.dry_run { @@ -265,25 +289,28 @@ fn check_llvm_version(builder: &Builder, llvm_config: &Path) { let mut cmd = Command::new(llvm_config); let version = output(cmd.arg("--version")); - let mut parts = version.split('.').take(2) + let mut parts = version + .split('.') + .take(2) .filter_map(|s| s.parse::().ok()); if let (Some(major), Some(_minor)) = (parts.next(), parts.next()) { if major >= 6 { - return + return; } } panic!("\n\nbad LLVM version: {}, need >=6.0\n\n", version) } -fn configure_cmake(builder: &Builder, - target: Interned, - cfg: &mut cmake::Config, - building_dist_binaries: bool) { +fn configure_cmake( + builder: &Builder, + target: Interned, + cfg: &mut cmake::Config, + building_dist_binaries: bool, +) { if builder.config.ninja { cfg.generator("Ninja"); } - cfg.target(&target) - .host(&builder.config.build); + cfg.target(&target).host(&builder.config.build); let sanitize_cc = |cc: &Path| { if target.contains("msvc") { @@ -297,7 +324,7 @@ fn configure_cmake(builder: &Builder, // vars that we'd otherwise configure. In that case we just skip this // entirely. if target.contains("msvc") && !builder.config.ninja { - return + return; } let (cc, cxx) = match builder.config.llvm_clang_cl { @@ -306,62 +333,61 @@ fn configure_cmake(builder: &Builder, }; // Handle msvc + ninja + ccache specially (this is what the bots use) - if target.contains("msvc") && - builder.config.ninja && - builder.config.ccache.is_some() - { - let mut wrap_cc = env::current_exe().expect("failed to get cwd"); - wrap_cc.set_file_name("sccache-plus-cl.exe"); - - cfg.define("CMAKE_C_COMPILER", sanitize_cc(&wrap_cc)) - .define("CMAKE_CXX_COMPILER", sanitize_cc(&wrap_cc)); - cfg.env("SCCACHE_PATH", - builder.config.ccache.as_ref().unwrap()) - .env("SCCACHE_TARGET", target) - .env("SCCACHE_CC", &cc) - .env("SCCACHE_CXX", &cxx); - - // Building LLVM on MSVC can be a little ludicrous at times. We're so far - // off the beaten path here that I'm not really sure this is even half - // supported any more. Here we're trying to: - // - // * Build LLVM on MSVC - // * Build LLVM with `clang-cl` instead of `cl.exe` - // * Build a project with `sccache` - // * Build for 32-bit as well - // * Build with Ninja - // - // For `cl.exe` there are different binaries to compile 32/64 bit which - // we use but for `clang-cl` there's only one which internally - // multiplexes via flags. As a result it appears that CMake's detection - // of a compiler's architecture and such on MSVC **doesn't** pass any - // custom flags we pass in CMAKE_CXX_FLAGS below. This means that if we - // use `clang-cl.exe` it's always diagnosed as a 64-bit compiler which - // definitely causes problems since all the env vars are pointing to - // 32-bit libraries. - // - // To hack around this... again... we pass an argument that's - // unconditionally passed in the sccache shim. This'll get CMake to - // correctly diagnose it's doing a 32-bit compilation and LLVM will - // internally configure itself appropriately. - if builder.config.llvm_clang_cl.is_some() && target.contains("i686") { - cfg.env("SCCACHE_EXTRA_ARGS", "-m32"); - } + if target.contains("msvc") && builder.config.ninja && builder.config.ccache.is_some() { + let mut wrap_cc = env::current_exe().expect("failed to get cwd"); + wrap_cc.set_file_name("sccache-plus-cl.exe"); + + cfg.define("CMAKE_C_COMPILER", sanitize_cc(&wrap_cc)) + .define("CMAKE_CXX_COMPILER", sanitize_cc(&wrap_cc)); + cfg.env("SCCACHE_PATH", builder.config.ccache.as_ref().unwrap()) + .env("SCCACHE_TARGET", target) + .env("SCCACHE_CC", &cc) + .env("SCCACHE_CXX", &cxx); + + // Building LLVM on MSVC can be a little ludicrous at times. We're so far + // off the beaten path here that I'm not really sure this is even half + // supported any more. Here we're trying to: + // + // * Build LLVM on MSVC + // * Build LLVM with `clang-cl` instead of `cl.exe` + // * Build a project with `sccache` + // * Build for 32-bit as well + // * Build with Ninja + // + // For `cl.exe` there are different binaries to compile 32/64 bit which + // we use but for `clang-cl` there's only one which internally + // multiplexes via flags. As a result it appears that CMake's detection + // of a compiler's architecture and such on MSVC **doesn't** pass any + // custom flags we pass in CMAKE_CXX_FLAGS below. This means that if we + // use `clang-cl.exe` it's always diagnosed as a 64-bit compiler which + // definitely causes problems since all the env vars are pointing to + // 32-bit libraries. + // + // To hack around this... again... we pass an argument that's + // unconditionally passed in the sccache shim. This'll get CMake to + // correctly diagnose it's doing a 32-bit compilation and LLVM will + // internally configure itself appropriately. + if builder.config.llvm_clang_cl.is_some() && target.contains("i686") { + cfg.env("SCCACHE_EXTRA_ARGS", "-m32"); + } // If ccache is configured we inform the build a little differently how // to invoke ccache while also invoking our compilers. } else if let Some(ref ccache) = builder.config.ccache { - cfg.define("CMAKE_C_COMPILER", ccache) - .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc)) - .define("CMAKE_CXX_COMPILER", ccache) - .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx)); + cfg.define("CMAKE_C_COMPILER", ccache) + .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc)) + .define("CMAKE_CXX_COMPILER", ccache) + .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx)); } else { - cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc)) - .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx)); + cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc)) + .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx)); } cfg.build_arg("-j").build_arg(builder.jobs().to_string()); - cfg.define("CMAKE_C_FLAGS", builder.cflags(target, GitRepo::Llvm).join(" ")); + cfg.define( + "CMAKE_C_FLAGS", + builder.cflags(target, GitRepo::Llvm).join(" "), + ); let mut cxxflags = builder.cflags(target, GitRepo::Llvm).join(" "); if building_dist_binaries { if builder.config.llvm_static_stdcpp && !target.contains("windows") { @@ -422,7 +448,7 @@ impl Step for Lld { let out_dir = builder.lld_out(target); let done_stamp = out_dir.join("lld-finished-building"); if done_stamp.exists() { - return out_dir + return out_dir; } let _folder = builder.fold_output(|| "lld"); @@ -451,10 +477,10 @@ impl Step for Lld { .unwrap() .with_file_name("llvm-config-wrapper"); cfg.out_dir(&out_dir) - .profile("Release") - .env("LLVM_CONFIG_REAL", llvm_config) - .define("LLVM_CONFIG_PATH", llvm_config_shim) - .define("LLVM_INCLUDE_TESTS", "OFF"); + .profile("Release") + .env("LLVM_CONFIG_REAL", llvm_config) + .define("LLVM_CONFIG_PATH", llvm_config_shim) + .define("LLVM_INCLUDE_TESTS", "OFF"); cfg.build(); @@ -489,7 +515,7 @@ impl Step for TestHelpers { let dst = builder.test_helpers_out(target); let src = builder.src.join("src/test/auxiliary/rust_test_helpers.c"); if up_to_date(&src, &dst.join("librust_test_helpers.a")) { - return + return; } let _folder = builder.fold_output(|| "build_test_helpers"); @@ -508,13 +534,13 @@ impl Step for TestHelpers { } cfg.cargo_metadata(false) - .out_dir(&dst) - .target(&target) - .host(&builder.config.build) - .opt_level(0) - .warnings(false) - .debug(false) - .file(builder.src.join("src/test/auxiliary/rust_test_helpers.c")) - .compile("rust_test_helpers"); + .out_dir(&dst) + .target(&target) + .host(&builder.config.build) + .opt_level(0) + .warnings(false) + .debug(false) + .file(builder.src.join("src/test/auxiliary/rust_test_helpers.c")) + .compile("rust_test_helpers"); } } diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs index fe547a6b151c2..31690744b5877 100644 --- a/src/bootstrap/sanity.rs +++ b/src/bootstrap/sanity.rs @@ -10,7 +10,7 @@ use std::collections::HashMap; use std::env; -use std::ffi::{OsString, OsStr}; +use std::ffi::{OsStr, OsString}; use std::fs; use std::path::PathBuf; use std::process::Command; @@ -28,26 +28,31 @@ impl Finder { fn new() -> Self { Self { cache: HashMap::new(), - path: env::var_os("PATH").unwrap_or_default() + path: env::var_os("PATH").unwrap_or_default(), } } fn maybe_have>(&mut self, cmd: S) -> Option { let cmd: OsString = cmd.as_ref().into(); let path = self.path.clone(); - self.cache.entry(cmd.clone()).or_insert_with(|| { - for path in env::split_paths(&path) { - let target = path.join(&cmd); - let mut cmd_alt = cmd.clone(); - cmd_alt.push(".exe"); - if target.is_file() || // some/path/git + self.cache + .entry(cmd.clone()) + .or_insert_with(|| { + for path in env::split_paths(&path) { + let target = path.join(&cmd); + let mut cmd_alt = cmd.clone(); + cmd_alt.push(".exe"); + if target.is_file() || // some/path/git target.with_extension("exe").exists() || // some/path/git.exe - target.join(&cmd_alt).exists() { // some/path/git/git.exe - return Some(target); + target.join(&cmd_alt).exists() + { + // some/path/git/git.exe + return Some(target); + } } - } - None - }).clone() + None + }) + .clone() } fn must_have>(&mut self, cmd: S) -> PathBuf { @@ -75,7 +80,9 @@ pub fn check(build: &mut Build) { } // We need cmake, but only if we're actually building LLVM or sanitizers. - let building_llvm = build.hosts.iter() + let building_llvm = build + .hosts + .iter() .filter_map(|host| build.config.target_config.get(host)) .any(|config| config.llvm_config.is_none()); if building_llvm || build.config.sanitizers { @@ -106,17 +113,29 @@ pub fn check(build: &mut Build) { } } - build.config.python = build.config.python.take().map(|p| cmd_finder.must_have(p)) + build.config.python = build + .config + .python + .take() + .map(|p| cmd_finder.must_have(p)) .or_else(|| env::var_os("BOOTSTRAP_PYTHON").map(PathBuf::from)) // set by bootstrap.py .or_else(|| cmd_finder.maybe_have("python2.7")) .or_else(|| cmd_finder.maybe_have("python2")) .or_else(|| Some(cmd_finder.must_have("python"))); - build.config.nodejs = build.config.nodejs.take().map(|p| cmd_finder.must_have(p)) + build.config.nodejs = build + .config + .nodejs + .take() + .map(|p| cmd_finder.must_have(p)) .or_else(|| cmd_finder.maybe_have("node")) .or_else(|| cmd_finder.maybe_have("nodejs")); - build.config.gdb = build.config.gdb.take().map(|p| cmd_finder.must_have(p)) + build.config.gdb = build + .config + .gdb + .take() + .map(|p| cmd_finder.must_have(p)) .or_else(|| cmd_finder.maybe_have("gdb")); // We're gonna build some custom C code here and there, host triples @@ -151,14 +170,16 @@ pub fn check(build: &mut Build) { for target in &build.targets { // Can't compile for iOS unless we're on macOS - if target.contains("apple-ios") && - !build.build.contains("apple-darwin") { + if target.contains("apple-ios") && !build.build.contains("apple-darwin") { panic!("the iOS target is only supported on macOS"); } if target.contains("-none-") { if build.no_std(*target).is_none() { - let target = build.config.target_config.entry(target.clone()) + let target = build + .config + .target_config + .entry(target.clone()) .or_default(); target.no_std = true; @@ -174,26 +195,33 @@ pub fn check(build: &mut Build) { // If this is a native target (host is also musl) and no musl-root is given, // fall back to the system toolchain in /usr before giving up if build.musl_root(*target).is_none() && build.config.build == *target { - let target = build.config.target_config.entry(target.clone()) + let target = build + .config + .target_config + .entry(target.clone()) .or_default(); target.musl_root = Some("/usr".into()); } match build.musl_root(*target) { Some(root) => { if fs::metadata(root.join("lib/libc.a")).is_err() { - panic!("couldn't find libc.a in musl dir: {}", - root.join("lib").display()); + panic!( + "couldn't find libc.a in musl dir: {}", + root.join("lib").display() + ); } if fs::metadata(root.join("lib/libunwind.a")).is_err() { - panic!("couldn't find libunwind.a in musl dir: {}", - root.join("lib").display()); + panic!( + "couldn't find libunwind.a in musl dir: {}", + root.join("lib").display() + ); } } - None => { - panic!("when targeting MUSL either the rust.musl-root \ - option or the target.$TARGET.musl-root option must \ - be specified in config.toml") - } + None => panic!( + "when targeting MUSL either the rust.musl-root \ + option or the target.$TARGET.musl-root option must \ + be specified in config.toml" + ), } } @@ -203,7 +231,8 @@ pub fn check(build: &mut Build) { // Studio, so detect that here and error. let out = output(Command::new("cmake").arg("--help")); if !out.contains("Visual Studio") { - panic!(" + panic!( + " cmake does not support Visual Studio generators. This is likely due to it being an msys/cygwin build of cmake, @@ -214,7 +243,8 @@ If you are building under msys2 try installing the mingw-w64-x86_64-cmake package instead of cmake: $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake -"); +" + ); } } } @@ -226,8 +256,10 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake if build.config.channel == "stable" { let stage0 = t!(fs::read_to_string(build.src.join("src/stage0.txt"))); if stage0.contains("\ndev:") { - panic!("bootstrapping from a dev compiler in a stable release, but \ - should only be bootstrapping from a released compiler!"); + panic!( + "bootstrapping from a dev compiler in a stable release, but \ + should only be bootstrapping from a released compiler!" + ); } } } diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 2edc78ebaa94f..869971b300bcd 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -19,11 +19,11 @@ use crate::compile; use crate::dist; use crate::flags::Subcommand; use crate::native; -use crate::tool::{self, Tool, SourceType}; +use crate::tool::{self, SourceType, Tool}; use crate::toolstate::ToolState; use crate::util::{self, dylib_path, dylib_path_var}; use crate::Crate as CargoCrate; -use crate::{DocTests, Mode, GitRepo}; +use crate::{DocTests, GitRepo, Mode}; const ADB_TEST_DIR: &str = "/data/tmp/work"; @@ -211,14 +211,16 @@ impl Step for Cargo { compiler, target: self.host, }); - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - Mode::ToolRustc, - self.host, - "test", - "src/tools/cargo", - SourceType::Submodule, - &[]); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + self.host, + "test", + "src/tools/cargo", + SourceType::Submodule, + &[], + ); if !builder.fail_fast { cargo.arg("--no-fail-fast"); @@ -274,18 +276,19 @@ impl Step for Rls { return; } - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - Mode::ToolRustc, - host, - "test", - "src/tools/rls", - SourceType::Submodule, - &[]); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "test", + "src/tools/rls", + SourceType::Submodule, + &[], + ); builder.add_rustc_lib_path(compiler, &mut cargo); - cargo.arg("--") - .args(builder.config.cmd.test_args()); + cargo.arg("--").args(builder.config.cmd.test_args()); if try_run(builder, &mut cargo) { builder.save_toolstate("rls", ToolState::TestPass); @@ -330,14 +333,16 @@ impl Step for Rustfmt { return; } - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - Mode::ToolRustc, - host, - "test", - "src/tools/rustfmt", - SourceType::Submodule, - &[]); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "test", + "src/tools/rustfmt", + SourceType::Submodule, + &[], + ); let dir = testdir(builder, compiler.host); t!(fs::create_dir_all(&dir)); @@ -386,14 +391,16 @@ impl Step for Miri { extra_features: Vec::new(), }); if let Some(miri) = miri { - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - Mode::ToolRustc, - host, - "test", - "src/tools/miri", - SourceType::Submodule, - &[]); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "test", + "src/tools/miri", + SourceType::Submodule, + &[], + ); // miri tests need to know about the stage sysroot cargo.env("MIRI_SYSROOT", builder.sysroot(compiler)); @@ -438,14 +445,16 @@ impl Step for CompiletestTest { let host = self.host; let compiler = builder.compiler(stage, host); - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - Mode::ToolBootstrap, - host, - "test", - "src/tools/compiletest", - SourceType::InTree, - &[]); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolBootstrap, + host, + "test", + "src/tools/compiletest", + SourceType::InTree, + &[], + ); try_run(builder, &mut cargo); } @@ -485,14 +494,16 @@ impl Step for Clippy { extra_features: Vec::new(), }); if let Some(clippy) = clippy { - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - Mode::ToolRustc, - host, - "test", - "src/tools/clippy", - SourceType::Submodule, - &[]); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "test", + "src/tools/clippy", + SourceType::Submodule, + &[], + ); // clippy tests need to know about the stage sysroot cargo.env("SYSROOT", builder.sysroot(compiler)); @@ -605,9 +616,7 @@ impl Step for RustdocJS { }); builder.run(&mut command); } else { - builder.info( - "No nodejs found, skipping \"src/test/rustdoc-js\" tests" - ); + builder.info("No nodejs found, skipping \"src/test/rustdoc-js\" tests"); } } } @@ -693,38 +702,68 @@ fn testdir(builder: &Builder, host: Interned) -> PathBuf { macro_rules! default_test { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { - test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: false }); - } + test!($name { + path: $path, + mode: $mode, + suite: $suite, + default: true, + host: false + }); + }; } macro_rules! default_test_with_compare_mode { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, compare_mode: $compare_mode:expr }) => { - test_with_compare_mode!($name { path: $path, mode: $mode, suite: $suite, default: true, - host: false, compare_mode: $compare_mode }); - } + test_with_compare_mode!($name { + path: $path, + mode: $mode, + suite: $suite, + default: true, + host: false, + compare_mode: $compare_mode + }); + }; } macro_rules! host_test { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { - test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: true }); - } + test!($name { + path: $path, + mode: $mode, + suite: $suite, + default: true, + host: true + }); + }; } macro_rules! test { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, host: $host:expr }) => { - test_definitions!($name { path: $path, mode: $mode, suite: $suite, default: $default, - host: $host, compare_mode: None }); - } + test_definitions!($name { + path: $path, + mode: $mode, + suite: $suite, + default: $default, + host: $host, + compare_mode: None + }); + }; } macro_rules! test_with_compare_mode { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, host: $host:expr, compare_mode: $compare_mode:expr }) => { - test_definitions!($name { path: $path, mode: $mode, suite: $suite, default: $default, - host: $host, compare_mode: Some($compare_mode) }); - } + test_definitions!($name { + path: $path, + mode: $mode, + suite: $suite, + default: $default, + host: $host, + compare_mode: Some($compare_mode) + }); + }; } macro_rules! test_definitions { @@ -771,7 +810,7 @@ macro_rules! test_definitions { }) } } - } + }; } default_test_with_compare_mode!(Ui { @@ -988,13 +1027,21 @@ impl Step for Compiletest { if builder.no_std(target) == Some(true) { // for no_std run-make (e.g., thumb*), // we need a host compiler which is called by cargo. - builder.ensure(compile::Std { compiler, target: compiler.host }); + builder.ensure(compile::Std { + compiler, + target: compiler.host, + }); } // HACK(eddyb) ensure that `libproc_macro` is available on the host. - builder.ensure(compile::Test { compiler, target: compiler.host }); + builder.ensure(compile::Test { + compiler, + target: compiler.host, + }); // Also provide `rust_test_helpers` for the host. - builder.ensure(native::TestHelpers { target: compiler.host }); + builder.ensure(native::TestHelpers { + target: compiler.host, + }); builder.ensure(native::TestHelpers { target }); builder.ensure(RemoteCopyLibs { compiler, target }); @@ -1101,23 +1148,22 @@ impl Step for Compiletest { let run = |cmd: &mut Command| { cmd.output().map(|output| { String::from_utf8_lossy(&output.stdout) - .lines().next().unwrap_or_else(|| { - panic!("{:?} failed {:?}", cmd, output) - }).to_string() + .lines() + .next() + .unwrap_or_else(|| panic!("{:?} failed {:?}", cmd, output)) + .to_string() }) }; let lldb_exe = if builder.config.lldb_enabled && !target.contains("emscripten") { // Test against the lldb that was just built. - builder.llvm_out(target) - .join("bin") - .join("lldb") + builder.llvm_out(target).join("bin").join("lldb") } else { PathBuf::from("lldb") }; let lldb_version = Command::new(&lldb_exe) .arg("--version") .output() - .map(|output| { String::from_utf8_lossy(&output.stdout).to_string() }) + .map(|output| String::from_utf8_lossy(&output.stdout).to_string()) .ok(); if let Some(ref vers) = lldb_version { cmd.arg("--lldb-version").arg(vers); @@ -1136,11 +1182,9 @@ impl Step for Compiletest { // Get test-args by striping suite path let mut test_args: Vec<&str> = paths .iter() - .map(|p| { - match p.strip_prefix(".") { - Ok(path) => path, - Err(_) => p, - } + .map(|p| match p.strip_prefix(".") { + Ok(path) => path, + Err(_) => p, }) .filter(|p| p.starts_with(suite_path) && p.is_file()) .map(|p| p.strip_prefix(suite_path).unwrap().to_str().unwrap()) @@ -1192,9 +1236,7 @@ impl Step for Compiletest { } } if suite == "run-make-fulldeps" && !builder.config.llvm_enabled { - builder.info( - "Ignoring run-make test suite as they generally don't work without LLVM" - ); + builder.info("Ignoring run-make test suite as they generally don't work without LLVM"); return; } @@ -1710,7 +1752,7 @@ impl Step for Crate { if !builder.config.wasm_syscall { builder.info( "Libstd was built without `wasm_syscall` feature enabled: \ - test output may not be visible." + test output may not be visible.", ); } @@ -1786,14 +1828,16 @@ impl Step for CrateRustdoc { let target = compiler.host; builder.ensure(compile::Rustc { compiler, target }); - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - Mode::ToolRustc, - target, - test_kind.subcommand(), - "src/tools/rustdoc", - SourceType::InTree, - &[]); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + target, + test_kind.subcommand(), + "src/tools/rustdoc", + SourceType::InTree, + &[], + ); if test_kind.subcommand() == "test" && !builder.fail_fast { cargo.arg("--no-fail-fast"); } diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index 7782351a552d4..e2f46e4402855 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -1,20 +1,20 @@ -use std::fs; +use std::collections::HashSet; use std::env; +use std::fs; use std::iter; use std::path::PathBuf; -use std::process::{Command, exit}; -use std::collections::HashSet; +use std::process::{exit, Command}; -use crate::Mode; -use crate::Compiler; -use crate::builder::{Step, RunConfig, ShouldRun, Builder}; -use crate::util::{exe, add_lib_path}; +use crate::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::cache::Interned; +use crate::channel; +use crate::channel::GitInfo; use crate::compile; use crate::native; -use crate::channel::GitInfo; -use crate::channel; -use crate::cache::Interned; use crate::toolstate::ToolState; +use crate::util::{add_lib_path, exe}; +use crate::Compiler; +use crate::Mode; #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum SourceType { @@ -53,14 +53,10 @@ impl Step for ToolBuild { let is_optional_tool = self.is_optional_tool; match self.mode { - Mode::ToolRustc => { - builder.ensure(compile::Rustc { compiler, target }) - } - Mode::ToolStd => { - builder.ensure(compile::Std { compiler, target }) - } + Mode::ToolRustc => builder.ensure(compile::Rustc { compiler, target }), + Mode::ToolStd => builder.ensure(compile::Std { compiler, target }), Mode::ToolBootstrap => {} // uses downloaded stage0 compiler libs - _ => panic!("unexpected Mode for tool build") + _ => panic!("unexpected Mode for tool build"), } let mut cargo = prepare_tool_cargo( @@ -75,15 +71,15 @@ impl Step for ToolBuild { ); let _folder = builder.fold_output(|| format!("stage{}-{}", compiler.stage, tool)); - builder.info(&format!("Building stage{} tool {} ({})", compiler.stage, tool, target)); + builder.info(&format!( + "Building stage{} tool {} ({})", + compiler.stage, tool, target + )); let mut duplicates = Vec::new(); let is_expected = compile::stream_cargo(builder, &mut cargo, vec![], &mut |msg| { // Only care about big things like the RLS/Cargo for now match tool { - | "rls" - | "cargo" - | "clippy-driver" - => {} + "rls" | "cargo" | "clippy-driver" => {} _ => return, } @@ -91,10 +87,8 @@ impl Step for ToolBuild { compile::CargoMessage::CompilerArtifact { package_id, features, - filenames - } => { - (package_id, features, filenames) - } + filenames, + } => (package_id, features, filenames), _ => return, }; let features = features.iter().map(|s| s.to_string()).collect::>(); @@ -103,77 +97,92 @@ impl Step for ToolBuild { let val = (tool, PathBuf::from(&*path), features.clone()); // we're only interested in deduplicating rlibs for now if val.1.extension().and_then(|s| s.to_str()) != Some("rlib") { - continue + continue; } // Don't worry about libs that turn out to be host dependencies // or build scripts, we only care about target dependencies that // are in `deps`. - if let Some(maybe_target) = val.1 - .parent() // chop off file name - .and_then(|p| p.parent()) // chop off `deps` - .and_then(|p| p.parent()) // chop off `release` + if let Some(maybe_target) = val + .1 + .parent() // chop off file name + .and_then(|p| p.parent()) // chop off `deps` + .and_then(|p| p.parent()) // chop off `release` .and_then(|p| p.file_name()) .and_then(|p| p.to_str()) { if maybe_target != &*target { - continue + continue; } } let mut artifacts = builder.tool_artifacts.borrow_mut(); - let prev_artifacts = artifacts - .entry(target) - .or_default(); + let prev_artifacts = artifacts.entry(target).or_default(); if let Some(prev) = prev_artifacts.get(&*id) { if prev.1 != val.1 { - duplicates.push(( - id.to_string(), - val, - prev.clone(), - )); + duplicates.push((id.to_string(), val, prev.clone())); } - return + return; } prev_artifacts.insert(id.to_string(), val); } }); if is_expected && !duplicates.is_empty() { - println!("duplicate artifacts found when compiling a tool, this \ - typically means that something was recompiled because \ - a transitive dependency has different features activated \ - than in a previous build:\n"); - println!("the following dependencies are duplicated although they \ - have the same features enabled:"); + println!( + "duplicate artifacts found when compiling a tool, this \ + typically means that something was recompiled because \ + a transitive dependency has different features activated \ + than in a previous build:\n" + ); + println!( + "the following dependencies are duplicated although they \ + have the same features enabled:" + ); for (id, cur, prev) in duplicates.drain_filter(|(_, cur, prev)| cur.2 == prev.2) { println!(" {}", id); // same features - println!(" `{}` ({:?})\n `{}` ({:?})", cur.0, cur.1, prev.0, prev.1); + println!( + " `{}` ({:?})\n `{}` ({:?})", + cur.0, cur.1, prev.0, prev.1 + ); } println!("the following dependencies have different features:"); for (id, cur, prev) in duplicates { println!(" {}", id); let cur_features: HashSet<_> = cur.2.into_iter().collect(); let prev_features: HashSet<_> = prev.2.into_iter().collect(); - println!(" `{}` additionally enabled features {:?} at {:?}", - cur.0, &cur_features - &prev_features, cur.1); - println!(" `{}` additionally enabled features {:?} at {:?}", - prev.0, &prev_features - &cur_features, prev.1); + println!( + " `{}` additionally enabled features {:?} at {:?}", + cur.0, + &cur_features - &prev_features, + cur.1 + ); + println!( + " `{}` additionally enabled features {:?} at {:?}", + prev.0, + &prev_features - &cur_features, + prev.1 + ); } println!(); - println!("to fix this you will probably want to edit the local \ - src/tools/rustc-workspace-hack/Cargo.toml crate, as \ - that will update the dependency graph to ensure that \ - these crates all share the same feature set"); + println!( + "to fix this you will probably want to edit the local \ + src/tools/rustc-workspace-hack/Cargo.toml crate, as \ + that will update the dependency graph to ensure that \ + these crates all share the same feature set" + ); panic!("tools should not compile multiple copies of the same crate"); } - builder.save_toolstate(tool, if is_expected { - ToolState::TestFail - } else { - ToolState::BuildFail - }); + builder.save_toolstate( + tool, + if is_expected { + ToolState::TestFail + } else { + ToolState::BuildFail + }, + ); if !is_expected { if !is_optional_tool { @@ -182,7 +191,8 @@ impl Step for ToolBuild { None } } else { - let cargo_out = builder.cargo_out(compiler, self.mode, target) + let cargo_out = builder + .cargo_out(compiler, self.mode, target) .join(exe(tool, &compiler.host)); let bin = builder.tools_dir(compiler).join(exe(tool, &compiler.host)); builder.copy(&cargo_out, &bin); @@ -215,10 +225,10 @@ pub fn prepare_tool_cargo( let mut features = extra_features.iter().cloned().collect::>(); if builder.build.config.cargo_native_static { - if path.ends_with("cargo") || - path.ends_with("rls") || - path.ends_with("clippy") || - path.ends_with("rustfmt") + if path.ends_with("cargo") + || path.ends_with("rls") + || path.ends_with("clippy") + || path.ends_with("rustfmt") { cargo.env("LIBZ_SYS_STATIC", "1"); features.push("rustc-workspace-hack/all-static".to_string()); @@ -376,22 +386,26 @@ impl Step for RemoteTestServer { fn make_run(run: RunConfig) { run.builder.ensure(RemoteTestServer { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + compiler: run + .builder + .compiler(run.builder.top_stage, run.builder.config.build), target: run.target, }); } fn run(self, builder: &Builder) -> PathBuf { - builder.ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "remote-test-server", - mode: Mode::ToolStd, - path: "src/tools/remote-test-server", - is_optional_tool: false, - source_type: SourceType::InTree, - extra_features: Vec::new(), - }).expect("expected to build -- essential tool") + builder + .ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "remote-test-server", + mode: Mode::ToolStd, + path: "src/tools/remote-test-server", + is_optional_tool: false, + source_type: SourceType::InTree, + extra_features: Vec::new(), + }) + .expect("expected to build -- essential tool") } } @@ -410,9 +424,7 @@ impl Step for Rustdoc { } fn make_run(run: RunConfig) { - run.builder.ensure(Rustdoc { - host: run.host, - }); + run.builder.ensure(Rustdoc { host: run.host }); } fn run(self, builder: &Builder) -> PathBuf { @@ -431,7 +443,10 @@ impl Step for Rustdoc { builder.compiler(target_compiler.stage - 1, builder.config.build) }; - builder.ensure(compile::Rustc { compiler: build_compiler, target }); + builder.ensure(compile::Rustc { + compiler: build_compiler, + target, + }); builder.ensure(compile::Rustc { compiler: build_compiler, target: builder.config.build, @@ -449,18 +464,25 @@ impl Step for Rustdoc { ); // Most tools don't get debuginfo, but rustdoc should. - cargo.env("RUSTC_DEBUGINFO", builder.config.rust_debuginfo.to_string()) - .env("RUSTC_DEBUGINFO_LINES", builder.config.rust_debuginfo_lines.to_string()); + cargo + .env("RUSTC_DEBUGINFO", builder.config.rust_debuginfo.to_string()) + .env( + "RUSTC_DEBUGINFO_LINES", + builder.config.rust_debuginfo_lines.to_string(), + ); let _folder = builder.fold_output(|| format!("stage{}-rustdoc", target_compiler.stage)); - builder.info(&format!("Building rustdoc for stage{} ({})", - target_compiler.stage, target_compiler.host)); + builder.info(&format!( + "Building rustdoc for stage{} ({})", + target_compiler.stage, target_compiler.host + )); builder.run(&mut cargo); // Cargo adds a number of paths to the dylib search path on windows, which results in // the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool" // rustdoc a different name. - let tool_rustdoc = builder.cargo_out(build_compiler, Mode::ToolRustc, target) + let tool_rustdoc = builder + .cargo_out(build_compiler, Mode::ToolRustc, target) .join(exe("rustdoc_tool_binary", &target_compiler.host)); // don't create a stage0-sysroot/bin directory. @@ -491,12 +513,15 @@ impl Step for Cargo { fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; - run.path("src/tools/cargo").default_condition(builder.config.extended) + run.path("src/tools/cargo") + .default_condition(builder.config.extended) } fn make_run(run: RunConfig) { run.builder.ensure(Cargo { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + compiler: run + .builder + .compiler(run.builder.top_stage, run.builder.config.build), target: run.target, }); } @@ -508,16 +533,18 @@ impl Step for Cargo { compiler: self.compiler, target: builder.config.build, }); - builder.ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "cargo", - mode: Mode::ToolRustc, - path: "src/tools/cargo", - is_optional_tool: false, - source_type: SourceType::Submodule, - extra_features: Vec::new(), - }).expect("expected to build -- essential tool") + builder + .ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "cargo", + mode: Mode::ToolRustc, + path: "src/tools/cargo", + is_optional_tool: false, + source_type: SourceType::Submodule, + extra_features: Vec::new(), + }) + .expect("expected to build -- essential tool") } } @@ -633,7 +660,8 @@ impl<'a> Builder<'a> { } else { PathBuf::from(&self.sysroot_libdir(compiler, compiler.host)) }, - self.cargo_out(compiler, tool.get_mode(), *host).join("deps"), + self.cargo_out(compiler, tool.get_mode(), *host) + .join("deps"), ]; // On MSVC a tool may invoke a C compiler (e.g., compiletest in run-make @@ -644,7 +672,7 @@ impl<'a> Builder<'a> { let curpaths = env::split_paths(&curpaths).collect::>(); for &(ref k, ref v) in self.cc[&compiler.host].env() { if k != "PATH" { - continue + continue; } for path in env::split_paths(v) { if !curpaths.contains(&path) { @@ -664,9 +692,10 @@ impl<'a> Builder<'a> { lib_paths.push(llvm_bin_path); } else { let old_path = env::var_os("PATH").unwrap_or_default(); - let new_path = env::join_paths(iter::once(llvm_bin_path) - .chain(env::split_paths(&old_path))) - .expect("Could not add LLVM bin path to PATH"); + let new_path = env::join_paths( + iter::once(llvm_bin_path).chain(env::split_paths(&old_path)), + ) + .expect("Could not add LLVM bin path to PATH"); cmd.env("PATH", new_path); } } @@ -684,7 +713,8 @@ impl<'a> Builder<'a> { // Add the llvm/bin directory to PATH since it contains lots of // useful, platform-independent tools - let llvm_bin_path = llvm_config.parent() + let llvm_bin_path = llvm_config + .parent() .expect("Expected llvm-config to be contained in directory"); assert!(llvm_bin_path.is_dir()); Some(llvm_bin_path.to_path_buf()) diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index 2880f1a084be0..dc0b8aa45b0ab 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -4,15 +4,15 @@ //! not a lot of interesting happenings here unfortunately. use std::env; -use std::str; use std::fs; use std::io::{self, Write}; use std::path::{Path, PathBuf}; use std::process::Command; -use std::time::{SystemTime, Instant}; +use std::str; +use std::time::{Instant, SystemTime}; -use crate::config::Config; use crate::builder::Builder; +use crate::config::Config; /// Returns the `name` as the filename of a static library for `target`. pub fn staticlib(name: &str, target: &str) -> String { @@ -41,7 +41,11 @@ pub fn is_dylib(name: &str) -> bool { /// Returns the corresponding relative library directory that the compiler's /// dylibs will be found in. pub fn libdir(target: &str) -> &'static str { - if target.contains("windows") {"bin"} else {"lib"} + if target.contains("windows") { + "bin" + } else { + "lib" + } } /// Adds a list of lookup paths to `cmd`'s dynamic library lookup path. @@ -75,7 +79,9 @@ pub fn dylib_path() -> Vec { /// `push` all components to `buf`. On windows, append `.exe` to the last component. pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf { - let (&file, components) = components.split_last().expect("at least one component required"); + let (&file, components) = components + .split_last() + .expect("at least one component required"); let mut file = file.to_owned(); if cfg!(windows) { @@ -99,9 +105,11 @@ impl Drop for TimeIt { fn drop(&mut self) { let time = self.1.elapsed(); if !self.0 { - println!("\tfinished in {}.{:03}", - time.as_secs(), - time.subsec_nanos() / 1_000_000); + println!( + "\tfinished in {}.{:03}", + time.as_secs(), + time.subsec_nanos() / 1_000_000 + ); } } } @@ -109,7 +117,9 @@ impl Drop for TimeIt { /// Symlinks two directories, using junctions on Windows and normal symlinks on /// Unix. pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> { - if config.dry_run { return Ok(()); } + if config.dry_run { + return Ok(()); + } let _ = fs::remove_dir(dest); return symlink_dir_inner(src, dest); @@ -129,9 +139,9 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> { #[cfg(windows)] #[allow(nonstandard_style)] fn symlink_dir_inner(target: &Path, junction: &Path) -> io::Result<()> { - use std::ptr; use std::ffi::OsStr; use std::os::windows::ffi::OsStrExt; + use std::ptr; const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024; const GENERIC_WRITE: DWORD = 0x40000000; @@ -167,22 +177,25 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> { } extern "system" { - fn CreateFileW(lpFileName: LPCWSTR, - dwDesiredAccess: DWORD, - dwShareMode: DWORD, - lpSecurityAttributes: LPSECURITY_ATTRIBUTES, - dwCreationDisposition: DWORD, - dwFlagsAndAttributes: DWORD, - hTemplateFile: HANDLE) - -> HANDLE; - fn DeviceIoControl(hDevice: HANDLE, - dwIoControlCode: DWORD, - lpInBuffer: LPVOID, - nInBufferSize: DWORD, - lpOutBuffer: LPVOID, - nOutBufferSize: DWORD, - lpBytesReturned: LPDWORD, - lpOverlapped: LPOVERLAPPED) -> BOOL; + fn CreateFileW( + lpFileName: LPCWSTR, + dwDesiredAccess: DWORD, + dwShareMode: DWORD, + lpSecurityAttributes: LPSECURITY_ATTRIBUTES, + dwCreationDisposition: DWORD, + dwFlagsAndAttributes: DWORD, + hTemplateFile: HANDLE, + ) -> HANDLE; + fn DeviceIoControl( + hDevice: HANDLE, + dwIoControlCode: DWORD, + lpInBuffer: LPVOID, + nInBufferSize: DWORD, + lpOutBuffer: LPVOID, + nOutBufferSize: DWORD, + lpBytesReturned: LPDWORD, + lpOverlapped: LPOVERLAPPED, + ) -> BOOL; fn CloseHandle(hObject: HANDLE) -> BOOL; } @@ -200,17 +213,18 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> { let path = to_u16s(junction)?; unsafe { - let h = CreateFileW(path.as_ptr(), - GENERIC_WRITE, - FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, - 0 as *mut _, - OPEN_EXISTING, - FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, - ptr::null_mut()); + let h = CreateFileW( + path.as_ptr(), + GENERIC_WRITE, + FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, + 0 as *mut _, + OPEN_EXISTING, + FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, + ptr::null_mut(), + ); let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE]; - let db = data.as_mut_ptr() - as *mut REPARSE_MOUNTPOINT_DATA_BUFFER; + let db = data.as_mut_ptr() as *mut REPARSE_MOUNTPOINT_DATA_BUFFER; let buf = &mut (*db).ReparseTarget as *mut u16; let mut i = 0; // FIXME: this conversion is very hacky @@ -225,17 +239,19 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> { (*db).ReparseTag = IO_REPARSE_TAG_MOUNT_POINT; (*db).ReparseTargetMaximumLength = (i * 2) as WORD; (*db).ReparseTargetLength = ((i - 1) * 2) as WORD; - (*db).ReparseDataLength = - (*db).ReparseTargetLength as DWORD + 12; + (*db).ReparseDataLength = (*db).ReparseTargetLength as DWORD + 12; let mut ret = 0; - let res = DeviceIoControl(h as *mut _, - FSCTL_SET_REPARSE_POINT, - data.as_ptr() as *mut _, - (*db).ReparseDataLength + 8, - ptr::null_mut(), 0, - &mut ret, - ptr::null_mut()); + let res = DeviceIoControl( + h as *mut _, + FSCTL_SET_REPARSE_POINT, + data.as_ptr() as *mut _, + (*db).ReparseDataLength + 8, + ptr::null_mut(), + 0, + &mut ret, + ptr::null_mut(), + ); let out = if res == 0 { Err(io::Error::last_os_error()) @@ -274,7 +290,10 @@ impl OutputFolder { // the ANSI escape code to clear from the cursor to end of line. // Travis seems to have trouble when _not_ using "\r\x1b[0K", that will // randomly put lines to the top of the webpage. - print!("travis_fold:start:{0}\r\x1b[0Ktravis_time:start:{0}\r\x1b[0K", name); + print!( + "travis_fold:start:{0}\r\x1b[0Ktravis_time:start:{0}\r\x1b[0K", + name + ); OutputFolder { name, start_time: SystemTime::now(), @@ -300,7 +319,7 @@ impl Drop for OutputFolder { let finish = end_time.duration_since(UNIX_EPOCH); println!( "travis_fold:end:{0}\r\x1b[0K\n\ - travis_time:end:{0}:start={1},finish={2},duration={3}\r\x1b[0K", + travis_time:end:{0}:start={1},finish={2},duration={3}\r\x1b[0K", self.name, to_nanos(start), to_nanos(finish), diff --git a/src/build_helper/lib.rs b/src/build_helper/lib.rs index 5a704e557751d..46e05da28cd22 100644 --- a/src/build_helper/lib.rs +++ b/src/build_helper/lib.rs @@ -1,9 +1,9 @@ use std::fs::File; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; +use std::thread; use std::time::{SystemTime, UNIX_EPOCH}; use std::{env, fs}; -use std::thread; /// A helper macro to `unwrap` a result except also print out details like: /// @@ -92,8 +92,11 @@ pub fn gnu_target(target: &str) -> &str { } pub fn make(host: &str) -> PathBuf { - if host.contains("bitrig") || host.contains("dragonfly") || host.contains("freebsd") - || host.contains("netbsd") || host.contains("openbsd") + if host.contains("bitrig") + || host.contains("dragonfly") + || host.contains("freebsd") + || host.contains("netbsd") + || host.contains("openbsd") { PathBuf::from("gmake") } else { @@ -120,7 +123,8 @@ pub fn output(cmd: &mut Command) -> String { } pub fn rerun_if_changed_anything_in_dir(dir: &Path) { - let mut stack = dir.read_dir() + let mut stack = dir + .read_dir() .unwrap() .map(|e| e.unwrap()) .filter(|e| &*e.file_name() != ".git") @@ -178,7 +182,7 @@ impl NativeLibBoilerplate { /// ensure it's linked against correctly. pub fn fixup_sanitizer_lib_name(&self, sanitizer_name: &str) { if env::var("TARGET").unwrap() != "x86_64-apple-darwin" { - return + return; } let dir = self.out_dir.join("build/lib/darwin"); @@ -221,8 +225,8 @@ pub fn native_lib_boilerplate( ) -> Result { rerun_if_changed_anything_in_dir(src_dir); - let out_dir = env::var_os("RUSTBUILD_NATIVE_DIR").unwrap_or_else(|| - env::var_os("OUT_DIR").unwrap()); + let out_dir = + env::var_os("RUSTBUILD_NATIVE_DIR").unwrap_or_else(|| env::var_os("OUT_DIR").unwrap()); let out_dir = PathBuf::from(out_dir).join(out_name); t!(fs::create_dir_all(&out_dir)); if link_name.contains('=') { @@ -246,9 +250,9 @@ pub fn native_lib_boilerplate( } } -pub fn sanitizer_lib_boilerplate(sanitizer_name: &str) - -> Result<(NativeLibBoilerplate, String), ()> -{ +pub fn sanitizer_lib_boilerplate( + sanitizer_name: &str, +) -> Result<(NativeLibBoilerplate, String), ()> { let (link_name, search_path, apple) = match &*env::var("TARGET").unwrap() { "x86_64-unknown-linux-gnu" => ( format!("clang_rt.{}-x86_64", sanitizer_name), @@ -270,12 +274,7 @@ pub fn sanitizer_lib_boilerplate(sanitizer_name: &str) // The source for `compiler-rt` comes from the `compiler-builtins` crate, so // load our env var set by cargo to find the source code. let dir = env::var_os("DEP_COMPILER_RT_COMPILER_RT").unwrap(); - let lib = native_lib_boilerplate( - dir.as_ref(), - sanitizer_name, - &to_link, - search_path, - )?; + let lib = native_lib_boilerplate(dir.as_ref(), sanitizer_name, &to_link, search_path)?; Ok((lib, link_name)) } diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 096cb51e0d3ef..c32b0f0a4406c 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -21,10 +21,7 @@ extern "Rust" { #[rustc_allocator_nounwind] fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); #[rustc_allocator_nounwind] - fn __rust_realloc(ptr: *mut u8, - old_size: usize, - align: usize, - new_size: usize) -> *mut u8; + fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8; #[rustc_allocator_nounwind] fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8; } @@ -154,12 +151,12 @@ unsafe impl Alloc for Global { } #[inline] - unsafe fn realloc(&mut self, - ptr: NonNull, - layout: Layout, - new_size: usize) - -> Result, AllocErr> - { + unsafe fn realloc( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result, AllocErr> { NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr) } @@ -228,14 +225,15 @@ pub fn handle_alloc_error(layout: Layout) -> ! { mod tests { extern crate test; use self::test::Bencher; + use alloc::{handle_alloc_error, Alloc, Global, Layout}; use boxed::Box; - use alloc::{Global, Alloc, Layout, handle_alloc_error}; #[test] fn allocate_zeroed() { unsafe { let layout = Layout::from_size_align(1024, 1).unwrap(); - let ptr = Global.alloc_zeroed(layout.clone()) + let ptr = Global + .alloc_zeroed(layout.clone()) .unwrap_or_else(|_| handle_alloc_error(layout)); let mut i = ptr.cast::().as_ptr(); diff --git a/src/liballoc/benches/btree/map.rs b/src/liballoc/benches/btree/map.rs index a6f584534d174..9749e2578164c 100644 --- a/src/liballoc/benches/btree/map.rs +++ b/src/liballoc/benches/btree/map.rs @@ -1,11 +1,11 @@ +use rand::{seq::SliceRandom, thread_rng, Rng}; +use std::collections::BTreeMap; use std::iter::Iterator; use std::vec::Vec; -use std::collections::BTreeMap; -use rand::{Rng, seq::SliceRandom, thread_rng}; -use test::{Bencher, black_box}; +use test::{black_box, Bencher}; macro_rules! map_insert_rand_bench { - ($name: ident, $n: expr, $map: ident) => ( + ($name: ident, $n: expr, $map: ident) => { #[bench] pub fn $name(b: &mut Bencher) { let n: usize = $n; @@ -26,11 +26,11 @@ macro_rules! map_insert_rand_bench { }); black_box(map); } - ) + }; } macro_rules! map_insert_seq_bench { - ($name: ident, $n: expr, $map: ident) => ( + ($name: ident, $n: expr, $map: ident) => { #[bench] pub fn $name(b: &mut Bencher) { let mut map = $map::new(); @@ -49,11 +49,11 @@ macro_rules! map_insert_seq_bench { }); black_box(map); } - ) + }; } macro_rules! map_find_rand_bench { - ($name: ident, $n: expr, $map: ident) => ( + ($name: ident, $n: expr, $map: ident) => { #[bench] pub fn $name(b: &mut Bencher) { let mut map = $map::new(); @@ -77,11 +77,11 @@ macro_rules! map_find_rand_bench { black_box(t); }) } - ) + }; } macro_rules! map_find_seq_bench { - ($name: ident, $n: expr, $map: ident) => ( + ($name: ident, $n: expr, $map: ident) => { #[bench] pub fn $name(b: &mut Bencher) { let mut map = $map::new(); @@ -100,20 +100,20 @@ macro_rules! map_find_seq_bench { black_box(x); }) } - ) + }; } -map_insert_rand_bench!{insert_rand_100, 100, BTreeMap} -map_insert_rand_bench!{insert_rand_10_000, 10_000, BTreeMap} +map_insert_rand_bench! {insert_rand_100, 100, BTreeMap} +map_insert_rand_bench! {insert_rand_10_000, 10_000, BTreeMap} -map_insert_seq_bench!{insert_seq_100, 100, BTreeMap} -map_insert_seq_bench!{insert_seq_10_000, 10_000, BTreeMap} +map_insert_seq_bench! {insert_seq_100, 100, BTreeMap} +map_insert_seq_bench! {insert_seq_10_000, 10_000, BTreeMap} -map_find_rand_bench!{find_rand_100, 100, BTreeMap} -map_find_rand_bench!{find_rand_10_000, 10_000, BTreeMap} +map_find_rand_bench! {find_rand_100, 100, BTreeMap} +map_find_rand_bench! {find_rand_10_000, 10_000, BTreeMap} -map_find_seq_bench!{find_seq_100, 100, BTreeMap} -map_find_seq_bench!{find_seq_10_000, 10_000, BTreeMap} +map_find_seq_bench! {find_seq_100, 100, BTreeMap} +map_find_seq_bench! {find_seq_10_000, 10_000, BTreeMap} fn bench_iter(b: &mut Bencher, size: i32) { let mut map = BTreeMap::::new(); diff --git a/src/liballoc/benches/lib.rs b/src/liballoc/benches/lib.rs index 08c69ee6e8507..b32ef3d78d8ca 100644 --- a/src/liballoc/benches/lib.rs +++ b/src/liballoc/benches/lib.rs @@ -8,8 +8,8 @@ extern crate test; mod btree; mod linked_list; -mod string; -mod str; mod slice; +mod str; +mod string; mod vec; mod vec_deque; diff --git a/src/liballoc/benches/slice.rs b/src/liballoc/benches/slice.rs index b9ebd74f7999a..4e941d2efb52d 100644 --- a/src/liballoc/benches/slice.rs +++ b/src/liballoc/benches/slice.rs @@ -1,11 +1,11 @@ -use rand::{thread_rng}; +use rand::thread_rng; use std::mem; use std::ptr; +use rand::distributions::{Alphanumeric, Standard}; use rand::{Rng, SeedableRng}; -use rand::distributions::{Standard, Alphanumeric}; use rand_xorshift::XorShiftRng; -use test::{Bencher, black_box}; +use test::{black_box, Bencher}; #[bench] fn iterator(b: &mut Bencher) { @@ -230,7 +230,10 @@ fn gen_strings(len: usize) -> Vec { fn gen_big_random(len: usize) -> Vec<[u64; 16]> { let mut rng = XorShiftRng::from_seed(SEED); - rng.sample_iter(&Standard).map(|x| [x; 16]).take(len).collect() + rng.sample_iter(&Standard) + .map(|x| [x; 16]) + .take(len) + .collect() } macro_rules! sort { @@ -241,7 +244,7 @@ macro_rules! sort { b.iter(|| v.clone().$f()); b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64; } - } + }; } macro_rules! sort_strings { @@ -253,7 +256,7 @@ macro_rules! sort_strings { b.iter(|| v.clone().$f()); b.bytes = $len * mem::size_of::<&str>() as u64; } - } + }; } macro_rules! sort_expensive { @@ -275,7 +278,7 @@ macro_rules! sort_expensive { }); b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64; } - } + }; } macro_rules! sort_lexicographic { @@ -286,7 +289,7 @@ macro_rules! sort_lexicographic { b.iter(|| v.clone().$f(|x| x.to_string())); b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64; } - } + }; } sort!(sort, sort_small_ascending, gen_ascending, 10); @@ -296,30 +299,95 @@ sort!(sort, sort_small_big, gen_big_random, 10); sort!(sort, sort_medium_random, gen_random, 100); sort!(sort, sort_large_ascending, gen_ascending, 10000); sort!(sort, sort_large_descending, gen_descending, 10000); -sort!(sort, sort_large_mostly_ascending, gen_mostly_ascending, 10000); -sort!(sort, sort_large_mostly_descending, gen_mostly_descending, 10000); +sort!( + sort, + sort_large_mostly_ascending, + gen_mostly_ascending, + 10000 +); +sort!( + sort, + sort_large_mostly_descending, + gen_mostly_descending, + 10000 +); sort!(sort, sort_large_random, gen_random, 10000); sort!(sort, sort_large_big, gen_big_random, 10000); sort_strings!(sort, sort_large_strings, gen_strings, 10000); sort_expensive!(sort_by, sort_large_expensive, gen_random, 10000); -sort!(sort_unstable, sort_unstable_small_ascending, gen_ascending, 10); -sort!(sort_unstable, sort_unstable_small_descending, gen_descending, 10); +sort!( + sort_unstable, + sort_unstable_small_ascending, + gen_ascending, + 10 +); +sort!( + sort_unstable, + sort_unstable_small_descending, + gen_descending, + 10 +); sort!(sort_unstable, sort_unstable_small_random, gen_random, 10); sort!(sort_unstable, sort_unstable_small_big, gen_big_random, 10); sort!(sort_unstable, sort_unstable_medium_random, gen_random, 100); -sort!(sort_unstable, sort_unstable_large_ascending, gen_ascending, 10000); -sort!(sort_unstable, sort_unstable_large_descending, gen_descending, 10000); -sort!(sort_unstable, sort_unstable_large_mostly_ascending, gen_mostly_ascending, 10000); -sort!(sort_unstable, sort_unstable_large_mostly_descending, gen_mostly_descending, 10000); +sort!( + sort_unstable, + sort_unstable_large_ascending, + gen_ascending, + 10000 +); +sort!( + sort_unstable, + sort_unstable_large_descending, + gen_descending, + 10000 +); +sort!( + sort_unstable, + sort_unstable_large_mostly_ascending, + gen_mostly_ascending, + 10000 +); +sort!( + sort_unstable, + sort_unstable_large_mostly_descending, + gen_mostly_descending, + 10000 +); sort!(sort_unstable, sort_unstable_large_random, gen_random, 10000); -sort!(sort_unstable, sort_unstable_large_big, gen_big_random, 10000); -sort_strings!(sort_unstable, sort_unstable_large_strings, gen_strings, 10000); -sort_expensive!(sort_unstable_by, sort_unstable_large_expensive, gen_random, 10000); +sort!( + sort_unstable, + sort_unstable_large_big, + gen_big_random, + 10000 +); +sort_strings!( + sort_unstable, + sort_unstable_large_strings, + gen_strings, + 10000 +); +sort_expensive!( + sort_unstable_by, + sort_unstable_large_expensive, + gen_random, + 10000 +); sort_lexicographic!(sort_by_key, sort_by_key_lexicographic, gen_random, 10000); -sort_lexicographic!(sort_unstable_by_key, sort_unstable_by_key_lexicographic, gen_random, 10000); -sort_lexicographic!(sort_by_cached_key, sort_by_cached_key_lexicographic, gen_random, 10000); +sort_lexicographic!( + sort_unstable_by_key, + sort_unstable_by_key_lexicographic, + gen_random, + 10000 +); +sort_lexicographic!( + sort_by_cached_key, + sort_by_cached_key_lexicographic, + gen_random, + 10000 +); macro_rules! reverse { ($name:ident, $ty:ty, $f:expr) => { @@ -327,24 +395,31 @@ macro_rules! reverse { fn $name(b: &mut Bencher) { // odd length and offset by 1 to be as unaligned as possible let n = 0xFFFFF; - let mut v: Vec<_> = - (0..1+(n / mem::size_of::<$ty>() as u64)) + let mut v: Vec<_> = (0..1 + (n / mem::size_of::<$ty>() as u64)) .map($f) .collect(); b.iter(|| black_box(&mut v[1..]).reverse()); b.bytes = n; } - } + }; } reverse!(reverse_u8, u8, |x| x as u8); reverse!(reverse_u16, u16, |x| x as u16); -reverse!(reverse_u8x3, [u8;3], |x| [x as u8, (x>>8) as u8, (x>>16) as u8]); +reverse!(reverse_u8x3, [u8; 3], |x| [ + x as u8, + (x >> 8) as u8, + (x >> 16) as u8 +]); reverse!(reverse_u32, u32, |x| x as u32); reverse!(reverse_u64, u64, |x| x as u64); reverse!(reverse_u128, u128, |x| x as u128); -#[repr(simd)] struct F64x4(f64, f64, f64, f64); -reverse!(reverse_simd_f64x4, F64x4, |x| { let x = x as f64; F64x4(x,x,x,x) }); +#[repr(simd)] +struct F64x4(f64, f64, f64, f64); +reverse!(reverse_simd_f64x4, F64x4, |x| { + let x = x as f64; + F64x4(x, x, x, x) +}); macro_rules! rotate { ($name:ident, $gen:expr, $len:expr, $mid:expr) => { @@ -352,32 +427,77 @@ macro_rules! rotate { fn $name(b: &mut Bencher) { let size = mem::size_of_val(&$gen(1)[0]); let mut v = $gen($len * 8 / size); - b.iter(|| black_box(&mut v).rotate_left(($mid*8+size-1)/size)); + b.iter(|| black_box(&mut v).rotate_left(($mid * 8 + size - 1) / size)); b.bytes = (v.len() * size) as u64; } - } + }; } rotate!(rotate_tiny_by1, gen_random, 16, 1); -rotate!(rotate_tiny_half, gen_random, 16, 16/2); -rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16/2+1); +rotate!(rotate_tiny_half, gen_random, 16, 16 / 2); +rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16 / 2 + 1); rotate!(rotate_medium_by1, gen_random, 9158, 1); rotate!(rotate_medium_by727_u64, gen_random, 9158, 727); rotate!(rotate_medium_by727_bytes, gen_random_bytes, 9158, 727); rotate!(rotate_medium_by727_strings, gen_strings, 9158, 727); -rotate!(rotate_medium_half, gen_random, 9158, 9158/2); -rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158/2+1); +rotate!(rotate_medium_half, gen_random, 9158, 9158 / 2); +rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158 / 2 + 1); // Intended to use more RAM than the machine has cache -rotate!(rotate_huge_by1, gen_random, 5*1024*1024, 1); -rotate!(rotate_huge_by9199_u64, gen_random, 5*1024*1024, 9199); -rotate!(rotate_huge_by9199_bytes, gen_random_bytes, 5*1024*1024, 9199); -rotate!(rotate_huge_by9199_strings, gen_strings, 5*1024*1024, 9199); -rotate!(rotate_huge_by9199_big, gen_big_random, 5*1024*1024, 9199); -rotate!(rotate_huge_by1234577_u64, gen_random, 5*1024*1024, 1234577); -rotate!(rotate_huge_by1234577_bytes, gen_random_bytes, 5*1024*1024, 1234577); -rotate!(rotate_huge_by1234577_strings, gen_strings, 5*1024*1024, 1234577); -rotate!(rotate_huge_by1234577_big, gen_big_random, 5*1024*1024, 1234577); -rotate!(rotate_huge_half, gen_random, 5*1024*1024, 5*1024*1024/2); -rotate!(rotate_huge_half_plus_one, gen_random, 5*1024*1024, 5*1024*1024/2+1); +rotate!(rotate_huge_by1, gen_random, 5 * 1024 * 1024, 1); +rotate!(rotate_huge_by9199_u64, gen_random, 5 * 1024 * 1024, 9199); +rotate!( + rotate_huge_by9199_bytes, + gen_random_bytes, + 5 * 1024 * 1024, + 9199 +); +rotate!( + rotate_huge_by9199_strings, + gen_strings, + 5 * 1024 * 1024, + 9199 +); +rotate!( + rotate_huge_by9199_big, + gen_big_random, + 5 * 1024 * 1024, + 9199 +); +rotate!( + rotate_huge_by1234577_u64, + gen_random, + 5 * 1024 * 1024, + 1234577 +); +rotate!( + rotate_huge_by1234577_bytes, + gen_random_bytes, + 5 * 1024 * 1024, + 1234577 +); +rotate!( + rotate_huge_by1234577_strings, + gen_strings, + 5 * 1024 * 1024, + 1234577 +); +rotate!( + rotate_huge_by1234577_big, + gen_big_random, + 5 * 1024 * 1024, + 1234577 +); +rotate!( + rotate_huge_half, + gen_random, + 5 * 1024 * 1024, + 5 * 1024 * 1024 / 2 +); +rotate!( + rotate_huge_half_plus_one, + gen_random, + 5 * 1024 * 1024, + 5 * 1024 * 1024 / 2 + 1 +); diff --git a/src/liballoc/benches/str.rs b/src/liballoc/benches/str.rs index 7f8661bd96888..0060b2ddb9608 100644 --- a/src/liballoc/benches/str.rs +++ b/src/liballoc/benches/str.rs @@ -1,4 +1,4 @@ -use test::{Bencher, black_box}; +use test::{black_box, Bencher}; #[bench] fn char_iterator(b: &mut Bencher) { @@ -12,7 +12,9 @@ fn char_iterator_for(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| { - for ch in s.chars() { black_box(ch); } + for ch in s.chars() { + black_box(ch); + } }); } @@ -40,7 +42,9 @@ fn char_iterator_rev_for(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| { - for ch in s.chars().rev() { black_box(ch); } + for ch in s.chars().rev() { + black_box(ch); + } }); } @@ -62,7 +66,8 @@ fn char_indicesator_rev(b: &mut Bencher) { #[bench] fn split_unicode_ascii(b: &mut Bencher) { - let s = "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam"; + let s = + "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam"; b.iter(|| assert_eq!(s.split('V').count(), 3)); } @@ -79,7 +84,9 @@ fn split_ascii(b: &mut Bencher) { fn split_extern_fn(b: &mut Bencher) { let s = "Mary had a little lamb, Little lamb, little-lamb."; let len = s.split(' ').count(); - fn pred(c: char) -> bool { c == ' ' } + fn pred(c: char) -> bool { + c == ' ' + } b.iter(|| assert_eq!(s.split(pred).count(), len)); } @@ -185,16 +192,19 @@ fn bench_contains_equal(b: &mut Bencher) { }) } - macro_rules! make_test_inner { ($s:ident, $code:expr, $name:ident, $str:expr, $iters:expr) => { #[bench] fn $name(bencher: &mut Bencher) { let mut $s = $str; black_box(&mut $s); - bencher.iter(|| for _ in 0..$iters { black_box($code); }); + bencher.iter(|| { + for _ in 0..$iters { + black_box($code); + } + }); } - } + }; } macro_rules! make_test { @@ -283,11 +293,25 @@ make_test!(starts_with_ascii_char, s, s.starts_with('/'), 1024); make_test!(ends_with_ascii_char, s, s.ends_with('/'), 1024); make_test!(starts_with_unichar, s, s.starts_with('\u{1F4A4}'), 1024); make_test!(ends_with_unichar, s, s.ends_with('\u{1F4A4}'), 1024); -make_test!(starts_with_str, s, s.starts_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024); -make_test!(ends_with_str, s, s.ends_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024); +make_test!( + starts_with_str, + s, + s.starts_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), + 1024 +); +make_test!( + ends_with_str, + s, + s.ends_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), + 1024 +); make_test!(split_space_char, s, s.split(' ').count()); -make_test!(split_terminator_space_char, s, s.split_terminator(' ').count()); +make_test!( + split_terminator_space_char, + s, + s.split_terminator(' ').count() +); make_test!(splitn_space_char, s, s.splitn(10, ' ').count()); make_test!(rsplitn_space_char, s, s.rsplitn(10, ' ').count()); diff --git a/src/liballoc/benches/vec.rs b/src/liballoc/benches/vec.rs index 590c49f4ef500..a3da9e80cd0fc 100644 --- a/src/liballoc/benches/vec.rs +++ b/src/liballoc/benches/vec.rs @@ -1,5 +1,5 @@ +use std::iter::{repeat, FromIterator}; use test::Bencher; -use std::iter::{FromIterator, repeat}; #[bench] fn bench_new(b: &mut Bencher) { diff --git a/src/liballoc/benches/vec_deque.rs b/src/liballoc/benches/vec_deque.rs index f7aadbdbd8266..5d9fd31397198 100644 --- a/src/liballoc/benches/vec_deque.rs +++ b/src/liballoc/benches/vec_deque.rs @@ -1,5 +1,5 @@ use std::collections::VecDeque; -use test::{Bencher, black_box}; +use test::{black_box, Bencher}; #[bench] fn bench_new(b: &mut Bencher) { diff --git a/src/liballoc/borrow.rs b/src/liballoc/borrow.rs index 603d73100a8b4..a11c492f0e9e8 100644 --- a/src/liballoc/borrow.rs +++ b/src/liballoc/borrow.rs @@ -16,8 +16,9 @@ pub use core::borrow::{Borrow, BorrowMut}; #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> Borrow for Cow<'a, B> - where B: ToOwned, - ::Owned: 'a +where + B: ToOwned, + ::Owned: 'a, { fn borrow(&self) -> &B { &**self @@ -68,9 +69,11 @@ pub trait ToOwned { /// let mut v: Vec = Vec::new(); /// [1, 2][..].clone_into(&mut v); /// ``` - #[unstable(feature = "toowned_clone_into", - reason = "recently added", - issue = "41263")] + #[unstable( + feature = "toowned_clone_into", + reason = "recently added", + issue = "41263" + )] fn clone_into(&self, target: &mut Self::Owned) { *target = self.to_owned(); } @@ -78,7 +81,8 @@ pub trait ToOwned { #[stable(feature = "rust1", since = "1.0.0")] impl ToOwned for T - where T: Clone +where + T: Clone, { type Owned = T; fn to_owned(&self) -> T { @@ -168,22 +172,22 @@ impl ToOwned for T /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub enum Cow<'a, B: ?Sized + 'a> - where B: ToOwned +where + B: ToOwned, { /// Borrowed data. #[stable(feature = "rust1", since = "1.0.0")] - Borrowed(#[stable(feature = "rust1", since = "1.0.0")] - &'a B), + Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B), /// Owned data. #[stable(feature = "rust1", since = "1.0.0")] - Owned(#[stable(feature = "rust1", since = "1.0.0")] - ::Owned), + Owned(#[stable(feature = "rust1", since = "1.0.0")] ::Owned), } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> Clone for Cow<'a, B> - where B: ToOwned +where + B: ToOwned, { fn clone(&self) -> Cow<'a, B> { match *self { @@ -208,7 +212,8 @@ impl<'a, B: ?Sized> Clone for Cow<'a, B> } impl<'a, B: ?Sized> Cow<'a, B> - where B: ToOwned +where + B: ToOwned, { /// Acquires a mutable reference to the owned form of the data. /// @@ -286,7 +291,8 @@ impl<'a, B: ?Sized> Cow<'a, B> #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> Deref for Cow<'a, B> - where B: ToOwned +where + B: ToOwned, { type Target = B; @@ -303,7 +309,8 @@ impl<'a, B: ?Sized> Eq for Cow<'a, B> where B: Eq + ToOwned {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> Ord for Cow<'a, B> - where B: Ord + ToOwned +where + B: Ord + ToOwned, { #[inline] fn cmp(&self, other: &Cow<'a, B>) -> Ordering { @@ -313,8 +320,9 @@ impl<'a, B: ?Sized> Ord for Cow<'a, B> #[stable(feature = "rust1", since = "1.0.0")] impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq> for Cow<'a, B> - where B: PartialEq + ToOwned, - C: ToOwned +where + B: PartialEq + ToOwned, + C: ToOwned, { #[inline] fn eq(&self, other: &Cow<'b, C>) -> bool { @@ -324,7 +332,8 @@ impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq> for Cow<'a, B> #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> PartialOrd for Cow<'a, B> - where B: PartialOrd + ToOwned +where + B: PartialOrd + ToOwned, { #[inline] fn partial_cmp(&self, other: &Cow<'a, B>) -> Option { @@ -334,8 +343,9 @@ impl<'a, B: ?Sized> PartialOrd for Cow<'a, B> #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> fmt::Debug for Cow<'a, B> - where B: fmt::Debug + ToOwned, - ::Owned: fmt::Debug +where + B: fmt::Debug + ToOwned, + ::Owned: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { @@ -347,8 +357,9 @@ impl<'a, B: ?Sized> fmt::Debug for Cow<'a, B> #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> fmt::Display for Cow<'a, B> - where B: fmt::Display + ToOwned, - ::Owned: fmt::Display +where + B: fmt::Display + ToOwned, + ::Owned: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { @@ -360,8 +371,9 @@ impl<'a, B: ?Sized> fmt::Display for Cow<'a, B> #[stable(feature = "default", since = "1.11.0")] impl<'a, B: ?Sized> Default for Cow<'a, B> - where B: ToOwned, - ::Owned: Default +where + B: ToOwned, + ::Owned: Default, { /// Creates an owned Cow<'a, B> with the default value for the contained owned value. fn default() -> Cow<'a, B> { @@ -371,7 +383,8 @@ impl<'a, B: ?Sized> Default for Cow<'a, B> #[stable(feature = "rust1", since = "1.0.0")] impl<'a, B: ?Sized> Hash for Cow<'a, B> - where B: Hash + ToOwned +where + B: Hash + ToOwned, { #[inline] fn hash(&self, state: &mut H) { diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index fbd0b948b82a0..fed9c56e8a8b1 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -63,19 +63,19 @@ use core::convert::From; use core::fmt; use core::future::Future; use core::hash::{Hash, Hasher}; -use core::iter::{Iterator, FromIterator, FusedIterator}; +use core::iter::{FromIterator, FusedIterator, Iterator}; use core::marker::{Unpin, Unsize}; use core::mem; -use core::pin::Pin; use core::ops::{ - CoerceUnsized, DispatchFromDyn, Deref, DerefMut, Receiver, Generator, GeneratorState + CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver, }; +use core::pin::Pin; use core::ptr::{self, NonNull, Unique}; use core::task::{LocalWaker, Poll}; -use vec::Vec; use raw_vec::RawVec; use str::from_boxed_utf8_unchecked; +use vec::Vec; /// A pointer type for heap allocation. /// @@ -199,7 +199,11 @@ impl Box { Box::into_unique(b).into() } - #[unstable(feature = "ptr_internals", issue = "0", reason = "use into_raw_non_null instead")] + #[unstable( + feature = "ptr_internals", + issue = "0", + reason = "use into_raw_non_null instead" + )] #[inline] #[doc(hidden)] pub fn into_unique(b: Box) -> Unique { @@ -253,7 +257,7 @@ impl Box { #[inline] pub fn leak<'a>(b: Box) -> &'a mut T where - T: 'a // Technically not needed, but kept to be explicit. + T: 'a, // Technically not needed, but kept to be explicit. { unsafe { &mut *Box::into_raw(b) } } @@ -321,7 +325,6 @@ impl Clone for Box { } } - #[stable(feature = "box_slice_clone", since = "1.3.0")] impl Clone for Box { fn clone(&self) -> Self { @@ -666,7 +669,6 @@ impl ExactSizeIterator for Box { #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Box {} - /// `FnBox` is a version of the `FnOnce` intended for use with boxed /// closure objects. The idea is that where one would normally store a /// `Box` in a data structure, you should use @@ -705,18 +707,25 @@ impl FusedIterator for Box {} /// } /// ``` #[rustc_paren_sugar] -#[unstable(feature = "fnbox", - reason = "will be deprecated if and when `Box` becomes usable", issue = "28796")] +#[unstable( + feature = "fnbox", + reason = "will be deprecated if and when `Box` becomes usable", + issue = "28796" +)] pub trait FnBox { type Output; fn call_box(self: Box, args: A) -> Self::Output; } -#[unstable(feature = "fnbox", - reason = "will be deprecated if and when `Box` becomes usable", issue = "28796")] +#[unstable( + feature = "fnbox", + reason = "will be deprecated if and when `Box` becomes usable", + issue = "28796" +)] impl FnBox for F - where F: FnOnce +where + F: FnOnce, { type Output = F::Output; @@ -725,8 +734,11 @@ impl FnBox for F } } -#[unstable(feature = "fnbox", - reason = "will be deprecated if and when `Box` becomes usable", issue = "28796")] +#[unstable( + feature = "fnbox", + reason = "will be deprecated if and when `Box` becomes usable", + issue = "28796" +)] impl<'a, A, R> FnOnce for Box + 'a> { type Output = R; @@ -735,8 +747,11 @@ impl<'a, A, R> FnOnce for Box + 'a> { } } -#[unstable(feature = "fnbox", - reason = "will be deprecated if and when `Box` becomes usable", issue = "28796")] +#[unstable( + feature = "fnbox", + reason = "will be deprecated if and when `Box` becomes usable", + issue = "28796" +)] impl<'a, A, R> FnOnce for Box + Send + 'a> { type Output = R; @@ -860,11 +875,12 @@ impl AsMut for Box { * could have a method to project a Pin from it. */ #[stable(feature = "pin", since = "1.33.0")] -impl Unpin for Box { } +impl Unpin for Box {} #[unstable(feature = "generator_trait", issue = "43122")] impl Generator for Box - where T: Generator + ?Sized +where + T: Generator + ?Sized, { type Yield = T::Yield; type Return = T::Return; diff --git a/src/liballoc/boxed_test.rs b/src/liballoc/boxed_test.rs index 654eabd070326..8ae6ec5a993e5 100644 --- a/src/liballoc/boxed_test.rs +++ b/src/liballoc/boxed_test.rs @@ -1,11 +1,11 @@ //! Test for `boxed` mod. use core::any::Any; -use core::ops::Deref; -use core::result::Result::{Err, Ok}; use core::clone::Clone; use core::f64; use core::i64; +use core::ops::Deref; +use core::result::Result::{Err, Ok}; use std::boxed::Box; diff --git a/src/liballoc/collections/binary_heap.rs b/src/liballoc/collections/binary_heap.rs index ad544e6015e4a..b3138de297a34 100644 --- a/src/liballoc/collections/binary_heap.rs +++ b/src/liballoc/collections/binary_heap.rs @@ -145,11 +145,11 @@ #![allow(missing_docs)] #![stable(feature = "rust1", since = "1.0.0")] -use core::ops::{Deref, DerefMut}; +use core::fmt; use core::iter::{FromIterator, FusedIterator}; -use core::mem::{swap, size_of, ManuallyDrop}; +use core::mem::{size_of, swap, ManuallyDrop}; +use core::ops::{Deref, DerefMut}; use core::ptr; -use core::fmt; use slice; use vec::{self, Vec}; @@ -229,9 +229,7 @@ pub struct PeekMut<'a, T: 'a + Ord> { #[stable(feature = "collection_debug", since = "1.17.0")] impl<'a, T: Ord + fmt::Debug> fmt::Debug for PeekMut<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("PeekMut") - .field(&self.heap.data[0]) - .finish() + f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish() } } @@ -272,7 +270,9 @@ impl<'a, T: Ord> PeekMut<'a, T> { #[stable(feature = "rust1", since = "1.0.0")] impl Clone for BinaryHeap { fn clone(&self) -> Self { - BinaryHeap { data: self.data.clone() } + BinaryHeap { + data: self.data.clone(), + } } fn clone_from(&mut self, source: &Self) { @@ -329,7 +329,9 @@ impl BinaryHeap { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> BinaryHeap { - BinaryHeap { data: Vec::with_capacity(capacity) } + BinaryHeap { + data: Vec::with_capacity(capacity), + } } /// Returns an iterator visiting all values in the underlying vector, in @@ -350,7 +352,9 @@ impl BinaryHeap { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter { - Iter { iter: self.data.iter() } + Iter { + iter: self.data.iter(), + } } /// Returns the greatest item in the binary heap, or `None` if it is empty. @@ -519,7 +523,7 @@ impl BinaryHeap { /// assert!(heap.capacity() >= 10); /// ``` #[inline] - #[unstable(feature = "shrink_to", reason = "new API", issue="56431")] + #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { self.data.shrink_to(min_capacity) } @@ -762,7 +766,9 @@ impl BinaryHeap { #[inline] #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self) -> Drain { - Drain { iter: self.data.drain(..) } + Drain { + iter: self.data.drain(..), + } } /// Drops all items from the binary heap. @@ -934,9 +940,7 @@ pub struct Iter<'a, T: 'a> { #[stable(feature = "collection_debug", since = "1.17.0")] impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("Iter") - .field(&self.iter.as_slice()) - .finish() + f.debug_tuple("Iter").field(&self.iter.as_slice()).finish() } } @@ -944,7 +948,9 @@ impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Iter<'a, T> { fn clone(&self) -> Iter<'a, T> { - Iter { iter: self.iter.clone() } + Iter { + iter: self.iter.clone(), + } } } @@ -998,8 +1004,8 @@ pub struct IntoIter { impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("IntoIter") - .field(&self.iter.as_slice()) - .finish() + .field(&self.iter.as_slice()) + .finish() } } @@ -1129,13 +1135,16 @@ impl IntoIterator for BinaryHeap { /// } /// ``` fn into_iter(self) -> IntoIter { - IntoIter { iter: self.data.into_iter() } + IntoIter { + iter: self.data.into_iter(), + } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a BinaryHeap - where T: Ord +where + T: Ord, { type Item = &'a T; type IntoIter = Iter<'a, T>; diff --git a/src/liballoc/collections/btree/map.rs b/src/liballoc/collections/btree/map.rs index 717650aca9600..ba38910aa40a4 100644 --- a/src/liballoc/collections/btree/map.rs +++ b/src/liballoc/collections/btree/map.rs @@ -1,7 +1,7 @@ use core::cmp::Ordering; use core::fmt::Debug; use core::hash::{Hash, Hasher}; -use core::iter::{FromIterator, Peekable, FusedIterator}; +use core::iter::{FromIterator, FusedIterator, Peekable}; use core::marker::PhantomData; use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ops::Index; @@ -10,14 +10,14 @@ use core::{fmt, intrinsics, mem, ptr}; use borrow::Borrow; -use super::node::{self, Handle, NodeRef, marker}; +use super::node::{self, marker, Handle, NodeRef}; use super::search; -use super::node::InsertResult::*; +use self::Entry::*; +use self::UnderflowResult::*; use super::node::ForceResult::*; +use super::node::InsertResult::*; use super::search::SearchResult::*; -use self::UnderflowResult::*; -use self::Entry::*; /// A map based on a B-Tree. /// @@ -143,9 +143,11 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap { impl Clone for BTreeMap { fn clone(&self) -> BTreeMap { fn clone_subtree<'a, K: Clone, V: Clone>( - node: node::NodeRef, K, V, marker::LeafOrInternal> + node: node::NodeRef, K, V, marker::LeafOrInternal>, ) -> BTreeMap - where K: 'a, V: 'a, + where + K: 'a, + V: 'a, { match node.force() { Leaf(leaf) => { @@ -219,8 +221,9 @@ impl Clone for BTreeMap { } impl super::Recover for BTreeMap - where K: Borrow + Ord, - Q: Ord +where + K: Borrow + Ord, + Q: Ord, { type Key = K; @@ -233,15 +236,15 @@ impl super::Recover for BTreeMap fn take(&mut self, key: &Q) -> Option { match search::search_tree(self.root.as_mut(), key) { - Found(handle) => { - Some(OccupiedEntry { - handle, - length: &mut self.length, - _marker: PhantomData, - } - .remove_kv() - .0) - } + Found(handle) => Some( + OccupiedEntry { + handle, + length: &mut self.length, + _marker: PhantomData, + } + .remove_kv() + .0, + ), GoDown(_) => None, } } @@ -431,25 +434,19 @@ impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for RangeMut<'a, K, pub enum Entry<'a, K: 'a, V: 'a> { /// A vacant entry. #[stable(feature = "rust1", since = "1.0.0")] - Vacant(#[stable(feature = "rust1", since = "1.0.0")] - VacantEntry<'a, K, V>), + Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>), /// An occupied entry. #[stable(feature = "rust1", since = "1.0.0")] - Occupied(#[stable(feature = "rust1", since = "1.0.0")] - OccupiedEntry<'a, K, V>), + Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>), } -#[stable(feature= "debug_btree_map", since = "1.12.0")] +#[stable(feature = "debug_btree_map", since = "1.12.0")] impl<'a, K: 'a + Debug + Ord, V: 'a + Debug> Debug for Entry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - Vacant(ref v) => f.debug_tuple("Entry") - .field(v) - .finish(), - Occupied(ref o) => f.debug_tuple("Entry") - .field(o) - .finish(), + Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), + Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(), } } } @@ -468,12 +465,10 @@ pub struct VacantEntry<'a, K: 'a, V: 'a> { _marker: PhantomData<&'a mut (K, V)>, } -#[stable(feature= "debug_btree_map", since = "1.12.0")] +#[stable(feature = "debug_btree_map", since = "1.12.0")] impl<'a, K: 'a + Debug + Ord, V: 'a> Debug for VacantEntry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("VacantEntry") - .field(self.key()) - .finish() + f.debug_tuple("VacantEntry").field(self.key()).finish() } } @@ -491,13 +486,13 @@ pub struct OccupiedEntry<'a, K: 'a, V: 'a> { _marker: PhantomData<&'a mut (K, V)>, } -#[stable(feature= "debug_btree_map", since = "1.12.0")] +#[stable(feature = "debug_btree_map", since = "1.12.0")] impl<'a, K: 'a + Debug + Ord, V: 'a + Debug> Debug for OccupiedEntry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("OccupiedEntry") - .field("key", self.key()) - .field("value", self.get()) - .finish() + .field("key", self.key()) + .field("value", self.get()) + .finish() } } @@ -568,8 +563,9 @@ impl BTreeMap { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self, key: &Q) -> Option<&V> - where K: Borrow, - Q: Ord + where + K: Borrow, + Q: Ord, { match search::search_tree(self.root.as_ref(), key) { Found(handle) => Some(handle.into_kv().1), @@ -595,8 +591,9 @@ impl BTreeMap { /// ``` #[unstable(feature = "map_get_key_value", issue = "49347")] pub fn get_key_value(&self, k: &Q) -> Option<(&K, &V)> - where K: Borrow, - Q: Ord + where + K: Borrow, + Q: Ord, { match search::search_tree(self.root.as_ref(), k) { Found(handle) => Some(handle.into_kv()), @@ -623,8 +620,9 @@ impl BTreeMap { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains_key(&self, key: &Q) -> bool - where K: Borrow, - Q: Ord + where + K: Borrow, + Q: Ord, { self.get(key).is_some() } @@ -651,8 +649,9 @@ impl BTreeMap { // See `get` for implementation notes, this is basically a copy-paste with mut's added #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut(&mut self, key: &Q) -> Option<&mut V> - where K: Borrow, - Q: Ord + where + K: Borrow, + Q: Ord, { match search::search_tree(self.root.as_mut(), key) { Found(handle) => Some(handle.into_kv_mut().1), @@ -717,18 +716,19 @@ impl BTreeMap { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, key: &Q) -> Option - where K: Borrow, - Q: Ord + where + K: Borrow, + Q: Ord, { match search::search_tree(self.root.as_mut(), key) { - Found(handle) => { - Some(OccupiedEntry { - handle, - length: &mut self.length, - _marker: PhantomData, - } - .remove()) - } + Found(handle) => Some( + OccupiedEntry { + handle, + length: &mut self.length, + _marker: PhantomData, + } + .remove(), + ), GoDown(_) => None, } } @@ -818,13 +818,16 @@ impl BTreeMap { /// ``` #[stable(feature = "btree_range", since = "1.17.0")] pub fn range(&self, range: R) -> Range - where T: Ord, K: Borrow, R: RangeBounds + where + T: Ord, + K: Borrow, + R: RangeBounds, { let root1 = self.root.as_ref(); let root2 = self.root.as_ref(); let (f, b) = range_search(root1, root2, range); - Range { front: f, back: b} + Range { front: f, back: b } } /// Constructs a mutable double-ended iterator over a sub-range of elements in the map. @@ -859,7 +862,10 @@ impl BTreeMap { /// ``` #[stable(feature = "btree_range", since = "1.17.0")] pub fn range_mut(&mut self, range: R) -> RangeMut - where T: Ord, K: Borrow, R: RangeBounds + where + T: Ord, + K: Borrow, + R: RangeBounds, { let root1 = self.root.as_mut(); let root2 = unsafe { ptr::read(&root1) }; @@ -895,21 +901,17 @@ impl BTreeMap { // FIXME(@porglezomp) Avoid allocating if we don't insert self.ensure_root_is_owned(); match search::search_tree(self.root.as_mut(), &key) { - Found(handle) => { - Occupied(OccupiedEntry { - handle, - length: &mut self.length, - _marker: PhantomData, - }) - } - GoDown(handle) => { - Vacant(VacantEntry { - key, - handle, - length: &mut self.length, - _marker: PhantomData, - }) - } + Found(handle) => Occupied(OccupiedEntry { + handle, + length: &mut self.length, + _marker: PhantomData, + }), + GoDown(handle) => Vacant(VacantEntry { + key, + handle, + length: &mut self.length, + _marker: PhantomData, + }), } } @@ -1015,7 +1017,8 @@ impl BTreeMap { /// ``` #[stable(feature = "btree_split_off", since = "1.11.0")] pub fn split_off(&mut self, key: &Q) -> Self - where K: Borrow + where + K: Borrow, { if self.is_empty() { return Self::new(); @@ -1073,10 +1076,10 @@ impl BTreeMap { /// Calculates the number of elements if it is incorrect. fn recalc_length(&mut self) { - fn dfs<'a, K, V>( - node: NodeRef, K, V, marker::LeafOrInternal> - ) -> usize - where K: 'a, V: 'a + fn dfs<'a, K, V>(node: NodeRef, K, V, marker::LeafOrInternal>) -> usize + where + K: 'a, + V: 'a, { let mut res = node.len(); @@ -1448,7 +1451,9 @@ impl<'a, K, V> FusedIterator for Keys<'a, K, V> {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Keys<'a, K, V> { fn clone(&self) -> Keys<'a, K, V> { - Keys { inner: self.inner.clone() } + Keys { + inner: self.inner.clone(), + } } } @@ -1485,7 +1490,9 @@ impl<'a, K, V> FusedIterator for Values<'a, K, V> {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Values<'a, K, V> { fn clone(&self) -> Values<'a, K, V> { - Values { inner: self.inner.clone() } + Values { + inner: self.inner.clone(), + } } } @@ -1532,7 +1539,6 @@ impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { #[stable(feature = "fused", since = "1.26.0")] impl<'a, K, V> FusedIterator for ValuesMut<'a, K, V> {} - impl<'a, K, V> Range<'a, K, V> { unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { let handle = self.front; @@ -1790,8 +1796,9 @@ impl Debug for BTreeMap { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: Ord, Q: ?Sized, V> Index<&'a Q> for BTreeMap - where K: Borrow, - Q: Ord +where + K: Borrow, + Q: Ord, { type Output = V; @@ -1806,9 +1813,9 @@ impl<'a, K: Ord, Q: ?Sized, V> Index<&'a Q> for BTreeMap } } -fn first_leaf_edge - (mut node: NodeRef) - -> Handle, marker::Edge> { +fn first_leaf_edge( + mut node: NodeRef, +) -> Handle, marker::Edge> { loop { match node.force() { Leaf(leaf) => return leaf.first_edge(), @@ -1819,9 +1826,9 @@ fn first_leaf_edge } } -fn last_leaf_edge - (mut node: NodeRef) - -> Handle, marker::Edge> { +fn last_leaf_edge( + mut node: NodeRef, +) -> Handle, marker::Edge> { loop { match node.force() { Leaf(leaf) => return leaf.last_edge(), @@ -1835,20 +1842,28 @@ fn last_leaf_edge fn range_search>( root1: NodeRef, root2: NodeRef, - range: R -)-> (Handle, marker::Edge>, - Handle, marker::Edge>) - where Q: Ord, K: Borrow + range: R, +) -> ( + Handle, marker::Edge>, + Handle, marker::Edge>, +) +where + Q: Ord, + K: Borrow, { match (range.start_bound(), range.end_bound()) { - (Excluded(s), Excluded(e)) if s==e => - panic!("range start and end are equal and excluded in BTreeMap"), - (Included(s), Included(e)) | - (Included(s), Excluded(e)) | - (Excluded(s), Included(e)) | - (Excluded(s), Excluded(e)) if s>e => - panic!("range start is greater than range end in BTreeMap"), - _ => {}, + (Excluded(s), Excluded(e)) if s == e => { + panic!("range start and end are equal and excluded in BTreeMap") + } + (Included(s), Included(e)) + | (Included(s), Excluded(e)) + | (Excluded(s), Included(e)) + | (Excluded(s), Excluded(e)) + if s > e => + { + panic!("range start is greater than range end in BTreeMap") + } + _ => {} }; let mut min_node = root1; @@ -1860,11 +1875,17 @@ fn range_search>( loop { let min_edge = match (min_found, range.start_bound()) { (false, Included(key)) => match search::search_linear(&min_node, key) { - (i, true) => { min_found = true; i }, + (i, true) => { + min_found = true; + i + } (i, false) => i, }, (false, Excluded(key)) => match search::search_linear(&min_node, key) { - (i, true) => { min_found = true; i+1 }, + (i, true) => { + min_found = true; + i + 1 + } (i, false) => i, }, (_, Unbounded) => 0, @@ -1874,11 +1895,17 @@ fn range_search>( let max_edge = match (max_found, range.end_bound()) { (false, Included(key)) => match search::search_linear(&max_node, key) { - (i, true) => { max_found = true; i+1 }, + (i, true) => { + max_found = true; + i + 1 + } (i, false) => i, }, (false, Excluded(key)) => match search::search_linear(&max_node, key) { - (i, true) => { max_found = true; i }, + (i, true) => { + max_found = true; + i + } (i, false) => i, }, (_, Unbounded) => max_node.keys().len(), @@ -1887,8 +1914,12 @@ fn range_search>( }; if !diverged { - if max_edge < min_edge { panic!("Ord is ill-defined in BTreeMap range") } - if min_edge != max_edge { diverged = true; } + if max_edge < min_edge { + panic!("Ord is ill-defined in BTreeMap range") + } + if min_edge != max_edge { + diverged = true; + } } let front = Handle::new_edge(min_node, min_edge); @@ -1896,11 +1927,11 @@ fn range_search>( match (front.force(), back.force()) { (Leaf(f), Leaf(b)) => { return (f, b); - }, + } (Internal(min_int), Internal(max_int)) => { min_node = min_int.descend(); max_node = max_int.descend(); - }, + } _ => unreachable!("BTreeMap has different depths"), }; } @@ -2050,7 +2081,9 @@ impl BTreeMap { /// ``` #[stable(feature = "map_values_mut", since = "1.10.0")] pub fn values_mut(&mut self) -> ValuesMut { - ValuesMut { inner: self.iter_mut() } + ValuesMut { + inner: self.iter_mut(), + } } /// Returns the number of elements in the map. @@ -2177,13 +2210,14 @@ impl<'a, K: Ord, V> Entry<'a, K, V> { /// ``` #[stable(feature = "entry_and_modify", since = "1.26.0")] pub fn and_modify(self, f: F) -> Self - where F: FnOnce(&mut V) + where + F: FnOnce(&mut V), { match self { Occupied(mut entry) => { f(entry.get_mut()); Occupied(entry) - }, + } Vacant(entry) => Vacant(entry), } } @@ -2212,7 +2246,6 @@ impl<'a, K: Ord, V: Default> Entry<'a, K, V> { Vacant(entry) => entry.insert(Default::default()), } } - } impl<'a, K: Ord, V> VacantEntry<'a, K, V> { @@ -2291,17 +2324,15 @@ impl<'a, K: Ord, V> VacantEntry<'a, K, V> { loop { match cur_parent { - Ok(parent) => { - match parent.insert(ins_k, ins_v, ins_edge) { - Fit(_) => return unsafe { &mut *out_ptr }, - Split(left, k, v, right) => { - ins_k = k; - ins_v = v; - ins_edge = right; - cur_parent = left.ascend().map_err(|n| n.into_root_mut()); - } + Ok(parent) => match parent.insert(ins_k, ins_v, ins_edge) { + Fit(_) => return unsafe { &mut *out_ptr }, + Split(left, k, v, right) => { + ins_k = k; + ins_v = v; + ins_edge = right; + cur_parent = left.ascend().map_err(|n| n.into_root_mut()); } - } + }, Err(root) => { root.push_level().push(ins_k, ins_v, ins_edge); return unsafe { &mut *out_ptr }; @@ -2485,7 +2516,9 @@ impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> { let key_loc = internal.kv_mut().0 as *mut K; let val_loc = internal.kv_mut().1 as *mut V; - let to_remove = first_leaf_edge(internal.right_edge().descend()).right_kv().ok(); + let to_remove = first_leaf_edge(internal.right_edge().descend()) + .right_kv() + .ok(); let to_remove = unsafe { unwrap_unchecked(to_remove) }; let (hole, key, val) = to_remove.remove(); @@ -2527,8 +2560,9 @@ enum UnderflowResult<'a, K, V> { Stole(NodeRef, K, V, marker::Internal>), } -fn handle_underfull_node<'a, K, V>(node: NodeRef, K, V, marker::LeafOrInternal>) - -> UnderflowResult<'a, K, V> { +fn handle_underfull_node<'a, K, V>( + node: NodeRef, K, V, marker::LeafOrInternal>, +) -> UnderflowResult<'a, K, V> { let parent = if let Ok(parent) = node.ascend() { parent } else { @@ -2537,14 +2571,12 @@ fn handle_underfull_node<'a, K, V>(node: NodeRef, K, V, marker:: let (is_left, mut handle) = match parent.left_kv() { Ok(left) => (true, left), - Err(parent) => { - match parent.right_kv() { - Ok(right) => (false, right), - Err(parent) => { - return EmptyParent(parent.into_node()); - } + Err(parent) => match parent.right_kv() { + Ok(right) => (false, right), + Err(parent) => { + return EmptyParent(parent.into_node()); } - } + }, }; if handle.can_merge() { diff --git a/src/liballoc/collections/btree/mod.rs b/src/liballoc/collections/btree/mod.rs index 8b7dc07063b62..f73a24d09916b 100644 --- a/src/liballoc/collections/btree/mod.rs +++ b/src/liballoc/collections/btree/mod.rs @@ -1,6 +1,6 @@ +pub mod map; mod node; mod search; -pub mod map; pub mod set; #[doc(hidden)] diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs index f9a21aa95db71..4f14fea91afb4 100644 --- a/src/liballoc/collections/btree/node.rs +++ b/src/liballoc/collections/btree/node.rs @@ -33,10 +33,10 @@ use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; -use core::ptr::{self, Unique, NonNull}; +use core::ptr::{self, NonNull, Unique}; use core::slice; -use alloc::{Global, Alloc, Layout}; +use alloc::{Alloc, Global, Layout}; use boxed::Box; const B: usize = 6; @@ -110,7 +110,7 @@ impl LeafNode { vals: MaybeUninit::uninitialized(), parent: ptr::null(), parent_idx: MaybeUninit::uninitialized(), - len: 0 + len: 0, } } } @@ -159,7 +159,7 @@ impl InternalNode { unsafe fn new() -> Self { InternalNode { data: LeafNode::new(), - edges: mem::uninitialized() + edges: mem::uninitialized(), } } } @@ -169,22 +169,28 @@ impl InternalNode { /// of nodes is actually behind the box, and, partially due to this lack of information, has no /// destructor. struct BoxedNode { - ptr: Unique> + ptr: Unique>, } impl BoxedNode { fn from_leaf(node: Box>) -> Self { - BoxedNode { ptr: Box::into_unique(node) } + BoxedNode { + ptr: Box::into_unique(node), + } } fn from_internal(node: Box>) -> Self { unsafe { - BoxedNode { ptr: Unique::new_unchecked(Box::into_raw(node) as *mut LeafNode) } + BoxedNode { + ptr: Unique::new_unchecked(Box::into_raw(node) as *mut LeafNode), + } } } unsafe fn from_ptr(ptr: NonNull>) -> Self { - BoxedNode { ptr: Unique::from(ptr) } + BoxedNode { + ptr: Unique::from(ptr), + } } fn as_ptr(&self) -> NonNull> { @@ -196,11 +202,11 @@ impl BoxedNode { /// and must be cleaned up manually. pub struct Root { node: BoxedNode, - height: usize + height: usize, } -unsafe impl Sync for Root { } -unsafe impl Send for Root { } +unsafe impl Sync for Root {} +unsafe impl Send for Root {} impl Root { pub fn is_shared_root(&self) -> bool { @@ -211,7 +217,7 @@ impl Root { Root { node: unsafe { BoxedNode::from_ptr(NonNull::new_unchecked( - &EMPTY_ROOT_NODE as *const _ as *const LeafNode as *mut _ + &EMPTY_ROOT_NODE as *const _ as *const LeafNode as *mut _, )) }, height: 0, @@ -221,12 +227,11 @@ impl Root { pub fn new_leaf() -> Self { Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), - height: 0 + height: 0, } } - pub fn as_ref(&self) - -> NodeRef { + pub fn as_ref(&self) -> NodeRef { NodeRef { height: self.height, node: self.node.as_ptr(), @@ -235,8 +240,7 @@ impl Root { } } - pub fn as_mut(&mut self) - -> NodeRef { + pub fn as_mut(&mut self) -> NodeRef { NodeRef { height: self.height, node: self.node.as_ptr(), @@ -245,8 +249,7 @@ impl Root { } } - pub fn into_ref(self) - -> NodeRef { + pub fn into_ref(self) -> NodeRef { NodeRef { height: self.height, node: self.node.as_ptr(), @@ -257,8 +260,7 @@ impl Root { /// Adds a new internal node with a single edge, pointing to the previous root, and make that /// new node the root. This increases the height by 1 and is the opposite of `pop_level`. - pub fn push_level(&mut self) - -> NodeRef { + pub fn push_level(&mut self) -> NodeRef { debug_assert!(!self.is_shared_root()); let mut new_node = Box::new(unsafe { InternalNode::new() }); new_node.edges[0] = unsafe { BoxedNode::from_ptr(self.node.as_ptr()) }; @@ -270,7 +272,7 @@ impl Root { height: self.height, node: self.node.as_ptr(), root: self as *mut _, - _marker: PhantomData + _marker: PhantomData, }; unsafe { @@ -290,17 +292,24 @@ impl Root { let top = self.node.ptr; self.node = unsafe { - BoxedNode::from_ptr(self.as_mut() - .cast_unchecked::() - .first_edge() - .descend() - .node) + BoxedNode::from_ptr( + self.as_mut() + .cast_unchecked::() + .first_edge() + .descend() + .node, + ) }; self.height -= 1; - unsafe { (*self.as_mut().as_leaf_mut()).parent = ptr::null(); } + unsafe { + (*self.as_mut().as_leaf_mut()).parent = ptr::null(); + } unsafe { - Global.dealloc(NonNull::from(top).cast(), Layout::new::>()); + Global.dealloc( + NonNull::from(top).cast(), + Layout::new::>(), + ); } } } @@ -332,43 +341,34 @@ pub struct NodeRef { node: NonNull>, // This is null unless the borrow type is `Mut` root: *const Root, - _marker: PhantomData<(BorrowType, Type)> + _marker: PhantomData<(BorrowType, Type)>, } -impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef, K, V, Type> { } +impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef, K, V, Type> {} impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef, K, V, Type> { fn clone(&self) -> Self { *self } } -unsafe impl Sync - for NodeRef { } +unsafe impl Sync for NodeRef {} -unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send - for NodeRef, K, V, Type> { } -unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send - for NodeRef, K, V, Type> { } -unsafe impl Send - for NodeRef { } +unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef, K, V, Type> {} +unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef, K, V, Type> {} +unsafe impl Send for NodeRef {} impl NodeRef { fn as_internal(&self) -> &InternalNode { - unsafe { - &*(self.node.as_ptr() as *mut InternalNode) - } + unsafe { &*(self.node.as_ptr() as *mut InternalNode) } } } impl<'a, K, V> NodeRef, K, V, marker::Internal> { fn as_internal_mut(&mut self) -> &mut InternalNode { - unsafe { - &mut *(self.node.as_ptr() as *mut InternalNode) - } + unsafe { &mut *(self.node.as_ptr() as *mut InternalNode) } } } - impl NodeRef { /// Finds the length of the node. This is the number of keys or values. In an /// internal node, the number of edges is `len() + 1`. @@ -389,7 +389,7 @@ impl NodeRef { height: self.height, node: self.node, root: self.root, - _marker: PhantomData + _marker: PhantomData, } } @@ -399,7 +399,7 @@ impl NodeRef { height: self.height, node: self.node, root: self.root, - _marker: PhantomData + _marker: PhantomData, } } @@ -409,9 +409,7 @@ impl NodeRef { } fn as_header(&self) -> &NodeHeader { - unsafe { - &*(self.node.as_ptr() as *const NodeHeader) - } + unsafe { &*(self.node.as_ptr() as *const NodeHeader) } } pub fn is_shared_root(&self) -> bool { @@ -433,17 +431,9 @@ impl NodeRef { /// /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should /// both, upon success, do nothing. - pub fn ascend(self) -> Result< - Handle< - NodeRef< - BorrowType, - K, V, - marker::Internal - >, - marker::Edge - >, - Self - > { + pub fn ascend( + self, + ) -> Result, marker::Edge>, Self> { let parent_as_leaf = self.as_header().parent as *const LeafNode; if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) { Ok(Handle { @@ -451,10 +441,10 @@ impl NodeRef { height: self.height + 1, node: non_zero, root: self.root, - _marker: PhantomData + _marker: PhantomData, }, idx: unsafe { usize::from(*self.as_header().parent_idx.get_ref()) }, - _marker: PhantomData + _marker: PhantomData, }) } else { Err(self) @@ -488,16 +478,9 @@ impl NodeRef { /// Similar to `ascend`, gets a reference to a node's parent node, but also /// deallocate the current node in the process. This is unsafe because the /// current node will still be accessible despite being deallocated. - pub unsafe fn deallocate_and_ascend(self) -> Option< - Handle< - NodeRef< - marker::Owned, - K, V, - marker::Internal - >, - marker::Edge - > - > { + pub unsafe fn deallocate_and_ascend( + self, + ) -> Option, marker::Edge>> { debug_assert!(!self.is_shared_root()); let node = self.node; let ret = self.ascend().ok(); @@ -510,16 +493,9 @@ impl NodeRef { /// Similar to `ascend`, gets a reference to a node's parent node, but also /// deallocate the current node in the process. This is unsafe because the /// current node will still be accessible despite being deallocated. - pub unsafe fn deallocate_and_ascend(self) -> Option< - Handle< - NodeRef< - marker::Owned, - K, V, - marker::Internal - >, - marker::Edge - > - > { + pub unsafe fn deallocate_and_ascend( + self, + ) -> Option, marker::Edge>> { let node = self.node; let ret = self.ascend().ok(); Global.dealloc(node.cast(), Layout::new::>()); @@ -530,14 +506,12 @@ impl NodeRef { impl<'a, K, V, Type> NodeRef, K, V, Type> { /// Unsafely asserts to the compiler some static information about whether this /// node is a `Leaf`. - unsafe fn cast_unchecked(&mut self) - -> NodeRef { - + unsafe fn cast_unchecked(&mut self) -> NodeRef { NodeRef { height: self.height, node: self.node, root: self.root, - _marker: PhantomData + _marker: PhantomData, } } @@ -556,7 +530,7 @@ impl<'a, K, V, Type> NodeRef, K, V, Type> { height: self.height, node: self.node, root: self.root, - _marker: PhantomData + _marker: PhantomData, } } @@ -612,21 +586,14 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { assert!(mem::size_of::>() == mem::size_of::>()); let header = self.as_header() as *const _ as *const NodeHeader; let keys = unsafe { &(*header).keys_start as *const _ as *const K }; - unsafe { - slice::from_raw_parts(keys, self.len()) - } + unsafe { slice::from_raw_parts(keys, self.len()) } } } fn into_val_slice(self) -> &'a [V] { debug_assert!(!self.is_shared_root()); // We cannot be the root, so `as_leaf` is okay - unsafe { - slice::from_raw_parts( - self.as_leaf().vals.as_ptr() as *const V, - self.len() - ) - } + unsafe { slice::from_raw_parts(self.as_leaf().vals.as_ptr() as *const V, self.len()) } } fn into_slices(self) -> (&'a [K], &'a [V]) { @@ -639,9 +606,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { /// Gets a mutable reference to the root itself. This is useful primarily when the /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer. pub fn into_root_mut(self) -> &'a mut Root { - unsafe { - &mut *(self.root as *mut Root) - } + unsafe { &mut *(self.root as *mut Root) } } fn into_key_slice_mut(mut self) -> &'a mut [K] { @@ -651,7 +616,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { unsafe { slice::from_raw_parts_mut( (*self.as_leaf_mut()).keys.as_mut_ptr() as *mut K, - self.len() + self.len(), ) } } @@ -662,7 +627,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { unsafe { slice::from_raw_parts_mut( (*self.as_leaf_mut()).vals.as_mut_ptr() as *mut V, - self.len() + self.len(), ) } } @@ -718,7 +683,10 @@ impl<'a, K, V> NodeRef, K, V, marker::Internal> { unsafe { ptr::write(self.keys_mut().get_unchecked_mut(idx), key); ptr::write(self.vals_mut().get_unchecked_mut(idx), val); - ptr::write(self.as_internal_mut().edges.get_unchecked_mut(idx + 1), edge.node); + ptr::write( + self.as_internal_mut().edges.get_unchecked_mut(idx + 1), + edge.node, + ); (*self.as_leaf_mut()).len += 1; @@ -750,10 +718,10 @@ impl<'a, K, V> NodeRef, K, V, marker::Internal> { slice_insert( slice::from_raw_parts_mut( self.as_internal_mut().edges.as_mut_ptr(), - self.len()+1 + self.len() + 1, ), 0, - edge.node + edge.node, ); (*self.as_leaf_mut()).len += 1; @@ -779,7 +747,10 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { ForceResult::Leaf(_) => None, ForceResult::Internal(internal) => { let edge = ptr::read(internal.as_internal().edges.get_unchecked(idx + 1)); - let mut new_root = Root { node: edge, height: internal.height - 1 }; + let mut new_root = Root { + node: edge, + height: internal.height - 1, + }; (*new_root.as_mut().as_leaf_mut()).parent = ptr::null(); Some(new_root) } @@ -807,12 +778,15 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { let edge = slice_remove( slice::from_raw_parts_mut( internal.as_internal_mut().edges.as_mut_ptr(), - old_len+1 + old_len + 1, ), - 0 + 0, ); - let mut new_root = Root { node: edge, height: internal.height - 1 }; + let mut new_root = Root { + node: edge, + height: internal.height - 1, + }; (*new_root.as_mut().as_leaf_mut()).parent = ptr::null(); for i in 0..old_len { @@ -830,32 +804,31 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { } fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) { - ( - self.keys_mut().as_mut_ptr(), - self.vals_mut().as_mut_ptr() - ) + (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr()) } } impl NodeRef { /// Checks whether a node is an `Internal` node or a `Leaf` node. - pub fn force(self) -> ForceResult< + pub fn force( + self, + ) -> ForceResult< NodeRef, - NodeRef + NodeRef, > { if self.height == 0 { ForceResult::Leaf(NodeRef { height: self.height, node: self.node, root: self.root, - _marker: PhantomData + _marker: PhantomData, }) } else { ForceResult::Internal(NodeRef { height: self.height, node: self.node, root: self.root, - _marker: PhantomData + _marker: PhantomData, }) } } @@ -872,10 +845,10 @@ impl NodeRef { pub struct Handle { node: Node, idx: usize, - _marker: PhantomData + _marker: PhantomData, } -impl Copy for Handle { } +impl Copy for Handle {} // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be // `Clone`able is when it is an immutable reference and therefore `Copy`. impl Clone for Handle { @@ -900,7 +873,7 @@ impl Handle, mar Handle { node, idx, - _marker: PhantomData + _marker: PhantomData, } } @@ -914,32 +887,28 @@ impl Handle, mar } impl PartialEq - for Handle, HandleType> { - + for Handle, HandleType> +{ fn eq(&self, other: &Self) -> bool { self.node.node == other.node.node && self.idx == other.idx } } impl - Handle, HandleType> { - + Handle, HandleType> +{ /// Temporarily takes out another, immutable handle on the same location. - pub fn reborrow(&self) - -> Handle, HandleType> { - + pub fn reborrow(&self) -> Handle, HandleType> { // We can't use Handle::new_kv or Handle::new_edge because we don't know our type Handle { node: self.node.reborrow(), idx: self.idx, - _marker: PhantomData + _marker: PhantomData, } } } -impl<'a, K, V, NodeType, HandleType> - Handle, K, V, NodeType>, HandleType> { - +impl<'a, K, V, NodeType, HandleType> Handle, K, V, NodeType>, HandleType> { /// Temporarily takes out another, mutable handle on the same location. Beware, as /// this method is very dangerous, doubly so since it may not immediately appear /// dangerous. @@ -950,21 +919,19 @@ impl<'a, K, V, NodeType, HandleType> /// of a reborrowed handle, out of bounds. // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety. - pub unsafe fn reborrow_mut(&mut self) - -> Handle, HandleType> { - + pub unsafe fn reborrow_mut( + &mut self, + ) -> Handle, HandleType> { // We can't use Handle::new_kv or Handle::new_edge because we don't know our type Handle { node: self.node.reborrow_mut(), idx: self.idx, - _marker: PhantomData + _marker: PhantomData, } } } -impl - Handle, marker::Edge> { - +impl Handle, marker::Edge> { /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to /// `node.len()`. pub fn new_edge(node: NodeRef, idx: usize) -> Self { @@ -974,13 +941,11 @@ impl Handle { node, idx, - _marker: PhantomData + _marker: PhantomData, } } - pub fn left_kv(self) - -> Result, marker::KV>, Self> { - + pub fn left_kv(self) -> Result, marker::KV>, Self> { if self.idx > 0 { Ok(Handle::new_kv(self.node, self.idx - 1)) } else { @@ -988,9 +953,7 @@ impl } } - pub fn right_kv(self) - -> Result, marker::KV>, Self> { - + pub fn right_kv(self) -> Result, marker::KV>, Self> { if self.idx < self.node.len() { Ok(Handle::new_kv(self.node, self.idx)) } else { @@ -1024,9 +987,7 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge /// this edge. This method splits the node if there isn't enough room. /// /// The returned pointer points to the inserted value. - pub fn insert(mut self, key: K, val: V) - -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) { - + pub fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) { if self.node.len() < CAPACITY { let ptr = self.insert_fit(key, val); (InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr) @@ -1034,15 +995,14 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge let middle = Handle::new_kv(self.node, B); let (mut left, k, v, mut right) = middle.split(); let ptr = if self.idx <= B { - unsafe { - Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val) - } + unsafe { Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val) } } else { unsafe { Handle::new_edge( right.as_mut().cast_unchecked::(), - self.idx - (B + 1) - ).insert_fit(key, val) + self.idx - (B + 1), + ) + .insert_fit(key, val) } }; (InsertResult::Split(left, k, v, right), ptr) @@ -1065,9 +1025,9 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: /// Unsafely asserts to the compiler some static information about whether the underlying /// node of this handle is a `Leaf`. - unsafe fn cast_unchecked(&mut self) - -> Handle, marker::Edge> { - + unsafe fn cast_unchecked( + &mut self, + ) -> Handle, marker::Edge> { Handle::new_edge(self.node.cast_unchecked(), self.idx) } @@ -1086,13 +1046,13 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: slice_insert( slice::from_raw_parts_mut( self.node.as_internal_mut().edges.as_mut_ptr(), - self.node.len() + self.node.len(), ), self.idx + 1, - edge.node + edge.node, ); - for i in (self.idx+1)..(self.node.len()+1) { + for i in (self.idx + 1)..(self.node.len() + 1) { Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link(); } } @@ -1101,9 +1061,12 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: /// Inserts a new key/value pair and an edge that will go to the right of that new pair /// between this edge and the key/value pair to the right of this edge. This method splits /// the node if there isn't enough room. - pub fn insert(mut self, key: K, val: V, edge: Root) - -> InsertResult<'a, K, V, marker::Internal> { - + pub fn insert( + mut self, + key: K, + val: V, + edge: Root, + ) -> InsertResult<'a, K, V, marker::Internal> { // Necessary for correctness, but this is an internal module debug_assert!(edge.height == self.node.height - 1); @@ -1121,8 +1084,9 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: unsafe { Handle::new_edge( right.as_mut().cast_unchecked::(), - self.idx - (B + 1) - ).insert_fit(key, val, edge); + self.idx - (B + 1), + ) + .insert_fit(key, val, edge); } } InsertResult::Split(left, k, v, right) @@ -1130,9 +1094,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: } } -impl - Handle, marker::Edge> { - +impl Handle, marker::Edge> { /// Finds the node pointed to by this edge. /// /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should @@ -1140,31 +1102,34 @@ impl pub fn descend(self) -> NodeRef { NodeRef { height: self.node.height - 1, - node: unsafe { self.node.as_internal().edges.get_unchecked(self.idx).as_ptr() }, + node: unsafe { + self.node + .as_internal() + .edges + .get_unchecked(self.idx) + .as_ptr() + }, root: self.node.root, - _marker: PhantomData + _marker: PhantomData, } } } -impl<'a, K: 'a, V: 'a, NodeType> - Handle, K, V, NodeType>, marker::KV> { - +impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn into_kv(self) -> (&'a K, &'a V) { let (keys, vals) = self.node.into_slices(); - unsafe { - (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx)) - } + unsafe { (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx)) } } } -impl<'a, K: 'a, V: 'a, NodeType> - Handle, K, V, NodeType>, marker::KV> { - +impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) { let (keys, vals) = self.node.into_slices_mut(); unsafe { - (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx)) + ( + keys.get_unchecked_mut(self.idx), + vals.get_unchecked_mut(self.idx), + ) } } } @@ -1173,7 +1138,10 @@ impl<'a, K, V, NodeType> Handle, K, V, NodeType>, marker pub fn kv_mut(&mut self) -> (&mut K, &mut V) { unsafe { let (keys, vals) = self.node.reborrow_mut().into_slices_mut(); - (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx)) + ( + keys.get_unchecked_mut(self.idx), + vals.get_unchecked_mut(self.idx), + ) } } } @@ -1186,8 +1154,14 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::KV> /// - The key and value pointed to by this handle and extracted. /// - All the key/value pairs to the right of this handle are put into a newly /// allocated node. - pub fn split(mut self) - -> (NodeRef, K, V, marker::Leaf>, K, V, Root) { + pub fn split( + mut self, + ) -> ( + NodeRef, K, V, marker::Leaf>, + K, + V, + Root, + ) { debug_assert!(!self.node.is_shared_root()); unsafe { let mut new_node = Box::new(LeafNode::new()); @@ -1200,12 +1174,12 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::KV> ptr::copy_nonoverlapping( self.node.keys().as_ptr().add(self.idx + 1), new_node.keys.as_mut_ptr() as *mut K, - new_len + new_len, ); ptr::copy_nonoverlapping( self.node.vals().as_ptr().add(self.idx + 1), new_node.vals.as_mut_ptr() as *mut V, - new_len + new_len, ); (*self.node.as_leaf_mut()).len = self.idx as u16; @@ -1213,19 +1187,25 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::KV> ( self.node, - k, v, + k, + v, Root { node: BoxedNode::from_leaf(new_node), - height: 0 - } + height: 0, + }, ) } } /// Removes the key/value pair pointed to by this handle, returning the edge between the /// now adjacent key/value pairs to the left and right of this handle. - pub fn remove(mut self) - -> (Handle, K, V, marker::Leaf>, marker::Edge>, K, V) { + pub fn remove( + mut self, + ) -> ( + Handle, K, V, marker::Leaf>, marker::Edge>, + K, + V, + ) { debug_assert!(!self.node.is_shared_root()); unsafe { let k = slice_remove(self.node.keys_mut(), self.idx); @@ -1244,8 +1224,14 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: /// - The key and value pointed to by this handle and extracted. /// - All the edges and key/value pairs to the right of this handle are put into /// a newly allocated node. - pub fn split(mut self) - -> (NodeRef, K, V, marker::Internal>, K, V, Root) { + pub fn split( + mut self, + ) -> ( + NodeRef, K, V, marker::Internal>, + K, + V, + Root, + ) { unsafe { let mut new_node = Box::new(InternalNode::new()); @@ -1258,17 +1244,17 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: ptr::copy_nonoverlapping( self.node.keys().as_ptr().add(self.idx + 1), new_node.data.keys.as_mut_ptr() as *mut K, - new_len + new_len, ); ptr::copy_nonoverlapping( self.node.vals().as_ptr().add(self.idx + 1), new_node.data.vals.as_mut_ptr() as *mut V, - new_len + new_len, ); ptr::copy_nonoverlapping( self.node.as_internal().edges.as_ptr().add(self.idx + 1), new_node.edges.as_mut_ptr(), - new_len + 1 + new_len + 1, ); (*self.node.as_leaf_mut()).len = self.idx as u16; @@ -1279,15 +1265,11 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: height, }; - for i in 0..(new_len+1) { + for i in 0..(new_len + 1) { Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link(); } - ( - self.node, - k, v, - new_root - ) + (self.node, k, v, new_root) } } @@ -1295,17 +1277,10 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: /// a node to hold the combination of the nodes to the left and right of this handle along /// with the key/value pair at this handle. pub fn can_merge(&self) -> bool { - ( - self.reborrow() - .left_edge() - .descend() - .len() - + self.reborrow() - .right_edge() - .descend() - .len() - + 1 - ) <= CAPACITY + (self.reborrow().left_edge().descend().len() + + self.reborrow().right_edge().descend().len() + + 1) + <= CAPACITY } /// Combines the node immediately to the left of this handle, the key/value pair pointed @@ -1313,8 +1288,9 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: /// child of the underlying node, returning an edge referencing that new child. /// /// Assumes that this edge `.can_merge()`. - pub fn merge(mut self) - -> Handle, K, V, marker::Internal>, marker::Edge> { + pub fn merge( + mut self, + ) -> Handle, K, V, marker::Internal>, marker::Edge> { let self1 = unsafe { ptr::read(&self) }; let self2 = unsafe { ptr::read(&self) }; let mut left_node = self1.left_edge().descend(); @@ -1326,23 +1302,27 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: debug_assert!(left_len + right_len + 1 <= CAPACITY); unsafe { - ptr::write(left_node.keys_mut().get_unchecked_mut(left_len), - slice_remove(self.node.keys_mut(), self.idx)); + ptr::write( + left_node.keys_mut().get_unchecked_mut(left_len), + slice_remove(self.node.keys_mut(), self.idx), + ); ptr::copy_nonoverlapping( right_node.keys().as_ptr(), left_node.keys_mut().as_mut_ptr().add(left_len + 1), - right_len + right_len, + ); + ptr::write( + left_node.vals_mut().get_unchecked_mut(left_len), + slice_remove(self.node.vals_mut(), self.idx), ); - ptr::write(left_node.vals_mut().get_unchecked_mut(left_len), - slice_remove(self.node.vals_mut(), self.idx)); ptr::copy_nonoverlapping( right_node.vals().as_ptr(), left_node.vals_mut().as_mut_ptr().add(left_len + 1), - right_len + right_len, ); slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1); - for i in self.idx+1..self.node.len() { + for i in self.idx + 1..self.node.len() { Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link(); } (*self.node.as_leaf_mut()).len -= 1; @@ -1352,30 +1332,23 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: if self.node.height > 1 { ptr::copy_nonoverlapping( right_node.cast_unchecked().as_internal().edges.as_ptr(), - left_node.cast_unchecked() - .as_internal_mut() - .edges - .as_mut_ptr() - .add(left_len + 1), - right_len + 1 + left_node + .cast_unchecked() + .as_internal_mut() + .edges + .as_mut_ptr() + .add(left_len + 1), + right_len + 1, ); - for i in left_len+1..left_len+right_len+2 { - Handle::new_edge( - left_node.cast_unchecked().reborrow_mut(), - i - ).correct_parent_link(); + for i in left_len + 1..left_len + right_len + 2 { + Handle::new_edge(left_node.cast_unchecked().reborrow_mut(), i) + .correct_parent_link(); } - Global.dealloc( - right_node.node.cast(), - Layout::new::>(), - ); + Global.dealloc(right_node.node.cast(), Layout::new::>()); } else { - Global.dealloc( - right_node.node.cast(), - Layout::new::>(), - ); + Global.dealloc(right_node.node.cast(), Layout::new::>()); } Handle::new_edge(self.node, self.idx) @@ -1394,7 +1367,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: match self.reborrow_mut().right_edge().descend().force() { ForceResult::Leaf(mut leaf) => leaf.push_front(k, v), - ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()) + ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()), } } } @@ -1411,7 +1384,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: match self.reborrow_mut().left_edge().descend().force() { ForceResult::Leaf(mut leaf) => leaf.push(k, v), - ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()) + ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()), } } } @@ -1440,12 +1413,8 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: }; // Make room for stolen elements in the right child. - ptr::copy(right_kv.0, - right_kv.0.add(count), - right_len); - ptr::copy(right_kv.1, - right_kv.1.add(count), - right_len); + ptr::copy(right_kv.0, right_kv.0.add(count), right_len); + ptr::copy(right_kv.1, right_kv.1.add(count), right_len); // Move elements from the left child to the right one. move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1); @@ -1464,15 +1433,15 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: (ForceResult::Internal(left), ForceResult::Internal(mut right)) => { // Make room for stolen edges. let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr(); - ptr::copy(right_edges, - right_edges.add(count), - right_len + 1); + ptr::copy(right_edges, right_edges.add(count), right_len + 1); right.correct_childrens_parent_links(count, count + right_len + 1); move_edges(left, new_left_len + 1, right, 0, count); - }, - (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { } - _ => { unreachable!(); } + } + (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} + _ => { + unreachable!(); + } } } } @@ -1510,12 +1479,8 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: move_kv(right_kv, count - 1, parent_kv, 0, 1); // Fix right indexing - ptr::copy(right_kv.0.add(count), - right_kv.0, - new_right_len); - ptr::copy(right_kv.1.add(count), - right_kv.1, - new_right_len); + ptr::copy(right_kv.0.add(count), right_kv.0, new_right_len); + ptr::copy(right_kv.1.add(count), right_kv.1, new_right_len); } (*left_node.reborrow_mut().as_leaf_mut()).len += count as u16; @@ -1527,64 +1492,68 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: // Fix right indexing. let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr(); - ptr::copy(right_edges.add(count), - right_edges, - new_right_len + 1); + ptr::copy(right_edges.add(count), right_edges, new_right_len + 1); right.correct_childrens_parent_links(0, new_right_len + 1); - }, - (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { } - _ => { unreachable!(); } + } + (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} + _ => { + unreachable!(); + } } } } } unsafe fn move_kv( - source: (*mut K, *mut V), source_offset: usize, - dest: (*mut K, *mut V), dest_offset: usize, - count: usize) -{ - ptr::copy_nonoverlapping(source.0.add(source_offset), - dest.0.add(dest_offset), - count); - ptr::copy_nonoverlapping(source.1.add(source_offset), - dest.1.add(dest_offset), - count); + source: (*mut K, *mut V), + source_offset: usize, + dest: (*mut K, *mut V), + dest_offset: usize, + count: usize, +) { + ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count); + ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count); } // Source and destination must have the same height. unsafe fn move_edges( - mut source: NodeRef, source_offset: usize, - mut dest: NodeRef, dest_offset: usize, - count: usize) -{ + mut source: NodeRef, + source_offset: usize, + mut dest: NodeRef, + dest_offset: usize, + count: usize, +) { let source_ptr = source.as_internal_mut().edges.as_mut_ptr(); let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr(); - ptr::copy_nonoverlapping(source_ptr.add(source_offset), - dest_ptr.add(dest_offset), - count); + ptr::copy_nonoverlapping( + source_ptr.add(source_offset), + dest_ptr.add(dest_offset), + count, + ); dest.correct_childrens_parent_links(dest_offset, dest_offset + count); } impl - Handle, HandleType> { - + Handle, HandleType> +{ /// Check whether the underlying node is an `Internal` node or a `Leaf` node. - pub fn force(self) -> ForceResult< + pub fn force( + self, + ) -> ForceResult< Handle, HandleType>, - Handle, HandleType> + Handle, HandleType>, > { match self.node.force() { ForceResult::Leaf(node) => ForceResult::Leaf(Handle { node, idx: self.idx, - _marker: PhantomData + _marker: PhantomData, }), ForceResult::Internal(node) => ForceResult::Internal(Handle { node, idx: self.idx, - _marker: PhantomData - }) + _marker: PhantomData, + }), } } } @@ -1592,8 +1561,10 @@ impl impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { /// Move the suffix after `self` from one node to another one. `right` must be empty. /// The first edge of `right` remains unchanged. - pub fn move_suffix(&mut self, - right: &mut NodeRef, K, V, marker::LeafOrInternal>) { + pub fn move_suffix( + &mut self, + right: &mut NodeRef, K, V, marker::LeafOrInternal>, + ) { unsafe { let left_new_len = self.idx; let mut left_node = self.reborrow_mut().into_node(); @@ -1607,7 +1578,6 @@ impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, ma let left_kv = left_node.reborrow_mut().into_kv_pointers_mut(); let right_kv = right_node.reborrow_mut().into_kv_pointers_mut(); - move_kv(left_kv, left_new_len, right_kv, 0, right_new_len); (*left_node.reborrow_mut().as_leaf_mut()).len = left_new_len as u16; @@ -1616,9 +1586,11 @@ impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, ma match (left_node.force(), right_node.force()) { (ForceResult::Internal(left), ForceResult::Internal(right)) => { move_edges(left, left_new_len + 1, right, 1, right_new_len); - }, - (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { } - _ => { unreachable!(); } + } + (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} + _ => { + unreachable!(); + } } } } @@ -1626,34 +1598,34 @@ impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, ma pub enum ForceResult { Leaf(Leaf), - Internal(Internal) + Internal(Internal), } pub enum InsertResult<'a, K, V, Type> { Fit(Handle, K, V, Type>, marker::KV>), - Split(NodeRef, K, V, Type>, K, V, Root) + Split(NodeRef, K, V, Type>, K, V, Root), } pub mod marker { use core::marker::PhantomData; - pub enum Leaf { } - pub enum Internal { } - pub enum LeafOrInternal { } + pub enum Leaf {} + pub enum Internal {} + pub enum LeafOrInternal {} - pub enum Owned { } + pub enum Owned {} pub struct Immut<'a>(PhantomData<&'a ()>); pub struct Mut<'a>(PhantomData<&'a mut ()>); - pub enum KV { } - pub enum Edge { } + pub enum KV {} + pub enum Edge {} } unsafe fn slice_insert(slice: &mut [T], idx: usize, val: T) { ptr::copy( slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), - slice.len() - idx + slice.len() - idx, ); ptr::write(slice.get_unchecked_mut(idx), val); } @@ -1663,7 +1635,7 @@ unsafe fn slice_remove(slice: &mut [T], idx: usize) -> T { ptr::copy( slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), - slice.len() - idx - 1 + slice.len() - idx - 1, ); ret } diff --git a/src/liballoc/collections/btree/search.rs b/src/liballoc/collections/btree/search.rs index 9010de7c16ac3..2091c390bd02f 100644 --- a/src/liballoc/collections/btree/search.rs +++ b/src/liballoc/collections/btree/search.rs @@ -2,22 +2,24 @@ use core::cmp::Ordering; use borrow::Borrow; -use super::node::{Handle, NodeRef, marker}; +use super::node::{marker, Handle, NodeRef}; -use super::node::ForceResult::*; use self::SearchResult::*; +use super::node::ForceResult::*; pub enum SearchResult { Found(Handle, marker::KV>), - GoDown(Handle, marker::Edge>) + GoDown(Handle, marker::Edge>), } pub fn search_tree( mut node: NodeRef, - key: &Q + key: &Q, ) -> SearchResult - where Q: Ord, K: Borrow { - +where + Q: Ord, + K: Borrow, +{ loop { match search_node(node, key) { Found(handle) => return Found(handle), @@ -27,38 +29,38 @@ pub fn search_tree( node = internal.descend(); continue; } - } + }, } } } pub fn search_node( node: NodeRef, - key: &Q + key: &Q, ) -> SearchResult - where Q: Ord, K: Borrow { - +where + Q: Ord, + K: Borrow, +{ match search_linear(&node, key) { - (idx, true) => Found( - Handle::new_kv(node, idx) - ), - (idx, false) => SearchResult::GoDown( - Handle::new_edge(node, idx) - ) + (idx, true) => Found(Handle::new_kv(node, idx)), + (idx, false) => SearchResult::GoDown(Handle::new_edge(node, idx)), } } pub fn search_linear( node: &NodeRef, - key: &Q + key: &Q, ) -> (usize, bool) - where Q: Ord, K: Borrow { - +where + Q: Ord, + K: Borrow, +{ for (i, k) in node.keys().iter().enumerate() { match key.cmp(k.borrow()) { - Ordering::Greater => {}, + Ordering::Greater => {} Ordering::Equal => return (i, true), - Ordering::Less => return (i, false) + Ordering::Less => return (i, false), } } (node.keys().len(), false) diff --git a/src/liballoc/collections/btree/set.rs b/src/liballoc/collections/btree/set.rs index 71fec7da9a5ed..c82f273effb89 100644 --- a/src/liballoc/collections/btree/set.rs +++ b/src/liballoc/collections/btree/set.rs @@ -1,16 +1,16 @@ // This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface // to TreeMap -use core::cmp::Ordering::{self, Less, Greater, Equal}; -use core::cmp::{min, max}; -use core::fmt::Debug; +use core::cmp::Ordering::{self, Equal, Greater, Less}; +use core::cmp::{max, min}; use core::fmt; -use core::iter::{Peekable, FromIterator, FusedIterator}; -use core::ops::{BitOr, BitAnd, BitXor, Sub, RangeBounds}; +use core::fmt::Debug; +use core::iter::{FromIterator, FusedIterator, Peekable}; +use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub}; +use super::Recover; use borrow::Borrow; use collections::btree_map::{self, BTreeMap, Keys}; -use super::Recover; // FIXME(conventions): implement bounded iterators @@ -78,9 +78,7 @@ pub struct Iter<'a, T: 'a> { #[stable(feature = "collection_debug", since = "1.17.0")] impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("Iter") - .field(&self.iter.clone()) - .finish() + f.debug_tuple("Iter").field(&self.iter.clone()).finish() } } @@ -127,9 +125,9 @@ pub struct Difference<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for Difference<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Difference") - .field(&self.a) - .field(&self.b) - .finish() + .field(&self.a) + .field(&self.b) + .finish() } } @@ -150,9 +148,9 @@ pub struct SymmetricDifference<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for SymmetricDifference<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("SymmetricDifference") - .field(&self.a) - .field(&self.b) - .finish() + .field(&self.a) + .field(&self.b) + .finish() } } @@ -173,9 +171,9 @@ pub struct Intersection<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for Intersection<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Intersection") - .field(&self.a) - .field(&self.b) - .finish() + .field(&self.a) + .field(&self.b) + .finish() } } @@ -196,9 +194,9 @@ pub struct Union<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for Union<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Union") - .field(&self.a) - .field(&self.b) - .finish() + .field(&self.a) + .field(&self.b) + .finish() } } @@ -215,7 +213,9 @@ impl BTreeSet { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> BTreeSet { - BTreeSet { map: BTreeMap::new() } + BTreeSet { + map: BTreeMap::new(), + } } /// Constructs a double-ended iterator over a sub-range of elements in the set. @@ -242,9 +242,14 @@ impl BTreeSet { /// ``` #[stable(feature = "btree_range", since = "1.17.0")] pub fn range(&self, range: R) -> Range - where K: Ord, T: Borrow, R: RangeBounds + where + K: Ord, + T: Borrow, + R: RangeBounds, { - Range { iter: self.map.range(range) } + Range { + iter: self.map.range(range), + } } /// Visits the values representing the difference, @@ -296,9 +301,10 @@ impl BTreeSet { /// assert_eq!(sym_diff, [1, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn symmetric_difference<'a>(&'a self, - other: &'a BTreeSet) - -> SymmetricDifference<'a, T> { + pub fn symmetric_difference<'a>( + &'a self, + other: &'a BTreeSet, + ) -> SymmetricDifference<'a, T> { SymmetricDifference { a: self.iter().peekable(), b: other.iter().peekable(), @@ -393,8 +399,9 @@ impl BTreeSet { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains(&self, value: &Q) -> bool - where T: Borrow, - Q: Ord + where + T: Borrow, + Q: Ord, { self.map.contains_key(value) } @@ -416,8 +423,9 @@ impl BTreeSet { /// ``` #[stable(feature = "set_recovery", since = "1.9.0")] pub fn get(&self, value: &Q) -> Option<&T> - where T: Borrow, - Q: Ord + where + T: Borrow, + Q: Ord, { Recover::get(&self.map, value) } @@ -577,8 +585,9 @@ impl BTreeSet { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, value: &Q) -> bool - where T: Borrow, - Q: Ord + where + T: Borrow, + Q: Ord, { self.map.remove(value).is_some() } @@ -600,8 +609,9 @@ impl BTreeSet { /// ``` #[stable(feature = "set_recovery", since = "1.9.0")] pub fn take(&mut self, value: &Q) -> Option - where T: Borrow, - Q: Ord + where + T: Borrow, + Q: Ord, { Recover::take(&mut self.map, value) } @@ -669,8 +679,13 @@ impl BTreeSet { /// assert!(b.contains(&41)); /// ``` #[stable(feature = "btree_split_off", since = "1.11.0")] - pub fn split_off(&mut self, key: &Q) -> Self where T: Borrow { - BTreeSet { map: self.map.split_off(key) } + pub fn split_off(&mut self, key: &Q) -> Self + where + T: Borrow, + { + BTreeSet { + map: self.map.split_off(key), + } } } @@ -704,7 +719,9 @@ impl BTreeSet { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter { - Iter { iter: self.map.keys() } + Iter { + iter: self.map.keys(), + } } /// Returns the number of elements in the set. @@ -769,7 +786,9 @@ impl IntoIterator for BTreeSet { /// assert_eq!(v, [1, 2, 3, 4]); /// ``` fn into_iter(self) -> IntoIter { - IntoIter { iter: self.map.into_iter() } + IntoIter { + iter: self.map.into_iter(), + } } } @@ -910,7 +929,9 @@ impl Debug for BTreeSet { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Iter<'a, T> { fn clone(&self) -> Iter<'a, T> { - Iter { iter: self.iter.clone() } + Iter { + iter: self.iter.clone(), + } } } #[stable(feature = "rust1", since = "1.0.0")] @@ -932,7 +953,9 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> ExactSizeIterator for Iter<'a, T> { - fn len(&self) -> usize { self.iter.len() } + fn len(&self) -> usize { + self.iter.len() + } } #[stable(feature = "fused", since = "1.26.0")] @@ -957,7 +980,9 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { - fn len(&self) -> usize { self.iter.len() } + fn len(&self) -> usize { + self.iter.len() + } } #[stable(feature = "fused", since = "1.26.0")] @@ -966,7 +991,9 @@ impl FusedIterator for IntoIter {} #[stable(feature = "btree_range", since = "1.17.0")] impl<'a, T> Clone for Range<'a, T> { fn clone(&self) -> Range<'a, T> { - Range { iter: self.iter.clone() } + Range { + iter: self.iter.clone(), + } } } diff --git a/src/liballoc/collections/linked_list.rs b/src/liballoc/collections/linked_list.rs index 804a2e9c8873b..485c7fd72283a 100644 --- a/src/liballoc/collections/linked_list.rs +++ b/src/liballoc/collections/linked_list.rs @@ -14,14 +14,14 @@ use core::cmp::Ordering; use core::fmt; -use core::hash::{Hasher, Hash}; +use core::hash::{Hash, Hasher}; use core::iter::{FromIterator, FusedIterator}; use core::marker::PhantomData; use core::mem; use core::ptr::NonNull; -use boxed::Box; use super::SpecExtend; +use boxed::Box; /// A doubly-linked list with owned nodes. /// @@ -63,9 +63,7 @@ pub struct Iter<'a, T: 'a> { #[stable(feature = "collection_debug", since = "1.17.0")] impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("Iter") - .field(&self.len) - .finish() + f.debug_tuple("Iter").field(&self.len).finish() } } @@ -96,9 +94,9 @@ pub struct IterMut<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("IterMut") - .field(&self.list) - .field(&self.len) - .finish() + .field(&self.list) + .field(&self.len) + .finish() } } @@ -118,9 +116,7 @@ pub struct IntoIter { #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("IntoIter") - .field(&self.list) - .finish() + f.debug_tuple("IntoIter").field(&self.list).finish() } } @@ -466,7 +462,8 @@ impl LinkedList { /// ``` #[stable(feature = "linked_list_contains", since = "1.12.0")] pub fn contains(&self, x: &T) -> bool - where T: PartialEq + where + T: PartialEq, { self.iter().any(|e| e == x) } @@ -488,9 +485,7 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn front(&self) -> Option<&T> { - unsafe { - self.head.as_ref().map(|node| &node.as_ref().element) - } + unsafe { self.head.as_ref().map(|node| &node.as_ref().element) } } /// Provides a mutable reference to the front element, or `None` if the list @@ -516,9 +511,7 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn front_mut(&mut self) -> Option<&mut T> { - unsafe { - self.head.as_mut().map(|node| &mut node.as_mut().element) - } + unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) } } /// Provides a reference to the back element, or `None` if the list is @@ -538,9 +531,7 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn back(&self) -> Option<&T> { - unsafe { - self.tail.as_ref().map(|node| &node.as_ref().element) - } + unsafe { self.tail.as_ref().map(|node| &node.as_ref().element) } } /// Provides a mutable reference to the back element, or `None` if the list @@ -566,9 +557,7 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn back_mut(&mut self) -> Option<&mut T> { - unsafe { - self.tail.as_mut().map(|node| &mut node.as_mut().element) - } + unsafe { self.tail.as_mut().map(|node| &mut node.as_mut().element) } } /// Adds an element first in the list. @@ -765,7 +754,8 @@ impl LinkedList { /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] pub fn drain_filter(&mut self, filter: F) -> DrainFilter - where F: FnMut(&mut T) -> bool + where + F: FnMut(&mut T) -> bool, { // avoid borrow issues. let it = self.head; @@ -911,9 +901,11 @@ impl<'a, T> IterMut<'a, T> { /// } /// ``` #[inline] - #[unstable(feature = "linked_list_extras", - reason = "this is probably better handled by a cursor type -- we'll see", - issue = "27794")] + #[unstable( + feature = "linked_list_extras", + reason = "this is probably better handled by a cursor type -- we'll see", + issue = "27794" + )] pub fn insert_next(&mut self, element: T) { match self.head { None => self.list.push_back(element), @@ -955,16 +947,16 @@ impl<'a, T> IterMut<'a, T> { /// assert_eq!(it.next().unwrap(), &2); /// ``` #[inline] - #[unstable(feature = "linked_list_extras", - reason = "this is probably better handled by a cursor type -- we'll see", - issue = "27794")] + #[unstable( + feature = "linked_list_extras", + reason = "this is probably better handled by a cursor type -- we'll see", + issue = "27794" + )] pub fn peek_next(&mut self) -> Option<&mut T> { if self.len == 0 { None } else { - unsafe { - self.head.as_mut().map(|node| &mut node.as_mut().element) - } + unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) } } } } @@ -972,7 +964,8 @@ impl<'a, T> IterMut<'a, T> { /// An iterator produced by calling `drain_filter` on LinkedList. #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] pub struct DrainFilter<'a, T: 'a, F: 'a> - where F: FnMut(&mut T) -> bool, +where + F: FnMut(&mut T) -> bool, { list: &'a mut LinkedList, it: Option>>, @@ -983,7 +976,8 @@ pub struct DrainFilter<'a, T: 'a, F: 'a> #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl<'a, T, F> Iterator for DrainFilter<'a, T, F> - where F: FnMut(&mut T) -> bool, +where + F: FnMut(&mut T) -> bool, { type Item = T; @@ -1010,7 +1004,8 @@ impl<'a, T, F> Iterator for DrainFilter<'a, T, F> #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl<'a, T, F> Drop for DrainFilter<'a, T, F> - where F: FnMut(&mut T) -> bool, +where + F: FnMut(&mut T) -> bool, { fn drop(&mut self) { self.for_each(drop); @@ -1019,12 +1014,11 @@ impl<'a, T, F> Drop for DrainFilter<'a, T, F> #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl<'a, T: 'a + fmt::Debug, F> fmt::Debug for DrainFilter<'a, T, F> - where F: FnMut(&mut T) -> bool +where + F: FnMut(&mut T) -> bool, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("DrainFilter") - .field(&self.list) - .finish() + f.debug_tuple("DrainFilter").field(&self.list).finish() } } @@ -1350,8 +1344,10 @@ mod tests { } check_links(&m); assert_eq!(m.len(), 3 + len * 2); - assert_eq!(m.into_iter().collect::>(), - [-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]); + assert_eq!( + m.into_iter().collect::>(), + [-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1] + ); } #[test] @@ -1359,13 +1355,13 @@ mod tests { fn test_send() { let n = list_from(&[1, 2, 3]); thread::spawn(move || { - check_links(&n); - let a: &[_] = &[&1, &2, &3]; - assert_eq!(a, &*n.iter().collect::>()); - }) - .join() - .ok() - .unwrap(); + check_links(&n); + let a: &[_] = &[&1, &2, &3]; + assert_eq!(a, &*n.iter().collect::>()); + }) + .join() + .ok() + .unwrap(); } #[test] diff --git a/src/liballoc/collections/mod.rs b/src/liballoc/collections/mod.rs index 138f5d79bb008..d37265d95750b 100644 --- a/src/liballoc/collections/mod.rs +++ b/src/liballoc/collections/mod.rs @@ -45,7 +45,7 @@ use alloc::{AllocErr, LayoutErr}; /// Augments `AllocErr` with a CapacityOverflow variant. #[derive(Clone, PartialEq, Eq, Debug)] -#[unstable(feature = "try_reserve", reason = "new API", issue="48043")] +#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub enum CollectionAllocErr { /// Error due to the computed capacity exceeding the collection's maximum /// (usually `isize::MAX` bytes). @@ -54,7 +54,7 @@ pub enum CollectionAllocErr { AllocErr, } -#[unstable(feature = "try_reserve", reason = "new API", issue="48043")] +#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] impl From for CollectionAllocErr { #[inline] fn from(AllocErr: AllocErr) -> Self { @@ -62,7 +62,7 @@ impl From for CollectionAllocErr { } } -#[unstable(feature = "try_reserve", reason = "new API", issue="48043")] +#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] impl From for CollectionAllocErr { #[inline] fn from(_: LayoutErr) -> Self { diff --git a/src/liballoc/collections/vec_deque.rs b/src/liballoc/collections/vec_deque.rs index b3f7ef5fd6ecc..5ff93b0d306d0 100644 --- a/src/liballoc/collections/vec_deque.rs +++ b/src/liballoc/collections/vec_deque.rs @@ -17,8 +17,8 @@ use core::ptr; use core::ptr::NonNull; use core::slice; -use core::hash::{Hash, Hasher}; use core::cmp; +use core::hash::{Hash, Hasher}; use collections::CollectionAllocErr; use raw_vec::RawVec; @@ -157,41 +157,45 @@ impl VecDeque { /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy(&self, dst: usize, src: usize, len: usize) { - debug_assert!(dst + len <= self.cap(), - "cpy dst={} src={} len={} cap={}", - dst, - src, - len, - self.cap()); - debug_assert!(src + len <= self.cap(), - "cpy dst={} src={} len={} cap={}", - dst, - src, - len, - self.cap()); - ptr::copy(self.ptr().add(src), - self.ptr().add(dst), - len); + debug_assert!( + dst + len <= self.cap(), + "cpy dst={} src={} len={} cap={}", + dst, + src, + len, + self.cap() + ); + debug_assert!( + src + len <= self.cap(), + "cpy dst={} src={} len={} cap={}", + dst, + src, + len, + self.cap() + ); + ptr::copy(self.ptr().add(src), self.ptr().add(dst), len); } /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) { - debug_assert!(dst + len <= self.cap(), - "cno dst={} src={} len={} cap={}", - dst, - src, - len, - self.cap()); - debug_assert!(src + len <= self.cap(), - "cno dst={} src={} len={} cap={}", - dst, - src, - len, - self.cap()); - ptr::copy_nonoverlapping(self.ptr().add(src), - self.ptr().add(dst), - len); + debug_assert!( + dst + len <= self.cap(), + "cno dst={} src={} len={} cap={}", + dst, + src, + len, + self.cap() + ); + debug_assert!( + src + len <= self.cap(), + "cno dst={} src={} len={} cap={}", + dst, + src, + len, + self.cap() + ); + ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len); } /// Copies a potentially wrapping block of memory len long from src to dest. @@ -200,14 +204,20 @@ impl VecDeque { unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) { #[allow(dead_code)] fn diff(a: usize, b: usize) -> usize { - if a <= b { b - a } else { a - b } + if a <= b { + b - a + } else { + a - b + } } - debug_assert!(cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(), - "wrc dst={} src={} len={} cap={}", - dst, - src, - len, - self.cap()); + debug_assert!( + cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(), + "wrc dst={} src={} len={} cap={}", + dst, + src, + len, + self.cap() + ); if src == dst || len == 0 { return; @@ -475,10 +485,7 @@ impl VecDeque { assert!(j < self.len()); let ri = self.wrap_add(self.tail, i); let rj = self.wrap_add(self.tail, j); - unsafe { - ptr::swap(self.ptr().add(ri), - self.ptr().add(rj)) - } + unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) } } /// Returns the number of elements the `VecDeque` can hold without @@ -545,7 +552,8 @@ impl VecDeque { pub fn reserve(&mut self, additional: usize) { let old_cap = self.cap(); let used_cap = self.len() + 1; - let new_cap = used_cap.checked_add(additional) + let new_cap = used_cap + .checked_add(additional) .and_then(|needed_cap| needed_cap.checked_next_power_of_two()) .expect("capacity overflow"); @@ -593,8 +601,8 @@ impl VecDeque { /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` - #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.try_reserve(additional) } @@ -631,11 +639,12 @@ impl VecDeque { /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` - #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] + #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { let old_cap = self.cap(); let used_cap = self.len() + 1; - let new_cap = used_cap.checked_add(additional) + let new_cap = used_cap + .checked_add(additional) .and_then(|needed_cap| needed_cap.checked_next_power_of_two()) .ok_or(CollectionAllocErr::CapacityOverflow)?; @@ -691,16 +700,17 @@ impl VecDeque { /// buf.shrink_to(0); /// assert!(buf.capacity() >= 4); /// ``` - #[unstable(feature = "shrink_to", reason = "new API", issue="56431")] + #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { - assert!(self.capacity() >= min_capacity, "Tried to shrink to a larger capacity"); + assert!( + self.capacity() >= min_capacity, + "Tried to shrink to a larger capacity" + ); // +1 since the ringbuffer always leaves one space empty // len + 1 can't overflow for an existing, well-formed ringbuffer. - let target_cap = cmp::max( - cmp::max(min_capacity, self.len()) + 1, - MINIMUM_CAPACITY + 1 - ).next_power_of_two(); + let target_cap = cmp::max(cmp::max(min_capacity, self.len()) + 1, MINIMUM_CAPACITY + 1) + .next_power_of_two(); if target_cap < self.cap() { // There are three cases of interest: @@ -959,7 +969,8 @@ impl VecDeque { #[inline] #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self, range: R) -> Drain - where R: RangeBounds + where + R: RangeBounds, { // Memory safety // @@ -975,12 +986,12 @@ impl VecDeque { let start = match range.start_bound() { Included(&n) => n, Excluded(&n) => n + 1, - Unbounded => 0, + Unbounded => 0, }; let end = match range.end_bound() { Included(&n) => n + 1, Excluded(&n) => n, - Unbounded => len, + Unbounded => len, }; assert!(start <= end, "drain lower bound was too large"); assert!(end <= len, "drain upper bound was too large"); @@ -1060,7 +1071,8 @@ impl VecDeque { /// ``` #[stable(feature = "vec_deque_contains", since = "1.12.0")] pub fn contains(&self, x: &T) -> bool - where T: PartialEq + where + T: PartialEq, { let (a, b) = self.as_slices(); a.contains(x) || b.contains(x) @@ -1399,7 +1411,11 @@ impl VecDeque { let contiguous = self.is_contiguous(); - match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { + match ( + contiguous, + distance_to_tail <= distance_to_head, + idx >= self.tail, + ) { (true, true, _) if index == 0 => { // push_front // @@ -1618,7 +1634,11 @@ impl VecDeque { let contiguous = self.is_contiguous(); - match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { + match ( + contiguous, + distance_to_tail <= distance_to_head, + idx >= self.tail, + ) { (true, true, _) => { unsafe { // contiguous, remove closer to tail: @@ -1783,22 +1803,24 @@ impl VecDeque { // `at` lies in the first half. let amount_in_first = first_len - at; - ptr::copy_nonoverlapping(first_half.as_ptr().add(at), - other.ptr(), - amount_in_first); + ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first); // just take all of the second half. - ptr::copy_nonoverlapping(second_half.as_ptr(), - other.ptr().add(amount_in_first), - second_len); + ptr::copy_nonoverlapping( + second_half.as_ptr(), + other.ptr().add(amount_in_first), + second_len, + ); } else { // `at` lies in the second half, need to factor in the elements we skipped // in the first half. let offset = at - first_len; let amount_in_second = second_len - offset; - ptr::copy_nonoverlapping(second_half.as_ptr().add(offset), - other.ptr(), - amount_in_second); + ptr::copy_nonoverlapping( + second_half.as_ptr().add(offset), + other.ptr(), + amount_in_second, + ); } } @@ -1851,7 +1873,8 @@ impl VecDeque { /// ``` #[stable(feature = "vec_deque_retain", since = "1.4.0")] pub fn retain(&mut self, mut f: F) - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { let len = self.len(); let mut del = 0; @@ -1906,7 +1929,7 @@ impl VecDeque { /// assert_eq!(buf, [5, 10, 101, 102, 103]); /// ``` #[stable(feature = "vec_resize_with", since = "1.33.0")] - pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut()->T) { + pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) { let len = self.len(); if new_len > len { @@ -2126,10 +2149,7 @@ pub struct Iter<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); - f.debug_tuple("Iter") - .field(&front) - .field(&back) - .finish() + f.debug_tuple("Iter").field(&front).field(&back).finish() } } @@ -2166,7 +2186,8 @@ impl<'a, T> Iterator for Iter<'a, T> { } fn fold(self, mut accum: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); accum = front.iter().fold(accum, &mut f); @@ -2186,7 +2207,8 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> { } fn rfold(self, mut accum: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); accum = back.iter().rfold(accum, &mut f); @@ -2204,7 +2226,6 @@ impl<'a, T> ExactSizeIterator for Iter<'a, T> { #[stable(feature = "fused", since = "1.26.0")] impl<'a, T> FusedIterator for Iter<'a, T> {} - /// A mutable iterator over the elements of a `VecDeque`. /// /// This `struct` is created by the [`iter_mut`] method on [`VecDeque`]. See its @@ -2223,10 +2244,7 @@ pub struct IterMut<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (front, back) = RingSlices::ring_slices(&*self.ring, self.head, self.tail); - f.debug_tuple("IterMut") - .field(&front) - .field(&back) - .finish() + f.debug_tuple("IterMut").field(&front).field(&back).finish() } } @@ -2255,7 +2273,8 @@ impl<'a, T> Iterator for IterMut<'a, T> { } fn fold(self, mut accum: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); accum = front.iter_mut().fold(accum, &mut f); @@ -2279,7 +2298,8 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { } fn rfold(self, mut accum: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, { let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); accum = back.iter_mut().rfold(accum, &mut f); @@ -2313,9 +2333,7 @@ pub struct IntoIter { #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("IntoIter") - .field(&self.inner) - .finish() + f.debug_tuple("IntoIter").field(&self.inner).finish() } } @@ -2372,10 +2390,10 @@ pub struct Drain<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for Drain<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Drain") - .field(&self.after_tail) - .field(&self.after_head) - .field(&self.iter) - .finish() + .field(&self.after_tail) + .field(&self.after_head) + .field(&self.iter) + .finish() } } @@ -2508,7 +2526,10 @@ macro_rules! __impl_slice_eq1 { }; ($Lhs: ty, $Rhs: ty, $Bound: ident) => { #[stable(feature = "vec_deque_partial_eq_slice", since = "1.17.0")] - impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq { + impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs + where + A: PartialEq, + { fn eq(&self, other: &$Rhs) -> bool { if self.len() != other.len() { return false; @@ -2518,7 +2539,7 @@ macro_rules! __impl_slice_eq1 { sa == oa && sb == ob } } - } + }; } __impl_slice_eq1! { VecDeque, Vec } @@ -2662,8 +2683,10 @@ impl From> for VecDeque { // We need to extend the buf if it's not a power of two, too small // or doesn't have at least one free space - if !buf.cap().is_power_of_two() || (buf.cap() < (MINIMUM_CAPACITY + 1)) || - (buf.cap() == len) { + if !buf.cap().is_power_of_two() + || (buf.cap() < (MINIMUM_CAPACITY + 1)) + || (buf.cap() == len) + { let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); buf.reserve_exact(len, cap - len); } @@ -2696,9 +2719,7 @@ impl From> for Vec { // do this in at most three copy moves. if (cap - tail) > head { // right hand block is the long one; move that enough for the left - ptr::copy(buf.add(tail), - buf.add(tail - head), - cap - tail); + ptr::copy(buf.add(tail), buf.add(tail - head), cap - tail); // copy left in the end ptr::copy(buf, buf.add(cap - head), head); // shift the new thing to the start @@ -2736,10 +2757,8 @@ impl From> for Vec { let n_ops = right_edge - left_edge; left_edge += n_ops; right_edge += right_offset + 1; - } } - } let out = Vec::from_raw_parts(buf, len, cap); mem::forget(other); @@ -2859,7 +2878,6 @@ mod tests { // this test isn't covering what it wants to let cap = tester.capacity(); - // len is the length *after* insertion for len in 1..cap { // 0, 1, 2, .., len - 1 @@ -2946,9 +2964,8 @@ mod tests { assert!(tester.head < tester.cap()); // We should see the correct values in the VecDeque - let expected: VecDeque<_> = (0..drain_start) - .chain(drain_end..len) - .collect(); + let expected: VecDeque<_> = + (0..drain_start).chain(drain_end..len).collect(); assert_eq!(expected, tester); } } diff --git a/src/liballoc/fmt.rs b/src/liballoc/fmt.rs index a1e7533449c69..06b8b4fcf0472 100644 --- a/src/liballoc/fmt.rs +++ b/src/liballoc/fmt.rs @@ -508,24 +508,24 @@ #[unstable(feature = "fmt_internals", issue = "0")] pub use core::fmt::rt; +#[stable(feature = "fmt_flags_align", since = "1.28.0")] +pub use core::fmt::Alignment; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::fmt::{Formatter, Result, Write}; +pub use core::fmt::Error; +#[stable(feature = "rust1", since = "1.0.0")] +pub use core::fmt::{write, ArgumentV1, Arguments}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{Binary, Octal}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{Debug, Display}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::fmt::{LowerHex, Pointer, UpperHex}; -#[stable(feature = "rust1", since = "1.0.0")] -pub use core::fmt::{LowerExp, UpperExp}; +pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::fmt::Error; +pub use core::fmt::{Formatter, Result, Write}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::fmt::{write, ArgumentV1, Arguments}; +pub use core::fmt::{LowerExp, UpperExp}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; -#[stable(feature = "fmt_flags_align", since = "1.28.0")] -pub use core::fmt::{Alignment}; +pub use core::fmt::{LowerHex, Pointer, UpperHex}; use string; diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 3050a93ef39a0..167f96b30fb06 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -51,25 +51,26 @@ //! default global allocator. It is not compatible with the libc allocator API. #![allow(unused_attributes)] -#![unstable(feature = "alloc", - reason = "this library is unlikely to be stabilized in its current \ - form or name", - issue = "27783")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", - test(no_crate_inject, attr(allow(unused_variables), deny(warnings))))] +#![unstable( + feature = "alloc", + reason = "this library is unlikely to be stabilized in its current \ + form or name", + issue = "27783" +)] +#![doc( + html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", + test(no_crate_inject, attr(allow(unused_variables), deny(warnings))) +)] #![no_std] #![needs_allocator] - #![deny(intra_doc_link_resolution_failure)] #![deny(missing_debug_implementations)] - #![cfg_attr(not(test), feature(fn_traits))] #![cfg_attr(not(test), feature(generator_trait))] #![cfg_attr(test, feature(test))] - #![feature(allocator_api)] #![feature(allow_internal_unstable)] #![feature(arbitrary_self_types)] @@ -119,9 +120,9 @@ #[macro_use] extern crate std; #[cfg(test)] -extern crate test; -#[cfg(test)] extern crate rand; +#[cfg(test)] +extern crate test; // Module with internal macros used by other modules (needs to be included before other modules). #[macro_use] @@ -131,9 +132,11 @@ mod macros; pub mod alloc; -#[unstable(feature = "futures_api", - reason = "futures in libcore are unstable", - issue = "50547")] +#[unstable( + feature = "futures_api", + reason = "futures in libcore are unstable", + issue = "50547" +)] pub mod task; // Primitive types using the heaps above @@ -146,22 +149,22 @@ pub mod boxed; mod boxed { pub use std::boxed::Box; } +pub mod borrow; #[cfg(test)] mod boxed_test; pub mod collections; -#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))] -pub mod sync; -pub mod rc; -pub mod raw_vec; -pub mod prelude; -pub mod borrow; pub mod fmt; +pub mod prelude; +pub mod raw_vec; +pub mod rc; pub mod slice; pub mod str; pub mod string; +#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))] +pub mod sync; pub mod vec; #[cfg(not(test))] mod std { - pub use core::ops; // RangeFull + pub use core::ops; // RangeFull } diff --git a/src/liballoc/prelude.rs b/src/liballoc/prelude.rs index 7cd22095de417..b4695aa0f25dc 100644 --- a/src/liballoc/prelude.rs +++ b/src/liballoc/prelude.rs @@ -12,8 +12,13 @@ #![unstable(feature = "alloc", issue = "27783")] -#[unstable(feature = "alloc", issue = "27783")] pub use borrow::ToOwned; -#[unstable(feature = "alloc", issue = "27783")] pub use boxed::Box; -#[unstable(feature = "alloc", issue = "27783")] pub use slice::SliceConcatExt; -#[unstable(feature = "alloc", issue = "27783")] pub use string::{String, ToString}; -#[unstable(feature = "alloc", issue = "27783")] pub use vec::Vec; +#[unstable(feature = "alloc", issue = "27783")] +pub use borrow::ToOwned; +#[unstable(feature = "alloc", issue = "27783")] +pub use boxed::Box; +#[unstable(feature = "alloc", issue = "27783")] +pub use slice::SliceConcatExt; +#[unstable(feature = "alloc", issue = "27783")] +pub use string::{String, ToString}; +#[unstable(feature = "alloc", issue = "27783")] +pub use vec::Vec; diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index c05452bcf4d5d..0ac50a45bd8a0 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -7,10 +7,10 @@ use core::ops::Drop; use core::ptr::{self, NonNull, Unique}; use core::slice; -use alloc::{Alloc, Layout, Global, handle_alloc_error}; +use alloc::{handle_alloc_error, Alloc, Global, Layout}; +use boxed::Box; use collections::CollectionAllocErr; use collections::CollectionAllocErr::*; -use boxed::Box; /// A low-level utility for more ergonomically allocating, reallocating, and deallocating /// a buffer of memory on the heap without having to worry about all the corner cases @@ -81,7 +81,9 @@ impl RawVec { unsafe { let elem_size = mem::size_of::(); - let alloc_size = cap.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow()); + let alloc_size = cap + .checked_mul(elem_size) + .unwrap_or_else(|| capacity_overflow()); alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow()); // handles ZSTs and `cap = 0` alike @@ -307,14 +309,15 @@ impl RawVec { let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); - let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), - cur, - new_size); + let ptr_res = self + .a + .realloc(NonNull::from(self.ptr).cast(), cur, new_size); match ptr_res { Ok(ptr) => (new_cap, ptr.cast().into()), - Err(_) => handle_alloc_error( - Layout::from_size_align_unchecked(new_size, cur.align()) - ), + Err(_) => handle_alloc_error(Layout::from_size_align_unchecked( + new_size, + cur.align(), + )), } } None => { @@ -368,23 +371,26 @@ impl RawVec { let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); - match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) { + match self + .a + .grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) + { Ok(_) => { // We can't directly divide `size`. self.cap = new_cap; true } - Err(_) => { - false - } + Err(_) => false, } } } /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. - pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) - -> Result<(), CollectionAllocErr> { - + pub fn try_reserve_exact( + &mut self, + used_cap: usize, + needed_extra_cap: usize, + ) -> Result<(), CollectionAllocErr> { self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact) } @@ -413,17 +419,21 @@ impl RawVec { Err(CapacityOverflow) => capacity_overflow(), Err(AllocErr) => unreachable!(), Ok(()) => { /* yay */ } - } - } + } + } /// Calculates the buffer's new size given that it'll hold `used_cap + /// needed_extra_cap` elements. This logic is used in amortized reserve methods. /// Returns `(new_capacity, new_alloc_size)`. - fn amortized_new_size(&self, used_cap: usize, needed_extra_cap: usize) - -> Result { - + fn amortized_new_size( + &self, + used_cap: usize, + needed_extra_cap: usize, + ) -> Result { // Nothing we can really do about these checks :( - let required_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?; + let required_cap = used_cap + .checked_add(needed_extra_cap) + .ok_or(CapacityOverflow)?; // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. let double_cap = self.cap * 2; // `double_cap` guarantees exponential growth. @@ -431,8 +441,11 @@ impl RawVec { } /// The same as `reserve`, but returns on errors instead of panicking or aborting. - pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize) - -> Result<(), CollectionAllocErr> { + pub fn try_reserve( + &mut self, + used_cap: usize, + needed_extra_cap: usize, + ) -> Result<(), CollectionAllocErr> { self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized) } @@ -530,7 +543,8 @@ impl RawVec { return false; } - let new_cap = self.amortized_new_size(used_cap, needed_extra_cap) + let new_cap = self + .amortized_new_size(used_cap, needed_extra_cap) .unwrap_or_else(|_| capacity_overflow()); // Here, `cap < used_cap + needed_extra_cap <= new_cap` @@ -541,15 +555,15 @@ impl RawVec { // FIXME: may crash and burn on over-reserve alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow()); match self.a.grow_in_place( - NonNull::from(self.ptr).cast(), old_layout, new_layout.size(), + NonNull::from(self.ptr).cast(), + old_layout, + new_layout.size(), ) { Ok(_) => { self.cap = new_cap; true } - Err(_) => { - false - } + Err(_) => false, } } } @@ -602,13 +616,14 @@ impl RawVec { let new_size = elem_size * amount; let align = mem::align_of::(); let old_layout = Layout::from_size_align_unchecked(old_size, align); - match self.a.realloc(NonNull::from(self.ptr).cast(), - old_layout, - new_size) { + match self + .a + .realloc(NonNull::from(self.ptr).cast(), old_layout, new_size) + { Ok(p) => self.ptr = p.cast().into(), - Err(_) => handle_alloc_error( - Layout::from_size_align_unchecked(new_size, align) - ), + Err(_) => { + handle_alloc_error(Layout::from_size_align_unchecked(new_size, align)) + } } } self.cap = amount; @@ -654,7 +669,9 @@ impl RawVec { // Nothing we can really do about these checks :( let new_cap = match strategy { - Exact => used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?, + Exact => used_cap + .checked_add(needed_extra_cap) + .ok_or(CapacityOverflow)?, Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?, }; let new_layout = Layout::array::(new_cap).map_err(|_| CapacityOverflow)?; @@ -664,7 +681,8 @@ impl RawVec { let res = match self.current_layout() { Some(layout) => { debug_assert!(new_layout.align() == layout.align()); - self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size()) + self.a + .realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size()) } None => self.a.alloc(new_layout), }; @@ -680,7 +698,6 @@ impl RawVec { Ok(()) } } - } impl RawVec { @@ -716,12 +733,12 @@ impl RawVec { unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec { /// Frees the memory owned by the RawVec *without* trying to Drop its contents. fn drop(&mut self) { - unsafe { self.dealloc_buffer(); } + unsafe { + self.dealloc_buffer(); + } } } - - // We need to guarantee the following: // * We don't ever allocate `> isize::MAX` byte-size objects // * We don't overflow `usize::MAX` and actually allocate too little @@ -767,7 +784,9 @@ mod tests { // A dumb allocator that consumes a fixed amount of fuel // before allocation attempts start failing. - struct BoundedAlloc { fuel: usize } + struct BoundedAlloc { + fuel: usize, + } unsafe impl Alloc for BoundedAlloc { unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { let size = layout.size(); @@ -775,7 +794,10 @@ mod tests { return Err(AllocErr); } match Global.alloc(layout) { - ok @ Ok(_) => { self.fuel -= size; ok } + ok @ Ok(_) => { + self.fuel -= size; + ok + } err @ Err(_) => err, } } @@ -823,5 +845,4 @@ mod tests { } } - } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index d3a55c59ff69c..a0d83916497fe 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -237,20 +237,20 @@ use core::any::Any; use core::borrow; use core::cell::Cell; use core::cmp::Ordering; +use core::convert::From; use core::fmt; use core::hash::{Hash, Hasher}; use core::intrinsics::abort; use core::marker; -use core::marker::{Unpin, Unsize, PhantomData}; +use core::marker::{PhantomData, Unpin, Unsize}; use core::mem::{self, align_of_val, forget, size_of_val}; -use core::ops::{Deref, Receiver}; use core::ops::{CoerceUnsized, DispatchFromDyn}; +use core::ops::{Deref, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; -use core::convert::From; use core::usize; -use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error}; +use alloc::{box_free, handle_alloc_error, Alloc, Global, Layout}; use string::String; use vec::Vec; @@ -533,9 +533,7 @@ impl Rc { #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { if Rc::is_unique(this) { - unsafe { - Some(&mut this.ptr.as_mut().value) - } + unsafe { Some(&mut this.ptr.as_mut().value) } } else { None } @@ -616,9 +614,7 @@ impl Rc { // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible // reference to the inner value. - unsafe { - &mut this.ptr.as_mut().value - } + unsafe { &mut this.ptr.as_mut().value } } } @@ -649,7 +645,10 @@ impl Rc { if (*self).is::() { let ptr = self.ptr.cast::>(); forget(self); - Ok(Rc { ptr, phantom: PhantomData }) + Ok(Rc { + ptr, + phantom: PhantomData, + }) } else { Err(self) } @@ -664,10 +663,14 @@ impl Rc { // `&*(ptr as *const RcBox)`, but this created a misaligned // reference (see #54908). let layout = Layout::new::>() - .extend(Layout::for_value(&*ptr)).unwrap().0 - .pad_to_align().unwrap(); - - let mem = Global.alloc(layout) + .extend(Layout::for_value(&*ptr)) + .unwrap() + .0 + .pad_to_align() + .unwrap(); + + let mem = Global + .alloc(layout) .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the RcBox @@ -692,12 +695,16 @@ impl Rc { ptr::copy_nonoverlapping( bptr as *const T as *const u8, &mut (*ptr).value as *mut _ as *mut u8, - value_size); + value_size, + ); // Free the allocation without dropping its contents box_free(box_unique); - Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Rc { + ptr: NonNull::new_unchecked(ptr), + phantom: PhantomData, + } } } } @@ -719,12 +726,12 @@ impl Rc<[T]> { let v_ptr = v as *const [T]; let ptr = Self::allocate_for_ptr(v_ptr); - ptr::copy_nonoverlapping( - v.as_ptr(), - &mut (*ptr).value as *mut [T] as *mut T, - v.len()); + ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len()); - Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Rc { + ptr: NonNull::new_unchecked(ptr), + phantom: PhantomData, + } } } @@ -768,7 +775,7 @@ impl RcFromSlice for Rc<[T]> { // Pointer to first element let elems = &mut (*ptr).value as *mut [T] as *mut T; - let mut guard = Guard{ + let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems: elems, layout: layout, @@ -783,7 +790,10 @@ impl RcFromSlice for Rc<[T]> { // All clear. Forget the guard so it doesn't free the new RcBox. forget(guard); - Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Rc { + ptr: NonNull::new_unchecked(ptr), + phantom: PhantomData, + } } } } @@ -875,7 +885,10 @@ impl Clone for Rc { #[inline] fn clone(&self) -> Rc { self.inc_strong(); - Rc { ptr: self.ptr, phantom: PhantomData } + Rc { + ptr: self.ptr, + phantom: PhantomData, + } } } @@ -1282,7 +1295,10 @@ impl Weak { None } else { inner.inc_strong(); - Some(Rc { ptr: self.ptr, phantom: PhantomData }) + Some(Rc { + ptr: self.ptr, + phantom: PhantomData, + }) } } @@ -1459,7 +1475,9 @@ trait RcBoxPtr { // nevertheless, we insert an abort here to hint LLVM at // an otherwise missed optimization. if self.strong() == 0 || self.strong() == usize::max_value() { - unsafe { abort(); } + unsafe { + abort(); + } } self.inner().strong.set(self.strong() + 1); } @@ -1481,7 +1499,9 @@ trait RcBoxPtr { // nevertheless, we insert an abort here to hint LLVM at // an otherwise missed optimization. if self.weak() == 0 || self.weak() == usize::max_value() { - unsafe { abort(); } + unsafe { + abort(); + } } self.inner().weak.set(self.weak() + 1); } @@ -1495,9 +1515,7 @@ trait RcBoxPtr { impl RcBoxPtr for Rc { #[inline(always)] fn inner(&self) -> &RcBox { - unsafe { - self.ptr.as_ref() - } + unsafe { self.ptr.as_ref() } } } @@ -1513,12 +1531,12 @@ mod tests { use super::{Rc, Weak}; use std::boxed::Box; use std::cell::RefCell; + use std::clone::Clone; + use std::convert::From; + use std::mem::drop; use std::option::Option; use std::option::Option::{None, Some}; use std::result::Result::{Err, Ok}; - use std::mem::drop; - use std::clone::Clone; - use std::convert::From; #[test] fn test_clone() { @@ -1569,7 +1587,9 @@ mod tests { x: RefCell>>, } - let a = Rc::new(Cycle { x: RefCell::new(None) }); + let a = Rc::new(Cycle { + x: RefCell::new(None), + }); let b = Rc::downgrade(&a.clone()); *a.x.borrow_mut() = Some(b); @@ -1927,4 +1947,4 @@ impl AsRef for Rc { } #[stable(feature = "pin", since = "1.33.0")] -impl Unpin for Rc { } +impl Unpin for Rc {} diff --git a/src/liballoc/slice.rs b/src/liballoc/slice.rs index db19f778617f4..40a8b02358e9e 100644 --- a/src/liballoc/slice.rs +++ b/src/liballoc/slice.rs @@ -88,35 +88,35 @@ #![cfg_attr(test, allow(unused_imports, dead_code))] use core::cmp::Ordering::{self, Less}; -use core::mem::size_of; use core::mem; +use core::mem::size_of; use core::ptr; -use core::{u8, u16, u32}; +use core::{u16, u32, u8}; use borrow::{Borrow, BorrowMut, ToOwned}; use boxed::Box; use vec::Vec; +#[stable(feature = "slice_get_slice", since = "1.28.0")] +pub use core::slice::SliceIndex; +#[stable(feature = "from_ref", since = "1.28.0")] +pub use core::slice::{from_mut, from_ref}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::slice::{Chunks, Windows}; +pub use core::slice::{from_raw_parts, from_raw_parts_mut}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::slice::{Iter, IterMut}; +pub use core::slice::{Chunks, Windows}; +#[stable(feature = "chunks_exact", since = "1.31.0")] +pub use core::slice::{ChunksExact, ChunksExactMut}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::slice::{SplitMut, ChunksMut, Split}; +pub use core::slice::{ChunksMut, Split, SplitMut}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::slice::{SplitN, RSplitN, SplitNMut, RSplitNMut}; +pub use core::slice::{Iter, IterMut}; +#[stable(feature = "rchunks", since = "1.31.0")] +pub use core::slice::{RChunks, RChunksExact, RChunksExactMut, RChunksMut}; #[stable(feature = "slice_rsplit", since = "1.27.0")] pub use core::slice::{RSplit, RSplitMut}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::slice::{from_raw_parts, from_raw_parts_mut}; -#[stable(feature = "from_ref", since = "1.28.0")] -pub use core::slice::{from_ref, from_mut}; -#[stable(feature = "slice_get_slice", since = "1.28.0")] -pub use core::slice::SliceIndex; -#[stable(feature = "chunks_exact", since = "1.31.0")] -pub use core::slice::{ChunksExact, ChunksExactMut}; -#[stable(feature = "rchunks", since = "1.31.0")] -pub use core::slice::{RChunks, RChunksMut, RChunksExact, RChunksExactMut}; +pub use core::slice::{RSplitN, RSplitNMut, SplitN, SplitNMut}; //////////////////////////////////////////////////////////////////////////////// // Basic slice extension methods @@ -154,7 +154,8 @@ mod hack { #[inline] pub fn to_vec(s: &[T]) -> Vec - where T: Clone + where + T: Clone, { let mut vector = Vec::with_capacity(s.len()); vector.extend_from_slice(s); @@ -194,7 +195,8 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn sort(&mut self) - where T: Ord + where + T: Ord, { merge_sort(self, |a, b| a.lt(b)); } @@ -247,7 +249,8 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn sort_by(&mut self, mut compare: F) - where F: FnMut(&T, &T) -> Ordering + where + F: FnMut(&T, &T) -> Ordering, { merge_sort(self, |a, b| compare(a, b) == Less); } @@ -282,7 +285,9 @@ impl [T] { #[stable(feature = "slice_sort_by_key", since = "1.7.0")] #[inline] pub fn sort_by_key(&mut self, mut f: F) - where F: FnMut(&T) -> K, K: Ord + where + F: FnMut(&T) -> K, + K: Ord, { merge_sort(self, |a, b| f(a).lt(&f(b))); } @@ -323,13 +328,19 @@ impl [T] { #[unstable(feature = "slice_sort_by_cached_key", issue = "34447")] #[inline] pub fn sort_by_cached_key(&mut self, f: F) - where F: FnMut(&T) -> K, K: Ord + where + F: FnMut(&T) -> K, + K: Ord, { // Helper macro for indexing our vector by the smallest possible type, to reduce allocation. macro_rules! sort_by_key { - ($t:ty, $slice:ident, $f:ident) => ({ - let mut indices: Vec<_> = - $slice.iter().map($f).enumerate().map(|(i, k)| (k, i as $t)).collect(); + ($t:ty, $slice:ident, $f:ident) => {{ + let mut indices: Vec<_> = $slice + .iter() + .map($f) + .enumerate() + .map(|(i, k)| (k, i as $t)) + .collect(); // The elements of `indices` are unique, as they are indexed, so any sort will be // stable with respect to the original slice. We use `sort_unstable` here because // it requires less memory allocation. @@ -342,19 +353,27 @@ impl [T] { indices[i].1 = index; $slice.swap(i, index as usize); } - }) + }}; } - let sz_u8 = mem::size_of::<(K, u8)>(); - let sz_u16 = mem::size_of::<(K, u16)>(); - let sz_u32 = mem::size_of::<(K, u32)>(); + let sz_u8 = mem::size_of::<(K, u8)>(); + let sz_u16 = mem::size_of::<(K, u16)>(); + let sz_u32 = mem::size_of::<(K, u32)>(); let sz_usize = mem::size_of::<(K, usize)>(); let len = self.len(); - if len < 2 { return } - if sz_u8 < sz_u16 && len <= ( u8::MAX as usize) { return sort_by_key!( u8, self, f) } - if sz_u16 < sz_u32 && len <= (u16::MAX as usize) { return sort_by_key!(u16, self, f) } - if sz_u32 < sz_usize && len <= (u32::MAX as usize) { return sort_by_key!(u32, self, f) } + if len < 2 { + return; + } + if sz_u8 < sz_u16 && len <= (u8::MAX as usize) { + return sort_by_key!(u8, self, f); + } + if sz_u16 < sz_u32 && len <= (u16::MAX as usize) { + return sort_by_key!(u16, self, f); + } + if sz_u32 < sz_usize && len <= (u32::MAX as usize) { + return sort_by_key!(u32, self, f); + } sort_by_key!(usize, self, f) } @@ -371,7 +390,8 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn to_vec(&self) -> Vec - where T: Clone + where + T: Clone, { // NB see hack module in this file hack::to_vec(self) @@ -425,10 +445,15 @@ impl [T] { /// b"0123456789abcdef".repeat(usize::max_value()); /// } /// ``` - #[unstable(feature = "repeat_generic_slice", - reason = "it's on str, why not on slice?", - issue = "48784")] - pub fn repeat(&self, n: usize) -> Vec where T: Copy { + #[unstable( + feature = "repeat_generic_slice", + reason = "it's on str, why not on slice?", + issue = "48784" + )] + pub fn repeat(&self, n: usize) -> Vec + where + T: Copy, + { if n == 0 { return Vec::new(); } @@ -525,9 +550,11 @@ impl [u8] { //////////////////////////////////////////////////////////////////////////////// // Extension traits for slices over specific kinds of data //////////////////////////////////////////////////////////////////////////////// -#[unstable(feature = "slice_concat_ext", - reason = "trait should not have to exist", - issue = "27747")] +#[unstable( + feature = "slice_concat_ext", + reason = "trait should not have to exist", + issue = "27747" +)] /// An extension trait for concatenating slices /// /// While this trait is unstable, the methods are stable. `SliceConcatExt` is @@ -538,9 +565,11 @@ impl [u8] { /// [`join()`]: #tymethod.join /// [`concat()`]: #tymethod.concat pub trait SliceConcatExt { - #[unstable(feature = "slice_concat_ext", - reason = "trait should not have to exist", - issue = "27747")] + #[unstable( + feature = "slice_concat_ext", + reason = "trait should not have to exist", + issue = "27747" + )] /// The resulting type after concatenation type Output; @@ -572,9 +601,11 @@ pub trait SliceConcatExt { fn connect(&self, sep: &T) -> Self::Output; } -#[unstable(feature = "slice_concat_ext", - reason = "trait should not have to exist", - issue = "27747")] +#[unstable( + feature = "slice_concat_ext", + reason = "trait should not have to exist", + issue = "27747" +)] impl> SliceConcatExt for [V] { type Output = Vec; @@ -662,7 +693,8 @@ impl ToOwned for [T] { /// /// This is the integral subroutine of insertion sort. fn insert_head(v: &mut [T], is_less: &mut F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { if v.len() >= 2 && is_less(&v[1], &v[0]) { unsafe { @@ -720,7 +752,9 @@ fn insert_head(v: &mut [T], is_less: &mut F) impl Drop for InsertionHole { fn drop(&mut self) { - unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); } + unsafe { + ptr::copy_nonoverlapping(self.src, self.dest, 1); + } } } } @@ -733,7 +767,8 @@ fn insert_head(v: &mut [T], is_less: &mut F) /// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough /// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type. unsafe fn merge(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { let len = v.len(); let v = v.as_mut_ptr(); @@ -833,7 +868,9 @@ unsafe fn merge(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F) fn drop(&mut self) { // `T` is not a zero-sized type, so it's okay to divide by its size. let len = (self.end as usize - self.start as usize) / mem::size_of::(); - unsafe { ptr::copy_nonoverlapping(self.start, self.dest, len); } + unsafe { + ptr::copy_nonoverlapping(self.start, self.dest, len); + } } } } @@ -851,7 +888,8 @@ unsafe fn merge(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F) /// /// The invariants ensure that the total running time is `O(n log n)` worst-case. fn merge_sort(v: &mut [T], mut is_less: F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { // Slices of up to this length get sorted using insertion sort. const MAX_INSERTION: usize = 20; @@ -868,7 +906,7 @@ fn merge_sort(v: &mut [T], mut is_less: F) // Short arrays get sorted in-place via insertion sort to avoid allocations. if len <= MAX_INSERTION { if len >= 2 { - for i in (0..len-1).rev() { + for i in (0..len - 1).rev() { insert_head(&mut v[i..], &mut is_less); } } @@ -894,14 +932,13 @@ fn merge_sort(v: &mut [T], mut is_less: F) start -= 1; unsafe { if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) { - while start > 0 && is_less(v.get_unchecked(start), - v.get_unchecked(start - 1)) { + while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) { start -= 1; } v[start..end].reverse(); } else { - while start > 0 && !is_less(v.get_unchecked(start), - v.get_unchecked(start - 1)) { + while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) + { start -= 1; } } @@ -927,8 +964,12 @@ fn merge_sort(v: &mut [T], mut is_less: F) let left = runs[r + 1]; let right = runs[r]; unsafe { - merge(&mut v[left.start .. right.start + right.len], left.len, buf.as_mut_ptr(), - &mut is_less); + merge( + &mut v[left.start..right.start + right.len], + left.len, + buf.as_mut_ptr(), + &mut is_less, + ); } runs[r] = Run { start: left.start, @@ -958,10 +999,12 @@ fn merge_sort(v: &mut [T], mut is_less: F) #[inline] fn collapse(runs: &[Run]) -> Option { let n = runs.len(); - if n >= 2 && (runs[n - 1].start == 0 || - runs[n - 2].len <= runs[n - 1].len || - (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len) || - (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len)) { + if n >= 2 + && (runs[n - 1].start == 0 + || runs[n - 2].len <= runs[n - 1].len + || (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len) + || (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len)) + { if n >= 3 && runs[n - 3].len < runs[n - 1].len { Some(n - 3) } else { diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs index 60d9f1626138e..a0210e5429f76 100644 --- a/src/liballoc/str.rs +++ b/src/liballoc/str.rs @@ -29,12 +29,12 @@ #![allow(unused_imports)] use core::fmt; -use core::str as core_str; -use core::str::pattern::Pattern; -use core::str::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher}; +use core::iter::FusedIterator; use core::mem; use core::ptr; -use core::iter::FusedIterator; +use core::str as core_str; +use core::str::pattern::Pattern; +use core::str::pattern::{DoubleEndedSearcher, ReverseSearcher, Searcher}; use core::unicode::conversions; use borrow::{Borrow, ToOwned}; @@ -44,36 +44,38 @@ use string::String; use vec::Vec; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::{FromStr, Utf8Error}; -#[allow(deprecated)] +pub use core::str::pattern; +#[stable(feature = "encode_utf16", since = "1.8.0")] +pub use core::str::EncodeUtf16; +#[unstable(feature = "split_ascii_whitespace", issue = "48656")] +pub use core::str::SplitAsciiWhitespace; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::{Lines, LinesAny}; +pub use core::str::SplitWhitespace; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::{Split, RSplit}; +pub use core::str::{from_utf8, from_utf8_mut, Bytes, CharIndices, Chars}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::{SplitN, RSplitN}; +pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::{SplitTerminator, RSplitTerminator}; +pub use core::str::{FromStr, Utf8Error}; +#[allow(deprecated)] #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::{Matches, RMatches}; +pub use core::str::{Lines, LinesAny}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{MatchIndices, RMatchIndices}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::{from_utf8, from_utf8_mut, Chars, CharIndices, Bytes}; +pub use core::str::{Matches, RMatches}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError}; +pub use core::str::{RSplit, Split}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::SplitWhitespace; +pub use core::str::{RSplitN, SplitN}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::str::pattern; -#[stable(feature = "encode_utf16", since = "1.8.0")] -pub use core::str::EncodeUtf16; -#[unstable(feature = "split_ascii_whitespace", issue = "48656")] -pub use core::str::SplitAsciiWhitespace; +pub use core::str::{RSplitTerminator, SplitTerminator}; -#[unstable(feature = "slice_concat_ext", - reason = "trait should not have to exist", - issue = "27747")] +#[unstable( + feature = "slice_concat_ext", + reason = "trait should not have to exist", + issue = "27747" +)] impl> SliceConcatExt for [S] { type Output = String; @@ -82,9 +84,7 @@ impl> SliceConcatExt for [S] { } fn join(&self, sep: &str) -> String { - unsafe { - String::from_utf8_unchecked( join_generic_copy(self, sep.as_bytes()) ) - } + unsafe { String::from_utf8_unchecked(join_generic_copy(self, sep.as_bytes())) } } fn connect(&self, sep: &str) -> String { @@ -122,10 +122,10 @@ macro_rules! spezialize_for_lengths { macro_rules! copy_slice_and_advance { ($target:expr, $bytes:expr) => { let len = $bytes.len(); - let (head, tail) = {$target}.split_at_mut(len); + let (head, tail) = { $target }.split_at_mut(len); head.copy_from_slice($bytes); $target = tail; - } + }; } // Optimized join implementation that works for both Vec (T: Copy) and String's inner vec @@ -155,11 +155,15 @@ where // if the `len` calculation overflows, we'll panic // we would have run out of memory anyway and the rest of the function requires // the entire Vec pre-allocated for safety - let len = sep_len.checked_mul(iter.len()).and_then(|n| { - slice.iter() + let len = sep_len + .checked_mul(iter.len()) + .and_then(|n| { + slice + .iter() .map(|s| s.borrow().as_ref().len()) .try_fold(n, usize::checked_add) - }).expect("attempt to join into collection with len > usize::MAX"); + }) + .expect("attempt to join into collection with len > usize::MAX"); // crucial for safety let mut result = Vec::with_capacity(len); @@ -381,13 +385,13 @@ impl str { // See http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992 // for the definition of `Final_Sigma`. debug_assert!('Σ'.len_utf8() == 2); - let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) && - !case_ignoreable_then_cased(from[i + 2..].chars()); + let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) + && !case_ignoreable_then_cased(from[i + 2..].chars()); to.push_str(if is_word_final { "ς" } else { "σ" }); } fn case_ignoreable_then_cased>(iter: I) -> bool { - use core::unicode::derived_property::{Cased, Case_Ignorable}; + use core::unicode::derived_property::{Case_Ignorable, Cased}; match iter.skip_while(|&c| Case_Ignorable(c)).next() { Some(c) => Cased(c), None => false, @@ -449,9 +453,11 @@ impl str { /// escaped. /// /// [`char::escape_debug`]: primitive.char.html#method.escape_debug - #[unstable(feature = "str_escape", - reason = "return type may change to be an iterator", - issue = "27791")] + #[unstable( + feature = "str_escape", + reason = "return type may change to be an iterator", + issue = "27791" + )] pub fn escape_debug(&self) -> String { let mut string = String::with_capacity(self.len()); let mut chars = self.chars(); @@ -465,9 +471,11 @@ impl str { /// Escapes each char in `s` with [`char::escape_default`]. /// /// [`char::escape_default`]: primitive.char.html#method.escape_default - #[unstable(feature = "str_escape", - reason = "return type may change to be an iterator", - issue = "27791")] + #[unstable( + feature = "str_escape", + reason = "return type may change to be an iterator", + issue = "27791" + )] pub fn escape_default(&self) -> String { self.chars().flat_map(|c| c.escape_default()).collect() } @@ -475,9 +483,11 @@ impl str { /// Escapes each char in `s` with [`char::escape_unicode`]. /// /// [`char::escape_unicode`]: primitive.char.html#method.escape_unicode - #[unstable(feature = "str_escape", - reason = "return type may change to be an iterator", - issue = "27791")] + #[unstable( + feature = "str_escape", + reason = "return type may change to be an iterator", + issue = "27791" + )] pub fn escape_unicode(&self) -> String { self.chars().flat_map(|c| c.escape_unicode()).collect() } diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs index fa15e9ad9018e..e63aece581839 100644 --- a/src/liballoc/string.rs +++ b/src/liballoc/string.rs @@ -53,13 +53,13 @@ use core::iter::{FromIterator, FusedIterator}; use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ops::{self, Add, AddAssign, Index, IndexMut, RangeBounds}; use core::ptr; -use core::str::pattern::Pattern; use core::str::lossy; +use core::str::pattern::Pattern; -use collections::CollectionAllocErr; use borrow::{Cow, ToOwned}; use boxed::Box; -use str::{self, from_boxed_utf8_unchecked, FromStr, Utf8Error, Chars}; +use collections::CollectionAllocErr; +use str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error}; use vec::Vec; /// A UTF-8 encoded, growable string. @@ -415,7 +415,9 @@ impl String { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> String { - String { vec: Vec::with_capacity(capacity) } + String { + vec: Vec::with_capacity(capacity), + } } // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is @@ -493,12 +495,10 @@ impl String { pub fn from_utf8(vec: Vec) -> Result { match str::from_utf8(&vec) { Ok(..) => Ok(String { vec }), - Err(e) => { - Err(FromUtf8Error { - bytes: vec, - error: e, - }) - } + Err(e) => Err(FromUtf8Error { + bytes: vec, + error: e, + }), } } @@ -648,7 +648,9 @@ impl String { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn from_utf16_lossy(v: &[u16]) -> String { - decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect() + decode_utf16(v.iter().cloned()) + .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) + .collect() } /// Creates a new `String` from a length, capacity, and pointer. @@ -695,7 +697,9 @@ impl String { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String { - String { vec: Vec::from_raw_parts(buf, length, capacity) } + String { + vec: Vec::from_raw_parts(buf, length, capacity), + } } /// Converts a vector of bytes to a `String` without checking that the @@ -953,7 +957,7 @@ impl String { /// } /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` - #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] + #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.vec.try_reserve(additional) } @@ -991,8 +995,8 @@ impl String { /// } /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` - #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.vec.try_reserve_exact(additional) } @@ -1040,7 +1044,7 @@ impl String { /// assert!(s.capacity() >= 3); /// ``` #[inline] - #[unstable(feature = "shrink_to", reason = "new API", issue="56431")] + #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { self.vec.shrink_to(min_capacity) } @@ -1067,7 +1071,9 @@ impl String { pub fn push(&mut self, ch: char) { match ch.len_utf8() { 1 => self.vec.push(ch as u8), - _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()), + _ => self + .vec + .extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()), } } @@ -1190,9 +1196,11 @@ impl String { let next = idx + ch.len_utf8(); let len = self.len(); unsafe { - ptr::copy(self.vec.as_ptr().add(next), - self.vec.as_mut_ptr().add(idx), - len - next); + ptr::copy( + self.vec.as_ptr().add(next), + self.vec.as_mut_ptr().add(idx), + len - next, + ); self.vec.set_len(len - (next - idx)); } ch @@ -1216,25 +1224,26 @@ impl String { #[inline] #[stable(feature = "string_retain", since = "1.26.0")] pub fn retain(&mut self, mut f: F) - where F: FnMut(char) -> bool + where + F: FnMut(char) -> bool, { let len = self.len(); let mut del_bytes = 0; let mut idx = 0; while idx < len { - let ch = unsafe { - self.get_unchecked(idx..len).chars().next().unwrap() - }; + let ch = unsafe { self.get_unchecked(idx..len).chars().next().unwrap() }; let ch_len = ch.len_utf8(); if !f(ch) { del_bytes += ch_len; } else if del_bytes > 0 { unsafe { - ptr::copy(self.vec.as_ptr().add(idx), - self.vec.as_mut_ptr().add(idx - del_bytes), - ch_len); + ptr::copy( + self.vec.as_ptr().add(idx), + self.vec.as_mut_ptr().add(idx - del_bytes), + ch_len, + ); } } @@ -1243,7 +1252,9 @@ impl String { } if del_bytes > 0 { - unsafe { self.vec.set_len(len - del_bytes); } + unsafe { + self.vec.set_len(len - del_bytes); + } } } @@ -1289,12 +1300,12 @@ impl String { let amt = bytes.len(); self.vec.reserve(amt); - ptr::copy(self.vec.as_ptr().add(idx), - self.vec.as_mut_ptr().add(idx + amt), - len - idx); - ptr::copy(bytes.as_ptr(), - self.vec.as_mut_ptr().add(idx), - amt); + ptr::copy( + self.vec.as_ptr().add(idx), + self.vec.as_mut_ptr().add(idx + amt), + len - idx, + ); + ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); self.vec.set_len(len + amt); } @@ -1486,7 +1497,8 @@ impl String { /// ``` #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self, range: R) -> Drain - where R: RangeBounds + where + R: RangeBounds, { // Memory safety // @@ -1546,7 +1558,8 @@ impl String { /// ``` #[stable(feature = "splice", since = "1.27.0")] pub fn replace_range(&mut self, range: R, replace_with: &str) - where R: RangeBounds + where + R: RangeBounds, { // Memory safety // @@ -1554,19 +1567,17 @@ impl String { // of the vector version. The data is just plain bytes. match range.start_bound() { - Included(&n) => assert!(self.is_char_boundary(n)), - Excluded(&n) => assert!(self.is_char_boundary(n + 1)), - Unbounded => {}, + Included(&n) => assert!(self.is_char_boundary(n)), + Excluded(&n) => assert!(self.is_char_boundary(n + 1)), + Unbounded => {} }; match range.end_bound() { - Included(&n) => assert!(self.is_char_boundary(n + 1)), - Excluded(&n) => assert!(self.is_char_boundary(n)), - Unbounded => {}, + Included(&n) => assert!(self.is_char_boundary(n + 1)), + Excluded(&n) => assert!(self.is_char_boundary(n)), + Unbounded => {} }; - unsafe { - self.as_mut_vec() - }.splice(range, replace_with.bytes()); + unsafe { self.as_mut_vec() }.splice(range, replace_with.bytes()); } /// Converts this `String` into a [`Box`]`<`[`str`]`>`. @@ -1684,7 +1695,9 @@ impl fmt::Display for FromUtf16Error { #[stable(feature = "rust1", since = "1.0.0")] impl Clone for String { fn clone(&self) -> Self { - String { vec: self.vec.clone() } + String { + vec: self.vec.clone(), + } } fn clone_from(&mut self, source: &Self) { @@ -1795,9 +1808,11 @@ impl<'a> Extend> for String { } /// A convenience impl that delegates to the impl for `&str` -#[unstable(feature = "pattern", - reason = "API not fully fleshed out and ready to be stabilized", - issue = "27721")] +#[unstable( + feature = "pattern", + reason = "API not fully fleshed out and ready to be stabilized", + issue = "27721" +)] impl<'a, 'b> Pattern<'a> for &'b String { type Searcher = <&'b str as Pattern<'a>>::Searcher; @@ -1833,20 +1848,27 @@ macro_rules! impl_eq { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, 'b> PartialEq<$rhs> for $lhs { #[inline] - fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) } + fn eq(&self, other: &$rhs) -> bool { + PartialEq::eq(&self[..], &other[..]) + } #[inline] - fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&self[..], &other[..]) } + fn ne(&self, other: &$rhs) -> bool { + PartialEq::ne(&self[..], &other[..]) + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, 'b> PartialEq<$lhs> for $rhs { #[inline] - fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) } + fn eq(&self, other: &$lhs) -> bool { + PartialEq::eq(&self[..], &other[..]) + } #[inline] - fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&self[..], &other[..]) } + fn ne(&self, other: &$lhs) -> bool { + PartialEq::ne(&self[..], &other[..]) + } } - - } + }; } impl_eq! { String, str } @@ -2159,7 +2181,7 @@ impl ToString for T { use core::fmt::Write; let mut buf = String::new(); buf.write_fmt(format_args!("{}", self)) - .expect("a Display implementation returned an error unexpectedly"); + .expect("a Display implementation returned an error unexpectedly"); buf.shrink_to_fit(); buf } diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 390a079165054..52b2181089654 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -7,23 +7,23 @@ //! [arc]: struct.Arc.html use core::any::Any; -use core::sync::atomic; -use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst}; use core::borrow; -use core::fmt; use core::cmp::Ordering; +use core::convert::From; +use core::fmt; +use core::hash::{Hash, Hasher}; use core::intrinsics::abort; +use core::marker::{PhantomData, Unpin, Unsize}; use core::mem::{self, align_of_val, size_of_val}; -use core::ops::{Deref, Receiver}; use core::ops::{CoerceUnsized, DispatchFromDyn}; +use core::ops::{Deref, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; -use core::marker::{Unpin, Unsize, PhantomData}; -use core::hash::{Hash, Hasher}; +use core::sync::atomic; +use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst}; use core::{isize, usize}; -use core::convert::From; -use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error}; +use alloc::{box_free, handle_alloc_error, Alloc, Global, Layout}; use boxed::Box; use rc::is_dangling; use string::String; @@ -290,7 +290,10 @@ impl Arc { weak: atomic::AtomicUsize::new(1), data, }; - Arc { ptr: Box::into_raw_non_null(x), phantom: PhantomData } + Arc { + ptr: Box::into_raw_non_null(x), + phantom: PhantomData, + } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then @@ -325,7 +328,12 @@ impl Arc { #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { // See `drop` for why all these atomics are like this - if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() { + if this + .inner() + .strong + .compare_exchange(1, 0, Release, Relaxed) + .is_err() + { return Err(this); } @@ -446,7 +454,11 @@ impl Arc { // Unlike with Clone(), we need this to be an Acquire read to // synchronize with the write coming from `is_unique`, so that the // events prior to that write happen before this read. - match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { + match this + .inner() + .weak + .compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) + { Ok(_) => { // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr)); @@ -485,7 +497,11 @@ impl Arc { let cnt = this.inner().weak.load(SeqCst); // If the weak count is currently locked, the value of the // count was 0 just before taking the lock. - if cnt == usize::MAX { 0 } else { cnt - 1 } + if cnt == usize::MAX { + 0 + } else { + cnt - 1 + } } /// Gets the number of strong (`Arc`) pointers to this value. @@ -567,10 +583,14 @@ impl Arc { // `&*(ptr as *const ArcInner)`, but this created a misaligned // reference (see #54908). let layout = Layout::new::>() - .extend(Layout::for_value(&*ptr)).unwrap().0 - .pad_to_align().unwrap(); - - let mem = Global.alloc(layout) + .extend(Layout::for_value(&*ptr)) + .unwrap() + .0 + .pad_to_align() + .unwrap(); + + let mem = Global + .alloc(layout) .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the ArcInner @@ -595,12 +615,16 @@ impl Arc { ptr::copy_nonoverlapping( bptr as *const T as *const u8, &mut (*ptr).data as *mut _ as *mut u8, - value_size); + value_size, + ); // Free the allocation without dropping its contents box_free(box_unique); - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Arc { + ptr: NonNull::new_unchecked(ptr), + phantom: PhantomData, + } } } } @@ -622,12 +646,12 @@ impl Arc<[T]> { let v_ptr = v as *const [T]; let ptr = Self::allocate_for_ptr(v_ptr); - ptr::copy_nonoverlapping( - v.as_ptr(), - &mut (*ptr).data as *mut [T] as *mut T, - v.len()); + ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Arc { + ptr: NonNull::new_unchecked(ptr), + phantom: PhantomData, + } } } @@ -672,7 +696,7 @@ impl ArcFromSlice for Arc<[T]> { // Pointer to first element let elems = &mut (*ptr).data as *mut [T] as *mut T; - let mut guard = Guard{ + let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems: elems, layout: layout, @@ -687,7 +711,10 @@ impl ArcFromSlice for Arc<[T]> { // All clear. Forget the guard so it doesn't free the new ArcInner. mem::forget(guard); - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Arc { + ptr: NonNull::new_unchecked(ptr), + phantom: PhantomData, + } } } } @@ -745,7 +772,10 @@ impl Clone for Arc { } } - Arc { ptr: self.ptr, phantom: PhantomData } + Arc { + ptr: self.ptr, + phantom: PhantomData, + } } } @@ -803,7 +833,12 @@ impl Arc { // before release writes (i.e., decrements) to `strong`. Since we hold a // weak count, there's no chance the ArcInner itself could be // deallocated. - if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { + if this + .inner() + .strong + .compare_exchange(1, 0, Acquire, Relaxed) + .is_err() + { // Another strong pointer exists; clone *this = Arc::new((**this).clone()); } else if this.inner().weak.load(Relaxed) != 1 { @@ -841,9 +876,7 @@ impl Arc { // As with `get_mut()`, the unsafety is ok because our reference was // either unique to begin with, or became one upon cloning the contents. - unsafe { - &mut this.ptr.as_mut().data - } + unsafe { &mut this.ptr.as_mut().data } } } @@ -883,9 +916,7 @@ impl Arc { // reference count is guaranteed to be 1 at this point, and we required // the Arc itself to be `mut`, so we're returning the only possible // reference to the inner data. - unsafe { - Some(&mut this.ptr.as_mut().data) - } + unsafe { Some(&mut this.ptr.as_mut().data) } } else { None } @@ -903,7 +934,12 @@ impl Arc { // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. - if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { + if self + .inner() + .weak + .compare_exchange(1, usize::MAX, Acquire, Relaxed) + .is_ok() + { // This needs to be an `Acquire` to synchronize with the decrement of the `strong` // counter in `drop` -- the only access that happens when any but the last reference // is being dropped. @@ -1024,7 +1060,10 @@ impl Arc { if (*self).is::() { let ptr = self.ptr.cast::>(); mem::forget(self); - Ok(Arc { ptr, phantom: PhantomData }) + Ok(Arc { + ptr, + phantom: PhantomData, + }) } else { Err(self) } @@ -1106,12 +1145,17 @@ impl Weak { } // Relaxed is valid for the same reason it is on Arc's Clone impl - match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) { - Ok(_) => return Some(Arc { - // null checked above - ptr: self.ptr, - phantom: PhantomData, - }), + match inner + .strong + .compare_exchange_weak(n, n + 1, Relaxed, Relaxed) + { + Ok(_) => { + return Some(Arc { + // null checked above + ptr: self.ptr, + phantom: PhantomData, + }) + } Err(old) => n = old, } } @@ -1273,14 +1317,12 @@ impl Drop for Weak { let inner = if let Some(inner) = self.inner() { inner } else { - return + return; }; if inner.weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - unsafe { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) - } + unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } } } @@ -1576,16 +1618,16 @@ impl From> for Arc<[T]> { mod tests { use std::boxed::Box; use std::clone::Clone; - use std::sync::mpsc::channel; + use std::convert::From; use std::mem::drop; use std::ops::Drop; use std::option::Option; use std::option::Option::{None, Some}; use std::sync::atomic; use std::sync::atomic::Ordering::{Acquire, SeqCst}; - use std::thread; + use std::sync::mpsc::channel; use std::sync::Mutex; - use std::convert::From; + use std::thread; use super::{Arc, Weak}; use vec::Vec; @@ -1768,7 +1810,9 @@ mod tests { x: Mutex>>, } - let a = Arc::new(Cycle { x: Mutex::new(None) }); + let a = Arc::new(Cycle { + x: Mutex::new(None), + }); let b = Arc::downgrade(&a.clone()); *a.x.lock().unwrap() = Some(b); @@ -2043,4 +2087,4 @@ impl AsRef for Arc { } #[stable(feature = "pin", since = "1.33.0")] -impl Unpin for Arc { } +impl Unpin for Arc {} diff --git a/src/liballoc/task.rs b/src/liballoc/task.rs index 604c56dd01762..db84648870138 100644 --- a/src/liballoc/task.rs +++ b/src/liballoc/task.rs @@ -69,7 +69,8 @@ mod if_arc { } impl From> for Waker - where T: Wake + 'static, + where + T: Wake + 'static, { fn from(rc: Arc) -> Self { unsafe { diff --git a/src/liballoc/tests/arc.rs b/src/liballoc/tests/arc.rs index 2759b1b1cac27..6087d2f5322c6 100644 --- a/src/liballoc/tests/arc.rs +++ b/src/liballoc/tests/arc.rs @@ -1,7 +1,7 @@ use std::any::Any; -use std::sync::{Arc, Weak}; use std::cell::RefCell; use std::cmp::PartialEq; +use std::sync::{Arc, Weak}; #[test] fn uninhabited() { @@ -10,7 +10,7 @@ fn uninhabited() { a = a.clone(); assert!(a.upgrade().is_none()); - let mut a: Weak = a; // Unsizing + let mut a: Weak = a; // Unsizing a = a.clone(); assert!(a.upgrade().is_none()); } @@ -18,8 +18,8 @@ fn uninhabited() { #[test] fn slice() { let a: Arc<[u32; 3]> = Arc::new([3, 2, 1]); - let a: Arc<[u32]> = a; // Unsizing - let b: Arc<[u32]> = Arc::from(&[3, 2, 1][..]); // Conversion + let a: Arc<[u32]> = a; // Unsizing + let b: Arc<[u32]> = Arc::from(&[3, 2, 1][..]); // Conversion assert_eq!(a, b); // Exercise is_dangling() with a DST @@ -31,7 +31,7 @@ fn slice() { #[test] fn trait_object() { let a: Arc = Arc::new(4); - let a: Arc = a; // Unsizing + let a: Arc = a; // Unsizing // Exercise is_dangling() with a DST let mut a = Arc::downgrade(&a); @@ -41,7 +41,7 @@ fn trait_object() { let mut b = Weak::::new(); b = b.clone(); assert!(b.upgrade().is_none()); - let mut b: Weak = b; // Unsizing + let mut b: Weak = b; // Unsizing b = b.clone(); assert!(b.upgrade().is_none()); } @@ -55,7 +55,7 @@ fn float_nan_ne() { #[test] fn partial_eq() { - struct TestPEq (RefCell); + struct TestPEq(RefCell); impl PartialEq for TestPEq { fn eq(&self, other: &TestPEq) -> bool { *self.0.borrow_mut() += 1; @@ -72,7 +72,7 @@ fn partial_eq() { #[test] fn eq() { #[derive(Eq)] - struct TestEq (RefCell); + struct TestEq(RefCell); impl PartialEq for TestEq { fn eq(&self, other: &TestEq) -> bool { *self.0.borrow_mut() += 1; diff --git a/src/liballoc/tests/binary_heap.rs b/src/liballoc/tests/binary_heap.rs index 6af1cf4080947..fd2cfea51e24a 100644 --- a/src/liballoc/tests/binary_heap.rs +++ b/src/liballoc/tests/binary_heap.rs @@ -1,10 +1,10 @@ use std::cmp; -use std::collections::BinaryHeap; use std::collections::binary_heap::{Drain, PeekMut}; +use std::collections::BinaryHeap; use std::panic::{self, AssertUnwindSafe}; -use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; +use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; -use rand::{thread_rng, seq::SliceRandom}; +use rand::{seq::SliceRandom, thread_rng}; #[test] fn test_iterator() { @@ -315,10 +315,11 @@ fn panic_safe() { for i in 1..=DATASZ { DROP_COUNTER.store(0, Ordering::SeqCst); - let mut panic_ords: Vec<_> = data.iter() - .filter(|&&x| x != i) - .map(|&x| PanicOrd(x, false)) - .collect(); + let mut panic_ords: Vec<_> = data + .iter() + .filter(|&&x| x != i) + .map(|&x| PanicOrd(x, false)) + .collect(); let panic_item = PanicOrd(i, true); // heapify the sane items diff --git a/src/liballoc/tests/btree/map.rs b/src/liballoc/tests/btree/map.rs index 05e0bdffaa86b..4835050f9a00d 100644 --- a/src/liballoc/tests/btree/map.rs +++ b/src/liballoc/tests/btree/map.rs @@ -1,10 +1,10 @@ -use std::collections::BTreeMap; use std::collections::btree_map::Entry::{Occupied, Vacant}; +use std::collections::BTreeMap; use std::ops::Bound::{self, Excluded, Included, Unbounded}; use std::rc::Rc; -use std::iter::FromIterator; use super::DeterministicRng; +use std::iter::FromIterator; #[test] fn test_basic_large() { @@ -75,7 +75,8 @@ fn test_iter() { let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); fn test(size: usize, mut iter: T) - where T: Iterator + where + T: Iterator, { for i in 0..size { assert_eq!(iter.size_hint(), (size - i, Some(size - i))); @@ -97,7 +98,8 @@ fn test_iter_rev() { let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); fn test(size: usize, mut iter: T) - where T: Iterator + where + T: Iterator, { for i in 0..size { assert_eq!(iter.size_hint(), (size - i, Some(size - i))); @@ -133,7 +135,8 @@ fn test_iter_mixed() { let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect(); fn test(size: usize, mut iter: T) - where T: Iterator + DoubleEndedIterator + where + T: Iterator + DoubleEndedIterator, { for i in 0..size / 4 { assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2))); @@ -175,8 +178,9 @@ fn test_range_inclusive() { let map: BTreeMap<_, _> = (0..=size).map(|i| (i, i)).collect(); fn check<'a, L, R>(lhs: L, rhs: R) - where L: IntoIterator, - R: IntoIterator, + where + L: IntoIterator, + R: IntoIterator, { let lhs: Vec<_> = lhs.into_iter().collect(); let rhs: Vec<_> = rhs.into_iter().collect(); @@ -192,7 +196,10 @@ fn test_range_inclusive() { check(map.range(-1..=size), map.range(..)); check(map.range(..=size), map.range(..)); check(map.range(..=200), map.range(..201)); - check(map.range(5..=8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]); + check( + map.range(5..=8), + vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)], + ); check(map.range(-1..=0), vec![(&0, &0)]); check(map.range(-1..=2), vec![(&0, &0), (&1, &1), (&2, &2)]); } @@ -278,7 +285,7 @@ fn test_range_borrowed_key() { map.insert("coyote".to_string(), 3); map.insert("dingo".to_string(), 4); // NOTE: would like to use simply "b".."d" here... - let mut iter = map.range::((Included("b"),Excluded("d"))); + let mut iter = map.range::((Included("b"), Excluded("d"))); assert_eq!(iter.next(), Some((&"baboon".to_string(), &2))); assert_eq!(iter.next(), Some((&"coyote".to_string(), &3))); assert_eq!(iter.next(), None); @@ -291,7 +298,9 @@ fn test_range() { for i in 0..size { for j in i..size { - let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v)); + let mut kvs = map + .range((Included(&i), Included(&j))) + .map(|(&k, &v)| (k, v)); let mut pairs = (i..=j).map(|i| (i, i)); for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { @@ -310,7 +319,9 @@ fn test_range_mut() { for i in 0..size { for j in i..size { - let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v)); + let mut kvs = map + .range_mut((Included(&i), Included(&j))) + .map(|(&k, &mut v)| (k, v)); let mut pairs = (i..=j).map(|i| (i, i)); for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) { @@ -367,7 +378,6 @@ fn test_entry() { assert_eq!(map.get(&1).unwrap(), &100); assert_eq!(map.len(), 6); - // Existing key (update) match map.entry(2) { Vacant(_) => unreachable!(), @@ -389,7 +399,6 @@ fn test_entry() { assert_eq!(map.get(&3), None); assert_eq!(map.len(), 5); - // Inexistent key (insert) match map.entry(10) { Occupied(_) => unreachable!(), @@ -511,7 +520,7 @@ fn test_clone() { #[test] #[allow(dead_code)] fn test_variance() { - use std::collections::btree_map::{Iter, IntoIter, Range, Keys, Values}; + use std::collections::btree_map::{IntoIter, Iter, Keys, Range, Values}; fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> { v @@ -592,7 +601,7 @@ macro_rules! create_append_test { let mut b = BTreeMap::new(); for i in 5..$len { - b.insert(i, 2*i); + b.insert(i, 2 * i); } a.append(&mut b); @@ -604,12 +613,12 @@ macro_rules! create_append_test { if i < 5 { assert_eq!(a[&i], i); } else { - assert_eq!(a[&i], 2*i); + assert_eq!(a[&i], 2 * i); } } - assert_eq!(a.remove(&($len-1)), Some(2*($len-1))); - assert_eq!(a.insert($len-1, 20), None); + assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1))); + assert_eq!(a.insert($len - 1, 20), None); } }; } @@ -672,6 +681,10 @@ fn test_split_off_large_random_sorted() { let key = data[data.len() / 2].0; let right = map.split_off(&key); - assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key))); - assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key))); + assert!(map + .into_iter() + .eq(data.clone().into_iter().filter(|x| x.0 < key))); + assert!(right + .into_iter() + .eq(data.into_iter().filter(|x| x.0 >= key))); } diff --git a/src/liballoc/tests/btree/set.rs b/src/liballoc/tests/btree/set.rs index e24c04fd8acb3..79dcf2762bfca 100644 --- a/src/liballoc/tests/btree/set.rs +++ b/src/liballoc/tests/btree/set.rs @@ -1,7 +1,7 @@ use std::collections::BTreeSet; -use std::iter::FromIterator; use super::DeterministicRng; +use std::iter::FromIterator; #[test] fn test_clone_eq() { @@ -30,7 +30,8 @@ fn test_hash() { } fn check(a: &[i32], b: &[i32], expected: &[i32], f: F) - where F: FnOnce(&BTreeSet, &BTreeSet, &mut dyn FnMut(&i32) -> bool) -> bool +where + F: FnOnce(&BTreeSet, &BTreeSet, &mut dyn FnMut(&i32) -> bool) -> bool, { let mut set_a = BTreeSet::new(); let mut set_b = BTreeSet::new(); @@ -43,13 +44,11 @@ fn check(a: &[i32], b: &[i32], expected: &[i32], f: F) } let mut i = 0; - f(&set_a, - &set_b, - &mut |&x| { - assert_eq!(x, expected[i]); - i += 1; - true - }); + f(&set_a, &set_b, &mut |&x| { + assert_eq!(x, expected[i]); + i += 1; + true + }); assert_eq!(i, expected.len()); } @@ -64,9 +63,11 @@ fn test_intersection() { check_intersection(&[], &[1, 2, 3], &[]); check_intersection(&[2], &[1, 2, 3], &[2]); check_intersection(&[1, 2, 3], &[2], &[2]); - check_intersection(&[11, 1, 3, 77, 103, 5, -5], - &[2, 11, 77, -9, -42, 5, 3], - &[3, 5, 11, 77]); + check_intersection( + &[11, 1, 3, 77, 103, 5, -5], + &[2, 11, 77, -9, -42, 5, 3], + &[3, 5, 11, 77], + ); } #[test] @@ -79,9 +80,11 @@ fn test_difference() { check_difference(&[1, 12], &[], &[1, 12]); check_difference(&[], &[1, 2, 3, 9], &[]); check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]); - check_difference(&[-5, 11, 22, 33, 40, 42], - &[-12, -5, 14, 23, 34, 38, 39, 50], - &[11, 22, 33, 40, 42]); + check_difference( + &[-5, 11, 22, 33, 40, 42], + &[-12, -5, 14, 23, 34, 38, 39, 50], + &[11, 22, 33, 40, 42], + ); } #[test] @@ -93,9 +96,11 @@ fn test_symmetric_difference() { check_symmetric_difference(&[], &[], &[]); check_symmetric_difference(&[1, 2, 3], &[2], &[1, 3]); check_symmetric_difference(&[2], &[1, 2, 3], &[1, 3]); - check_symmetric_difference(&[1, 3, 5, 9, 11], - &[-2, 3, 9, 14, 22], - &[-2, 1, 5, 11, 14, 22]); + check_symmetric_difference( + &[1, 3, 5, 9, 11], + &[-2, 3, 9, 14, 22], + &[-2, 1, 5, 11, 14, 22], + ); } #[test] @@ -107,9 +112,11 @@ fn test_union() { check_union(&[], &[], &[]); check_union(&[1, 2, 3], &[2], &[1, 2, 3]); check_union(&[2], &[1, 2, 3], &[1, 2, 3]); - check_union(&[1, 3, 5, 9, 11, 16, 19, 24], - &[-2, 1, 5, 9, 13, 19], - &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]); + check_union( + &[1, 3, 5, 9, 11, 16, 19, 24], + &[-2, 1, 5, 9, 13, 19], + &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24], + ); } #[test] @@ -316,6 +323,8 @@ fn test_split_off_large_random_sorted() { let key = data[data.len() / 2]; let right = set.split_off(&key); - assert!(set.into_iter().eq(data.clone().into_iter().filter(|x| *x < key))); + assert!(set + .into_iter() + .eq(data.clone().into_iter().filter(|x| *x < key))); assert!(right.into_iter().eq(data.into_iter().filter(|x| *x >= key))); } diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 24eea1d294965..a06c25617dbf3 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{Global, Alloc, Layout, System}; +use std::alloc::{Alloc, Global, Layout, System}; /// https://github.com/rust-lang/rust/issues/45955 #[test] @@ -16,12 +16,19 @@ fn check_overalign_requests(mut allocator: T) { let align = 16; // greater than size let iterations = 100; unsafe { - let pointers: Vec<_> = (0..iterations).map(|_| { - allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap() - }).collect(); + let pointers: Vec<_> = (0..iterations) + .map(|_| { + allocator + .alloc(Layout::from_size_align(size, align).unwrap()) + .unwrap() + }) + .collect(); for &ptr in &pointers { - assert_eq!((ptr.as_ptr() as usize) % align, 0, - "Got a pointer less aligned than requested") + assert_eq!( + (ptr.as_ptr() as usize) % align, + 0, + "Got a pointer less aligned than requested" + ) } // Clean up diff --git a/src/liballoc/tests/lib.rs b/src/liballoc/tests/lib.rs index a76fd87a1a92d..643330b6afe04 100644 --- a/src/liballoc/tests/lib.rs +++ b/src/liballoc/tests/lib.rs @@ -13,8 +13,8 @@ extern crate core; extern crate rand; -use std::hash::{Hash, Hasher}; use std::collections::hash_map::DefaultHasher; +use std::hash::{Hash, Hasher}; mod arc; mod binary_heap; @@ -27,8 +27,8 @@ mod rc; mod slice; mod str; mod string; -mod vec_deque; mod vec; +mod vec_deque; fn hash(t: &T) -> u64 { let mut s = DefaultHasher::new(); diff --git a/src/liballoc/tests/linked_list.rs b/src/liballoc/tests/linked_list.rs index 6e775f9650d12..1c7ed20aaad35 100644 --- a/src/liballoc/tests/linked_list.rs +++ b/src/liballoc/tests/linked_list.rs @@ -104,7 +104,6 @@ fn test_split_off() { assert_eq!(m.back(), Some(&1)); assert_eq!(m.front(), Some(&1)); } - } #[test] @@ -304,9 +303,14 @@ fn test_show() { let list: LinkedList<_> = (0..10).collect(); assert_eq!(format!("{:?}", list), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); - let list: LinkedList<_> = vec!["just", "one", "test", "more"].iter().cloned().collect(); - assert_eq!(format!("{:?}", list), - "[\"just\", \"one\", \"test\", \"more\"]"); + let list: LinkedList<_> = vec!["just", "one", "test", "more"] + .iter() + .cloned() + .collect(); + assert_eq!( + format!("{:?}", list), + "[\"just\", \"one\", \"test\", \"more\"]" + ); } #[test] @@ -417,7 +421,10 @@ fn drain_filter_false() { assert_eq!(count, 0); assert_eq!(list.len(), initial_len); - assert_eq!(list.into_iter().collect::>(), vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + assert_eq!( + list.into_iter().collect::>(), + vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + ); } #[test] @@ -446,19 +453,14 @@ fn drain_filter_true() { #[test] fn drain_filter_complex() { - - { // [+xxx++++++xxxxx++++x+x++] + { + // [+xxx++++++xxxxx++++x+x++] let mut list = vec![ - 1, - 2, 4, 6, - 7, 9, 11, 13, 15, 17, - 18, 20, 22, 24, 26, - 27, 29, 31, 33, - 34, - 35, - 36, - 37, 39 - ].into_iter().collect::>(); + 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, + 39, + ] + .into_iter() + .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); @@ -471,17 +473,13 @@ fn drain_filter_complex() { ); } - { // [xxx++++++xxxxx++++x+x++] + { + // [xxx++++++xxxxx++++x+x++] let mut list = vec![ - 2, 4, 6, - 7, 9, 11, 13, 15, 17, - 18, 20, 22, 24, 26, - 27, 29, 31, 33, - 34, - 35, - 36, - 37, 39 - ].into_iter().collect::>(); + 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39, + ] + .into_iter() + .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); @@ -494,16 +492,13 @@ fn drain_filter_complex() { ); } - { // [xxx++++++xxxxx++++x+x] + { + // [xxx++++++xxxxx++++x+x] let mut list = vec![ - 2, 4, 6, - 7, 9, 11, 13, 15, 17, - 18, 20, 22, 24, 26, - 27, 29, 31, 33, - 34, - 35, - 36 - ].into_iter().collect::>(); + 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, + ] + .into_iter() + .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); @@ -516,31 +511,41 @@ fn drain_filter_complex() { ); } - { // [xxxxxxxxxx+++++++++++] + { + // [xxxxxxxxxx+++++++++++] let mut list = vec![ - 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, - 1, 3, 5, 7, 9, 11, 13, 15, 17, 19 - ].into_iter().collect::>(); + 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, + ] + .into_iter() + .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]); assert_eq!(list.len(), 10); - assert_eq!(list.into_iter().collect::>(), vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); + assert_eq!( + list.into_iter().collect::>(), + vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19] + ); } - { // [+++++++++++xxxxxxxxxx] + { + // [+++++++++++xxxxxxxxxx] let mut list = vec![ - 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, - 2, 4, 6, 8, 10, 12, 14, 16, 18, 20 - ].into_iter().collect::>(); + 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, + ] + .into_iter() + .collect::>(); let removed = list.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]); assert_eq!(list.len(), 10); - assert_eq!(list.into_iter().collect::>(), vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); + assert_eq!( + list.into_iter().collect::>(), + vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19] + ); } } diff --git a/src/liballoc/tests/rc.rs b/src/liballoc/tests/rc.rs index 18f82e8041008..27141e2043a84 100644 --- a/src/liballoc/tests/rc.rs +++ b/src/liballoc/tests/rc.rs @@ -1,7 +1,7 @@ use std::any::Any; -use std::rc::{Rc, Weak}; use std::cell::RefCell; use std::cmp::PartialEq; +use std::rc::{Rc, Weak}; #[test] fn uninhabited() { @@ -10,7 +10,7 @@ fn uninhabited() { a = a.clone(); assert!(a.upgrade().is_none()); - let mut a: Weak = a; // Unsizing + let mut a: Weak = a; // Unsizing a = a.clone(); assert!(a.upgrade().is_none()); } @@ -18,8 +18,8 @@ fn uninhabited() { #[test] fn slice() { let a: Rc<[u32; 3]> = Rc::new([3, 2, 1]); - let a: Rc<[u32]> = a; // Unsizing - let b: Rc<[u32]> = Rc::from(&[3, 2, 1][..]); // Conversion + let a: Rc<[u32]> = a; // Unsizing + let b: Rc<[u32]> = Rc::from(&[3, 2, 1][..]); // Conversion assert_eq!(a, b); // Exercise is_dangling() with a DST @@ -31,7 +31,7 @@ fn slice() { #[test] fn trait_object() { let a: Rc = Rc::new(4); - let a: Rc = a; // Unsizing + let a: Rc = a; // Unsizing // Exercise is_dangling() with a DST let mut a = Rc::downgrade(&a); @@ -41,7 +41,7 @@ fn trait_object() { let mut b = Weak::::new(); b = b.clone(); assert!(b.upgrade().is_none()); - let mut b: Weak = b; // Unsizing + let mut b: Weak = b; // Unsizing b = b.clone(); assert!(b.upgrade().is_none()); } @@ -55,7 +55,7 @@ fn float_nan_ne() { #[test] fn partial_eq() { - struct TestPEq (RefCell); + struct TestPEq(RefCell); impl PartialEq for TestPEq { fn eq(&self, other: &TestPEq) -> bool { *self.0.borrow_mut() += 1; @@ -72,7 +72,7 @@ fn partial_eq() { #[test] fn eq() { #[derive(Eq)] - struct TestEq (RefCell); + struct TestEq(RefCell); impl PartialEq for TestEq { fn eq(&self, other: &TestEq) -> bool { *self.0.borrow_mut() += 1; diff --git a/src/liballoc/tests/slice.rs b/src/liballoc/tests/slice.rs index 8ecd17236c048..b530c8d9cdc52 100644 --- a/src/liballoc/tests/slice.rs +++ b/src/liballoc/tests/slice.rs @@ -1,15 +1,15 @@ use std::cell::Cell; -use std::cmp::Ordering::{Equal, Greater, Less}; use std::cmp::Ordering; +use std::cmp::Ordering::{Equal, Greater, Less}; use std::mem; use std::panic; use std::rc::Rc; use std::sync::atomic::Ordering::Relaxed; -use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize}; +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT}; use std::thread; -use rand::{Rng, RngCore, thread_rng, seq::SliceRandom}; use rand::distributions::Standard; +use rand::{seq::SliceRandom, thread_rng, Rng, RngCore}; fn square(n: usize) -> usize { n * n @@ -233,7 +233,6 @@ fn test_slice_to() { assert_eq!(&vec[..0], b); } - #[test] fn test_pop() { let mut v = vec![5]; @@ -396,7 +395,8 @@ fn test_sort() { for len in (2..25).chain(500..510) { for &modulus in &[5, 10, 100, 1000] { for _ in 0..10 { - let orig: Vec<_> = rng.sample_iter::(&Standard) + let orig: Vec<_> = rng + .sample_iter::(&Standard) .map(|x| x % modulus) .take(len) .collect(); @@ -534,7 +534,7 @@ fn test_rotate_left() { // non-small prime rotation, has a few rounds of swapping v = (389..1000).chain(0..389).collect(); - v.rotate_left(1000-389); + v.rotate_left(1000 - 389); assert_eq!(v, expected); } @@ -605,7 +605,10 @@ fn test_join_nocopy() { let v: [String; 0] = []; assert_eq!(v.join(","), ""); assert_eq!(["a".to_string(), "ab".into()].join(","), "a,ab"); - assert_eq!(["a".to_string(), "ab".into(), "abc".into()].join(","), "a,ab,abc"); + assert_eq!( + ["a".to_string(), "ab".into(), "abc".into()].join(","), + "a,ab,abc" + ); assert_eq!(["a".to_string(), "ab".into(), "".into()].join(","), "a,ab,"); } @@ -688,7 +691,7 @@ macro_rules! assert_order { (Equal, $a:expr, $b:expr) => { assert_eq!($a.cmp($b), Equal); assert_eq!($a, $b); - } + }; } #[test] @@ -705,7 +708,6 @@ fn test_total_ord_u8() { assert_order!(Greater, &[2u8, 2][..], &c[..]); } - #[test] fn test_total_ord_i32() { let c = &[1, 2, 3]; @@ -795,7 +797,6 @@ fn test_mut_iterator() { #[test] fn test_rev_iterator() { - let xs = [1, 2, 5, 10, 11]; let ys = [11, 10, 5, 2, 1]; let mut i = 0; @@ -818,15 +819,21 @@ fn test_mut_rev_iterator() { #[test] fn test_move_iterator() { let xs = vec![1, 2, 3, 4, 5]; - assert_eq!(xs.into_iter().fold(0, |a: usize, b: usize| 10 * a + b), - 12345); + assert_eq!( + xs.into_iter().fold(0, |a: usize, b: usize| 10 * a + b), + 12345 + ); } #[test] fn test_move_rev_iterator() { let xs = vec![1, 2, 3, 4, 5]; - assert_eq!(xs.into_iter().rev().fold(0, |a: usize, b: usize| 10 * a + b), - 54321); + assert_eq!( + xs.into_iter() + .rev() + .fold(0, |a: usize, b: usize| 10 * a + b), + 54321 + ); } #[test] @@ -870,11 +877,15 @@ fn test_splitnator_mut() { let xs = &mut [1, 2, 3, 4, 5]; let splits: &[&mut [_]] = &[&mut [1, 2, 3, 4, 5]]; - assert_eq!(xs.splitn_mut(1, |x| *x % 2 == 0).collect::>(), - splits); + assert_eq!( + xs.splitn_mut(1, |x| *x % 2 == 0).collect::>(), + splits + ); let splits: &[&mut [_]] = &[&mut [1], &mut [3, 4, 5]]; - assert_eq!(xs.splitn_mut(2, |x| *x % 2 == 0).collect::>(), - splits); + assert_eq!( + xs.splitn_mut(2, |x| *x % 2 == 0).collect::>(), + splits + ); let splits: &[&mut [_]] = &[&mut [], &mut [], &mut [], &mut [4, 5]]; assert_eq!(xs.splitn_mut(4, |_| true).collect::>(), splits); @@ -1046,11 +1057,11 @@ fn test_reverse_part() { #[test] fn test_show() { macro_rules! test_show_vec { - ($x:expr, $x_str:expr) => ({ + ($x:expr, $x_str:expr) => {{ let (x, x_str) = ($x, $x_str); assert_eq!(format!("{:?}", x), x_str); assert_eq!(format!("{:?}", x), x_str); - }) + }}; } let empty = Vec::::new(); test_show_vec!(empty, "[]"); @@ -1074,7 +1085,7 @@ fn test_vec_default() { ($ty:ty) => {{ let v: $ty = Default::default(); assert!(v.is_empty()); - }} + }}; } t!(&[i32]); @@ -1397,8 +1408,8 @@ fn test_box_slice_clone() { #[allow(unused_must_use)] // here, we care about the side effects of `.clone()` #[cfg_attr(target_os = "emscripten", ignore)] fn test_box_slice_clone_panics() { - use std::sync::Arc; use std::sync::atomic::{AtomicUsize, Ordering}; + use std::sync::Arc; use std::thread::spawn; struct Canary { @@ -1436,15 +1447,21 @@ fn test_box_slice_clone_panics() { }; spawn(move || { - // When xs is dropped, +5. - let xs = vec![canary.clone(), canary.clone(), canary.clone(), panic, canary] - .into_boxed_slice(); - - // When panic is cloned, +3. - xs.clone(); - }) - .join() - .unwrap_err(); + // When xs is dropped, +5. + let xs = vec![ + canary.clone(), + canary.clone(), + canary.clone(), + panic, + canary, + ] + .into_boxed_slice(); + + // When panic is cloned, +3. + xs.clone(); + }) + .join() + .unwrap_err(); // Total = 8 assert_eq!(drop_count.load(Ordering::SeqCst), 8); @@ -1478,26 +1495,86 @@ const MAX_LEN: usize = 80; static DROP_COUNTS: [AtomicUsize; MAX_LEN] = [ // FIXME(RFC 1109): AtomicUsize is not Copy. - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), - AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), + AtomicUsize::new(0), ]; static VERSIONS: AtomicUsize = ATOMIC_USIZE_INIT; @@ -1544,7 +1621,10 @@ macro_rules! test { // Work out the total number of comparisons required to sort // this array... let mut count = 0usize; - $input.to_owned().$func(|a, b| { count += 1; a.cmp(b) }); + $input.to_owned().$func(|a, b| { + count += 1; + a.cmp(b) + }); // ... and then panic on each and every single one. for panic_countdown in 0..count { @@ -1566,21 +1646,26 @@ macro_rules! test { panic_countdown -= 1; a.cmp(b) }) - }).join(); + }) + .join(); // Check that the number of things dropped is exactly // what we expect (i.e., the contents of `v`). for (i, c) in DROP_COUNTS.iter().enumerate().take(len) { let count = c.load(Relaxed); - assert!(count == 1, - "found drop count == {} for i == {}, len == {}", - count, i, len); + assert!( + count == 1, + "found drop count == {} for i == {}, len == {}", + count, + i, + len + ); } // Check that the most recent versions of values were dropped. assert_eq!(VERSIONS.load(Relaxed), 0); } - } + }; } thread_local!(static SILENCE_PANIC: Cell = Cell::new(false)); @@ -1601,12 +1686,10 @@ fn panic_safe() { for &modulus in &[5, 20, 50] { for &has_runs in &[false, true] { let mut input = (0..len) - .map(|id| { - DropCounter { - x: rng.next_u32() % modulus, - id: id, - version: Cell::new(0), - } + .map(|id| DropCounter { + x: rng.next_u32() % modulus, + id: id, + version: Cell::new(0), }) .collect::>(); diff --git a/src/liballoc/tests/str.rs b/src/liballoc/tests/str.rs index 66a1b947a7d3a..cabad3d024f13 100644 --- a/src/liballoc/tests/str.rs +++ b/src/liballoc/tests/str.rs @@ -13,21 +13,33 @@ fn test_le() { #[test] fn test_find() { assert_eq!("hello".find('l'), Some(2)); - assert_eq!("hello".find(|c:char| c == 'o'), Some(4)); + assert_eq!("hello".find(|c: char| c == 'o'), Some(4)); assert!("hello".find('x').is_none()); - assert!("hello".find(|c:char| c == 'x').is_none()); - assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30)); - assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30)); + assert!("hello".find(|c: char| c == 'x').is_none()); + assert_eq!( + "ประเทศไทย中华Việt Nam".find('华'), + Some(30) + ); + assert_eq!( + "ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), + Some(30) + ); } #[test] fn test_rfind() { assert_eq!("hello".rfind('l'), Some(3)); - assert_eq!("hello".rfind(|c:char| c == 'o'), Some(4)); + assert_eq!("hello".rfind(|c: char| c == 'o'), Some(4)); assert!("hello".rfind('x').is_none()); - assert!("hello".rfind(|c:char| c == 'x').is_none()); - assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30)); - assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30)); + assert!("hello".rfind(|c: char| c == 'x').is_none()); + assert_eq!( + "ประเทศไทย中华Việt Nam".rfind('华'), + Some(30) + ); + assert_eq!( + "ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), + Some(30) + ); } #[test] @@ -65,7 +77,7 @@ fn test_find_str() { assert_eq!(data[0..43].find(""), Some(0)); assert_eq!(data[6..43].find(""), Some(6 - 6)); - assert_eq!(data[0..43].find("ประ"), Some( 0)); + assert_eq!(data[0..43].find("ประ"), Some(0)); assert_eq!(data[0..43].find("ทศไ"), Some(12)); assert_eq!(data[0..43].find("ย中"), Some(24)); assert_eq!(data[0..43].find("iệt"), Some(34)); @@ -81,9 +93,10 @@ fn test_find_str() { let string = "Việt Namacbaabcaabaaba"; for (i, ci) in string.char_indices() { let ip = i + ci.len_utf8(); - for j in string[ip..].char_indices() - .map(|(i, _)| i) - .chain(Some(string.len() - ip)) + for j in string[ip..] + .char_indices() + .map(|(i, _)| i) + .chain(Some(string.len() - ip)) { let pat = &string[i..ip + j]; assert!(match string.find(pat) { @@ -98,15 +111,15 @@ fn test_find_str() { } } -fn s(x: &str) -> String { x.to_string() } +fn s(x: &str) -> String { + x.to_string() +} macro_rules! test_concat { - ($expected: expr, $string: expr) => { - { - let s: String = $string.concat(); - assert_eq!($expected, s); - } - } + ($expected: expr, $string: expr) => {{ + let s: String = $string.concat(); + assert_eq!($expected, s); + }}; } #[test] @@ -125,12 +138,10 @@ fn test_concat_for_different_lengths() { } macro_rules! test_join { - ($expected: expr, $string: expr, $delim: expr) => { - { - let s = $string.join($delim); - assert_eq!($expected, s); - } - } + ($expected: expr, $string: expr, $delim: expr) => {{ + let s = $string.join($delim); + assert_eq!($expected, s); + }}; } #[test] @@ -162,14 +173,18 @@ fn test_join_for_different_lengths_with_long_separator() { test_join!("", empty, "~~~~~"); test_join!("a", ["a"], "~~~~~"); test_join!("a~~~~~b", ["a", "b"], "~~~~~"); - test_join!("~~~~~a~~~~~bc", ["", "a", "bc"], "~~~~~"); + test_join!( + "~~~~~a~~~~~bc", + ["", "a", "bc"], + "~~~~~" + ); } #[test] fn test_unsafe_slice() { - assert_eq!("ab", unsafe {"abc".get_unchecked(0..2)}); - assert_eq!("bc", unsafe {"abc".get_unchecked(1..3)}); - assert_eq!("", unsafe {"abc".get_unchecked(1..1)}); + assert_eq!("ab", unsafe { "abc".get_unchecked(0..2) }); + assert_eq!("bc", unsafe { "abc".get_unchecked(1..3) }); + assert_eq!("", unsafe { "abc".get_unchecked(1..1) }); fn a_million_letter_a() -> String { let mut i = 0; let mut rs = String::new(); @@ -189,8 +204,9 @@ fn test_unsafe_slice() { rs } let letters = a_million_letter_a(); - assert_eq!(half_a_million_letter_a(), - unsafe { letters.get_unchecked(0..500000)}); + assert_eq!(half_a_million_letter_a(), unsafe { + letters.get_unchecked(0..500000) + }); } #[test] @@ -232,7 +248,10 @@ fn test_replacen() { assert_eq!(" test test ".replacen(test, "toast", 0), " test test "); assert_eq!(" test test ".replacen(test, "", 5), " "); - assert_eq!("qwer123zxc789".replacen(char::is_numeric, "", 3), "qwerzxc789"); + assert_eq!( + "qwer123zxc789".replacen(char::is_numeric, "", 3), + "qwerzxc789" + ); } #[test] @@ -288,10 +307,22 @@ fn test_replace_2d() { #[test] fn test_replace_pattern() { let data = "abcdαβγδabcdαβγδ"; - assert_eq!(data.replace("dαβ", "😺😺😺"), "abc😺😺😺γδabc😺😺😺γδ"); - assert_eq!(data.replace('γ', "😺😺😺"), "abcdαβ😺😺😺δabcdαβ😺😺😺δ"); - assert_eq!(data.replace(&['a', 'γ'] as &[_], "😺😺😺"), "😺😺😺bcdαβ😺😺😺δ😺😺😺bcdαβ😺😺😺δ"); - assert_eq!(data.replace(|c| c == 'γ', "😺😺😺"), "abcdαβ😺😺😺δabcdαβ😺😺😺δ"); + assert_eq!( + data.replace("dαβ", "😺😺😺"), + "abc😺😺😺γδabc😺😺😺γδ" + ); + assert_eq!( + data.replace('γ', "😺😺😺"), + "abcdαβ😺😺😺δabcdαβ😺😺😺δ" + ); + assert_eq!( + data.replace(&['a', 'γ'] as &[_], "😺😺😺"), + "😺😺😺bcdαβ😺😺😺δ😺😺😺bcdαβ😺😺😺δ" + ); + assert_eq!( + data.replace(|c| c == 'γ', "😺😺😺"), + "abcdαβ😺😺😺δabcdαβ😺😺😺δ" + ); } // The current implementation of SliceIndex fails to handle methods @@ -303,8 +334,7 @@ mod slice_index { // // This is not suitable for testing failure on invalid inputs. macro_rules! assert_range_eq { - ($s:expr, $range:expr, $expected:expr) - => { + ($s:expr, $range:expr, $expected:expr) => { let mut s: String = $s.to_owned(); let mut expected: String = $expected.to_owned(); { @@ -315,7 +345,8 @@ mod slice_index { assert_eq!(s.get($range), Some(expected), "(in assertion for: get)"); unsafe { assert_eq!( - s.get_unchecked($range), expected, + s.get_unchecked($range), + expected, "(in assertion for: get_unchecked)", ); } @@ -324,22 +355,21 @@ mod slice_index { let s: &mut str = &mut s; let expected: &mut str = &mut expected; + assert_eq!(&mut s[$range], expected, "(in assertion for: index_mut)",); assert_eq!( - &mut s[$range], expected, - "(in assertion for: index_mut)", - ); - assert_eq!( - s.get_mut($range), Some(&mut expected[..]), + s.get_mut($range), + Some(&mut expected[..]), "(in assertion for: get_mut)", ); unsafe { assert_eq!( - s.get_unchecked_mut($range), expected, + s.get_unchecked_mut($range), + expected, "(in assertion for: get_unchecked_mut)", ); } } - } + }; } // Make sure the macro can actually detect bugs, @@ -459,15 +489,15 @@ mod slice_index { assert_range_eq!(data, 30..33, "华"); /*0: 中 - 3: 华 - 6: V - 7: i - 8: ệ - 11: t - 12: - 13: N - 14: a - 15: m */ + 3: 华 + 6: V + 7: i + 8: ệ + 11: t + 12: + 13: N + 14: a + 15: m */ let ss = "中华Việt Nam"; assert_range_eq!(ss, 3..6, "华"); assert_range_eq!(ss, 6..16, "Việt Nam"); @@ -648,23 +678,24 @@ mod slice_index { } } - const LOREM_PARAGRAPH: &'static str = "\ - Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem \ - sit amet dolor ultricies condimentum. Praesent iaculis purus elit, ac malesuada \ - quam malesuada in. Duis sed orci eros. Suspendisse sit amet magna mollis, mollis \ - nunc luctus, imperdiet mi. Integer fringilla non sem ut lacinia. Fusce varius \ - tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec tempus vel, \ - gravida nec quam."; + const LOREM_PARAGRAPH: &'static str = + "\ + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem \ + sit amet dolor ultricies condimentum. Praesent iaculis purus elit, ac malesuada \ + quam malesuada in. Duis sed orci eros. Suspendisse sit amet magna mollis, mollis \ + nunc luctus, imperdiet mi. Integer fringilla non sem ut lacinia. Fusce varius \ + tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec tempus vel, \ + gravida nec quam."; // check the panic includes the prefix of the sliced string #[test] - #[should_panic(expected="byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")] + #[should_panic(expected = "byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")] fn test_slice_fail_truncated_1() { &LOREM_PARAGRAPH[..1024]; } // check the truncation in the panic message #[test] - #[should_panic(expected="luctus, im`[...]")] + #[should_panic(expected = "luctus, im`[...]")] fn test_slice_fail_truncated_2() { &LOREM_PARAGRAPH[..1024]; } @@ -710,8 +741,12 @@ fn test_is_char_boundary() { // ensure character locations are boundaries and continuation bytes are not assert!(s.is_char_boundary(i), "{} is a char boundary in {:?}", i, s); for j in 1..ch.len_utf8() { - assert!(!s.is_char_boundary(i + j), - "{} should not be a char boundary in {:?}", i + j, s); + assert!( + !s.is_char_boundary(i + j), + "{} should not be a char boundary in {:?}", + i + j, + s + ); } } } @@ -728,7 +763,10 @@ fn test_trim_start_matches() { assert_eq!("11foo1bar11".trim_start_matches('1'), "foo1bar11"); let chars: &[char] = &['1', '2']; assert_eq!("12foo1bar12".trim_start_matches(chars), "foo1bar12"); - assert_eq!("123foo1bar123".trim_start_matches(|c: char| c.is_numeric()), "foo1bar123"); + assert_eq!( + "123foo1bar123".trim_start_matches(|c: char| c.is_numeric()), + "foo1bar123" + ); } #[test] @@ -743,7 +781,10 @@ fn test_trim_end_matches() { assert_eq!("11foo1bar11".trim_end_matches('1'), "11foo1bar"); let chars: &[char] = &['1', '2']; assert_eq!("12foo1bar12".trim_end_matches(chars), "12foo1bar"); - assert_eq!("123foo1bar123".trim_end_matches(|c: char| c.is_numeric()), "123foo1bar"); + assert_eq!( + "123foo1bar123".trim_end_matches(|c: char| c.is_numeric()), + "123foo1bar" + ); } #[test] @@ -758,7 +799,10 @@ fn test_trim_matches() { assert_eq!("11foo1bar11".trim_matches('1'), "foo1bar"); let chars: &[char] = &['1', '2']; assert_eq!("12foo1bar12".trim_matches(chars), "foo1bar"); - assert_eq!("123foo1bar123".trim_matches(|c: char| c.is_numeric()), "foo1bar"); + assert_eq!( + "123foo1bar123".trim_matches(|c: char| c.is_numeric()), + "foo1bar" + ); } #[test] @@ -844,7 +888,7 @@ fn from_utf8_error() { let error = from_utf8($input).unwrap_err(); assert_eq!(error.valid_up_to(), $expected_valid_up_to); assert_eq!(error.error_len(), $expected_error_len); - } + }; } test!(b"A\xC3\xA9 \xFF ", 4, Some(1)); test!(b"A\xC3\xA9 \x80 ", 4, Some(1)); @@ -871,9 +915,8 @@ fn from_utf8_error() { fn test_as_bytes() { // no null let v = [ - 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, - 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, - 109 + 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, + 86, 105, 225, 187, 135, 116, 32, 78, 97, 109, ]; let b: &[u8] = &[]; assert_eq!("".as_bytes(), b); @@ -983,9 +1026,15 @@ fn test_escape_unicode() { assert_eq!("a c".escape_unicode(), "\\u{61}\\u{20}\\u{63}"); assert_eq!("\r\n\t".escape_unicode(), "\\u{d}\\u{a}\\u{9}"); assert_eq!("'\"\\".escape_unicode(), "\\u{27}\\u{22}\\u{5c}"); - assert_eq!("\x00\x01\u{fe}\u{ff}".escape_unicode(), "\\u{0}\\u{1}\\u{fe}\\u{ff}"); + assert_eq!( + "\x00\x01\u{fe}\u{ff}".escape_unicode(), + "\\u{0}\\u{1}\\u{fe}\\u{ff}" + ); assert_eq!("\u{100}\u{ffff}".escape_unicode(), "\\u{100}\\u{ffff}"); - assert_eq!("\u{10000}\u{10ffff}".escape_unicode(), "\\u{10000}\\u{10ffff}"); + assert_eq!( + "\u{10000}\u{10ffff}".escape_unicode(), + "\\u{10000}\\u{10ffff}" + ); assert_eq!("ab\u{fb00}".escape_unicode(), "\\u{61}\\u{62}\\u{fb00}"); assert_eq!("\u{1d4ea}\r".escape_unicode(), "\\u{1d4ea}\\u{d}"); } @@ -1008,7 +1057,10 @@ fn test_escape_debug() { assert_eq!("\u{10000}\u{10ffff}".escape_debug(), "\u{10000}\\u{10ffff}"); assert_eq!("ab\u{200b}".escape_debug(), "ab\\u{200b}"); assert_eq!("\u{10d4ea}\r".escape_debug(), "\\u{10d4ea}\\r"); - assert_eq!("\u{301}a\u{301}bé\u{e000}".escape_debug(), "\\u{301}a\u{301}bé\\u{e000}"); + assert_eq!( + "\u{301}a\u{301}bé\u{e000}".escape_debug(), + "\\u{301}a\u{301}bé\\u{e000}" + ); } #[test] @@ -1020,7 +1072,10 @@ fn test_escape_default() { assert_eq!("'\"\\".escape_default(), "\\'\\\"\\\\"); assert_eq!("\u{7f}\u{ff}".escape_default(), "\\u{7f}\\u{ff}"); assert_eq!("\u{100}\u{ffff}".escape_default(), "\\u{100}\\u{ffff}"); - assert_eq!("\u{10000}\u{10ffff}".escape_default(), "\\u{10000}\\u{10ffff}"); + assert_eq!( + "\u{10000}\u{10ffff}".escape_default(), + "\\u{10000}\\u{10ffff}" + ); assert_eq!("ab\u{200b}".escape_default(), "ab\\u{200b}"); assert_eq!("\u{10d4ea}\r".escape_default(), "\\u{10d4ea}\\r"); } @@ -1037,7 +1092,9 @@ fn test_total_ord() { #[test] fn test_iterator() { let s = "ศไทย中华Việt Nam"; - let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']; + let v = [ + 'ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm', + ]; let mut pos = 0; let it = s.chars(); @@ -1053,7 +1110,9 @@ fn test_iterator() { #[test] fn test_rev_iterator() { let s = "ศไทย中华Việt Nam"; - let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ']; + let v = [ + 'm', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ', + ]; let mut pos = 0; let it = s.chars().rev(); @@ -1092,7 +1151,7 @@ fn test_iterator_clone() { let s = "ศไทย中华Việt Nam"; let mut it = s.chars(); it.next(); - assert!(it.clone().zip(it).all(|(x,y)| x == y)); + assert!(it.clone().zip(it).all(|(x, y)| x == y)); } #[test] @@ -1107,9 +1166,8 @@ fn test_iterator_last() { fn test_bytesator() { let s = "ศไทย中华Việt Nam"; let v = [ - 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, - 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, - 109 + 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, + 86, 105, 225, 187, 135, 116, 32, 78, 97, 109, ]; let mut pos = 0; @@ -1123,9 +1181,8 @@ fn test_bytesator() { fn test_bytes_revator() { let s = "ศไทย中华Việt Nam"; let v = [ - 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, - 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, - 109 + 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, + 86, 105, 225, 187, 135, 116, 32, 78, 97, 109, ]; let mut pos = v.len(); @@ -1139,9 +1196,8 @@ fn test_bytes_revator() { fn test_bytesator_nth() { let s = "ศไทย中华Việt Nam"; let v = [ - 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, - 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97, - 109 + 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142, + 86, 105, 225, 187, 135, 116, 32, 78, 97, 109, ]; let mut b = s.bytes(); @@ -1170,7 +1226,9 @@ fn test_bytesator_last() { fn test_char_indicesator() { let s = "ศไทย中华Việt Nam"; let p = [0, 3, 6, 9, 12, 15, 18, 19, 20, 23, 24, 25, 26, 27]; - let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m']; + let v = [ + 'ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm', + ]; let mut pos = 0; let it = s.char_indices(); @@ -1187,7 +1245,9 @@ fn test_char_indicesator() { fn test_char_indices_revator() { let s = "ศไทย中华Việt Nam"; let p = [27, 26, 25, 24, 23, 20, 19, 18, 15, 12, 9, 6, 3, 0]; - let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ']; + let v = [ + 'm', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ', + ]; let mut pos = 0; let it = s.char_indices().rev(); @@ -1213,17 +1273,29 @@ fn test_splitn_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.splitn(4, ' ').collect(); - assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]); + assert_eq!( + split, + ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"] + ); let split: Vec<&str> = data.splitn(4, |c: char| c == ' ').collect(); - assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]); + assert_eq!( + split, + ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"] + ); // Unicode let split: Vec<&str> = data.splitn(4, 'ä').collect(); - assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]); + assert_eq!( + split, + ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"] + ); let split: Vec<&str> = data.splitn(4, |c: char| c == 'ä').collect(); - assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]); + assert_eq!( + split, + ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"] + ); } #[test] @@ -1231,7 +1303,10 @@ fn test_split_char_iterator_no_trailing() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split('\n').collect(); - assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]); + assert_eq!( + split, + ["", "Märy häd ä little lämb", "Little lämb", ""] + ); let split: Vec<&str> = data.split_terminator('\n').collect(); assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]); @@ -1242,13 +1317,26 @@ fn test_rsplit() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.rsplit(' ').collect(); - assert_eq!(split, ["lämb\n", "lämb\nLittle", "little", "ä", "häd", "\nMäry"]); + assert_eq!( + split, + [ + "lämb\n", + "lämb\nLittle", + "little", + "ä", + "häd", + "\nMäry" + ] + ); let split: Vec<&str> = data.rsplit("lämb").collect(); assert_eq!(split, ["\n", "\nLittle ", "\nMäry häd ä little "]); let split: Vec<&str> = data.rsplit(|c: char| c == 'ä').collect(); - assert_eq!(split, ["mb\n", "mb\nLittle l", " little l", "d ", "ry h", "\nM"]); + assert_eq!( + split, + ["mb\n", "mb\nLittle l", " little l", "d ", "ry h", "\nM"] + ); } #[test] @@ -1269,18 +1357,27 @@ fn test_rsplitn() { fn test_split_whitespace() { let data = "\n \tMäry häd\tä little lämb\nLittle lämb\n"; let words: Vec<&str> = data.split_whitespace().collect(); - assert_eq!(words, ["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"]) + assert_eq!( + words, + ["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"] + ) } #[test] fn test_lines() { let data = "\nMäry häd ä little lämb\n\r\nLittle lämb\n"; let lines: Vec<&str> = data.lines().collect(); - assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]); + assert_eq!( + lines, + ["", "Märy häd ä little lämb", "", "Little lämb"] + ); let data = "\r\nMäry häd ä little lämb\n\nLittle lämb"; // no trailing \n let lines: Vec<&str> = data.lines().collect(); - assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]); + assert_eq!( + lines, + ["", "Märy häd ä little lämb", "", "Little lämb"] + ); } #[test] @@ -1294,15 +1391,19 @@ fn test_splitator() { t("::hello::there", "::", &["", "hello", "there"]); t("hello::there::", "::", &["hello", "there", ""]); t("::hello::there::", "::", &["", "hello", "there", ""]); - t("ประเทศไทย中华Việt Nam", "中华", &["ประเทศไทย", "Việt Nam"]); + t( + "ประเทศไทย中华Việt Nam", + "中华", + &["ประเทศไทย", "Việt Nam"], + ); t("zzXXXzzYYYzz", "zz", &["", "XXX", "YYY", ""]); t("zzXXXzYYYz", "XXX", &["zz", "zYYYz"]); t(".XXX.YYY.", ".", &["", "XXX", "YYY", ""]); t("", ".", &[""]); - t("zz", "zz", &["",""]); + t("zz", "zz", &["", ""]); t("ok", "z", &["ok"]); - t("zzz", "zz", &["","z"]); - t("zzzzz", "zz", &["","","z"]); + t("zzz", "zz", &["", "z"]); + t("zzzzz", "zz", &["", "", "z"]); } #[test] @@ -1368,7 +1469,7 @@ fn test_bool_from_str() { fn check_contains_all_substrings(s: &str) { assert!(s.contains("")); for i in 0..s.len() { - for j in i+1..=s.len() { + for j in i + 1..=s.len() { assert!(s.contains(&s[i..j])); } } @@ -1389,7 +1490,6 @@ fn strslice_issue_16878() { assert!(!"00abc01234567890123456789abc".contains("bcabc")); } - #[test] fn test_strslice_contains() { let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'"; @@ -1402,20 +1502,32 @@ fn test_rsplitn_char_iterator() { let mut split: Vec<&str> = data.rsplitn(4, ' ').collect(); split.reverse(); - assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]); + assert_eq!( + split, + ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"] + ); let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == ' ').collect(); split.reverse(); - assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]); + assert_eq!( + split, + ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"] + ); // Unicode let mut split: Vec<&str> = data.rsplitn(4, 'ä').collect(); split.reverse(); - assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]); + assert_eq!( + split, + ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"] + ); let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == 'ä').collect(); split.reverse(); - assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]); + assert_eq!( + split, + ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"] + ); } #[test] @@ -1423,33 +1535,85 @@ fn test_split_char_iterator() { let data = "\nMäry häd ä little lämb\nLittle lämb\n"; let split: Vec<&str> = data.split(' ').collect(); - assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); + assert_eq!( + split, + [ + "\nMäry", + "häd", + "ä", + "little", + "lämb\nLittle", + "lämb\n" + ] + ); let mut rsplit: Vec<&str> = data.split(' ').rev().collect(); rsplit.reverse(); - assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); + assert_eq!( + rsplit, + [ + "\nMäry", + "häd", + "ä", + "little", + "lämb\nLittle", + "lämb\n" + ] + ); let split: Vec<&str> = data.split(|c: char| c == ' ').collect(); - assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); + assert_eq!( + split, + [ + "\nMäry", + "häd", + "ä", + "little", + "lämb\nLittle", + "lämb\n" + ] + ); let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect(); rsplit.reverse(); - assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]); + assert_eq!( + rsplit, + [ + "\nMäry", + "häd", + "ä", + "little", + "lämb\nLittle", + "lämb\n" + ] + ); // Unicode let split: Vec<&str> = data.split('ä').collect(); - assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); + assert_eq!( + split, + ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"] + ); let mut rsplit: Vec<&str> = data.split('ä').rev().collect(); rsplit.reverse(); - assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); + assert_eq!( + rsplit, + ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"] + ); let split: Vec<&str> = data.split(|c: char| c == 'ä').collect(); - assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); + assert_eq!( + split, + ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"] + ); let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect(); rsplit.reverse(); - assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]); + assert_eq!( + rsplit, + ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"] + ); } #[test] @@ -1458,7 +1622,10 @@ fn test_rev_split_char_iterator_no_trailing() { let mut split: Vec<&str> = data.split('\n').rev().collect(); split.reverse(); - assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]); + assert_eq!( + split, + ["", "Märy häd ä little lämb", "Little lämb", ""] + ); let mut split: Vec<&str> = data.split_terminator('\n').rev().collect(); split.reverse(); @@ -1467,8 +1634,10 @@ fn test_rev_split_char_iterator_no_trailing() { #[test] fn test_utf16_code_units() { - assert_eq!("é\u{1F4A9}".encode_utf16().collect::>(), - [0xE9, 0xD83D, 0xDCA9]) + assert_eq!( + "é\u{1F4A9}".encode_utf16().collect::>(), + [0xE9, 0xD83D, 0xDCA9] + ) } #[test] @@ -1504,26 +1673,40 @@ fn contains_weird_cases() { #[test] fn trim_ws() { - assert_eq!(" \t a \t ".trim_start_matches(|c: char| c.is_whitespace()), - "a \t "); - assert_eq!(" \t a \t ".trim_end_matches(|c: char| c.is_whitespace()), - " \t a"); - assert_eq!(" \t a \t ".trim_start_matches(|c: char| c.is_whitespace()), - "a \t "); - assert_eq!(" \t a \t ".trim_end_matches(|c: char| c.is_whitespace()), - " \t a"); - assert_eq!(" \t a \t ".trim_matches(|c: char| c.is_whitespace()), - "a"); - assert_eq!(" \t \t ".trim_start_matches(|c: char| c.is_whitespace()), - ""); - assert_eq!(" \t \t ".trim_end_matches(|c: char| c.is_whitespace()), - ""); - assert_eq!(" \t \t ".trim_start_matches(|c: char| c.is_whitespace()), - ""); - assert_eq!(" \t \t ".trim_end_matches(|c: char| c.is_whitespace()), - ""); - assert_eq!(" \t \t ".trim_matches(|c: char| c.is_whitespace()), - ""); + assert_eq!( + " \t a \t ".trim_start_matches(|c: char| c.is_whitespace()), + "a \t " + ); + assert_eq!( + " \t a \t ".trim_end_matches(|c: char| c.is_whitespace()), + " \t a" + ); + assert_eq!( + " \t a \t ".trim_start_matches(|c: char| c.is_whitespace()), + "a \t " + ); + assert_eq!( + " \t a \t ".trim_end_matches(|c: char| c.is_whitespace()), + " \t a" + ); + assert_eq!(" \t a \t ".trim_matches(|c: char| c.is_whitespace()), "a"); + assert_eq!( + " \t \t ".trim_start_matches(|c: char| c.is_whitespace()), + "" + ); + assert_eq!( + " \t \t ".trim_end_matches(|c: char| c.is_whitespace()), + "" + ); + assert_eq!( + " \t \t ".trim_start_matches(|c: char| c.is_whitespace()), + "" + ); + assert_eq!( + " \t \t ".trim_end_matches(|c: char| c.is_whitespace()), + "" + ); + assert_eq!(" \t \t ".trim_matches(|c: char| c.is_whitespace()), ""); } #[test] @@ -1600,8 +1783,8 @@ fn test_repeat() { mod pattern { use std::str::pattern::Pattern; - use std::str::pattern::{Searcher, ReverseSearcher}; - use std::str::pattern::SearchStep::{self, Match, Reject, Done}; + use std::str::pattern::SearchStep::{self, Done, Match, Reject}; + use std::str::pattern::{ReverseSearcher, Searcher}; macro_rules! make_test { ($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => { @@ -1621,14 +1804,22 @@ mod pattern { } } - fn cmp_search_to_vec<'a, P: Pattern<'a>>(rev: bool, pat: P, haystack: &'a str, - right: Vec) - where P::Searcher: ReverseSearcher<'a> + fn cmp_search_to_vec<'a, P: Pattern<'a>>( + rev: bool, + pat: P, + haystack: &'a str, + right: Vec, + ) where + P::Searcher: ReverseSearcher<'a>, { let mut searcher = pat.into_searcher(haystack); let mut v = vec![]; loop { - match if !rev {searcher.next()} else {searcher.next_back()} { + match if !rev { + searcher.next() + } else { + searcher.next_back() + } { Match(a, b) => v.push(Match(a, b)), Reject(a, b) => v.push(Reject(a, b)), Done => break, @@ -1643,8 +1834,7 @@ mod pattern { for (i, e) in right.iter().enumerate() { match *e { - Match(a, b) | Reject(a, b) - if a <= b && a == first_index => { + Match(a, b) | Reject(a, b) if a <= b && a == first_index => { first_index = b; } _ => { @@ -1665,76 +1855,106 @@ mod pattern { assert_eq!(v, right); } - make_test!(str_searcher_ascii_haystack, "bb", "abbcbbd", [ - Reject(0, 1), - Match (1, 3), - Reject(3, 4), - Match (4, 6), - Reject(6, 7), - ]); - make_test!(str_searcher_ascii_haystack_seq, "bb", "abbcbbbbd", [ - Reject(0, 1), - Match (1, 3), - Reject(3, 4), - Match (4, 6), - Match (6, 8), - Reject(8, 9), - ]); - make_test!(str_searcher_empty_needle_ascii_haystack, "", "abbcbbd", [ - Match (0, 0), - Reject(0, 1), - Match (1, 1), - Reject(1, 2), - Match (2, 2), - Reject(2, 3), - Match (3, 3), - Reject(3, 4), - Match (4, 4), - Reject(4, 5), - Match (5, 5), - Reject(5, 6), - Match (6, 6), - Reject(6, 7), - Match (7, 7), - ]); - make_test!(str_searcher_multibyte_haystack, " ", "├──", [ - Reject(0, 3), - Reject(3, 6), - Reject(6, 9), - ]); - make_test!(str_searcher_empty_needle_multibyte_haystack, "", "├──", [ - Match (0, 0), - Reject(0, 3), - Match (3, 3), - Reject(3, 6), - Match (6, 6), - Reject(6, 9), - Match (9, 9), - ]); - make_test!(str_searcher_empty_needle_empty_haystack, "", "", [ - Match(0, 0), - ]); - make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", [ - ]); - make_test!(char_searcher_ascii_haystack, 'b', "abbcbbd", [ - Reject(0, 1), - Match (1, 2), - Match (2, 3), - Reject(3, 4), - Match (4, 5), - Match (5, 6), - Reject(6, 7), - ]); - make_test!(char_searcher_multibyte_haystack, ' ', "├──", [ - Reject(0, 3), - Reject(3, 6), - Reject(6, 9), - ]); - make_test!(char_searcher_short_haystack, '\u{1F4A9}', "* \t", [ - Reject(0, 1), - Reject(1, 2), - Reject(2, 3), - ]); + make_test!( + str_searcher_ascii_haystack, + "bb", + "abbcbbd", + [ + Reject(0, 1), + Match(1, 3), + Reject(3, 4), + Match(4, 6), + Reject(6, 7), + ] + ); + make_test!( + str_searcher_ascii_haystack_seq, + "bb", + "abbcbbbbd", + [ + Reject(0, 1), + Match(1, 3), + Reject(3, 4), + Match(4, 6), + Match(6, 8), + Reject(8, 9), + ] + ); + make_test!( + str_searcher_empty_needle_ascii_haystack, + "", + "abbcbbd", + [ + Match(0, 0), + Reject(0, 1), + Match(1, 1), + Reject(1, 2), + Match(2, 2), + Reject(2, 3), + Match(3, 3), + Reject(3, 4), + Match(4, 4), + Reject(4, 5), + Match(5, 5), + Reject(5, 6), + Match(6, 6), + Reject(6, 7), + Match(7, 7), + ] + ); + make_test!( + str_searcher_multibyte_haystack, + " ", + "├──", + [Reject(0, 3), Reject(3, 6), Reject(6, 9),] + ); + make_test!( + str_searcher_empty_needle_multibyte_haystack, + "", + "├──", + [ + Match(0, 0), + Reject(0, 3), + Match(3, 3), + Reject(3, 6), + Match(6, 6), + Reject(6, 9), + Match(9, 9), + ] + ); + make_test!( + str_searcher_empty_needle_empty_haystack, + "", + "", + [Match(0, 0),] + ); + make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", []); + make_test!( + char_searcher_ascii_haystack, + 'b', + "abbcbbd", + [ + Reject(0, 1), + Match(1, 2), + Match(2, 3), + Reject(3, 4), + Match(4, 5), + Match(5, 6), + Reject(6, 7), + ] + ); + make_test!( + char_searcher_multibyte_haystack, + ' ', + "├──", + [Reject(0, 3), Reject(3, 6), Reject(6, 9),] + ); + make_test!( + char_searcher_short_haystack, + '\u{1F4A9}', + "* \t", + [Reject(0, 1), Reject(1, 2), Reject(2, 3),] + ); } @@ -1832,7 +2052,10 @@ generate_iterator_test! { fn different_str_pattern_forwarding_lifetimes() { use std::str::pattern::Pattern; - fn foo<'a, P>(p: P) where for<'b> &'b P: Pattern<'a> { + fn foo<'a, P>(p: P) + where + for<'b> &'b P: Pattern<'a>, + { for _ in 0..3 { "asdf".find(&p); } diff --git a/src/liballoc/tests/string.rs b/src/liballoc/tests/string.rs index 8a5bfca8b7db5..8011a8de31d7b 100644 --- a/src/liballoc/tests/string.rs +++ b/src/liballoc/tests/string.rs @@ -1,9 +1,12 @@ use std::borrow::Cow; use std::collections::CollectionAllocErr::*; use std::mem::size_of; -use std::{usize, isize}; +use std::{isize, usize}; -pub trait IntoCow<'a, B: ?Sized> where B: ToOwned { +pub trait IntoCow<'a, B: ?Sized> +where + B: ToOwned, +{ fn into_cow(self) -> Cow<'a, B>; } @@ -43,8 +46,10 @@ fn test_from_utf8() { assert_eq!(String::from_utf8(xs).unwrap(), String::from("hello")); let xs = "ศไทย中华Việt Nam".as_bytes().to_vec(); - assert_eq!(String::from_utf8(xs).unwrap(), - String::from("ศไทย中华Việt Nam")); + assert_eq!( + String::from_utf8(xs).unwrap(), + String::from("ศไทย中华Việt Nam") + ); let xs = b"hello\xFF".to_vec(); let err = String::from_utf8(xs).unwrap_err(); @@ -62,60 +67,89 @@ fn test_from_utf8_lossy() { assert_eq!(String::from_utf8_lossy(xs), ys); let xs = b"Hello\xC2 There\xFF Goodbye"; - assert_eq!(String::from_utf8_lossy(xs), - String::from("Hello\u{FFFD} There\u{FFFD} Goodbye").into_cow()); + assert_eq!( + String::from_utf8_lossy(xs), + String::from("Hello\u{FFFD} There\u{FFFD} Goodbye").into_cow() + ); let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; - assert_eq!(String::from_utf8_lossy(xs), - String::from("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye").into_cow()); + assert_eq!( + String::from_utf8_lossy(xs), + String::from("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye").into_cow() + ); let xs = b"\xF5foo\xF5\x80bar"; - assert_eq!(String::from_utf8_lossy(xs), - String::from("\u{FFFD}foo\u{FFFD}\u{FFFD}bar").into_cow()); + assert_eq!( + String::from_utf8_lossy(xs), + String::from("\u{FFFD}foo\u{FFFD}\u{FFFD}bar").into_cow() + ); let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz"; - assert_eq!(String::from_utf8_lossy(xs), - String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}baz").into_cow()); + assert_eq!( + String::from_utf8_lossy(xs), + String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}baz").into_cow() + ); let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz"; - assert_eq!(String::from_utf8_lossy(xs), - String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}\u{FFFD}baz").into_cow()); + assert_eq!( + String::from_utf8_lossy(xs), + String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}\u{FFFD}baz").into_cow() + ); let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar"; - assert_eq!(String::from_utf8_lossy(xs), - String::from("\u{FFFD}\u{FFFD}\u{FFFD}\u{FFFD}foo\u{10000}bar").into_cow()); + assert_eq!( + String::from_utf8_lossy(xs), + String::from("\u{FFFD}\u{FFFD}\u{FFFD}\u{FFFD}foo\u{10000}bar").into_cow() + ); // surrogates let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar"; - assert_eq!(String::from_utf8_lossy(xs), - String::from("\u{FFFD}\u{FFFD}\u{FFFD}foo\u{FFFD}\u{FFFD}\u{FFFD}bar").into_cow()); + assert_eq!( + String::from_utf8_lossy(xs), + String::from("\u{FFFD}\u{FFFD}\u{FFFD}foo\u{FFFD}\u{FFFD}\u{FFFD}bar").into_cow() + ); } #[test] fn test_from_utf16() { - let pairs = [(String::from("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"), - vec![0xd800, 0xdf45, 0xd800, 0xdf3f, 0xd800, 0xdf3b, 0xd800, 0xdf46, 0xd800, - 0xdf39, 0xd800, 0xdf3b, 0xd800, 0xdf30, 0x000a]), - - (String::from("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"), - vec![0xd801, 0xdc12, 0xd801, 0xdc49, 0xd801, 0xdc2e, 0xd801, 0xdc40, 0xd801, - 0xdc32, 0xd801, 0xdc4b, 0x0020, 0xd801, 0xdc0f, 0xd801, 0xdc32, 0xd801, - 0xdc4d, 0x000a]), - - (String::from("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"), - vec![0xd800, 0xdf00, 0xd800, 0xdf16, 0xd800, 0xdf0b, 0xd800, 0xdf04, 0xd800, - 0xdf11, 0xd800, 0xdf09, 0x00b7, 0xd800, 0xdf0c, 0xd800, 0xdf04, 0xd800, - 0xdf15, 0xd800, 0xdf04, 0xd800, 0xdf0b, 0xd800, 0xdf09, 0xd800, 0xdf11, - 0x000a]), - - (String::from("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n"), - vec![0xd801, 0xdc8b, 0xd801, 0xdc98, 0xd801, 0xdc88, 0xd801, 0xdc91, 0xd801, - 0xdc9b, 0xd801, 0xdc92, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc93, 0x0020, - 0xd801, 0xdc88, 0xd801, 0xdc9a, 0xd801, 0xdc8d, 0x0020, 0xd801, 0xdc8f, - 0xd801, 0xdc9c, 0xd801, 0xdc92, 0xd801, 0xdc96, 0xd801, 0xdc86, 0x0020, - 0xd801, 0xdc95, 0xd801, 0xdc86, 0x000a]), - // Issue #12318, even-numbered non-BMP planes - (String::from("\u{20000}"), vec![0xD840, 0xDC00])]; + let pairs = [ + ( + String::from("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"), + vec![ + 0xd800, 0xdf45, 0xd800, 0xdf3f, 0xd800, 0xdf3b, 0xd800, 0xdf46, 0xd800, 0xdf39, + 0xd800, 0xdf3b, 0xd800, 0xdf30, 0x000a, + ], + ), + ( + String::from("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"), + vec![ + 0xd801, 0xdc12, 0xd801, 0xdc49, 0xd801, 0xdc2e, 0xd801, 0xdc40, 0xd801, 0xdc32, + 0xd801, 0xdc4b, 0x0020, 0xd801, 0xdc0f, 0xd801, 0xdc32, 0xd801, 0xdc4d, 0x000a, + ], + ), + ( + String::from("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"), + vec![ + 0xd800, 0xdf00, 0xd800, 0xdf16, 0xd800, 0xdf0b, 0xd800, 0xdf04, 0xd800, 0xdf11, + 0xd800, 0xdf09, 0x00b7, 0xd800, 0xdf0c, 0xd800, 0xdf04, 0xd800, 0xdf15, 0xd800, + 0xdf04, 0xd800, 0xdf0b, 0xd800, 0xdf09, 0xd800, 0xdf11, 0x000a, + ], + ), + ( + String::from( + "𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n", + ), + vec![ + 0xd801, 0xdc8b, 0xd801, 0xdc98, 0xd801, 0xdc88, 0xd801, 0xdc91, 0xd801, 0xdc9b, + 0xd801, 0xdc92, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc93, 0x0020, 0xd801, 0xdc88, + 0xd801, 0xdc9a, 0xd801, 0xdc8d, 0x0020, 0xd801, 0xdc8f, 0xd801, 0xdc9c, 0xd801, + 0xdc92, 0xd801, 0xdc96, 0xd801, 0xdc86, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc86, + 0x000a, + ], + ), + // Issue #12318, even-numbered non-BMP planes + (String::from("\u{20000}"), vec![0xD840, 0xDC00]), + ]; for p in &pairs { let (s, u) = (*p).clone(); @@ -152,19 +186,27 @@ fn test_utf16_invalid() { fn test_from_utf16_lossy() { // completely positive cases tested above. // lead + eof - assert_eq!(String::from_utf16_lossy(&[0xD800]), - String::from("\u{FFFD}")); + assert_eq!( + String::from_utf16_lossy(&[0xD800]), + String::from("\u{FFFD}") + ); // lead + lead - assert_eq!(String::from_utf16_lossy(&[0xD800, 0xD800]), - String::from("\u{FFFD}\u{FFFD}")); + assert_eq!( + String::from_utf16_lossy(&[0xD800, 0xD800]), + String::from("\u{FFFD}\u{FFFD}") + ); // isolated trail - assert_eq!(String::from_utf16_lossy(&[0x0061, 0xDC00]), - String::from("a\u{FFFD}")); + assert_eq!( + String::from_utf16_lossy(&[0x0061, 0xDC00]), + String::from("a\u{FFFD}") + ); // general - assert_eq!(String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]), - String::from("\u{FFFD}𐒋\u{FFFD}")); + assert_eq!( + String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]), + String::from("\u{FFFD}𐒋\u{FFFD}") + ); } #[test] @@ -524,7 +566,6 @@ fn test_reserve_exact() { #[test] fn test_try_reserve() { - // These are the interesting cases: // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM) // * > isize::MAX should always fail @@ -558,23 +599,30 @@ fn test_try_reserve() { if guards_against_isize { // Check isize::MAX + 1 does count as overflow if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an overflow!") } + } else { + panic!("isize::MAX + 1 should trigger an overflow!") + } // Check usize::MAX does count as overflow if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } else { // Check isize::MAX + 1 is an OOM if let Err(AllocErr) = empty_string.try_reserve(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } // Check usize::MAX is an OOM if let Err(AllocErr) = empty_string.try_reserve(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an OOM!") } + } else { + panic!("usize::MAX should trigger an OOM!") + } } } - { // Same basic idea, but with non-zero len let mut ten_bytes: String = String::from("0123456789"); @@ -587,21 +635,25 @@ fn test_try_reserve() { } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { if let Err(AllocErr) = ten_bytes.try_reserve(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } // Should always overflow in the add-to-len if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } - } #[test] fn test_try_reserve_exact() { - // This is exactly the same as test_try_reserve with the method changed. // See that test for comments. @@ -622,20 +674,27 @@ fn test_try_reserve_exact() { if guards_against_isize { if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an overflow!") } + } else { + panic!("isize::MAX + 1 should trigger an overflow!") + } if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } else { if let Err(AllocErr) = empty_string.try_reserve_exact(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } if let Err(AllocErr) = empty_string.try_reserve_exact(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an OOM!") } + } else { + panic!("usize::MAX should trigger an OOM!") + } } } - { let mut ten_bytes: String = String::from("0123456789"); @@ -647,13 +706,18 @@ fn test_try_reserve_exact() { } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { if let Err(AllocErr) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } - } diff --git a/src/liballoc/tests/vec.rs b/src/liballoc/tests/vec.rs index 0fdcf34c783a8..a4103b42a8823 100644 --- a/src/liballoc/tests/vec.rs +++ b/src/liballoc/tests/vec.rs @@ -1,8 +1,8 @@ use std::borrow::Cow; +use std::collections::CollectionAllocErr::*; use std::mem::size_of; -use std::{usize, isize}; use std::vec::{Drain, IntoIter}; -use std::collections::CollectionAllocErr::*; +use std::{isize, usize}; struct DropCounter<'a> { count: &'a mut u32, @@ -32,8 +32,12 @@ fn test_double_drop() { x: Vec::new(), y: Vec::new(), }; - tv.x.push(DropCounter { count: &mut count_x }); - tv.y.push(DropCounter { count: &mut count_y }); + tv.x.push(DropCounter { + count: &mut count_x, + }); + tv.y.push(DropCounter { + count: &mut count_y, + }); // If Vec had a drop flag, here is where it would be zeroed. // Instead, it should rely on its internal state to prevent @@ -113,7 +117,9 @@ fn test_extend() { let mut count_x = 0; { let mut x = Vec::new(); - let y = vec![DropCounter { count: &mut count_x }]; + let y = vec![DropCounter { + count: &mut count_x, + }]; x.extend(y); } assert_eq!(count_x, 1); @@ -271,7 +277,12 @@ fn test_dedup_by() { assert_eq!(vec, ["foo", "bar", "baz", "bar"]); let mut vec = vec![("foo", 1), ("foo", 2), ("bar", 3), ("bar", 4), ("bar", 5)]; - vec.dedup_by(|a, b| a.0 == b.0 && { b.1 += a.1; true }); + vec.dedup_by(|a, b| { + a.0 == b.0 && { + b.1 += a.1; + true + } + }); assert_eq!(vec, [("foo", 3), ("bar", 12)]); } @@ -323,14 +334,22 @@ fn zero_sized_values() { #[test] fn test_partition() { - assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), - (vec![], vec![])); - assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), - (vec![1, 2, 3], vec![])); - assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), - (vec![1], vec![2, 3])); - assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), - (vec![], vec![1, 2, 3])); + assert_eq!( + vec![].into_iter().partition(|x: &i32| *x < 3), + (vec![], vec![]) + ); + assert_eq!( + vec![1, 2, 3].into_iter().partition(|x| *x < 4), + (vec![1, 2, 3], vec![]) + ); + assert_eq!( + vec![1, 2, 3].into_iter().partition(|x| *x < 2), + (vec![1], vec![2, 3]) + ); + assert_eq!( + vec![1, 2, 3].into_iter().partition(|x| *x < 0), + (vec![], vec![1, 2, 3]) + ); } #[test] @@ -509,66 +528,59 @@ fn test_drain_out_of_bounds() { #[test] fn test_drain_range() { let mut v = vec![1, 2, 3, 4, 5]; - for _ in v.drain(4..) { - } + for _ in v.drain(4..) {} assert_eq!(v, &[1, 2, 3, 4]); let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect(); - for _ in v.drain(1..4) { - } + for _ in v.drain(1..4) {} assert_eq!(v, &[1.to_string(), 5.to_string()]); let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect(); - for _ in v.drain(1..4).rev() { - } + for _ in v.drain(1..4).rev() {} assert_eq!(v, &[1.to_string(), 5.to_string()]); let mut v: Vec<_> = vec![(); 5]; - for _ in v.drain(1..4).rev() { - } + for _ in v.drain(1..4).rev() {} assert_eq!(v, &[(), ()]); } #[test] fn test_drain_inclusive_range() { let mut v = vec!['a', 'b', 'c', 'd', 'e']; - for _ in v.drain(1..=3) { - } + for _ in v.drain(1..=3) {} assert_eq!(v, &['a', 'e']); let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect(); - for _ in v.drain(1..=5) { - } + for _ in v.drain(1..=5) {} assert_eq!(v, &["0".to_string()]); let mut v: Vec = (0..=5).map(|x| x.to_string()).collect(); - for _ in v.drain(0..=5) { - } + for _ in v.drain(0..=5) {} assert_eq!(v, Vec::::new()); let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect(); - for _ in v.drain(0..=3) { - } + for _ in v.drain(0..=3) {} assert_eq!(v, &["4".to_string(), "5".to_string()]); let mut v: Vec<_> = (0..=1).map(|x| x.to_string()).collect(); - for _ in v.drain(..=0) { - } + for _ in v.drain(..=0) {} assert_eq!(v, &["1".to_string()]); } #[test] fn test_drain_max_vec_size() { let mut v = Vec::<()>::with_capacity(usize::max_value()); - unsafe { v.set_len(usize::max_value()); } - for _ in v.drain(usize::max_value() - 1..) { + unsafe { + v.set_len(usize::max_value()); } + for _ in v.drain(usize::max_value() - 1..) {} assert_eq!(v.len(), usize::max_value() - 1); let mut v = Vec::<()>::with_capacity(usize::max_value()); - unsafe { v.set_len(usize::max_value()); } - for _ in v.drain(usize::max_value() - 1..=usize::max_value() - 1) { + unsafe { + v.set_len(usize::max_value()); } + for _ in v.drain(usize::max_value() - 1..=usize::max_value() - 1) {} assert_eq!(v.len(), usize::max_value() - 1); } @@ -734,7 +746,10 @@ fn test_cow_from() { fn test_from_cow() { let borrowed: &[_] = &["borrowed", "(slice)"]; let owned = vec!["owned", "(vec)"]; - assert_eq!(Vec::from(Cow::Borrowed(borrowed)), vec!["borrowed", "(slice)"]); + assert_eq!( + Vec::from(Cow::Borrowed(borrowed)), + vec!["borrowed", "(slice)"] + ); assert_eq!(Vec::from(Cow::Owned(owned)), vec!["owned", "(vec)"]); } @@ -864,35 +879,29 @@ fn drain_filter_true() { #[test] fn drain_filter_complex() { - - { // [+xxx++++++xxxxx++++x+x++] - let mut vec = vec![1, - 2, 4, 6, - 7, 9, 11, 13, 15, 17, - 18, 20, 22, 24, 26, - 27, 29, 31, 33, - 34, - 35, - 36, - 37, 39]; + { + // [+xxx++++++xxxxx++++x+x++] + let mut vec = vec![ + 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, + 39, + ]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); assert_eq!(vec.len(), 14); - assert_eq!(vec, vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]); + assert_eq!( + vec, + vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39] + ); } - { // [xxx++++++xxxxx++++x+x++] - let mut vec = vec![2, 4, 6, - 7, 9, 11, 13, 15, 17, - 18, 20, 22, 24, 26, - 27, 29, 31, 33, - 34, - 35, - 36, - 37, 39]; + { + // [xxx++++++xxxxx++++x+x++] + let mut vec = vec![ + 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39, + ]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); @@ -902,14 +911,11 @@ fn drain_filter_complex() { assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]); } - { // [xxx++++++xxxxx++++x+x] - let mut vec = vec![2, 4, 6, - 7, 9, 11, 13, 15, 17, - 18, 20, 22, 24, 26, - 27, 29, 31, 33, - 34, - 35, - 36]; + { + // [xxx++++++xxxxx++++x+x] + let mut vec = vec![ + 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, + ]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); @@ -919,9 +925,11 @@ fn drain_filter_complex() { assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]); } - { // [xxxxxxxxxx+++++++++++] - let mut vec = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, - 1, 3, 5, 7, 9, 11, 13, 15, 17, 19]; + { + // [xxxxxxxxxx+++++++++++] + let mut vec = vec![ + 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, + ]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); @@ -931,9 +939,11 @@ fn drain_filter_complex() { assert_eq!(vec, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); } - { // [+++++++++++xxxxxxxxxx] - let mut vec = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, - 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]; + { + // [+++++++++++xxxxxxxxxx] + let mut vec = vec![ + 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, + ]; let removed = vec.drain_filter(|x| *x % 2 == 0).collect::>(); assert_eq!(removed.len(), 10); @@ -970,7 +980,6 @@ fn test_reserve_exact() { #[test] fn test_try_reserve() { - // These are the interesting cases: // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM) // * > isize::MAX should always fail @@ -1004,23 +1013,30 @@ fn test_try_reserve() { if guards_against_isize { // Check isize::MAX + 1 does count as overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an overflow!") } + } else { + panic!("isize::MAX + 1 should trigger an overflow!") + } // Check usize::MAX does count as overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } else { // Check isize::MAX + 1 is an OOM if let Err(AllocErr) = empty_bytes.try_reserve(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } // Check usize::MAX is an OOM if let Err(AllocErr) = empty_bytes.try_reserve(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an OOM!") } + } else { + panic!("usize::MAX should trigger an OOM!") + } } } - { // Same basic idea, but with non-zero len let mut ten_bytes: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; @@ -1033,33 +1049,42 @@ fn test_try_reserve() { } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { if let Err(AllocErr) = ten_bytes.try_reserve(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } // Should always overflow in the add-to-len if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } - { // Same basic idea, but with interesting type size let mut ten_u32s: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; - if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 10) { + if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } - if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 10) { + if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { - if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) { + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { - if let Err(AllocErr) = ten_u32s.try_reserve(MAX_CAP/4 - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + if let Err(AllocErr) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) { + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } // Should fail in the mul-by-size if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) { @@ -1067,12 +1092,10 @@ fn test_try_reserve() { panic!("usize::MAX should trigger an overflow!"); } } - } #[test] fn test_try_reserve_exact() { - // This is exactly the same as test_try_reserve with the method changed. // See that test for comments. @@ -1093,20 +1116,27 @@ fn test_try_reserve_exact() { if guards_against_isize { if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an overflow!") } + } else { + panic!("isize::MAX + 1 should trigger an overflow!") + } if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } else { if let Err(AllocErr) = empty_bytes.try_reserve_exact(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } if let Err(AllocErr) = empty_bytes.try_reserve_exact(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an OOM!") } + } else { + panic!("usize::MAX should trigger an OOM!") + } } } - { let mut ten_bytes: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; @@ -1118,34 +1148,44 @@ fn test_try_reserve_exact() { } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { if let Err(AllocErr) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } - { let mut ten_u32s: Vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; - if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 10) { + if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } - if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 10) { + if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { - if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) { + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { - if let Err(AllocErr) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + if let Err(AllocErr) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) { + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } - } diff --git a/src/liballoc/tests/vec_deque.rs b/src/liballoc/tests/vec_deque.rs index 76831ba65e3b8..5bd9c02785a79 100644 --- a/src/liballoc/tests/vec_deque.rs +++ b/src/liballoc/tests/vec_deque.rs @@ -1,9 +1,9 @@ +use std::collections::vec_deque::Drain; +use std::collections::CollectionAllocErr::*; use std::collections::VecDeque; use std::fmt::Debug; -use std::collections::vec_deque::{Drain}; -use std::collections::CollectionAllocErr::*; use std::mem::size_of; -use std::{usize, isize}; +use std::{isize, usize}; use self::Taggy::*; use self::Taggypar::*; @@ -148,10 +148,12 @@ fn test_param_taggy() { #[test] fn test_param_taggypar() { - test_parameterized::>(Onepar::(1), - Twopar::(1, 2), - Threepar::(1, 2, 3), - Twopar::(17, 42)); + test_parameterized::>( + Onepar::(1), + Twopar::(1, 2), + Threepar::(1, 2, 3), + Twopar::(17, 42), + ); } #[test] @@ -320,8 +322,10 @@ fn test_mut_rev_iter_wrap() { assert_eq!(d.pop_front(), Some(1)); d.push_back(4); - assert_eq!(d.iter_mut().rev().map(|x| *x).collect::>(), - vec![4, 3, 2]); + assert_eq!( + d.iter_mut().rev().map(|x| *x).collect::>(), + vec![4, 3, 2] + ); } #[test] @@ -372,7 +376,6 @@ fn test_mut_rev_iter() { #[test] fn test_into_iter() { - // Empty iter { let d: VecDeque = VecDeque::new(); @@ -431,7 +434,6 @@ fn test_into_iter() { #[test] fn test_drain() { - // Empty iter { let mut d: VecDeque = VecDeque::new(); @@ -654,8 +656,10 @@ fn test_show() { .iter() .cloned() .collect(); - assert_eq!(format!("{:?}", ringbuf), - "[\"just\", \"one\", \"test\", \"more\"]"); + assert_eq!( + format!("{:?}", ringbuf), + "[\"just\", \"one\", \"test\", \"more\"]" + ); } #[test] @@ -952,7 +956,6 @@ fn test_append_permutations() { // doesn't pop more values than are pushed for src_pop_back in 0..(src_push_back + src_push_front) { for src_pop_front in 0..(src_push_back + src_push_front - src_pop_back) { - let src = construct_vec_deque( src_push_back, src_pop_back, @@ -963,8 +966,8 @@ fn test_append_permutations() { for dst_push_back in 0..MAX { for dst_push_front in 0..MAX { for dst_pop_back in 0..(dst_push_back + dst_push_front) { - for dst_pop_front - in 0..(dst_push_back + dst_push_front - dst_pop_back) + for dst_pop_front in + 0..(dst_push_back + dst_push_front - dst_pop_back) { let mut dst = construct_vec_deque( dst_push_back, @@ -1010,8 +1013,12 @@ fn test_append_double_drop() { { let mut a = VecDeque::new(); let mut b = VecDeque::new(); - a.push_back(DropCounter { count: &mut count_a }); - b.push_back(DropCounter { count: &mut count_b }); + a.push_back(DropCounter { + count: &mut count_a, + }); + b.push_back(DropCounter { + count: &mut count_b, + }); a.append(&mut b); } @@ -1120,7 +1127,6 @@ fn test_reserve_exact_2() { #[test] fn test_try_reserve() { - // These are the interesting cases: // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM) // * > isize::MAX should always fail @@ -1154,22 +1160,27 @@ fn test_try_reserve() { if guards_against_isize { // Check isize::MAX + 1 does count as overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an overflow!") } + } else { + panic!("isize::MAX + 1 should trigger an overflow!") + } // Check usize::MAX does count as overflow if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } else { // Check isize::MAX is an OOM // VecDeque starts with capacity 7, always adds 1 to the capacity // and also rounds the number to next power of 2 so this is the // furthest we can go without triggering CapacityOverflow if let Err(AllocErr) = empty_bytes.try_reserve(MAX_CAP) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } } - { // Same basic idea, but with non-zero len let mut ten_bytes: VecDeque = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); @@ -1182,33 +1193,42 @@ fn test_try_reserve() { } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { if let Err(AllocErr) = ten_bytes.try_reserve(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } // Should always overflow in the add-to-len if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } - { // Same basic idea, but with interesting type size let mut ten_u32s: VecDeque = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); - if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 10) { + if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } - if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 10) { + if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { - if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) { + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { - if let Err(AllocErr) = ten_u32s.try_reserve(MAX_CAP/4 - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + if let Err(AllocErr) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) { + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } // Should fail in the mul-by-size if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) { @@ -1216,12 +1236,10 @@ fn test_try_reserve() { panic!("usize::MAX should trigger an overflow!"); } } - } #[test] fn test_try_reserve_exact() { - // This is exactly the same as test_try_reserve with the method changed. // See that test for comments. @@ -1242,21 +1260,26 @@ fn test_try_reserve_exact() { if guards_against_isize { if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) { - } else { panic!("isize::MAX + 1 should trigger an overflow!") } + } else { + panic!("isize::MAX + 1 should trigger an overflow!") + } if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } else { // Check isize::MAX is an OOM // VecDeque starts with capacity 7, always adds 1 to the capacity // and also rounds the number to next power of 2 so this is the // furthest we can go without triggering CapacityOverflow if let Err(AllocErr) = empty_bytes.try_reserve_exact(MAX_CAP) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } } - { let mut ten_bytes: VecDeque = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); @@ -1268,36 +1291,46 @@ fn test_try_reserve_exact() { } if guards_against_isize { if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { if let Err(AllocErr) = ten_bytes.try_reserve_exact(MAX_CAP - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } - { let mut ten_u32s: VecDeque = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); - if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 10) { + if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } - if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 10) { + if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) { panic!("isize::MAX shouldn't trigger an overflow!"); } if guards_against_isize { - if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) { - } else { panic!("isize::MAX + 1 should trigger an overflow!"); } + if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) { + } else { + panic!("isize::MAX + 1 should trigger an overflow!"); + } } else { - if let Err(AllocErr) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) { - } else { panic!("isize::MAX + 1 should trigger an OOM!") } + if let Err(AllocErr) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) { + } else { + panic!("isize::MAX + 1 should trigger an OOM!") + } } if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) { - } else { panic!("usize::MAX should trigger an overflow!") } + } else { + panic!("usize::MAX should trigger an overflow!") + } } - } #[test] @@ -1399,9 +1432,8 @@ fn test_rotate_right_parts() { #[test] fn test_rotate_left_random() { let shifts = [ - 6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, - 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, - 9, 4, 12, 3, 12, 9, 11, 1, 7, 9, 7, 2, + 6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, 9, 4, 12, 3, + 12, 9, 11, 1, 7, 9, 7, 2, ]; let n = 12; let mut v: VecDeque<_> = (0..n).collect(); @@ -1418,9 +1450,8 @@ fn test_rotate_left_random() { #[test] fn test_rotate_right_random() { let shifts = [ - 6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, - 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, - 9, 4, 12, 3, 12, 9, 11, 1, 7, 9, 7, 2, + 6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, 9, 4, 12, 3, + 12, 9, 11, 1, 7, 9, 7, 2, ]; let n = 12; let mut v: VecDeque<_> = (0..n).collect(); diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index b69c114ed4594..b008d6a236863 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -63,17 +63,17 @@ use core::intrinsics::{arith_offset, assume}; use core::iter::{FromIterator, FusedIterator, TrustedLen}; use core::marker::PhantomData; use core::mem; +use core::ops; use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ops::{Index, IndexMut, RangeBounds}; -use core::ops; use core::ptr; use core::ptr::NonNull; use core::slice; -use collections::CollectionAllocErr; -use borrow::ToOwned; use borrow::Cow; +use borrow::ToOwned; use boxed::Box; +use collections::CollectionAllocErr; use raw_vec::RawVec; /// A contiguous growable array type, written `Vec` but pronounced 'vector'. @@ -517,7 +517,7 @@ impl Vec { /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` - #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] + #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.buf.try_reserve(self.len, additional) } @@ -557,8 +557,8 @@ impl Vec { /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` - #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.buf.try_reserve_exact(self.len, additional) } @@ -603,7 +603,7 @@ impl Vec { /// vec.shrink_to(0); /// assert!(vec.capacity() >= 3); /// ``` - #[unstable(feature = "shrink_to", reason = "new API", issue="56431")] + #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { self.buf.shrink_to_fit(cmp::max(self.len, min_capacity)); } @@ -915,7 +915,8 @@ impl Vec { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn retain(&mut self, mut f: F) - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { self.drain_filter(|x| !f(x)); } @@ -936,7 +937,11 @@ impl Vec { /// ``` #[stable(feature = "dedup_by", since = "1.16.0")] #[inline] - pub fn dedup_by_key(&mut self, mut key: F) where F: FnMut(&mut T) -> K, K: PartialEq { + pub fn dedup_by_key(&mut self, mut key: F) + where + F: FnMut(&mut T) -> K, + K: PartialEq, + { self.dedup_by(|a, b| key(a) == key(b)) } @@ -959,7 +964,10 @@ impl Vec { /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); /// ``` #[stable(feature = "dedup_by", since = "1.16.0")] - pub fn dedup_by(&mut self, same_bucket: F) where F: FnMut(&mut T, &mut T) -> bool { + pub fn dedup_by(&mut self, same_bucket: F) + where + F: FnMut(&mut T, &mut T) -> bool, + { let len = { let (dedup, _) = self.as_mut_slice().partition_dedup_by(same_bucket); dedup.len() @@ -1082,7 +1090,8 @@ impl Vec { /// ``` #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self, range: R) -> Drain - where R: RangeBounds + where + R: RangeBounds, { // Memory safety // @@ -1098,12 +1107,12 @@ impl Vec { let start = match range.start_bound() { Included(&n) => n, Excluded(&n) => n + 1, - Unbounded => 0, + Unbounded => 0, }; let end = match range.end_bound() { Included(&n) => n + 1, Excluded(&n) => n, - Unbounded => len, + Unbounded => len, }; assert!(start <= end); assert!(end <= len); @@ -1113,8 +1122,7 @@ impl Vec { self.set_len(start); // Use the borrow in the IterMut to indicate borrowing behavior of the // whole Drain iterator (like &mut T). - let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), - end - start); + let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start); Drain { tail_start: end, tail_len: len - end, @@ -1207,9 +1215,7 @@ impl Vec { self.set_len(at); other.set_len(other_len); - ptr::copy_nonoverlapping(self.as_ptr().add(at), - other.as_mut_ptr(), - other.len()); + ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len()); } other } @@ -1245,7 +1251,8 @@ impl Vec { /// [`Clone`]: ../../std/clone/trait.Clone.html #[stable(feature = "vec_resize_with", since = "1.33.0")] pub fn resize_with(&mut self, new_len: usize, f: F) - where F: FnMut() -> T + where + F: FnMut() -> T, { let len = self.len(); if new_len > len { @@ -1366,20 +1373,32 @@ trait ExtendWith { struct ExtendElement(T); impl ExtendWith for ExtendElement { - fn next(&mut self) -> T { self.0.clone() } - fn last(self) -> T { self.0 } + fn next(&mut self) -> T { + self.0.clone() + } + fn last(self) -> T { + self.0 + } } struct ExtendDefault; impl ExtendWith for ExtendDefault { - fn next(&mut self) -> T { Default::default() } - fn last(self) -> T { Default::default() } + fn next(&mut self) -> T { + Default::default() + } + fn last(self) -> T { + Default::default() + } } struct ExtendFunc(F); impl T> ExtendWith for ExtendFunc { - fn next(&mut self) -> T { (self.0)() } - fn last(mut self) -> T { (self.0)() } + fn next(&mut self) -> T { + (self.0)() + } + fn last(mut self) -> T { + (self.0)() + } } impl Vec { @@ -1426,7 +1445,10 @@ struct SetLenOnDrop<'a> { impl<'a> SetLenOnDrop<'a> { #[inline] fn new(len: &'a mut usize) -> Self { - SetLenOnDrop { local_len: *len, len: len } + SetLenOnDrop { + local_len: *len, + len: len, + } } #[inline] @@ -1480,7 +1502,11 @@ impl Vec { /// /// assert_eq!(vec, vec![2, 3, 1]); /// ``` - #[unstable(feature = "vec_remove_item", reason = "recently added", issue = "40062")] + #[unstable( + feature = "vec_remove_item", + reason = "recently added", + issue = "40062" + )] pub fn remove_item(&mut self, item: &T) -> Option { let pos = self.iter().position(|x| *x == *item)?; Some(self.remove(pos)) @@ -1517,7 +1543,7 @@ impl SpecFromElem for u8 { return Vec { buf: RawVec::with_capacity_zeroed(n), len: n, - } + }; } unsafe { let mut v = Vec::with_capacity(n); @@ -1535,7 +1561,7 @@ impl SpecFromElem for T { return Vec { buf: RawVec::with_capacity_zeroed(n), len: n, - } + }; } let mut v = Vec::with_capacity(n); v.extend_with(n, ExtendElement(elem)); @@ -1556,7 +1582,7 @@ macro_rules! impl_is_zero { $is_zero(*self) } } - } + }; } impl_is_zero!(i8, |x| x == 0); @@ -1591,7 +1617,6 @@ unsafe impl IsZero for *mut T { } } - //////////////////////////////////////////////////////////////////////////////// // Common trait implementations for Vec //////////////////////////////////////////////////////////////////////////////// @@ -1627,8 +1652,8 @@ impl Hash for Vec { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( - message="vector indices are of type `usize` or ranges of `usize`", - label="vector indices are of type `usize` or ranges of `usize`", + message = "vector indices are of type `usize` or ranges of `usize`", + label = "vector indices are of type `usize` or ranges of `usize`" )] impl Index for Vec where @@ -1644,8 +1669,8 @@ where #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( - message="vector indices are of type `usize` or ranges of `usize`", - label="vector indices are of type `usize` or ranges of `usize`", + message = "vector indices are of type `usize` or ranges of `usize`", + label = "vector indices are of type `usize` or ranges of `usize`" )] impl IndexMut for Vec where @@ -1765,7 +1790,8 @@ trait SpecExtend { } impl SpecExtend for Vec - where I: Iterator, +where + I: Iterator, { default fn from_iter(mut iterator: I) -> Self { // Unroll the first iteration, as the vector is going to be @@ -1795,7 +1821,8 @@ impl SpecExtend for Vec } impl SpecExtend for Vec - where I: TrustedLen, +where + I: TrustedLen, { default fn from_iter(iterator: I) -> Self { let mut vector = Vec::new(); @@ -1807,9 +1834,12 @@ impl SpecExtend for Vec // This is the case for a TrustedLen iterator. let (low, high) = iterator.size_hint(); if let Some(high_value) = high { - debug_assert_eq!(low, high_value, - "TrustedLen iterator's size hint is not exact: {:?}", - (low, high)); + debug_assert_eq!( + low, + high_value, + "TrustedLen iterator's size hint is not exact: {:?}", + (low, high) + ); } if let Some(additional) = high { self.reserve(additional); @@ -1836,9 +1866,7 @@ impl SpecExtend> for Vec { // has not been advanced at all. if iterator.buf.as_ptr() as *const _ == iterator.ptr { unsafe { - let vec = Vec::from_raw_parts(iterator.buf.as_ptr(), - iterator.len(), - iterator.cap); + let vec = Vec::from_raw_parts(iterator.buf.as_ptr(), iterator.len(), iterator.cap); mem::forget(iterator); vec } @@ -1858,8 +1886,9 @@ impl SpecExtend> for Vec { } impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec - where I: Iterator, - T: Clone, +where + I: Iterator, + T: Clone, { default fn from_iter(iterator: I) -> Self { SpecExtend::from_iter(iterator.cloned()) @@ -1871,7 +1900,8 @@ impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec } impl<'a, T: 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec - where T: Copy, +where + T: Copy, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); @@ -1945,7 +1975,9 @@ impl Vec { #[inline] #[stable(feature = "vec_splice", since = "1.21.0")] pub fn splice(&mut self, range: R, replace_with: I) -> Splice - where R: RangeBounds, I: IntoIterator + where + R: RangeBounds, + I: IntoIterator, { Splice { drain: self.drain(range), @@ -2000,12 +2032,15 @@ impl Vec { /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] pub fn drain_filter(&mut self, filter: F) -> DrainFilter - where F: FnMut(&mut T) -> bool, + where + F: FnMut(&mut T) -> bool, { let old_len = self.len(); // Guard against us getting leaked (leak amplification) - unsafe { self.set_len(0); } + unsafe { + self.set_len(0); + } DrainFilter { vec: self, @@ -2036,13 +2071,20 @@ macro_rules! __impl_slice_eq1 { }; ($Lhs: ty, $Rhs: ty, $Bound: ident) => { #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq { + impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs + where + A: PartialEq, + { #[inline] - fn eq(&self, other: &$Rhs) -> bool { self[..] == other[..] } + fn eq(&self, other: &$Rhs) -> bool { + self[..] == other[..] + } #[inline] - fn ne(&self, other: &$Rhs) -> bool { self[..] != other[..] } + fn ne(&self, other: &$Rhs) -> bool { + self[..] != other[..] + } } - } + }; } __impl_slice_eq1! { Vec, Vec } @@ -2173,7 +2215,10 @@ impl<'a, T: Clone> From<&'a mut [T]> for Vec { } #[stable(feature = "vec_from_cow_slice", since = "1.14.0")] -impl<'a, T> From> for Vec where [T]: ToOwned> { +impl<'a, T> From> for Vec +where + [T]: ToOwned>, +{ fn from(s: Cow<'a, [T]>) -> Vec { s.into_owned() } @@ -2230,7 +2275,10 @@ impl<'a, T: Clone> From<&'a Vec> for Cow<'a, [T]> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> FromIterator for Cow<'a, [T]> where T: Clone { +impl<'a, T> FromIterator for Cow<'a, [T]> +where + T: Clone, +{ fn from_iter>(it: I) -> Cow<'a, [T]> { Cow::Owned(FromIterator::from_iter(it)) } @@ -2259,9 +2307,7 @@ pub struct IntoIter { #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("IntoIter") - .field(&self.as_slice()) - .finish() + f.debug_tuple("IntoIter").field(&self.as_slice()).finish() } } @@ -2279,9 +2325,7 @@ impl IntoIter { /// ``` #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] pub fn as_slice(&self) -> &[T] { - unsafe { - slice::from_raw_parts(self.ptr, self.len()) - } + unsafe { slice::from_raw_parts(self.ptr, self.len()) } } /// Returns the remaining items of this iterator as a mutable slice. @@ -2299,9 +2343,7 @@ impl IntoIter { /// ``` #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] pub fn as_mut_slice(&mut self) -> &mut [T] { - unsafe { - slice::from_raw_parts_mut(self.ptr as *mut T, self.len()) - } + unsafe { slice::from_raw_parts_mut(self.ptr as *mut T, self.len()) } } } @@ -2429,9 +2471,7 @@ pub struct Drain<'a, T: 'a> { #[stable(feature = "collection_debug", since = "1.17.0")] impl<'a, T: 'a + fmt::Debug> fmt::Debug for Drain<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("Drain") - .field(&self.iter.as_slice()) - .finish() + f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() } } @@ -2446,7 +2486,9 @@ impl<'a, T> Iterator for Drain<'a, T> { #[inline] fn next(&mut self) -> Option { - self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) }) + self.iter + .next() + .map(|elt| unsafe { ptr::read(elt as *const _) }) } fn size_hint(&self) -> (usize, Option) { @@ -2458,7 +2500,9 @@ impl<'a, T> Iterator for Drain<'a, T> { impl<'a, T> DoubleEndedIterator for Drain<'a, T> { #[inline] fn next_back(&mut self) -> Option { - self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) + self.iter + .next_back() + .map(|elt| unsafe { ptr::read(elt as *const _) }) } } @@ -2485,7 +2529,6 @@ impl<'a, T> Drop for Drain<'a, T> { } } - #[stable(feature = "drain", since = "1.6.0")] impl<'a, T> ExactSizeIterator for Drain<'a, T> { fn is_empty(&self) -> bool { @@ -2533,7 +2576,6 @@ impl<'a, I: Iterator> DoubleEndedIterator for Splice<'a, I> { #[stable(feature = "vec_splice", since = "1.21.0")] impl<'a, I: Iterator> ExactSizeIterator for Splice<'a, I> {} - #[stable(feature = "vec_splice", since = "1.21.0")] impl<'a, I: Iterator> Drop for Splice<'a, I> { fn drop(&mut self) { @@ -2542,27 +2584,31 @@ impl<'a, I: Iterator> Drop for Splice<'a, I> { unsafe { if self.drain.tail_len == 0 { self.drain.vec.as_mut().extend(self.replace_with.by_ref()); - return + return; } // First fill the range left by drain(). if !self.drain.fill(&mut self.replace_with) { - return + return; } // There may be more elements. Use the lower bound as an estimate. // FIXME: Is the upper bound a better guess? Or something else? let (lower_bound, _upper_bound) = self.replace_with.size_hint(); - if lower_bound > 0 { + if lower_bound > 0 { self.drain.move_tail(lower_bound); if !self.drain.fill(&mut self.replace_with) { - return + return; } } // Collect any remaining elements. // This is a zero-length vector which does not allocate if `lower_bound` was exact. - let mut collected = self.replace_with.by_ref().collect::>().into_iter(); + let mut collected = self + .replace_with + .by_ref() + .collect::>() + .into_iter(); // Now we have an exact count. if collected.len() > 0 { self.drain.move_tail(collected.len()); @@ -2581,20 +2627,19 @@ impl<'a, T> Drain<'a, T> { /// that have been moved out. /// Fill that range as much as possible with new elements from the `replace_with` iterator. /// Return whether we filled the entire range. (`replace_with.next()` didn’t return `None`.) - unsafe fn fill>(&mut self, replace_with: &mut I) -> bool { + unsafe fn fill>(&mut self, replace_with: &mut I) -> bool { let vec = self.vec.as_mut(); let range_start = vec.len; let range_end = self.tail_start; - let range_slice = slice::from_raw_parts_mut( - vec.as_mut_ptr().add(range_start), - range_end - range_start); + let range_slice = + slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start); for place in range_slice { if let Some(new_item) = replace_with.next() { ptr::write(place, new_item); vec.len += 1; } else { - return false + return false; } } true @@ -2618,7 +2663,8 @@ impl<'a, T> Drain<'a, T> { #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] #[derive(Debug)] pub struct DrainFilter<'a, T: 'a, F> - where F: FnMut(&mut T) -> bool, +where + F: FnMut(&mut T) -> bool, { vec: &'a mut Vec, idx: usize, @@ -2629,7 +2675,8 @@ pub struct DrainFilter<'a, T: 'a, F> #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl<'a, T, F> Iterator for DrainFilter<'a, T, F> - where F: FnMut(&mut T) -> bool, +where + F: FnMut(&mut T) -> bool, { type Item = T; @@ -2663,7 +2710,8 @@ impl<'a, T, F> Iterator for DrainFilter<'a, T, F> #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] impl<'a, T, F> Drop for DrainFilter<'a, T, F> - where F: FnMut(&mut T) -> bool, +where + F: FnMut(&mut T) -> bool, { fn drop(&mut self) { self.for_each(drop); diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index b40e905620de9..47ac203a244ab 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -8,18 +8,18 @@ //! This crate implements `TypedArena`, a simple arena that can only hold //! objects of a single type. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - test(no_crate_inject, attr(deny(warnings))))] - +#![doc( + html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + test(no_crate_inject, attr(deny(warnings))) +)] #![feature(alloc)] #![feature(core_intrinsics)] #![feature(dropck_eyepatch)] #![feature(nll)] #![feature(raw_vec_internals)] #![cfg_attr(test, feature(test))] - #![allow(deprecated)] extern crate alloc; @@ -129,8 +129,7 @@ impl TypedArena { unsafe { if mem::size_of::() == 0 { self.ptr - .set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) - as *mut T); + .set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T); let ptr = mem::align_of::() as *mut T; // Don't drop the object. This `write` is equivalent to `forget`. ptr::write(ptr, object); @@ -218,7 +217,7 @@ impl TypedArena { self.clear_last_chunk(&mut last_chunk); let len = chunks_borrow.len(); // If `T` is ZST, code below has no effect. - for mut chunk in chunks_borrow.drain(..len-1) { + for mut chunk in chunks_borrow.drain(..len - 1) { let cap = chunk.storage.cap(); chunk.destroy(cap); } @@ -302,7 +301,10 @@ impl DroplessArena { pub fn in_arena(&self, ptr: *const T) -> bool { let ptr = ptr as *const u8 as *mut u8; - self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end()) + self.chunks + .borrow() + .iter() + .any(|chunk| chunk.start() <= ptr && ptr < chunk.end()) } #[inline] @@ -359,9 +361,8 @@ impl DroplessArena { let ptr = self.ptr.get(); // Set the pointer past ourselves - self.ptr.set( - intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8, - ); + self.ptr + .set(intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8); slice::from_raw_parts_mut(ptr, bytes) } } @@ -370,9 +371,7 @@ impl DroplessArena { pub fn alloc(&self, object: T) -> &mut T { assert!(!mem::needs_drop::()); - let mem = self.alloc_raw( - mem::size_of::(), - mem::align_of::()) as *mut _ as *mut T; + let mem = self.alloc_raw(mem::size_of::(), mem::align_of::()) as *mut _ as *mut T; unsafe { // Write into uninitialized memory. @@ -397,9 +396,8 @@ impl DroplessArena { assert!(mem::size_of::() != 0); assert!(!slice.is_empty()); - let mem = self.alloc_raw( - slice.len() * mem::size_of::(), - mem::align_of::()) as *mut _ as *mut T; + let mem = self.alloc_raw(slice.len() * mem::size_of::(), mem::align_of::()) as *mut _ + as *mut T; unsafe { let arena_slice = slice::from_raw_parts_mut(mem, slice.len()); diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs index 66a3094d77d01..423960e30a1be 100644 --- a/src/libcore/alloc.rs +++ b/src/libcore/alloc.rs @@ -5,9 +5,9 @@ use cmp; use fmt; use mem; -use usize; -use ptr::{self, NonNull}; use num::NonZeroUsize; +use ptr::{self, NonNull}; +use usize; /// Represents the combination of a starting address and /// a total capacity of the returned block. @@ -86,9 +86,7 @@ impl Layout { return Err(LayoutErr { private: () }); } - unsafe { - Ok(Layout::from_size_align_unchecked(size, align)) - } + unsafe { Ok(Layout::from_size_align_unchecked(size, align)) } } /// Creates a layout, bypassing all checks. @@ -100,18 +98,25 @@ impl Layout { #[stable(feature = "alloc_layout", since = "1.28.0")] #[inline] pub unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Self { - Layout { size_: size, align_: NonZeroUsize::new_unchecked(align) } + Layout { + size_: size, + align_: NonZeroUsize::new_unchecked(align), + } } /// The minimum size in bytes for a memory block of this layout. #[stable(feature = "alloc_layout", since = "1.28.0")] #[inline] - pub fn size(&self) -> usize { self.size_ } + pub fn size(&self) -> usize { + self.size_ + } /// The minimum byte alignment for a memory block of this layout. #[stable(feature = "alloc_layout", since = "1.28.0")] #[inline] - pub fn align(&self) -> usize { self.align_.get() } + pub fn align(&self) -> usize { + self.align_.get() + } /// Constructs a `Layout` suitable for holding a value of type `T`. #[stable(feature = "alloc_layout", since = "1.28.0")] @@ -123,9 +128,7 @@ impl Layout { // result use the unchecked constructor here to avoid inserting code // that panics if it isn't optimized well enough. debug_assert!(Layout::from_size_align(size, align).is_ok()); - unsafe { - Layout::from_size_align_unchecked(size, align) - } + unsafe { Layout::from_size_align_unchecked(size, align) } } /// Produces layout describing a record that could be used to @@ -137,9 +140,7 @@ impl Layout { let (size, align) = (mem::size_of_val(t), mem::align_of_val(t)); // See rationale in `new` for why this us using an unsafe variant below debug_assert!(Layout::from_size_align(size, align).is_ok()); - unsafe { - Layout::from_size_align_unchecked(size, align) - } + unsafe { Layout::from_size_align_unchecked(size, align) } } /// Creates a layout describing the record that can hold a value @@ -203,8 +204,7 @@ impl Layout { // size and padding overflow in the above manner should cause // the allocator to yield an error anyway.) - let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) - & !align.wrapping_sub(1); + let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1); len_rounded_up.wrapping_sub(len) } @@ -219,7 +219,9 @@ impl Layout { #[inline] pub fn pad_to_align(&self) -> Result { let pad = self.padding_needed_for(self.align()); - let new_size = self.size().checked_add(pad) + let new_size = self + .size() + .checked_add(pad) .ok_or(LayoutErr { private: () })?; Layout::from_size_align(new_size, self.align()) @@ -236,15 +238,21 @@ impl Layout { #[unstable(feature = "alloc_layout_extra", issue = "55724")] #[inline] pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutErr> { - let padded_size = self.size().checked_add(self.padding_needed_for(self.align())) + let padded_size = self + .size() + .checked_add(self.padding_needed_for(self.align())) .ok_or(LayoutErr { private: () })?; - let alloc_size = padded_size.checked_mul(n) + let alloc_size = padded_size + .checked_mul(n) .ok_or(LayoutErr { private: () })?; unsafe { // self.align is already known to be valid and alloc_size has been // padded already. - Ok((Layout::from_size_align_unchecked(alloc_size, self.align()), padded_size)) + Ok(( + Layout::from_size_align_unchecked(alloc_size, self.align()), + padded_size, + )) } } @@ -268,9 +276,12 @@ impl Layout { let new_align = cmp::max(self.align(), next.align()); let pad = self.padding_needed_for(next.align()); - let offset = self.size().checked_add(pad) + let offset = self + .size() + .checked_add(pad) .ok_or(LayoutErr { private: () })?; - let new_size = offset.checked_add(next.size()) + let new_size = offset + .checked_add(next.size()) .ok_or(LayoutErr { private: () })?; let layout = Layout::from_size_align(new_size, new_align)?; @@ -292,7 +303,10 @@ impl Layout { #[unstable(feature = "alloc_layout_extra", issue = "55724")] #[inline] pub fn repeat_packed(&self, n: usize) -> Result { - let size = self.size().checked_mul(n).ok_or(LayoutErr { private: () })?; + let size = self + .size() + .checked_mul(n) + .ok_or(LayoutErr { private: () })?; Layout::from_size_align(size, self.align()) } @@ -305,7 +319,9 @@ impl Layout { #[unstable(feature = "alloc_layout_extra", issue = "55724")] #[inline] pub fn extend_packed(&self, next: Self) -> Result { - let new_size = self.size().checked_add(next.size()) + let new_size = self + .size() + .checked_add(next.size()) .ok_or(LayoutErr { private: () })?; let layout = Layout::from_size_align(new_size, self.align())?; Ok(layout) @@ -317,12 +333,10 @@ impl Layout { #[unstable(feature = "alloc_layout_extra", issue = "55724")] #[inline] pub fn array(n: usize) -> Result { - Layout::new::() - .repeat(n) - .map(|(k, offs)| { - debug_assert!(offs == mem::size_of::()); - k - }) + Layout::new::().repeat(n).map(|(k, offs)| { + debug_assert!(offs == mem::size_of::()); + k + }) } } @@ -332,7 +346,7 @@ impl Layout { #[stable(feature = "alloc_layout", since = "1.28.0")] #[derive(Clone, PartialEq, Eq, Debug)] pub struct LayoutErr { - private: () + private: (), } // (we need this for downstream impl of trait Error) @@ -568,11 +582,7 @@ pub unsafe trait GlobalAlloc { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_ptr = self.alloc(new_layout); if !new_ptr.is_null() { - ptr::copy_nonoverlapping( - ptr, - new_ptr, - cmp::min(layout.size(), new_size), - ); + ptr::copy_nonoverlapping(ptr, new_ptr, cmp::min(layout.size(), new_size)); self.dealloc(ptr, layout); } new_ptr @@ -660,7 +670,6 @@ pub unsafe trait GlobalAlloc { /// the future. #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe trait Alloc { - // (Note: some existing allocators have unspecified but well-defined // behavior in response to a zero size allocation request ; // e.g., in C, `malloc` of 0 will either return a null pointer or a @@ -817,10 +826,12 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn realloc(&mut self, - ptr: NonNull, - layout: Layout, - new_size: usize) -> Result, AllocErr> { + unsafe fn realloc( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result, AllocErr> { let old_size = layout.size(); if new_size >= old_size { @@ -837,9 +848,7 @@ pub unsafe trait Alloc { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let result = self.alloc(new_layout); if let Ok(new_ptr) = result { - ptr::copy_nonoverlapping(ptr.as_ptr(), - new_ptr.as_ptr(), - cmp::min(old_size, new_size)); + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size)); self.dealloc(ptr, layout); } result @@ -915,10 +924,12 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn realloc_excess(&mut self, - ptr: NonNull, - layout: Layout, - new_size: usize) -> Result { + unsafe fn realloc_excess( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let usable_size = self.usable_size(&new_layout); self.realloc(ptr, layout, new_size) @@ -960,10 +971,12 @@ pub unsafe trait Alloc { /// function; clients are expected either to be able to recover from /// `grow_in_place` failures without aborting, or to fall back on /// another reallocation method before resorting to an abort. - unsafe fn grow_in_place(&mut self, - ptr: NonNull, - layout: Layout, - new_size: usize) -> Result<(), CannotReallocInPlace> { + unsafe fn grow_in_place( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result<(), CannotReallocInPlace> { let _ = ptr; // this default implementation doesn't care about the actual address. debug_assert!(new_size >= layout.size()); let (_l, u) = self.usable_size(&layout); @@ -1015,10 +1028,12 @@ pub unsafe trait Alloc { /// function; clients are expected either to be able to recover from /// `shrink_in_place` failures without aborting, or to fall back /// on another reallocation method before resorting to an abort. - unsafe fn shrink_in_place(&mut self, - ptr: NonNull, - layout: Layout, - new_size: usize) -> Result<(), CannotReallocInPlace> { + unsafe fn shrink_in_place( + &mut self, + ptr: NonNull, + layout: Layout, + new_size: usize, + ) -> Result<(), CannotReallocInPlace> { let _ = ptr; // this default implementation doesn't care about the actual address. debug_assert!(new_size <= layout.size()); let (l, _u) = self.usable_size(&layout); @@ -1031,7 +1046,6 @@ pub unsafe trait Alloc { } } - // == COMMON USAGE PATTERNS == // alloc_one, dealloc_one, alloc_array, realloc_array. dealloc_array @@ -1065,7 +1079,8 @@ pub unsafe trait Alloc { /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html fn alloc_one(&mut self) -> Result, AllocErr> - where Self: Sized + where + Self: Sized, { let k = Layout::new::(); if k.size() > 0 { @@ -1093,7 +1108,8 @@ pub unsafe trait Alloc { /// /// * the layout of `T` must *fit* that block of memory. unsafe fn dealloc_one(&mut self, ptr: NonNull) - where Self: Sized + where + Self: Sized, { let k = Layout::new::(); if k.size() > 0 { @@ -1134,14 +1150,13 @@ pub unsafe trait Alloc { /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html fn alloc_array(&mut self, n: usize) -> Result, AllocErr> - where Self: Sized + where + Self: Sized, { match Layout::array::(n) { - Ok(ref layout) if layout.size() > 0 => { - unsafe { - self.alloc(layout.clone()).map(|p| p.cast()) - } - } + Ok(ref layout) if layout.size() > 0 => unsafe { + self.alloc(layout.clone()).map(|p| p.cast()) + }, _ => Err(AllocErr), } } @@ -1180,20 +1195,22 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn realloc_array(&mut self, - ptr: NonNull, - n_old: usize, - n_new: usize) -> Result, AllocErr> - where Self: Sized + unsafe fn realloc_array( + &mut self, + ptr: NonNull, + n_old: usize, + n_new: usize, + ) -> Result, AllocErr> + where + Self: Sized, { match (Layout::array::(n_old), Layout::array::(n_new)) { (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { debug_assert!(k_old.align() == k_new.align()); - self.realloc(ptr.cast(), k_old.clone(), k_new.size()).map(NonNull::cast) - } - _ => { - Err(AllocErr) + self.realloc(ptr.cast(), k_old.clone(), k_new.size()) + .map(NonNull::cast) } + _ => Err(AllocErr), } } @@ -1218,15 +1235,12 @@ pub unsafe trait Alloc { /// /// Always returns `Err` on arithmetic overflow. unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), AllocErr> - where Self: Sized + where + Self: Sized, { match Layout::array::(n) { - Ok(ref k) if k.size() > 0 => { - Ok(self.dealloc(ptr.cast(), k.clone())) - } - _ => { - Err(AllocErr) - } + Ok(ref k) if k.size() > 0 => Ok(self.dealloc(ptr.cast(), k.clone())), + _ => Err(AllocErr), } } } diff --git a/src/libcore/any.rs b/src/libcore/any.rs index 6a863ff369a87..e8e8c4c082a97 100644 --- a/src/libcore/any.rs +++ b/src/libcore/any.rs @@ -94,15 +94,19 @@ pub trait Any: 'static { /// assert_eq!(is_string(&"cookie monster".to_string()), true); /// } /// ``` - #[unstable(feature = "get_type_id", - reason = "this method will likely be replaced by an associated static", - issue = "27745")] + #[unstable( + feature = "get_type_id", + reason = "this method will likely be replaced by an associated static", + issue = "27745" + )] fn get_type_id(&self) -> TypeId; } #[stable(feature = "rust1", since = "1.0.0")] -impl Any for T { - fn get_type_id(&self) -> TypeId { TypeId::of::() } +impl Any for T { + fn get_type_id(&self) -> TypeId { + TypeId::of::() + } } /////////////////////////////////////////////////////////////////////////////// @@ -192,9 +196,7 @@ impl dyn Any { #[inline] pub fn downcast_ref(&self) -> Option<&T> { if self.is::() { - unsafe { - Some(&*(self as *const dyn Any as *const T)) - } + unsafe { Some(&*(self as *const dyn Any as *const T)) } } else { None } @@ -229,16 +231,14 @@ impl dyn Any { #[inline] pub fn downcast_mut(&mut self) -> Option<&mut T> { if self.is::() { - unsafe { - Some(&mut *(self as *mut dyn Any as *mut T)) - } + unsafe { Some(&mut *(self as *mut dyn Any as *mut T)) } } else { None } } } -impl dyn Any+Send { +impl dyn Any + Send { /// Forwards to the method defined on the type `Any`. /// /// # Examples @@ -322,7 +322,7 @@ impl dyn Any+Send { } } -impl dyn Any+Send+Sync { +impl dyn Any + Send + Sync { /// Forwards to the method defined on the type `Any`. /// /// # Examples @@ -447,7 +447,7 @@ impl TypeId { /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_unstable(feature="const_type_id")] + #[rustc_const_unstable(feature = "const_type_id")] pub const fn of() -> TypeId { TypeId { t: unsafe { intrinsics::type_id::() }, diff --git a/src/libcore/array.rs b/src/libcore/array.rs index 3a27a39af4ace..55b76804e3148 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array.rs @@ -13,7 +13,7 @@ use borrow::{Borrow, BorrowMut}; use cmp::Ordering; use convert::TryFrom; use fmt; -use hash::{Hash, self}; +use hash::{self, Hash}; use marker::Unsize; use slice::{Iter, IterMut}; @@ -61,10 +61,12 @@ impl fmt::Display for TryFromSliceError { } impl TryFromSliceError { - #[unstable(feature = "array_error_internals", - reason = "available through Error trait and this method should not \ - be exposed publicly", - issue = "0")] + #[unstable( + feature = "array_error_internals", + reason = "available through Error trait and this method should not \ + be exposed publicly", + issue = "0" + )] #[inline] #[doc(hidden)] pub fn __description(&self) -> &str { @@ -78,13 +80,20 @@ macro_rules! __impl_slice_eq1 { }; ($Lhs: ty, $Rhs: ty, $Bound: ident) => { #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq { + impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs + where + A: PartialEq, + { #[inline] - fn eq(&self, other: &$Rhs) -> bool { self[..] == other[..] } + fn eq(&self, other: &$Rhs) -> bool { + self[..] == other[..] + } #[inline] - fn ne(&self, other: &$Rhs) -> bool { self[..] != other[..] } + fn ne(&self, other: &$Rhs) -> bool { + self[..] != other[..] + } } - } + }; } macro_rules! __impl_slice_eq2 { @@ -95,13 +104,20 @@ macro_rules! __impl_slice_eq2 { __impl_slice_eq1!($Lhs, $Rhs, $Bound); #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: $Bound, B> PartialEq<$Lhs> for $Rhs where B: PartialEq { + impl<'a, 'b, A: $Bound, B> PartialEq<$Lhs> for $Rhs + where + B: PartialEq, + { #[inline] - fn eq(&self, other: &$Lhs) -> bool { self[..] == other[..] } + fn eq(&self, other: &$Lhs) -> bool { + self[..] == other[..] + } #[inline] - fn ne(&self, other: &$Lhs) -> bool { self[..] != other[..] } + fn ne(&self, other: &$Lhs) -> bool { + self[..] != other[..] + } } - } + }; } // macro for implementing n-element array functions and operations @@ -283,4 +299,4 @@ macro_rules! array_impl_default { }; } -array_impl_default!{32, T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T} +array_impl_default! {32, T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T T} diff --git a/src/libcore/ascii.rs b/src/libcore/ascii.rs index 7a06aa2b0d397..c2fe1736056a4 100644 --- a/src/libcore/ascii.rs +++ b/src/libcore/ascii.rs @@ -12,8 +12,8 @@ #![stable(feature = "core_ascii", since = "1.26.0")] use fmt; -use ops::Range; use iter::FusedIterator; +use ops::Range; /// An iterator over the escaped version of a byte. /// @@ -98,15 +98,18 @@ pub fn escape_default(c: u8) -> EscapeDefault { b'\\' => ([b'\\', b'\\', 0, 0], 2), b'\'' => ([b'\\', b'\'', 0, 0], 2), b'"' => ([b'\\', b'"', 0, 0], 2), - b'\x20' ..= b'\x7e' => ([c, 0, 0, 0], 1), + b'\x20'..=b'\x7e' => ([c, 0, 0, 0], 1), _ => ([b'\\', b'x', hexify(c >> 4), hexify(c & 0xf)], 4), }; - return EscapeDefault { range: 0..len, data }; + return EscapeDefault { + range: 0..len, + data, + }; fn hexify(b: u8) -> u8 { match b { - 0 ..= 9 => b'0' + b, + 0..=9 => b'0' + b, _ => b'a' + b - 10, } } @@ -115,8 +118,12 @@ pub fn escape_default(c: u8) -> EscapeDefault { #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for EscapeDefault { type Item = u8; - fn next(&mut self) -> Option { self.range.next().map(|i| self.data[i]) } - fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } + fn next(&mut self) -> Option { + self.range.next().map(|i| self.data[i]) + } + fn size_hint(&self) -> (usize, Option) { + self.range.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for EscapeDefault { diff --git a/src/libcore/benches/any.rs b/src/libcore/benches/any.rs index ceb507aad38f4..53099b78266f8 100644 --- a/src/libcore/benches/any.rs +++ b/src/libcore/benches/any.rs @@ -1,5 +1,5 @@ use core::any::*; -use test::{Bencher, black_box}; +use test::{black_box, Bencher}; #[bench] fn bench_downcast_ref(b: &mut Bencher) { diff --git a/src/libcore/benches/char/methods.rs b/src/libcore/benches/char/methods.rs index af934c1171577..9c1501fcda657 100644 --- a/src/libcore/benches/char/methods.rs +++ b/src/libcore/benches/char/methods.rs @@ -5,28 +5,61 @@ const RADIX: [u32; 5] = [2, 8, 10, 16, 32]; #[bench] fn bench_to_digit_radix_2(b: &mut Bencher) { - b.iter(|| CHARS.iter().cycle().take(10_000).map(|c| c.to_digit(2)).min()) + b.iter(|| { + CHARS + .iter() + .cycle() + .take(10_000) + .map(|c| c.to_digit(2)) + .min() + }) } #[bench] fn bench_to_digit_radix_10(b: &mut Bencher) { - b.iter(|| CHARS.iter().cycle().take(10_000).map(|c| c.to_digit(10)).min()) + b.iter(|| { + CHARS + .iter() + .cycle() + .take(10_000) + .map(|c| c.to_digit(10)) + .min() + }) } #[bench] fn bench_to_digit_radix_16(b: &mut Bencher) { - b.iter(|| CHARS.iter().cycle().take(10_000).map(|c| c.to_digit(16)).min()) + b.iter(|| { + CHARS + .iter() + .cycle() + .take(10_000) + .map(|c| c.to_digit(16)) + .min() + }) } #[bench] fn bench_to_digit_radix_36(b: &mut Bencher) { - b.iter(|| CHARS.iter().cycle().take(10_000).map(|c| c.to_digit(36)).min()) + b.iter(|| { + CHARS + .iter() + .cycle() + .take(10_000) + .map(|c| c.to_digit(36)) + .min() + }) } #[bench] fn bench_to_digit_radix_var(b: &mut Bencher) { - b.iter(|| CHARS.iter().cycle() - .zip(RADIX.iter().cycle()) - .take(10_000) - .map(|(c, radix)| c.to_digit(*radix)).min()) + b.iter(|| { + CHARS + .iter() + .cycle() + .zip(RADIX.iter().cycle()) + .take(10_000) + .map(|(c, radix)| c.to_digit(*radix)) + .min() + }) } diff --git a/src/libcore/benches/hash/sip.rs b/src/libcore/benches/hash/sip.rs index 5baba42763e10..725c864dce9f1 100644 --- a/src/libcore/benches/hash/sip.rs +++ b/src/libcore/benches/hash/sip.rs @@ -1,7 +1,7 @@ #![allow(deprecated)] use core::hash::*; -use test::{Bencher, black_box}; +use test::{black_box, Bencher}; fn hash_bytes(mut s: H, x: &[u8]) -> u64 { Hasher::write(&mut s, x); @@ -44,11 +44,11 @@ fn bench_str_over_8_bytes(b: &mut Bencher) { #[bench] fn bench_long_str(b: &mut Bencher) { let s = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor \ -incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud \ -exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute \ -irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla \ -pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui \ -officia deserunt mollit anim id est laborum."; + incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud \ + exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute \ + irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla \ + pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui \ + officia deserunt mollit anim id est laborum."; b.iter(|| { assert_eq!(hash(&s), 17717065544121360093); }) @@ -58,9 +58,7 @@ officia deserunt mollit anim id est laborum."; fn bench_u32(b: &mut Bencher) { let u = 162629500u32; let u = black_box(u); - b.iter(|| { - hash(&u) - }); + b.iter(|| hash(&u)); b.bytes = 8; } @@ -70,9 +68,7 @@ fn bench_u32_keyed(b: &mut Bencher) { let u = black_box(u); let k1 = black_box(0x1); let k2 = black_box(0x2); - b.iter(|| { - hash_with(SipHasher::new_with_keys(k1, k2), &u) - }); + b.iter(|| hash_with(SipHasher::new_with_keys(k1, k2), &u)); b.bytes = 8; } @@ -80,62 +76,48 @@ fn bench_u32_keyed(b: &mut Bencher) { fn bench_u64(b: &mut Bencher) { let u = 16262950014981195938u64; let u = black_box(u); - b.iter(|| { - hash(&u) - }); + b.iter(|| hash(&u)); b.bytes = 8; } #[bench] fn bench_bytes_4(b: &mut Bencher) { let data = black_box([b' '; 4]); - b.iter(|| { - hash_bytes(SipHasher::default(), &data) - }); + b.iter(|| hash_bytes(SipHasher::default(), &data)); b.bytes = 4; } #[bench] fn bench_bytes_7(b: &mut Bencher) { let data = black_box([b' '; 7]); - b.iter(|| { - hash_bytes(SipHasher::default(), &data) - }); + b.iter(|| hash_bytes(SipHasher::default(), &data)); b.bytes = 7; } #[bench] fn bench_bytes_8(b: &mut Bencher) { let data = black_box([b' '; 8]); - b.iter(|| { - hash_bytes(SipHasher::default(), &data) - }); + b.iter(|| hash_bytes(SipHasher::default(), &data)); b.bytes = 8; } #[bench] fn bench_bytes_a_16(b: &mut Bencher) { let data = black_box([b' '; 16]); - b.iter(|| { - hash_bytes(SipHasher::default(), &data) - }); + b.iter(|| hash_bytes(SipHasher::default(), &data)); b.bytes = 16; } #[bench] fn bench_bytes_b_32(b: &mut Bencher) { let data = black_box([b' '; 32]); - b.iter(|| { - hash_bytes(SipHasher::default(), &data) - }); + b.iter(|| hash_bytes(SipHasher::default(), &data)); b.bytes = 32; } #[bench] fn bench_bytes_c_128(b: &mut Bencher) { let data = black_box([b' '; 128]); - b.iter(|| { - hash_bytes(SipHasher::default(), &data) - }); + b.iter(|| hash_bytes(SipHasher::default(), &data)); b.bytes = 128; } diff --git a/src/libcore/benches/iter.rs b/src/libcore/benches/iter.rs index fe852e42b5cd3..353f6172751d1 100644 --- a/src/libcore/benches/iter.rs +++ b/src/libcore/benches/iter.rs @@ -1,5 +1,5 @@ use core::iter::*; -use test::{Bencher, black_box}; +use test::{black_box, Bencher}; #[bench] fn bench_rposition(b: &mut Bencher) { @@ -14,7 +14,11 @@ fn bench_skip_while(b: &mut Bencher) { b.iter(|| { let it = 0..100; let mut sum = 0; - it.skip_while(|&x| { sum += x; sum < 4000 }).all(|_| true); + it.skip_while(|&x| { + sum += x; + sum < 4000 + }) + .all(|_| true); }); } @@ -29,7 +33,9 @@ fn bench_multiple_take(b: &mut Bencher) { }); } -fn scatter(x: i32) -> i32 { (x * 31) % 127 } +fn scatter(x: i32) -> i32 { + (x * 31) % 127 +} #[bench] fn bench_max_by_key(b: &mut Bencher) { @@ -43,7 +49,12 @@ fn bench_max_by_key(b: &mut Bencher) { #[bench] fn bench_max_by_key2(b: &mut Bencher) { fn max_index_iter(array: &[i32]) -> usize { - array.iter().enumerate().max_by_key(|&(_, item)| item).unwrap().0 + array + .iter() + .enumerate() + .max_by_key(|&(_, item)| item) + .unwrap() + .0 } let mut data = vec![0; 1638]; @@ -76,23 +87,21 @@ pub fn add_zip(xs: &[f32], ys: &mut [f32]) { fn bench_zip_copy(b: &mut Bencher) { let source = vec![0u8; 16 * 1024]; let mut dst = black_box(vec![0u8; 16 * 1024]); - b.iter(|| { - copy_zip(&source, &mut dst) - }) + b.iter(|| copy_zip(&source, &mut dst)) } #[bench] fn bench_zip_add(b: &mut Bencher) { let source = vec![1.; 16 * 1024]; let mut dst = vec![0.; 16 * 1024]; - b.iter(|| { - add_zip(&source, &mut dst) - }); + b.iter(|| add_zip(&source, &mut dst)); } /// `Iterator::for_each` implemented as a plain loop. -fn for_each_loop(iter: I, mut f: F) where - I: Iterator, F: FnMut(I::Item) +fn for_each_loop(iter: I, mut f: F) +where + I: Iterator, + F: FnMut(I::Item), { for item in iter { f(item); @@ -101,8 +110,10 @@ fn for_each_loop(iter: I, mut f: F) where /// `Iterator::for_each` implemented with `fold` for internal iteration. /// (except when `by_ref()` effectively disables that optimization.) -fn for_each_fold(iter: I, mut f: F) where - I: Iterator, F: FnMut(I::Item) +fn for_each_fold(iter: I, mut f: F) +where + I: Iterator, + F: FnMut(I::Item), { iter.fold((), move |(), item| f(item)); } @@ -137,25 +148,20 @@ fn bench_for_each_chain_ref_fold(b: &mut Bencher) { }); } - /// Helper to benchmark `sum` for iterators taken by value which /// can optimize `fold`, and by reference which cannot. macro_rules! bench_sums { ($bench_sum:ident, $bench_ref_sum:ident, $iter:expr) => { #[bench] fn $bench_sum(b: &mut Bencher) { - b.iter(|| -> i64 { - $iter.map(black_box).sum() - }); + b.iter(|| -> i64 { $iter.map(black_box).sum() }); } #[bench] fn $bench_ref_sum(b: &mut Bencher) { - b.iter(|| -> i64 { - $iter.map(black_box).by_ref().sum() - }); + b.iter(|| -> i64 { $iter.map(black_box).by_ref().sum() }); } - } + }; } bench_sums! { @@ -286,7 +292,10 @@ fn bench_zip_then_skip(b: &mut Bencher) { let t: Vec<_> = (0..100_000).collect(); b.iter(|| { - let s = v.iter().zip(t.iter()).skip(10000) + let s = v + .iter() + .zip(t.iter()) + .skip(10000) .take_while(|t| *t.0 < 10100) .map(|(a, b)| *a + *b) .sum::(); @@ -299,7 +308,10 @@ fn bench_skip_then_zip(b: &mut Bencher) { let t: Vec<_> = (0..100_000).collect(); b.iter(|| { - let s = v.iter().skip(10000).zip(t.iter().skip(10000)) + let s = v + .iter() + .skip(10000) + .zip(t.iter().skip(10000)) .take_while(|t| *t.0 < 10100) .map(|(a, b)| *a + *b) .sum::(); diff --git a/src/libcore/benches/num/flt2dec/mod.rs b/src/libcore/benches/num/flt2dec/mod.rs index 4153745d0424a..b810dd12ab61b 100644 --- a/src/libcore/benches/num/flt2dec/mod.rs +++ b/src/libcore/benches/num/flt2dec/mod.rs @@ -3,17 +3,17 @@ mod strategy { mod grisu; } +use core::num::flt2dec::MAX_SIG_DIGITS; +use core::num::flt2dec::{decode, DecodableFloat, Decoded, FullDecoded}; use std::f64; use std::io::Write; use std::vec::Vec; use test::Bencher; -use core::num::flt2dec::{decode, DecodableFloat, FullDecoded, Decoded}; -use core::num::flt2dec::MAX_SIG_DIGITS; pub fn decode_finite(v: T) -> Decoded { match decode(v).1 { FullDecoded::Finite(decoded) => decoded, - full_decoded => panic!("expected finite, got {:?} instead", full_decoded) + full_decoded => panic!("expected finite, got {:?} instead", full_decoded), } } diff --git a/src/libcore/benches/num/flt2dec/strategy/dragon.rs b/src/libcore/benches/num/flt2dec/strategy/dragon.rs index 60660b1da1118..4052fec33813c 100644 --- a/src/libcore/benches/num/flt2dec/strategy/dragon.rs +++ b/src/libcore/benches/num/flt2dec/strategy/dragon.rs @@ -1,6 +1,6 @@ -use std::{i16, f64}; use super::super::*; use core::num::flt2dec::strategy::dragon::*; +use std::{f64, i16}; use test::Bencher; #[bench] diff --git a/src/libcore/benches/num/flt2dec/strategy/grisu.rs b/src/libcore/benches/num/flt2dec/strategy/grisu.rs index 841feba50dd5b..495074747c283 100644 --- a/src/libcore/benches/num/flt2dec/strategy/grisu.rs +++ b/src/libcore/benches/num/flt2dec/strategy/grisu.rs @@ -1,12 +1,12 @@ -use std::{i16, f64}; use super::super::*; use core::num::flt2dec::strategy::grisu::*; +use std::{f64, i16}; use test::Bencher; pub fn decode_finite(v: T) -> Decoded { match decode(v).1 { FullDecoded::Finite(decoded) => decoded, - full_decoded => panic!("expected finite, got {:?} instead", full_decoded) + full_decoded => panic!("expected finite, got {:?} instead", full_decoded), } } diff --git a/src/libcore/benches/num/mod.rs b/src/libcore/benches/num/mod.rs index f5c49ea5bf0d5..74f6c8b6374df 100644 --- a/src/libcore/benches/num/mod.rs +++ b/src/libcore/benches/num/mod.rs @@ -1,8 +1,8 @@ -mod flt2dec; mod dec2flt; +mod flt2dec; -use test::Bencher; use std::str::FromStr; +use test::Bencher; const ASCII_NUMBERS: [&str; 19] = [ "0", @@ -27,7 +27,7 @@ const ASCII_NUMBERS: [&str; 19] = [ ]; macro_rules! from_str_bench { - ($mac:ident, $t:ty) => ( + ($mac:ident, $t:ty) => { #[bench] fn $mac(b: &mut Bencher) { b.iter(|| { @@ -39,11 +39,11 @@ macro_rules! from_str_bench { .max() }) } - ) + }; } macro_rules! from_str_radix_bench { - ($mac:ident, $t:ty, $radix:expr) => ( + ($mac:ident, $t:ty, $radix:expr) => { #[bench] fn $mac(b: &mut Bencher) { b.iter(|| { @@ -55,7 +55,7 @@ macro_rules! from_str_radix_bench { .max() }) } - ) + }; } from_str_bench!(bench_u8_from_str, u8); diff --git a/src/libcore/benches/ops.rs b/src/libcore/benches/ops.rs index 80649f33562f2..0a2be8a28819f 100644 --- a/src/libcore/benches/ops.rs +++ b/src/libcore/benches/ops.rs @@ -4,17 +4,16 @@ use test::Bencher; // Overhead of dtors struct HasDtor { - _x: isize + _x: isize, } impl Drop for HasDtor { - fn drop(&mut self) { - } + fn drop(&mut self) {} } #[bench] fn alloc_obj_with_dtor(b: &mut Bencher) { b.iter(|| { - HasDtor { _x : 10 }; + HasDtor { _x: 10 }; }) } diff --git a/src/libcore/benches/slice.rs b/src/libcore/benches/slice.rs index 484753c1a045e..7501695b1e6de 100644 --- a/src/libcore/benches/slice.rs +++ b/src/libcore/benches/slice.rs @@ -8,11 +8,12 @@ enum Cache { } fn binary_search(b: &mut Bencher, cache: Cache, mapper: F) - where F: Fn(usize) -> usize +where + F: Fn(usize) -> usize, { let size = match cache { - Cache::L1 => 1000, // 8kb - Cache::L2 => 10_000, // 80kb + Cache::L1 => 1000, // 8kb + Cache::L2 => 10_000, // 80kb Cache::L3 => 1_000_000, // 8Mb }; let v = (0..size).map(&mapper).collect::>(); diff --git a/src/libcore/borrow.rs b/src/libcore/borrow.rs index 89668dc06505b..bb6bcccc80d41 100644 --- a/src/libcore/borrow.rs +++ b/src/libcore/borrow.rs @@ -185,7 +185,7 @@ pub trait Borrow { /// /// [`Borrow`]: trait.Borrow.html #[stable(feature = "rust1", since = "1.0.0")] -pub trait BorrowMut : Borrow { +pub trait BorrowMut: Borrow { /// Mutably borrows from an owned value. /// /// # Examples @@ -207,25 +207,35 @@ pub trait BorrowMut : Borrow { #[stable(feature = "rust1", since = "1.0.0")] impl Borrow for T { - fn borrow(&self) -> &T { self } + fn borrow(&self) -> &T { + self + } } #[stable(feature = "rust1", since = "1.0.0")] impl BorrowMut for T { - fn borrow_mut(&mut self) -> &mut T { self } + fn borrow_mut(&mut self) -> &mut T { + self + } } #[stable(feature = "rust1", since = "1.0.0")] impl Borrow for &T { - fn borrow(&self) -> &T { &**self } + fn borrow(&self) -> &T { + &**self + } } #[stable(feature = "rust1", since = "1.0.0")] impl Borrow for &mut T { - fn borrow(&self) -> &T { &**self } + fn borrow(&self) -> &T { + &**self + } } #[stable(feature = "rust1", since = "1.0.0")] impl BorrowMut for &mut T { - fn borrow_mut(&mut self) -> &mut T { &mut **self } + fn borrow_mut(&mut self) -> &mut T { + &mut **self + } } diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index d57ca13a334e8..98e6a4af2a1cc 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -190,7 +190,7 @@ use cmp::Ordering; use fmt::{self, Debug, Display}; use marker::Unsize; use mem; -use ops::{Deref, DerefMut, CoerceUnsized}; +use ops::{CoerceUnsized, Deref, DerefMut}; use ptr; /// A mutable memory location. @@ -231,7 +231,7 @@ pub struct Cell { value: UnsafeCell, } -impl Cell { +impl Cell { /// Returns a copy of the contained value. /// /// # Examples @@ -246,7 +246,7 @@ impl Cell { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self) -> T { - unsafe{ *self.value.get() } + unsafe { *self.value.get() } } /// Updates the contained value using a function and returns the new value. @@ -284,7 +284,7 @@ unsafe impl Send for Cell where T: Send {} impl !Sync for Cell {} #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Cell { +impl Clone for Cell { #[inline] fn clone(&self) -> Cell { Cell::new(self.get()) @@ -292,7 +292,7 @@ impl Clone for Cell { } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for Cell { +impl Default for Cell { /// Creates a `Cell`, with the `Default` value for T. #[inline] fn default() -> Cell { @@ -301,7 +301,7 @@ impl Default for Cell { } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for Cell { +impl PartialEq for Cell { #[inline] fn eq(&self, other: &Cell) -> bool { self.get() == other.get() @@ -309,10 +309,10 @@ impl PartialEq for Cell { } #[stable(feature = "cell_eq", since = "1.2.0")] -impl Eq for Cell {} +impl Eq for Cell {} #[stable(feature = "cell_ord", since = "1.10.0")] -impl PartialOrd for Cell { +impl PartialOrd for Cell { #[inline] fn partial_cmp(&self, other: &Cell) -> Option { self.get().partial_cmp(&other.get()) @@ -340,7 +340,7 @@ impl PartialOrd for Cell { } #[stable(feature = "cell_ord", since = "1.10.0")] -impl Ord for Cell { +impl Ord for Cell { #[inline] fn cmp(&self, other: &Cell) -> Ordering { self.get().cmp(&other.get()) @@ -486,9 +486,7 @@ impl Cell { #[inline] #[stable(feature = "cell_get_mut", since = "1.11.0")] pub fn get_mut(&mut self) -> &mut T { - unsafe { - &mut *self.value.get() - } + unsafe { &mut *self.value.get() } } /// Returns a `&Cell` from a `&mut T` @@ -506,11 +504,9 @@ impl Cell { /// assert_eq!(slice_cell.len(), 3); /// ``` #[inline] - #[unstable(feature = "as_cell", issue="43038")] + #[unstable(feature = "as_cell", issue = "43038")] pub fn from_mut(t: &mut T) -> &Cell { - unsafe { - &*(t as *mut T as *const Cell) - } + unsafe { &*(t as *mut T as *const Cell) } } } @@ -552,11 +548,9 @@ impl Cell<[T]> { /// /// assert_eq!(slice_cell.len(), 3); /// ``` - #[unstable(feature = "as_cell", issue="43038")] + #[unstable(feature = "as_cell", issue = "43038")] pub fn as_slice_of_cells(&self) -> &[Cell] { - unsafe { - &*(self as *const Cell<[T]> as *const [Cell]) - } + unsafe { &*(self as *const Cell<[T]> as *const [Cell]) } } } @@ -694,7 +688,7 @@ impl RefCell { /// assert_eq!(cell, RefCell::new(6)); /// ``` #[inline] - #[stable(feature = "refcell_replace", since="1.24.0")] + #[stable(feature = "refcell_replace", since = "1.24.0")] pub fn replace(&self, t: T) -> T { mem::replace(&mut *self.borrow_mut(), t) } @@ -719,7 +713,7 @@ impl RefCell { /// assert_eq!(cell, RefCell::new(6)); /// ``` #[inline] - #[unstable(feature = "refcell_replace_swap", issue="43570")] + #[unstable(feature = "refcell_replace_swap", issue = "43570")] pub fn replace_with T>(&self, f: F) -> T { let mut_borrow = &mut *self.borrow_mut(); let replacement = f(mut_borrow); @@ -746,7 +740,7 @@ impl RefCell { /// assert_eq!(d, RefCell::new(5)); /// ``` #[inline] - #[stable(feature = "refcell_swap", since="1.24.0")] + #[stable(feature = "refcell_swap", since = "1.24.0")] pub fn swap(&self, other: &Self) { mem::swap(&mut *self.borrow_mut(), &mut *other.borrow_mut()) } @@ -954,9 +948,7 @@ impl RefCell { #[inline] #[stable(feature = "cell_get_mut", since = "1.11.0")] pub fn get_mut(&mut self) -> &mut T { - unsafe { - &mut *self.value.get() - } + unsafe { &mut *self.value.get() } } } @@ -978,7 +970,7 @@ impl Clone for RefCell { } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for RefCell { +impl Default for RefCell { /// Creates a `RefCell`, with the `Default` value for T. #[inline] fn default() -> RefCell { @@ -1103,7 +1095,9 @@ impl Clone for BorrowRef<'_> { // a writing borrow. assert!(borrow != isize::max_value()); self.borrow.set(borrow + 1); - BorrowRef { borrow: self.borrow } + BorrowRef { + borrow: self.borrow, + } } } @@ -1166,7 +1160,8 @@ impl<'b, T: ?Sized> Ref<'b, T> { #[stable(feature = "cell_map", since = "1.8.0")] #[inline] pub fn map(orig: Ref<'b, T>, f: F) -> Ref<'b, U> - where F: FnOnce(&T) -> &U + where + F: FnOnce(&T) -> &U, { Ref { value: f(orig.value), @@ -1198,11 +1193,18 @@ impl<'b, T: ?Sized> Ref<'b, T> { #[unstable(feature = "refcell_map_split", issue = "51476")] #[inline] pub fn map_split(orig: Ref<'b, T>, f: F) -> (Ref<'b, U>, Ref<'b, V>) - where F: FnOnce(&T) -> (&U, &V) + where + F: FnOnce(&T) -> (&U, &V), { let (a, b) = f(orig.value); let borrow = orig.borrow.clone(); - (Ref { value: a, borrow }, Ref { value: b, borrow: orig.borrow }) + ( + Ref { value: a, borrow }, + Ref { + value: b, + borrow: orig.borrow, + }, + ) } } @@ -1243,7 +1245,8 @@ impl<'b, T: ?Sized> RefMut<'b, T> { #[stable(feature = "cell_map", since = "1.8.0")] #[inline] pub fn map(orig: RefMut<'b, T>, f: F) -> RefMut<'b, U> - where F: FnOnce(&mut T) -> &mut U + where + F: FnOnce(&mut T) -> &mut U, { // FIXME(nll-rfc#40): fix borrow-check let RefMut { value, borrow } = orig; @@ -1282,13 +1285,21 @@ impl<'b, T: ?Sized> RefMut<'b, T> { #[unstable(feature = "refcell_map_split", issue = "51476")] #[inline] pub fn map_split( - orig: RefMut<'b, T>, f: F + orig: RefMut<'b, T>, + f: F, ) -> (RefMut<'b, U>, RefMut<'b, V>) - where F: FnOnce(&mut T) -> (&mut U, &mut V) + where + F: FnOnce(&mut T) -> (&mut U, &mut V), { let (a, b) = f(orig.value); let borrow = orig.borrow.clone(); - (RefMut { value: a, borrow }, RefMut { value: b, borrow: orig.borrow }) + ( + RefMut { value: a, borrow }, + RefMut { + value: b, + borrow: orig.borrow, + }, + ) } } @@ -1316,7 +1327,7 @@ impl<'b> BorrowRefMut<'b> { UNUSED => { borrow.set(UNUSED - 1); Some(BorrowRefMut { borrow }) - }, + } _ => None, } } @@ -1333,7 +1344,9 @@ impl<'b> BorrowRefMut<'b> { // Prevent the borrow counter from underflowing. assert!(borrow != isize::min_value()); self.borrow.set(borrow - 1); - BorrowRefMut { borrow: self.borrow } + BorrowRefMut { + borrow: self.borrow, + } } } diff --git a/src/libcore/char/convert.rs b/src/libcore/char/convert.rs index 4a1a236b669f0..1383c01057739 100644 --- a/src/libcore/char/convert.rs +++ b/src/libcore/char/convert.rs @@ -1,10 +1,10 @@ //! Character conversions. +use super::MAX; use convert::TryFrom; use fmt; use mem::transmute; use str::FromStr; -use super::MAX; /// Converts a `u32` to a `char`. /// @@ -161,7 +161,6 @@ impl From for char { } } - /// An error which can be returned when parsing a char. #[stable(feature = "char_from_str", since = "1.20.0")] #[derive(Clone, Debug, PartialEq, Eq)] @@ -170,16 +169,16 @@ pub struct ParseCharError { } impl ParseCharError { - #[unstable(feature = "char_error_internals", - reason = "this method should not be available publicly", - issue = "0")] + #[unstable( + feature = "char_error_internals", + reason = "this method should not be available publicly", + issue = "0" + )] #[doc(hidden)] pub fn __description(&self) -> &str { match self.kind { - CharErrorKind::EmptyString => { - "cannot parse char from empty string" - }, - CharErrorKind::TooManyChars => "too many characters in string" + CharErrorKind::EmptyString => "cannot parse char from empty string", + CharErrorKind::TooManyChars => "too many characters in string", } } } @@ -197,7 +196,6 @@ impl fmt::Display for ParseCharError { } } - #[stable(feature = "char_from_str", since = "1.20.0")] impl FromStr for char { type Err = ParseCharError; @@ -206,18 +204,17 @@ impl FromStr for char { fn from_str(s: &str) -> Result { let mut chars = s.chars(); match (chars.next(), chars.next()) { - (None, _) => { - Err(ParseCharError { kind: CharErrorKind::EmptyString }) - }, + (None, _) => Err(ParseCharError { + kind: CharErrorKind::EmptyString, + }), (Some(c), None) => Ok(c), - _ => { - Err(ParseCharError { kind: CharErrorKind::TooManyChars }) - } + _ => Err(ParseCharError { + kind: CharErrorKind::TooManyChars, + }), } } } - #[unstable(feature = "try_from", issue = "33417")] impl TryFrom for char { type Error = CharTryFromError; @@ -315,4 +312,3 @@ pub fn from_digit(num: u32, radix: u32) -> Option { None } } - diff --git a/src/libcore/char/decode.rs b/src/libcore/char/decode.rs index 510c46cdca0ed..06af4c57ab0d2 100644 --- a/src/libcore/char/decode.rs +++ b/src/libcore/char/decode.rs @@ -1,13 +1,14 @@ //! UTF-8 and UTF-16 decoding iterators -use fmt; use super::from_u32_unchecked; +use fmt; /// An iterator that decodes UTF-16 encoded code points from an iterator of `u16`s. #[stable(feature = "decode_utf16", since = "1.9.0")] #[derive(Clone, Debug)] pub struct DecodeUtf16 - where I: Iterator +where + I: Iterator, { iter: I, buf: Option, @@ -80,7 +81,7 @@ impl> Iterator for DecodeUtf16 { fn next(&mut self) -> Option> { let u = match self.buf.take() { Some(buf) => buf, - None => self.iter.next()? + None => self.iter.next()?, }; if u < 0xD800 || 0xDFFF < u { diff --git a/src/libcore/char/methods.rs b/src/libcore/char/methods.rs index fbc9a4a6b8efa..c27a43e5b1833 100644 --- a/src/libcore/char/methods.rs +++ b/src/libcore/char/methods.rs @@ -1,8 +1,8 @@ //! impl char {} +use super::*; use slice; use str::from_utf8_unchecked_mut; -use super::*; use unicode::printable::is_printable; use unicode::tables::{conversions, derived_property, general_category, property}; @@ -115,9 +115,9 @@ impl char { // the code is split up here to improve execution speed for cases where // the `radix` is constant and 10 or smaller - let val = if radix <= 10 { + let val = if radix <= 10 { match self { - '0' ..= '9' => self as u32 - '0' as u32, + '0'..='9' => self as u32 - '0' as u32, _ => return None, } } else { @@ -129,8 +129,11 @@ impl char { } }; - if val < radix { Some(val) } - else { None } + if val < radix { + Some(val) + } else { + None + } } /// Returns an iterator that yields the hexadecimal Unicode escape of a @@ -304,8 +307,8 @@ impl char { '\r' => EscapeDefaultState::Backslash('r'), '\n' => EscapeDefaultState::Backslash('n'), '\\' | '\'' | '"' => EscapeDefaultState::Backslash(self), - '\x20' ..= '\x7e' => EscapeDefaultState::Char(self), - _ => EscapeDefaultState::Unicode(self.escape_unicode()) + '\x20'..='\x7e' => EscapeDefaultState::Char(self), + _ => EscapeDefaultState::Unicode(self.escape_unicode()), }; EscapeDefault { state: init_state } } @@ -393,7 +396,11 @@ impl char { #[inline] pub fn len_utf16(self) -> usize { let ch = self as u32; - if (ch & 0xFFFF) == ch { 1 } else { 2 } + if (ch & 0xFFFF) == ch { + 1 + } else { + 2 + } } /// Encodes this character as UTF-8 into the provided byte buffer, @@ -437,30 +444,31 @@ impl char { pub fn encode_utf8(self, dst: &mut [u8]) -> &mut str { let code = self as u32; unsafe { - let len = - if code < MAX_ONE_B && !dst.is_empty() { + let len = if code < MAX_ONE_B && !dst.is_empty() { *dst.get_unchecked_mut(0) = code as u8; 1 } else if code < MAX_TWO_B && dst.len() >= 2 { *dst.get_unchecked_mut(0) = (code >> 6 & 0x1F) as u8 | TAG_TWO_B; *dst.get_unchecked_mut(1) = (code & 0x3F) as u8 | TAG_CONT; 2 - } else if code < MAX_THREE_B && dst.len() >= 3 { + } else if code < MAX_THREE_B && dst.len() >= 3 { *dst.get_unchecked_mut(0) = (code >> 12 & 0x0F) as u8 | TAG_THREE_B; - *dst.get_unchecked_mut(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT; + *dst.get_unchecked_mut(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT; *dst.get_unchecked_mut(2) = (code & 0x3F) as u8 | TAG_CONT; 3 } else if dst.len() >= 4 { *dst.get_unchecked_mut(0) = (code >> 18 & 0x07) as u8 | TAG_FOUR_B; *dst.get_unchecked_mut(1) = (code >> 12 & 0x3F) as u8 | TAG_CONT; - *dst.get_unchecked_mut(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT; + *dst.get_unchecked_mut(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT; *dst.get_unchecked_mut(3) = (code & 0x3F) as u8 | TAG_CONT; 4 } else { - panic!("encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}", + panic!( + "encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}", from_u32_unchecked(code).len_utf8(), code, - dst.len()) + dst.len() + ) }; from_utf8_unchecked_mut(dst.get_unchecked_mut(..len)) } @@ -516,10 +524,12 @@ impl char { *dst.get_unchecked_mut(1) = 0xDC00 | ((code as u16) & 0x3FF); slice::from_raw_parts_mut(dst.as_mut_ptr(), 2) } else { - panic!("encode_utf16: need {} units to encode U+{:X}, but the buffer has {}", + panic!( + "encode_utf16: need {} units to encode U+{:X}, but the buffer has {}", from_u32_unchecked(code).len_utf16(), code, - dst.len()) + dst.len() + ) } } } @@ -554,9 +564,11 @@ impl char { /// 'XID_Start' is a Unicode Derived Property specified in /// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications), /// mostly similar to `ID_Start` but modified for closure under `NFKx`. - #[unstable(feature = "rustc_private", - reason = "mainly needed for compiler internals", - issue = "27812")] + #[unstable( + feature = "rustc_private", + reason = "mainly needed for compiler internals", + issue = "27812" + )] #[inline] pub fn is_xid_start(self) -> bool { derived_property::XID_Start(self) @@ -568,9 +580,11 @@ impl char { /// 'XID_Continue' is a Unicode Derived Property specified in /// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications), /// mostly similar to 'ID_Continue' but modified for closure under NFKx. - #[unstable(feature = "rustc_private", - reason = "mainly needed for compiler internals", - issue = "27812")] + #[unstable( + feature = "rustc_private", + reason = "mainly needed for compiler internals", + issue = "27812" + )] #[inline] pub fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) diff --git a/src/libcore/char/mod.rs b/src/libcore/char/mod.rs index 15e153bdfada2..db2948c3f6366 100644 --- a/src/libcore/char/mod.rs +++ b/src/libcore/char/mod.rs @@ -24,14 +24,14 @@ mod decode; mod methods; // stable re-exports -#[stable(feature = "rust1", since = "1.0.0")] -pub use self::convert::{from_u32, from_digit}; #[stable(feature = "char_from_unchecked", since = "1.5.0")] pub use self::convert::from_u32_unchecked; -#[stable(feature = "char_from_str", since = "1.20.0")] -pub use self::convert::ParseCharError; #[unstable(feature = "try_from", issue = "33417")] pub use self::convert::CharTryFromError; +#[stable(feature = "char_from_str", since = "1.20.0")] +pub use self::convert::ParseCharError; +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::convert::{from_digit, from_u32}; #[stable(feature = "decode_utf16", since = "1.9.0")] pub use self::decode::{decode_utf16, DecodeUtf16, DecodeUtf16Error}; @@ -45,13 +45,13 @@ use fmt::{self, Write}; use iter::FusedIterator; // UTF-8 ranges and tags for encoding characters -const TAG_CONT: u8 = 0b1000_0000; -const TAG_TWO_B: u8 = 0b1100_0000; -const TAG_THREE_B: u8 = 0b1110_0000; -const TAG_FOUR_B: u8 = 0b1111_0000; -const MAX_ONE_B: u32 = 0x80; -const MAX_TWO_B: u32 = 0x800; -const MAX_THREE_B: u32 = 0x10000; +const TAG_CONT: u8 = 0b1000_0000; +const TAG_TWO_B: u8 = 0b1100_0000; +const TAG_THREE_B: u8 = 0b1110_0000; +const TAG_FOUR_B: u8 = 0b1111_0000; +const MAX_ONE_B: u32 = 0x80; +const MAX_TWO_B: u32 = 0x800; +const MAX_THREE_B: u32 = 0x10000; /* Lu Uppercase_Letter an uppercase letter @@ -190,11 +190,11 @@ impl Iterator for EscapeUnicode { match self.state { EscapeUnicodeState::Done => None, - EscapeUnicodeState::RightBrace | - EscapeUnicodeState::Value | - EscapeUnicodeState::LeftBrace | - EscapeUnicodeState::Type | - EscapeUnicodeState::Backslash => Some('}'), + EscapeUnicodeState::RightBrace + | EscapeUnicodeState::Value + | EscapeUnicodeState::LeftBrace + | EscapeUnicodeState::Type + | EscapeUnicodeState::Backslash => Some('}'), } } } @@ -204,14 +204,15 @@ impl ExactSizeIterator for EscapeUnicode { #[inline] fn len(&self) -> usize { // The match is a single memory access with no branching - self.hex_digit_idx + match self.state { - EscapeUnicodeState::Done => 0, - EscapeUnicodeState::RightBrace => 1, - EscapeUnicodeState::Value => 2, - EscapeUnicodeState::LeftBrace => 3, - EscapeUnicodeState::Type => 4, - EscapeUnicodeState::Backslash => 5, - } + self.hex_digit_idx + + match self.state { + EscapeUnicodeState::Done => 0, + EscapeUnicodeState::RightBrace => 1, + EscapeUnicodeState::Value => 2, + EscapeUnicodeState::LeftBrace => 3, + EscapeUnicodeState::Type => 4, + EscapeUnicodeState::Backslash => 5, + } } } @@ -238,7 +239,7 @@ impl fmt::Display for EscapeUnicode { #[derive(Clone, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub struct EscapeDefault { - state: EscapeDefaultState + state: EscapeDefaultState, } #[derive(Clone, Debug)] @@ -284,15 +285,15 @@ impl Iterator for EscapeDefault { EscapeDefaultState::Backslash(c) if n == 0 => { self.state = EscapeDefaultState::Char(c); Some('\\') - }, + } EscapeDefaultState::Backslash(c) if n == 1 => { self.state = EscapeDefaultState::Done; Some(c) - }, + } EscapeDefaultState::Backslash(_) => { self.state = EscapeDefaultState::Done; None - }, + } EscapeDefaultState::Char(c) => { self.state = EscapeDefaultState::Done; @@ -301,7 +302,7 @@ impl Iterator for EscapeDefault { } else { None } - }, + } EscapeDefaultState::Done => None, EscapeDefaultState::Unicode(ref mut i) => i.nth(n), } @@ -355,12 +356,16 @@ pub struct EscapeDebug(EscapeDefault); #[stable(feature = "char_escape_debug", since = "1.20.0")] impl Iterator for EscapeDebug { type Item = char; - fn next(&mut self) -> Option { self.0.next() } - fn size_hint(&self) -> (usize, Option) { self.0.size_hint() } + fn next(&mut self) -> Option { + self.0.next() + } + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } } #[stable(feature = "char_escape_debug", since = "1.20.0")] -impl ExactSizeIterator for EscapeDebug { } +impl ExactSizeIterator for EscapeDebug {} #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for EscapeDebug {} @@ -428,7 +433,7 @@ impl CaseMappingIter { fn new(chars: [char; 3]) -> CaseMappingIter { if chars[2] == '\0' { if chars[1] == '\0' { - CaseMappingIter::One(chars[0]) // Including if chars[0] == '\0' + CaseMappingIter::One(chars[0]) // Including if chars[0] == '\0' } else { CaseMappingIter::Two(chars[0], chars[1]) } @@ -471,9 +476,7 @@ impl fmt::Display for CaseMappingIter { f.write_char(b)?; f.write_char(c) } - CaseMappingIter::One(c) => { - f.write_char(c) - } + CaseMappingIter::One(c) => f.write_char(c), CaseMappingIter::Zero => Ok(()), } } diff --git a/src/libcore/clone.rs b/src/libcore/clone.rs index ed90b7de26417..aaf6101746569 100644 --- a/src/libcore/clone.rs +++ b/src/libcore/clone.rs @@ -107,7 +107,7 @@ /// [impls]: #implementors #[stable(feature = "rust1", since = "1.0.0")] #[lang = "clone"] -pub trait Clone : Sized { +pub trait Clone: Sized { /// Returns a copy of the value. /// /// # Examples @@ -139,16 +139,24 @@ pub trait Clone : Sized { // These structs should never appear in user code. #[doc(hidden)] #[allow(missing_debug_implementations)] -#[unstable(feature = "derive_clone_copy", - reason = "deriving hack, should not be public", - issue = "0")] -pub struct AssertParamIsClone { _field: ::marker::PhantomData } +#[unstable( + feature = "derive_clone_copy", + reason = "deriving hack, should not be public", + issue = "0" +)] +pub struct AssertParamIsClone { + _field: ::marker::PhantomData, +} #[doc(hidden)] #[allow(missing_debug_implementations)] -#[unstable(feature = "derive_clone_copy", - reason = "deriving hack, should not be public", - issue = "0")] -pub struct AssertParamIsCopy { _field: ::marker::PhantomData } +#[unstable( + feature = "derive_clone_copy", + reason = "deriving hack, should not be public", + issue = "0" +)] +pub struct AssertParamIsCopy { + _field: ::marker::PhantomData, +} /// Implementations of `Clone` for primitive types. /// diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index f420d0d00a401..1162d5b0d5d1c 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -182,8 +182,8 @@ use self::Ordering::*; #[doc(alias = "==")] #[doc(alias = "!=")] #[rustc_on_unimplemented( - message="can't compare `{Self}` with `{Rhs}`", - label="no implementation for `{Self} == {Rhs}`", + message = "can't compare `{Self}` with `{Rhs}`", + label = "no implementation for `{Self} == {Rhs}`" )] pub trait PartialEq { /// This method tests for `self` and `other` values to be equal, and is used @@ -196,7 +196,9 @@ pub trait PartialEq { #[inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] - fn ne(&self, other: &Rhs) -> bool { !self.eq(other) } + fn ne(&self, other: &Rhs) -> bool { + !self.eq(other) + } } /// Trait for equality comparisons which are [equivalence relations]( @@ -261,10 +263,14 @@ pub trait Eq: PartialEq { // This struct should never appear in user code. #[doc(hidden)] #[allow(missing_debug_implementations)] -#[unstable(feature = "derive_eq", - reason = "deriving hack, should not be public", - issue = "0")] -pub struct AssertParamIsEq { _field: ::marker::PhantomData } +#[unstable( + feature = "derive_eq", + reason = "deriving hack, should not be public", + issue = "0" +)] +pub struct AssertParamIsEq { + _field: ::marker::PhantomData, +} /// An `Ordering` is the result of a comparison between two values. /// @@ -435,13 +441,21 @@ impl PartialOrd for Reverse { } #[inline] - fn lt(&self, other: &Self) -> bool { other.0 < self.0 } + fn lt(&self, other: &Self) -> bool { + other.0 < self.0 + } #[inline] - fn le(&self, other: &Self) -> bool { other.0 <= self.0 } + fn le(&self, other: &Self) -> bool { + other.0 <= self.0 + } #[inline] - fn ge(&self, other: &Self) -> bool { other.0 >= self.0 } + fn ge(&self, other: &Self) -> bool { + other.0 >= self.0 + } #[inline] - fn gt(&self, other: &Self) -> bool { other.0 > self.0 } + fn gt(&self, other: &Self) -> bool { + other.0 > self.0 + } } #[stable(feature = "reverse_cmp_key", since = "1.19.0")] @@ -546,8 +560,14 @@ pub trait Ord: Eq + PartialOrd { #[stable(feature = "ord_max_min", since = "1.21.0")] #[inline] fn max(self, other: Self) -> Self - where Self: Sized { - if other >= self { other } else { self } + where + Self: Sized, + { + if other >= self { + other + } else { + self + } } /// Compares and returns the minimum of two values. @@ -563,8 +583,14 @@ pub trait Ord: Eq + PartialOrd { #[stable(feature = "ord_max_min", since = "1.21.0")] #[inline] fn min(self, other: Self) -> Self - where Self: Sized { - if self <= other { self } else { other } + where + Self: Sized, + { + if self <= other { + self + } else { + other + } } } @@ -692,8 +718,8 @@ impl PartialOrd for Ordering { #[doc(alias = "<=")] #[doc(alias = ">=")] #[rustc_on_unimplemented( - message="can't compare `{Self}` with `{Rhs}`", - label="no implementation for `{Self} < {Rhs}` and `{Self} > {Rhs}`", + message = "can't compare `{Self}` with `{Rhs}`", + label = "no implementation for `{Self} < {Rhs}` and `{Self} > {Rhs}`" )] pub trait PartialOrd: PartialEq { /// This method returns an ordering between `self` and `other` values if one exists. @@ -852,7 +878,7 @@ pub fn max(v1: T, v2: T) -> T { // Implementation of PartialEq, Eq, PartialOrd and Ord for primitive types mod impls { - use cmp::Ordering::{self, Less, Greater, Equal}; + use cmp::Ordering::{self, Equal, Greater, Less}; macro_rules! partial_eq_impl { ($($t:ty)*) => ($( @@ -869,9 +895,13 @@ mod impls { #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for () { #[inline] - fn eq(&self, _other: &()) -> bool { true } + fn eq(&self, _other: &()) -> bool { + true + } #[inline] - fn ne(&self, _other: &()) -> bool { false } + fn ne(&self, _other: &()) -> bool { + false + } } partial_eq_impl! { @@ -963,7 +993,9 @@ mod impls { #[stable(feature = "rust1", since = "1.0.0")] impl Ord for () { #[inline] - fn cmp(&self, _other: &()) -> Ordering { Equal } + fn cmp(&self, _other: &()) -> Ordering { + Equal + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1003,31 +1035,54 @@ mod impls { // & pointers #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: ?Sized, B: ?Sized> PartialEq<&'b B> for &'a A where A: PartialEq { + impl<'a, 'b, A: ?Sized, B: ?Sized> PartialEq<&'b B> for &'a A + where + A: PartialEq, + { #[inline] - fn eq(&self, other: & &'b B) -> bool { PartialEq::eq(*self, *other) } + fn eq(&self, other: &&'b B) -> bool { + PartialEq::eq(*self, *other) + } #[inline] - fn ne(&self, other: & &'b B) -> bool { PartialEq::ne(*self, *other) } + fn ne(&self, other: &&'b B) -> bool { + PartialEq::ne(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: ?Sized, B: ?Sized> PartialOrd<&'b B> for &'a A where A: PartialOrd { + impl<'a, 'b, A: ?Sized, B: ?Sized> PartialOrd<&'b B> for &'a A + where + A: PartialOrd, + { #[inline] fn partial_cmp(&self, other: &&'b B) -> Option { PartialOrd::partial_cmp(*self, *other) } #[inline] - fn lt(&self, other: & &'b B) -> bool { PartialOrd::lt(*self, *other) } + fn lt(&self, other: &&'b B) -> bool { + PartialOrd::lt(*self, *other) + } #[inline] - fn le(&self, other: & &'b B) -> bool { PartialOrd::le(*self, *other) } + fn le(&self, other: &&'b B) -> bool { + PartialOrd::le(*self, *other) + } #[inline] - fn ge(&self, other: & &'b B) -> bool { PartialOrd::ge(*self, *other) } + fn ge(&self, other: &&'b B) -> bool { + PartialOrd::ge(*self, *other) + } #[inline] - fn gt(&self, other: & &'b B) -> bool { PartialOrd::gt(*self, *other) } + fn gt(&self, other: &&'b B) -> bool { + PartialOrd::gt(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] - impl Ord for &A where A: Ord { + impl Ord for &A + where + A: Ord, + { #[inline] - fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(*self, *other) } + fn cmp(&self, other: &Self) -> Ordering { + Ord::cmp(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for &A where A: Eq {} @@ -1035,48 +1090,85 @@ mod impls { // &mut pointers #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: ?Sized, B: ?Sized> PartialEq<&'b mut B> for &'a mut A where A: PartialEq { + impl<'a, 'b, A: ?Sized, B: ?Sized> PartialEq<&'b mut B> for &'a mut A + where + A: PartialEq, + { #[inline] - fn eq(&self, other: &&'b mut B) -> bool { PartialEq::eq(*self, *other) } + fn eq(&self, other: &&'b mut B) -> bool { + PartialEq::eq(*self, *other) + } #[inline] - fn ne(&self, other: &&'b mut B) -> bool { PartialEq::ne(*self, *other) } + fn ne(&self, other: &&'b mut B) -> bool { + PartialEq::ne(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: ?Sized, B: ?Sized> PartialOrd<&'b mut B> for &'a mut A where A: PartialOrd { + impl<'a, 'b, A: ?Sized, B: ?Sized> PartialOrd<&'b mut B> for &'a mut A + where + A: PartialOrd, + { #[inline] fn partial_cmp(&self, other: &&'b mut B) -> Option { PartialOrd::partial_cmp(*self, *other) } #[inline] - fn lt(&self, other: &&'b mut B) -> bool { PartialOrd::lt(*self, *other) } + fn lt(&self, other: &&'b mut B) -> bool { + PartialOrd::lt(*self, *other) + } #[inline] - fn le(&self, other: &&'b mut B) -> bool { PartialOrd::le(*self, *other) } + fn le(&self, other: &&'b mut B) -> bool { + PartialOrd::le(*self, *other) + } #[inline] - fn ge(&self, other: &&'b mut B) -> bool { PartialOrd::ge(*self, *other) } + fn ge(&self, other: &&'b mut B) -> bool { + PartialOrd::ge(*self, *other) + } #[inline] - fn gt(&self, other: &&'b mut B) -> bool { PartialOrd::gt(*self, *other) } + fn gt(&self, other: &&'b mut B) -> bool { + PartialOrd::gt(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] - impl Ord for &mut A where A: Ord { + impl Ord for &mut A + where + A: Ord, + { #[inline] - fn cmp(&self, other: &Self) -> Ordering { Ord::cmp(*self, *other) } + fn cmp(&self, other: &Self) -> Ordering { + Ord::cmp(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for &mut A where A: Eq {} #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: ?Sized, B: ?Sized> PartialEq<&'b mut B> for &'a A where A: PartialEq { + impl<'a, 'b, A: ?Sized, B: ?Sized> PartialEq<&'b mut B> for &'a A + where + A: PartialEq, + { #[inline] - fn eq(&self, other: &&'b mut B) -> bool { PartialEq::eq(*self, *other) } + fn eq(&self, other: &&'b mut B) -> bool { + PartialEq::eq(*self, *other) + } #[inline] - fn ne(&self, other: &&'b mut B) -> bool { PartialEq::ne(*self, *other) } + fn ne(&self, other: &&'b mut B) -> bool { + PartialEq::ne(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, 'b, A: ?Sized, B: ?Sized> PartialEq<&'b B> for &'a mut A where A: PartialEq { + impl<'a, 'b, A: ?Sized, B: ?Sized> PartialEq<&'b B> for &'a mut A + where + A: PartialEq, + { #[inline] - fn eq(&self, other: &&'b B) -> bool { PartialEq::eq(*self, *other) } + fn eq(&self, other: &&'b B) -> bool { + PartialEq::eq(*self, *other) + } #[inline] - fn ne(&self, other: &&'b B) -> bool { PartialEq::ne(*self, *other) } + fn ne(&self, other: &&'b B) -> bool { + PartialEq::ne(*self, *other) + } } } diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index 5bf5c858f865a..cbbf153e34cf2 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -95,7 +95,9 @@ /// ``` #[unstable(feature = "convert_id", issue = "53500")] #[inline] -pub const fn identity(x: T) -> T { x } +pub const fn identity(x: T) -> T { + x +} /// A cheap reference-to-reference conversion. Used to convert a value to a /// reference value within generic code. @@ -395,7 +397,9 @@ pub trait TryFrom: Sized { // As lifts over & #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef for &T where T: AsRef +impl AsRef for &T +where + T: AsRef, { fn as_ref(&self) -> &U { >::as_ref(*self) @@ -404,7 +408,9 @@ impl AsRef for &T where T: AsRef // As lifts over &mut #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef for &mut T where T: AsRef +impl AsRef for &mut T +where + T: AsRef, { fn as_ref(&self) -> &U { >::as_ref(*self) @@ -421,7 +427,9 @@ impl AsRef for &mut T where T: AsRef // AsMut lifts over &mut #[stable(feature = "rust1", since = "1.0.0")] -impl AsMut for &mut T where T: AsMut +impl AsMut for &mut T +where + T: AsMut, { fn as_mut(&mut self) -> &mut U { (*self).as_mut() @@ -438,7 +446,9 @@ impl AsMut for &mut T where T: AsMut // From implies Into #[stable(feature = "rust1", since = "1.0.0")] -impl Into for T where U: From +impl Into for T +where + U: From, { fn into(self) -> U { U::from(self) @@ -448,13 +458,16 @@ impl Into for T where U: From // From (and thus Into) is reflexive #[stable(feature = "rust1", since = "1.0.0")] impl From for T { - fn from(t: T) -> T { t } + fn from(t: T) -> T { + t + } } - // TryFrom implies TryInto #[unstable(feature = "try_from", issue = "33417")] -impl TryInto for T where U: TryFrom +impl TryInto for T +where + U: TryFrom, { type Error = U::Error; @@ -466,7 +479,10 @@ impl TryInto for T where U: TryFrom // Infallible conversions are semantically equivalent to fallible conversions // with an uninhabited error type. #[unstable(feature = "try_from", issue = "33417")] -impl TryFrom for T where T: From { +impl TryFrom for T +where + T: From, +{ type Error = !; fn try_from(value: U) -> Result { diff --git a/src/libcore/default.rs b/src/libcore/default.rs index 0e47c2fd0b5d0..be80cd306f35e 100644 --- a/src/libcore/default.rs +++ b/src/libcore/default.rs @@ -121,9 +121,11 @@ macro_rules! default_impl { impl Default for $t { #[inline] #[doc = $doc] - fn default() -> $t { $v } + fn default() -> $t { + $v + } } - } + }; } default_impl! { (), (), "Returns the default value of `()`" } diff --git a/src/libcore/ffi.rs b/src/libcore/ffi.rs index 899fae909467a..04ed1548b5fa0 100644 --- a/src/libcore/ffi.rs +++ b/src/libcore/ffi.rs @@ -4,7 +4,7 @@ //! Utilities related to FFI bindings. -use ::fmt; +use fmt; /// Equivalent to C's `void` type when used as a [pointer]. /// @@ -26,12 +26,20 @@ use ::fmt; #[repr(u8)] #[stable(feature = "raw_os", since = "1.1.0")] pub enum c_void { - #[unstable(feature = "c_void_variant", reason = "should not have to exist", - issue = "0")] - #[doc(hidden)] __variant1, - #[unstable(feature = "c_void_variant", reason = "should not have to exist", - issue = "0")] - #[doc(hidden)] __variant2, + #[unstable( + feature = "c_void_variant", + reason = "should not have to exist", + issue = "0" + )] + #[doc(hidden)] + __variant1, + #[unstable( + feature = "c_void_variant", + reason = "should not have to exist", + issue = "0" + )] + #[doc(hidden)] + __variant2, } #[stable(feature = "std_debug", since = "1.16.0")] @@ -42,21 +50,33 @@ impl fmt::Debug for c_void { } /// Basic implementation of a `va_list`. -#[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"), - not(target_arch = "x86_64")), - all(target_arch = "aarch4", target_os = "ios"), - windows))] -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] -extern { +#[cfg(any( + all( + not(target_arch = "aarch64"), + not(target_arch = "powerpc"), + not(target_arch = "x86_64") + ), + all(target_arch = "aarch4", target_os = "ios"), + windows +))] +#[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" +)] +extern "C" { type VaListImpl; } -#[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"), - not(target_arch = "x86_64")), - windows))] +#[cfg(any( + all( + not(target_arch = "aarch64"), + not(target_arch = "powerpc"), + not(target_arch = "x86_64") + ), + windows +))] impl fmt::Debug for VaListImpl { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "va_list* {:p}", self) @@ -71,10 +91,12 @@ impl fmt::Debug for VaListImpl { #[cfg(all(target_arch = "aarch64", not(windows)))] #[repr(C)] #[derive(Debug)] -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] +#[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" +)] struct VaListImpl { stack: *mut (), gr_top: *mut (), @@ -87,10 +109,12 @@ struct VaListImpl { #[cfg(all(target_arch = "powerpc", not(windows)))] #[repr(C)] #[derive(Debug)] -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] +#[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" +)] struct VaListImpl { gpr: u8, fpr: u8, @@ -103,10 +127,12 @@ struct VaListImpl { #[cfg(all(target_arch = "x86_64", not(windows)))] #[repr(C)] #[derive(Debug)] -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] +#[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" +)] struct VaListImpl { gp_offset: i32, fp_offset: i32, @@ -117,10 +143,12 @@ struct VaListImpl { /// A wrapper for a `va_list` #[lang = "va_list"] #[derive(Debug)] -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] +#[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" +)] #[repr(transparent)] pub struct VaList<'a>(&'a mut VaListImpl); @@ -137,10 +165,12 @@ mod sealed_trait { /// Trait which whitelists the allowed types to be used with [VaList::arg] /// /// [VaList::va_arg]: struct.VaList.html#method.arg - #[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] + #[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" + )] pub trait VaArgSafe {} } @@ -156,48 +186,75 @@ macro_rules! impl_va_arg_safe { } } -impl_va_arg_safe!{i8, i16, i32, i64, usize} -impl_va_arg_safe!{u8, u16, u32, u64, isize} -impl_va_arg_safe!{f64} +impl_va_arg_safe! {i8, i16, i32, i64, usize} +impl_va_arg_safe! {u8, u16, u32, u64, isize} +impl_va_arg_safe! {f64} -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] +#[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" +)] impl sealed_trait::VaArgSafe for *mut T {} -#[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] +#[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" +)] impl sealed_trait::VaArgSafe for *const T {} impl<'a> VaList<'a> { /// Advance to the next arg. - #[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] + #[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" + )] pub unsafe fn arg(&mut self) -> T { va_arg(self) } /// Copy the `va_list` at the current location. - #[unstable(feature = "c_variadic", - reason = "the `c_variadic` feature has not been properly tested on \ - all supported platforms", - issue = "27745")] + #[unstable( + feature = "c_variadic", + reason = "the `c_variadic` feature has not been properly tested on \ + all supported platforms", + issue = "27745" + )] pub unsafe fn copy(&mut self, f: F) -> R - where F: for<'copy> FnOnce(VaList<'copy>) -> R { - #[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"), - not(target_arch = "x86_64")), - all(target_arch = "aarch4", target_os = "ios"), - windows))] + where + F: for<'copy> FnOnce(VaList<'copy>) -> R, + { + #[cfg(any( + all( + not(target_arch = "aarch64"), + not(target_arch = "powerpc"), + not(target_arch = "x86_64") + ), + all(target_arch = "aarch4", target_os = "ios"), + windows + ))] let mut ap = va_copy(self); - #[cfg(all(any(target_arch = "aarch64", target_arch = "powerpc", target_arch = "x86_64"), - not(windows)))] + #[cfg(all( + any( + target_arch = "aarch64", + target_arch = "powerpc", + target_arch = "x86_64" + ), + not(windows) + ))] let mut ap_inner = va_copy(self); - #[cfg(all(any(target_arch = "aarch64", target_arch = "powerpc", target_arch = "x86_64"), - not(windows)))] + #[cfg(all( + any( + target_arch = "aarch64", + target_arch = "powerpc", + target_arch = "x86_64" + ), + not(windows) + ))] let mut ap = VaList(&mut ap_inner); let ret = f(VaList(ap.0)); va_end(&mut ap); @@ -211,12 +268,23 @@ extern "rust-intrinsic" { fn va_end(ap: &mut VaList); /// Copy the current location of arglist `src` to the arglist `dst`. - #[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"), - not(target_arch = "x86_64")), - windows))] + #[cfg(any( + all( + not(target_arch = "aarch64"), + not(target_arch = "powerpc"), + not(target_arch = "x86_64") + ), + windows + ))] fn va_copy<'a>(src: &VaList<'a>) -> VaList<'a>; - #[cfg(all(any(target_arch = "aarch64", target_arch = "powerpc", target_arch = "x86_64"), - not(windows)))] + #[cfg(all( + any( + target_arch = "aarch64", + target_arch = "powerpc", + target_arch = "x86_64" + ), + not(windows) + ))] fn va_copy(src: &VaList) -> VaListImpl; /// Loads an argument of type `T` from the `va_list` `ap` and increment the diff --git a/src/libcore/fmt/builders.rs b/src/libcore/fmt/builders.rs index 2525b47f2bde5..cda2b9946ecb1 100644 --- a/src/libcore/fmt/builders.rs +++ b/src/libcore/fmt/builders.rs @@ -6,8 +6,10 @@ struct PadAdapter<'a> { } impl<'a> PadAdapter<'a> { - fn wrap<'b, 'c: 'a+'b>(fmt: &'c mut fmt::Formatter, slot: &'b mut Option) - -> fmt::Formatter<'b> { + fn wrap<'b, 'c: 'a + 'b>( + fmt: &'c mut fmt::Formatter, + slot: &'b mut Option, + ) -> fmt::Formatter<'b> { fmt.wrap_buf(move |buf| { *slot = Some(PadAdapter { buf, @@ -83,9 +85,10 @@ pub struct DebugStruct<'a, 'b: 'a> { has_fields: bool, } -pub fn debug_struct_new<'a, 'b>(fmt: &'a mut fmt::Formatter<'b>, - name: &str) - -> DebugStruct<'a, 'b> { +pub fn debug_struct_new<'a, 'b>( + fmt: &'a mut fmt::Formatter<'b>, + name: &str, +) -> DebugStruct<'a, 'b> { let result = fmt.write_str(name); DebugStruct { fmt, @@ -99,11 +102,7 @@ impl<'a, 'b: 'a> DebugStruct<'a, 'b> { #[stable(feature = "debug_builders", since = "1.2.0")] pub fn field(&mut self, name: &str, value: &dyn fmt::Debug) -> &mut DebugStruct<'a, 'b> { self.result = self.result.and_then(|_| { - let prefix = if self.has_fields { - "," - } else { - " {" - }; + let prefix = if self.has_fields { "," } else { " {" }; if self.is_pretty() { let mut slot = None; @@ -253,11 +252,7 @@ impl<'a, 'b: 'a> DebugInner<'a, 'b> { if self.is_pretty() { let mut slot = None; let mut writer = PadAdapter::wrap(&mut self.fmt, &mut slot); - writer.write_str(if self.has_fields { - ",\n" - } else { - "\n" - })?; + writer.write_str(if self.has_fields { ",\n" } else { "\n" })?; entry.fmt(&mut writer) } else { if self.has_fields { @@ -338,8 +333,9 @@ impl<'a, 'b: 'a> DebugSet<'a, 'b> { /// Adds the contents of an iterator of entries to the set output. #[stable(feature = "debug_builders", since = "1.2.0")] pub fn entries(&mut self, entries: I) -> &mut DebugSet<'a, 'b> - where D: fmt::Debug, - I: IntoIterator + where + D: fmt::Debug, + I: IntoIterator, { for entry in entries { self.entry(&entry); @@ -351,7 +347,9 @@ impl<'a, 'b: 'a> DebugSet<'a, 'b> { #[stable(feature = "debug_builders", since = "1.2.0")] pub fn finish(&mut self) -> fmt::Result { self.inner.finish(); - self.inner.result.and_then(|_| self.inner.fmt.write_str("}")) + self.inner + .result + .and_then(|_| self.inner.fmt.write_str("}")) } } @@ -409,8 +407,9 @@ impl<'a, 'b: 'a> DebugList<'a, 'b> { /// Adds the contents of an iterator of entries to the list output. #[stable(feature = "debug_builders", since = "1.2.0")] pub fn entries(&mut self, entries: I) -> &mut DebugList<'a, 'b> - where D: fmt::Debug, - I: IntoIterator + where + D: fmt::Debug, + I: IntoIterator, { for entry in entries { self.entry(&entry); @@ -422,7 +421,9 @@ impl<'a, 'b: 'a> DebugList<'a, 'b> { #[stable(feature = "debug_builders", since = "1.2.0")] pub fn finish(&mut self) -> fmt::Result { self.inner.finish(); - self.inner.result.and_then(|_| self.inner.fmt.write_str("]")) + self.inner + .result + .and_then(|_| self.inner.fmt.write_str("]")) } } @@ -477,11 +478,7 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { if self.is_pretty() { let mut slot = None; let mut writer = PadAdapter::wrap(&mut self.fmt, &mut slot); - writer.write_str(if self.has_fields { - ",\n" - } else { - "\n" - })?; + writer.write_str(if self.has_fields { ",\n" } else { "\n" })?; key.fmt(&mut writer)?; writer.write_str(": ")?; value.fmt(&mut writer) @@ -502,9 +499,10 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { /// Adds the contents of an iterator of entries to the map output. #[stable(feature = "debug_builders", since = "1.2.0")] pub fn entries(&mut self, entries: I) -> &mut DebugMap<'a, 'b> - where K: fmt::Debug, - V: fmt::Debug, - I: IntoIterator + where + K: fmt::Debug, + V: fmt::Debug, + I: IntoIterator, { for (k, v) in entries { self.entry(&k, &v); diff --git a/src/libcore/fmt/float.rs b/src/libcore/fmt/float.rs index 20c626cef1b16..7473fecaae5d1 100644 --- a/src/libcore/fmt/float.rs +++ b/src/libcore/fmt/float.rs @@ -1,13 +1,18 @@ -use fmt::{Formatter, Result, LowerExp, UpperExp, Display, Debug}; +use fmt::{Debug, Display, Formatter, LowerExp, Result, UpperExp}; use mem::MaybeUninit; use num::flt2dec; // Don't inline this so callers don't use the stack space this function // requires unless they have to. #[inline(never)] -fn float_to_decimal_common_exact(fmt: &mut Formatter, num: &T, - sign: flt2dec::Sign, precision: usize) -> Result - where T: flt2dec::DecodableFloat +fn float_to_decimal_common_exact( + fmt: &mut Formatter, + num: &T, + sign: flt2dec::Sign, + precision: usize, +) -> Result +where + T: flt2dec::DecodableFloat, { unsafe { let mut buf = MaybeUninit::<[u8; 1024]>::uninitialized(); // enough for f32 and f64 @@ -15,9 +20,15 @@ fn float_to_decimal_common_exact(fmt: &mut Formatter, num: &T, // FIXME(#53491): Technically, this is calling `get_mut` on an uninitialized // `MaybeUninit` (here and elsewhere in this file). Revisit this once // we decided whether that is valid or not. - let formatted = flt2dec::to_exact_fixed_str(flt2dec::strategy::grisu::format_exact, - *num, sign, precision, - false, buf.get_mut(), parts.get_mut()); + let formatted = flt2dec::to_exact_fixed_str( + flt2dec::strategy::grisu::format_exact, + *num, + sign, + precision, + false, + buf.get_mut(), + parts.get_mut(), + ); fmt.pad_formatted_parts(&formatted) } } @@ -25,32 +36,48 @@ fn float_to_decimal_common_exact(fmt: &mut Formatter, num: &T, // Don't inline this so callers that call both this and the above won't wind // up using the combined stack space of both functions in some cases. #[inline(never)] -fn float_to_decimal_common_shortest(fmt: &mut Formatter, num: &T, - sign: flt2dec::Sign, precision: usize) -> Result - where T: flt2dec::DecodableFloat +fn float_to_decimal_common_shortest( + fmt: &mut Formatter, + num: &T, + sign: flt2dec::Sign, + precision: usize, +) -> Result +where + T: flt2dec::DecodableFloat, { unsafe { // enough for f32 and f64 let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninitialized(); let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninitialized(); - let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest, *num, - sign, precision, false, buf.get_mut(), - parts.get_mut()); + let formatted = flt2dec::to_shortest_str( + flt2dec::strategy::grisu::format_shortest, + *num, + sign, + precision, + false, + buf.get_mut(), + parts.get_mut(), + ); fmt.pad_formatted_parts(&formatted) } } // Common code of floating point Debug and Display. -fn float_to_decimal_common(fmt: &mut Formatter, num: &T, - negative_zero: bool, min_precision: usize) -> Result - where T: flt2dec::DecodableFloat +fn float_to_decimal_common( + fmt: &mut Formatter, + num: &T, + negative_zero: bool, + min_precision: usize, +) -> Result +where + T: flt2dec::DecodableFloat, { let force_sign = fmt.sign_plus(); let sign = match (force_sign, negative_zero) { (false, false) => flt2dec::Sign::Minus, - (false, true) => flt2dec::Sign::MinusRaw, - (true, false) => flt2dec::Sign::MinusPlus, - (true, true) => flt2dec::Sign::MinusPlusRaw, + (false, true) => flt2dec::Sign::MinusRaw, + (true, false) => flt2dec::Sign::MinusPlus, + (true, true) => flt2dec::Sign::MinusPlusRaw, }; if let Some(precision) = fmt.precision { @@ -63,17 +90,28 @@ fn float_to_decimal_common(fmt: &mut Formatter, num: &T, // Don't inline this so callers don't use the stack space this function // requires unless they have to. #[inline(never)] -fn float_to_exponential_common_exact(fmt: &mut Formatter, num: &T, - sign: flt2dec::Sign, precision: usize, - upper: bool) -> Result - where T: flt2dec::DecodableFloat +fn float_to_exponential_common_exact( + fmt: &mut Formatter, + num: &T, + sign: flt2dec::Sign, + precision: usize, + upper: bool, +) -> Result +where + T: flt2dec::DecodableFloat, { unsafe { let mut buf = MaybeUninit::<[u8; 1024]>::uninitialized(); // enough for f32 and f64 let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninitialized(); - let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact, - *num, sign, precision, - upper, buf.get_mut(), parts.get_mut()); + let formatted = flt2dec::to_exact_exp_str( + flt2dec::strategy::grisu::format_exact, + *num, + sign, + precision, + upper, + buf.get_mut(), + parts.get_mut(), + ); fmt.pad_formatted_parts(&formatted) } } @@ -81,30 +119,41 @@ fn float_to_exponential_common_exact(fmt: &mut Formatter, num: &T, // Don't inline this so callers that call both this and the above won't wind // up using the combined stack space of both functions in some cases. #[inline(never)] -fn float_to_exponential_common_shortest(fmt: &mut Formatter, - num: &T, sign: flt2dec::Sign, - upper: bool) -> Result - where T: flt2dec::DecodableFloat +fn float_to_exponential_common_shortest( + fmt: &mut Formatter, + num: &T, + sign: flt2dec::Sign, + upper: bool, +) -> Result +where + T: flt2dec::DecodableFloat, { unsafe { // enough for f32 and f64 let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninitialized(); let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninitialized(); - let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest, - *num, sign, (0, 0), upper, - buf.get_mut(), parts.get_mut()); + let formatted = flt2dec::to_shortest_exp_str( + flt2dec::strategy::grisu::format_shortest, + *num, + sign, + (0, 0), + upper, + buf.get_mut(), + parts.get_mut(), + ); fmt.pad_formatted_parts(&formatted) } } // Common code of floating point LowerExp and UpperExp. fn float_to_exponential_common(fmt: &mut Formatter, num: &T, upper: bool) -> Result - where T: flt2dec::DecodableFloat +where + T: flt2dec::DecodableFloat, { let force_sign = fmt.sign_plus(); let sign = match force_sign { false => flt2dec::Sign::Minus, - true => flt2dec::Sign::MinusPlus, + true => flt2dec::Sign::MinusPlus, }; if let Some(precision) = fmt.precision { @@ -116,7 +165,7 @@ fn float_to_exponential_common(fmt: &mut Formatter, num: &T, upper: bool) -> } macro_rules! floating { - ($ty:ident) => ( + ($ty:ident) => { #[stable(feature = "rust1", since = "1.0.0")] impl Debug for $ty { fn fmt(&self, fmt: &mut Formatter) -> Result { @@ -144,7 +193,7 @@ macro_rules! floating { float_to_exponential_common(fmt, self, true) } } - ) + }; } floating! { f32 } diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index ec1aeb8a7d1e9..6f4a5897da406 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -2,7 +2,7 @@ #![stable(feature = "rust1", since = "1.0.0")] -use cell::{UnsafeCell, Cell, RefCell, Ref, RefMut}; +use cell::{Cell, Ref, RefCell, RefMut, UnsafeCell}; use marker::PhantomData; use mem; use num::flt2dec; @@ -11,9 +11,9 @@ use result; use slice; use str; +mod builders; mod float; mod num; -mod builders; #[stable(feature = "fmt_flags_align", since = "1.28.0")] /// Possible alignments returned by `Formatter::align` @@ -31,10 +31,13 @@ pub enum Alignment { } #[stable(feature = "debug_builders", since = "1.2.0")] -pub use self::builders::{DebugStruct, DebugTuple, DebugSet, DebugList, DebugMap}; +pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; -#[unstable(feature = "fmt_internals", reason = "internal to format_args!", - issue = "0")] +#[unstable( + feature = "fmt_internals", + reason = "internal to format_args!", + issue = "0" +)] #[doc(hidden)] pub mod rt { pub mod v1; @@ -195,10 +198,11 @@ pub trait Write { // This Adapter is needed to allow `self` (of type `&mut // Self`) to be cast to a Write (below) without // requiring a `Sized` bound. - struct Adapter<'a,T: ?Sized +'a>(&'a mut T); + struct Adapter<'a, T: ?Sized + 'a>(&'a mut T); impl Write for Adapter<'_, T> - where T: Write + where + T: Write, { fn write_str(&mut self, s: &str) -> Result { self.0.write_str(s) @@ -244,7 +248,7 @@ pub struct Formatter<'a> { width: Option, precision: Option, - buf: &'a mut (dyn Write+'a), + buf: &'a mut (dyn Write + 'a), curarg: slice::Iter<'a, ArgumentV1<'a>>, args: &'a [ArgumentV1<'a>], } @@ -270,16 +274,22 @@ struct Void { /// types, and then this struct is used to canonicalize arguments to one type. #[derive(Copy)] #[allow(missing_debug_implementations)] -#[unstable(feature = "fmt_internals", reason = "internal to format_args!", - issue = "0")] +#[unstable( + feature = "fmt_internals", + reason = "internal to format_args!", + issue = "0" +)] #[doc(hidden)] pub struct ArgumentV1<'a> { value: &'a Void, formatter: fn(&Void, &mut Formatter) -> Result, } -#[unstable(feature = "fmt_internals", reason = "internal to format_args!", - issue = "0")] +#[unstable( + feature = "fmt_internals", + reason = "internal to format_args!", + issue = "0" +)] impl Clone for ArgumentV1<'_> { fn clone(&self) -> Self { *self @@ -293,21 +303,26 @@ impl<'a> ArgumentV1<'a> { } #[doc(hidden)] - #[unstable(feature = "fmt_internals", reason = "internal to format_args!", - issue = "0")] - pub fn new<'b, T>(x: &'b T, - f: fn(&T, &mut Formatter) -> Result) -> ArgumentV1<'b> { + #[unstable( + feature = "fmt_internals", + reason = "internal to format_args!", + issue = "0" + )] + pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter) -> Result) -> ArgumentV1<'b> { unsafe { ArgumentV1 { formatter: mem::transmute(f), - value: mem::transmute(x) + value: mem::transmute(x), } } } #[doc(hidden)] - #[unstable(feature = "fmt_internals", reason = "internal to format_args!", - issue = "0")] + #[unstable( + feature = "fmt_internals", + reason = "internal to format_args!", + issue = "0" + )] pub fn from_usize(x: &usize) -> ArgumentV1 { ArgumentV1::new(x, ArgumentV1::show_usize) } @@ -323,16 +338,26 @@ impl<'a> ArgumentV1<'a> { // flags available in the v1 format of format_args #[derive(Copy, Clone)] -enum FlagV1 { SignPlus, SignMinus, Alternate, SignAwareZeroPad, DebugLowerHex, DebugUpperHex } +enum FlagV1 { + SignPlus, + SignMinus, + Alternate, + SignAwareZeroPad, + DebugLowerHex, + DebugUpperHex, +} impl<'a> Arguments<'a> { /// When using the format_args!() macro, this function is used to generate the /// Arguments structure. - #[doc(hidden)] #[inline] - #[unstable(feature = "fmt_internals", reason = "internal to format_args!", - issue = "0")] - pub fn new_v1(pieces: &'a [&'a str], - args: &'a [ArgumentV1<'a>]) -> Arguments<'a> { + #[doc(hidden)] + #[inline] + #[unstable( + feature = "fmt_internals", + reason = "internal to format_args!", + issue = "0" + )] + pub fn new_v1(pieces: &'a [&'a str], args: &'a [ArgumentV1<'a>]) -> Arguments<'a> { Arguments { pieces, fmt: None, @@ -346,12 +371,18 @@ impl<'a> Arguments<'a> { /// `CountIsParam` or `CountIsNextParam` has to point to an argument /// created with `argumentusize`. However, failing to do so doesn't cause /// unsafety, but will ignore invalid . - #[doc(hidden)] #[inline] - #[unstable(feature = "fmt_internals", reason = "internal to format_args!", - issue = "0")] - pub fn new_v1_formatted(pieces: &'a [&'a str], - args: &'a [ArgumentV1<'a>], - fmt: &'a [rt::v1::Argument]) -> Arguments<'a> { + #[doc(hidden)] + #[inline] + #[unstable( + feature = "fmt_internals", + reason = "internal to format_args!", + issue = "0" + )] + pub fn new_v1_formatted( + pieces: &'a [&'a str], + args: &'a [ArgumentV1<'a>], + fmt: &'a [rt::v1::Argument], + ) -> Arguments<'a> { Arguments { pieces, fmt: Some(fmt), @@ -363,12 +394,15 @@ impl<'a> Arguments<'a> { /// /// This is intended to be used for setting initial `String` capacity /// when using `format!`. Note: this is neither the lower nor upper bound. - #[doc(hidden)] #[inline] - #[unstable(feature = "fmt_internals", reason = "internal to format_args!", - issue = "0")] + #[doc(hidden)] + #[inline] + #[unstable( + feature = "fmt_internals", + reason = "internal to format_args!", + issue = "0" + )] pub fn estimated_capacity(&self) -> usize { - let pieces_length: usize = self.pieces.iter() - .map(|x| x.len()).sum(); + let pieces_length: usize = self.pieces.iter().map(|x| x.len()).sum(); if self.args.is_empty() { pieces_length @@ -532,10 +566,13 @@ impl Display for Arguments<'_> { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( - on(crate_local, label="`{Self}` cannot be formatted using `{{:?}}`", - note="add `#[derive(Debug)]` or manually implement `{Debug}`"), - message="`{Self}` doesn't implement `{Debug}`", - label="`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`", + on( + crate_local, + label = "`{Self}` cannot be formatted using `{{:?}}`", + note = "add `#[derive(Debug)]` or manually implement `{Debug}`" + ), + message = "`{Self}` doesn't implement `{Debug}`", + label = "`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`" )] #[doc(alias = "{:?}")] #[lang = "debug_trait"] @@ -600,14 +637,14 @@ pub trait Debug { /// ``` #[rustc_on_unimplemented( on( - _Self="std::path::Path", - label="`{Self}` cannot be formatted with the default formatter; call `.display()` on it", - note="call `.display()` or `.to_string_lossy()` to safely print paths, \ - as they may contain non-Unicode data" + _Self = "std::path::Path", + label = "`{Self}` cannot be formatted with the default formatter; call `.display()` on it", + note = "call `.display()` or `.to_string_lossy()` to safely print paths, \ + as they may contain non-Unicode data" ), - message="`{Self}` doesn't implement `{Display}`", - label="`{Self}` cannot be formatted with the default formatter", - note="in format strings you may be able to use `{{:?}}` (or {{:#?}} for pretty-print) instead", + message = "`{Self}` doesn't implement `{Display}`", + label = "`{Self}` cannot be formatted with the default formatter", + note = "in format strings you may be able to use `{{:?}}` (or {{:#?}} for pretty-print) instead" )] #[doc(alias = "{}")] #[stable(feature = "rust1", since = "1.0.0")] @@ -1056,7 +1093,9 @@ pub fn write(output: &mut dyn Write, args: Arguments) -> Result { impl<'a> Formatter<'a> { fn wrap_buf<'b, 'c, F>(&'b mut self, wrap: F) -> Formatter<'c> - where 'b: 'c, F: FnOnce(&'b mut (dyn Write+'b)) -> &'c mut (dyn Write+'c) + where + 'b: 'c, + F: FnOnce(&'b mut (dyn Write + 'b)) -> &'c mut (dyn Write + 'c), { Formatter { // We want to change this @@ -1089,7 +1128,7 @@ impl<'a> Formatter<'a> { // Extract the correct argument let value = match arg.position { - rt::v1::Position::Next => { *self.curarg.next().unwrap() } + rt::v1::Position::Next => *self.curarg.next().unwrap(), rt::v1::Position::At(i) => self.args[i], }; @@ -1101,12 +1140,8 @@ impl<'a> Formatter<'a> { match *cnt { rt::v1::Count::Is(n) => Some(n), rt::v1::Count::Implied => None, - rt::v1::Count::Param(i) => { - self.args[i].as_usize() - } - rt::v1::Count::NextParam => { - self.curarg.next().and_then(|arg| arg.as_usize()) - } + rt::v1::Count::Param(i) => self.args[i].as_usize(), + rt::v1::Count::NextParam => self.curarg.next().and_then(|arg| arg.as_usize()), } } @@ -1157,23 +1192,22 @@ impl<'a> Formatter<'a> { /// assert_eq!(&format!("{:0>#8}", Foo::new(-1)), "00-Foo 1"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn pad_integral(&mut self, - is_nonnegative: bool, - prefix: &str, - buf: &str) - -> Result { + pub fn pad_integral(&mut self, is_nonnegative: bool, prefix: &str, buf: &str) -> Result { let mut width = buf.len(); let mut sign = None; if !is_nonnegative { - sign = Some('-'); width += 1; + sign = Some('-'); + width += 1; } else if self.sign_plus() { - sign = Some('+'); width += 1; + sign = Some('+'); + width += 1; } let mut prefixed = false; if self.alternate() { - prefixed = true; width += prefix.chars().count(); + prefixed = true; + width += prefix.chars().count(); } // Writes the sign if it exists, and then the prefix if it was requested @@ -1181,8 +1215,11 @@ impl<'a> Formatter<'a> { if let Some(c) = sign { f.buf.write_str(c.encode_utf8(&mut [0; 4]))?; } - if prefixed { f.buf.write_str(prefix) } - else { Ok(()) } + if prefixed { + f.buf.write_str(prefix) + } else { + Ok(()) + } }; // The `width` field is more of a `min-width` parameter at this point. @@ -1190,12 +1227,14 @@ impl<'a> Formatter<'a> { // If there's no minimum length requirements then we can just // write the bytes. None => { - write_prefix(self)?; self.buf.write_str(buf) + write_prefix(self)?; + self.buf.write_str(buf) } // Check if we're over the minimum width, if so then we can also // just write the bytes. Some(min) if width >= min => { - write_prefix(self)?; self.buf.write_str(buf) + write_prefix(self)?; + self.buf.write_str(buf) } // The sign and prefix goes before the padding if the fill character // is zero @@ -1208,11 +1247,10 @@ impl<'a> Formatter<'a> { }) } // Otherwise, the sign and prefix goes after the padding - Some(min) => { - self.with_padding(min - width, rt::v1::Alignment::Right, |f| { - write_prefix(f)?; f.buf.write_str(buf) - }) - } + Some(min) => self.with_padding(min - width, rt::v1::Alignment::Right, |f| { + write_prefix(f)?; + f.buf.write_str(buf) + }), } } @@ -1275,35 +1313,30 @@ impl<'a> Formatter<'a> { None => self.buf.write_str(s), // If we're under the maximum width, check if we're over the minimum // width, if so it's as easy as just emitting the string. - Some(width) if s.chars().count() >= width => { - self.buf.write_str(s) - } + Some(width) if s.chars().count() >= width => self.buf.write_str(s), // If we're under both the maximum and the minimum width, then fill // up the minimum width with the specified string + some alignment. Some(width) => { let align = rt::v1::Alignment::Left; - self.with_padding(width - s.chars().count(), align, |me| { - me.buf.write_str(s) - }) + self.with_padding(width - s.chars().count(), align, |me| me.buf.write_str(s)) } } } /// Runs a callback, emitting the correct padding either before or /// afterwards depending on whether right or left alignment is requested. - fn with_padding(&mut self, padding: usize, default: rt::v1::Alignment, - f: F) -> Result - where F: FnOnce(&mut Formatter) -> Result, + fn with_padding(&mut self, padding: usize, default: rt::v1::Alignment, f: F) -> Result + where + F: FnOnce(&mut Formatter) -> Result, { let align = match self.align { rt::v1::Alignment::Unknown => default, - _ => self.align + _ => self.align, }; let (pre_pad, post_pad) = match align { rt::v1::Alignment::Left => (0, padding), - rt::v1::Alignment::Right | - rt::v1::Alignment::Unknown => (padding, 0), + rt::v1::Alignment::Right | rt::v1::Alignment::Unknown => (padding, 0), rt::v1::Alignment::Center => (padding / 2, (padding + 1) / 2), }; @@ -1341,7 +1374,11 @@ impl<'a> Formatter<'a> { // remove the sign from the formatted parts formatted.sign = b""; - width = if width < sign.len() { 0 } else { width - sign.len() }; + width = if width < sign.len() { + 0 + } else { + width - sign.len() + }; align = rt::v1::Alignment::Right; self.fill = '0'; self.align = rt::v1::Alignment::Right; @@ -1349,12 +1386,11 @@ impl<'a> Formatter<'a> { // remaining parts go through the ordinary padding process. let len = formatted.len(); - let ret = if width <= len { // no padding + let ret = if width <= len { + // no padding self.write_formatted_parts(&formatted) } else { - self.with_padding(width - len, align, |f| { - f.write_formatted_parts(&formatted) - }) + self.with_padding(width - len, align, |f| f.write_formatted_parts(&formatted)) }; self.fill = old_fill; self.align = old_align; @@ -1454,10 +1490,14 @@ impl<'a> Formatter<'a> { /// Flags for formatting #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_deprecated(since = "1.24.0", - reason = "use the `sign_plus`, `sign_minus`, `alternate`, \ - or `sign_aware_zero_pad` methods instead")] - pub fn flags(&self) -> u32 { self.flags } + #[rustc_deprecated( + since = "1.24.0", + reason = "use the `sign_plus`, `sign_minus`, `alternate`, \ + or `sign_aware_zero_pad` methods instead" + )] + pub fn flags(&self) -> u32 { + self.flags + } /// Character used as 'fill' whenever there is alignment. /// @@ -1487,7 +1527,9 @@ impl<'a> Formatter<'a> { /// assert_eq!(&format!("{:t>6}", Foo), "tttttt"); /// ``` #[stable(feature = "fmt_flags", since = "1.5.0")] - pub fn fill(&self) -> char { self.fill } + pub fn fill(&self) -> char { + self.fill + } /// Flag indicating what form of alignment was requested. /// @@ -1557,7 +1599,9 @@ impl<'a> Formatter<'a> { /// assert_eq!(&format!("{}", Foo(23)), "Foo(23)"); /// ``` #[stable(feature = "fmt_flags", since = "1.5.0")] - pub fn width(&self) -> Option { self.width } + pub fn width(&self) -> Option { + self.width + } /// Optionally specified precision for numeric types. /// @@ -1584,7 +1628,9 @@ impl<'a> Formatter<'a> { /// assert_eq!(&format!("{}", Foo(23.2)), "Foo(23.20)"); /// ``` #[stable(feature = "fmt_flags", since = "1.5.0")] - pub fn precision(&self) -> Option { self.precision } + pub fn precision(&self) -> Option { + self.precision + } /// Determines if the `+` flag was specified. /// @@ -1612,7 +1658,9 @@ impl<'a> Formatter<'a> { /// assert_eq!(&format!("{}", Foo(23)), "Foo(23)"); /// ``` #[stable(feature = "fmt_flags", since = "1.5.0")] - pub fn sign_plus(&self) -> bool { self.flags & (1 << FlagV1::SignPlus as u32) != 0 } + pub fn sign_plus(&self) -> bool { + self.flags & (1 << FlagV1::SignPlus as u32) != 0 + } /// Determines if the `-` flag was specified. /// @@ -1638,7 +1686,9 @@ impl<'a> Formatter<'a> { /// assert_eq!(&format!("{}", Foo(23)), "Foo(23)"); /// ``` #[stable(feature = "fmt_flags", since = "1.5.0")] - pub fn sign_minus(&self) -> bool { self.flags & (1 << FlagV1::SignMinus as u32) != 0 } + pub fn sign_minus(&self) -> bool { + self.flags & (1 << FlagV1::SignMinus as u32) != 0 + } /// Determines if the `#` flag was specified. /// @@ -1663,7 +1713,9 @@ impl<'a> Formatter<'a> { /// assert_eq!(&format!("{}", Foo(23)), "23"); /// ``` #[stable(feature = "fmt_flags", since = "1.5.0")] - pub fn alternate(&self) -> bool { self.flags & (1 << FlagV1::Alternate as u32) != 0 } + pub fn alternate(&self) -> bool { + self.flags & (1 << FlagV1::Alternate as u32) != 0 + } /// Determines if the `0` flag was specified. /// @@ -1692,9 +1744,13 @@ impl<'a> Formatter<'a> { // FIXME: Decide what public API we want for these two flags. // https://github.com/rust-lang/rust/issues/48584 - fn debug_lower_hex(&self) -> bool { self.flags & (1 << FlagV1::DebugLowerHex as u32) != 0 } + fn debug_lower_hex(&self) -> bool { + self.flags & (1 << FlagV1::DebugLowerHex as u32) != 0 + } - fn debug_upper_hex(&self) -> bool { self.flags & (1 << FlagV1::DebugUpperHex as u32) != 0 } + fn debug_upper_hex(&self) -> bool { + self.flags & (1 << FlagV1::DebugUpperHex as u32) != 0 + } /// Creates a [`DebugStruct`] builder designed to assist with creation of /// [`fmt::Debug`] implementations for structs. @@ -2050,11 +2106,15 @@ impl Pointer for &mut T { #[stable(feature = "rust1", since = "1.0.0")] impl Debug for *const T { - fn fmt(&self, f: &mut Formatter) -> Result { Pointer::fmt(self, f) } + fn fmt(&self, f: &mut Formatter) -> Result { + Pointer::fmt(self, f) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Debug for *mut T { - fn fmt(&self, f: &mut Formatter) -> Result { Pointer::fmt(self, f) } + fn fmt(&self, f: &mut Formatter) -> Result { + Pointer::fmt(self, f) + } } macro_rules! peel { @@ -2112,9 +2172,7 @@ impl Debug for PhantomData { #[stable(feature = "rust1", since = "1.0.0")] impl Debug for Cell { fn fmt(&self, f: &mut Formatter) -> Result { - f.debug_struct("Cell") - .field("value", &self.get()) - .finish() + f.debug_struct("Cell").field("value", &self.get()).finish() } } @@ -2122,11 +2180,7 @@ impl Debug for Cell { impl Debug for RefCell { fn fmt(&self, f: &mut Formatter) -> Result { match self.try_borrow() { - Ok(borrow) => { - f.debug_struct("RefCell") - .field("value", &borrow) - .finish() - } + Ok(borrow) => f.debug_struct("RefCell").field("value", &borrow).finish(), Err(_) => { // The RefCell is mutably borrowed so we can't look at its value // here. Show a placeholder instead. diff --git a/src/libcore/fmt/num.rs b/src/libcore/fmt/num.rs index c7c8fc50efaae..9831619a96b0e 100644 --- a/src/libcore/fmt/num.rs +++ b/src/libcore/fmt/num.rs @@ -4,15 +4,16 @@ use fmt; +use mem; use ops::{Div, Rem, Sub}; -use str; -use slice; use ptr; -use mem; +use slice; +use str; #[doc(hidden)] -trait Int: PartialEq + PartialOrd + Div + Rem + - Sub + Copy { +trait Int: + PartialEq + PartialOrd + Div + Rem + Sub + Copy +{ fn zero() -> Self; fn from_u8(u: u8) -> Self; fn to_u8(&self) -> u8; @@ -60,25 +61,25 @@ trait GenericRadix { // Accumulate each digit of the number from the least significant // to the most significant figure. for byte in buf.iter_mut().rev() { - let n = x % base; // Get the current place value. - x = x / base; // Deaccumulate the number. + let n = x % base; // Get the current place value. + x = x / base; // Deaccumulate the number. *byte = Self::digit(n.to_u8()); // Store the digit in the buffer. curr -= 1; if x == zero { // No more digits left to accumulate. - break + break; }; } } else { // Do the same as above, but accounting for two's complement. for byte in buf.iter_mut().rev() { - let n = zero - (x % base); // Get the current place value. - x = x / base; // Deaccumulate the number. + let n = zero - (x % base); // Get the current place value. + x = x / base; // Deaccumulate the number. *byte = Self::digit(n.to_u8()); // Store the digit in the buffer. curr -= 1; if x == zero { // No more digits left to accumulate. - break + break; }; } } @@ -121,9 +122,9 @@ macro_rules! radix { radix! { Binary, 2, "0b", x @ 0 ..= 1 => b'0' + x } radix! { Octal, 8, "0o", x @ 0 ..= 7 => b'0' + x } radix! { LowerHex, 16, "0x", x @ 0 ..= 9 => b'0' + x, - x @ 10 ..= 15 => b'a' + (x - 10) } +x @ 10 ..= 15 => b'a' + (x - 10) } radix! { UpperHex, 16, "0x", x @ 0 ..= 9 => b'0' + x, - x @ 10 ..= 15 => b'A' + (x - 10) } +x @ 10 ..= 15 => b'A' + (x - 10) } macro_rules! int_base { ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => { @@ -133,7 +134,7 @@ macro_rules! int_base { $Radix.fmt_int(*self as $U, f) } } - } + }; } macro_rules! debug { @@ -151,7 +152,7 @@ macro_rules! debug { } } } - } + }; } macro_rules! integer { @@ -167,7 +168,7 @@ macro_rules! integer { int_base! { LowerHex for $Uint as $Uint -> LowerHex } int_base! { UpperHex for $Uint as $Uint -> UpperHex } debug! { $Uint } - } + }; } integer! { isize, usize } integer! { i8, u8 } @@ -176,8 +177,7 @@ integer! { i32, u32 } integer! { i64, u64 } integer! { i128, u128 } -const DEC_DIGITS_LUT: &'static[u8] = - b"0001020304050607080910111213141516171819\ +const DEC_DIGITS_LUT: &'static [u8] = b"0001020304050607080910111213141516171819\ 2021222324252627282930313233343536373839\ 4041424344454647484950515253545556575859\ 6061626364656667686970717273747576777879\ diff --git a/src/libcore/future/future.rs b/src/libcore/future/future.rs index 0bc8a0fd26a04..d6c209ffd56c6 100644 --- a/src/libcore/future/future.rs +++ b/src/libcore/future/future.rs @@ -5,7 +5,7 @@ use marker::Unpin; use ops; use pin::Pin; -use task::{Poll, LocalWaker}; +use task::{LocalWaker, Poll}; /// A future represents an asynchronous computation. /// diff --git a/src/libcore/hash/mod.rs b/src/libcore/hash/mod.rs index d5d29c91e0346..9772556c5a0ba 100644 --- a/src/libcore/hash/mod.rs +++ b/src/libcore/hash/mod.rs @@ -191,7 +191,8 @@ pub trait Hash { /// [`Hasher`]: trait.Hasher.html #[stable(feature = "hash_slice", since = "1.3.0")] fn hash_slice(data: &[Self], state: &mut H) - where Self: Sized + where + Self: Sized, { for piece in data { piece.hash(state); @@ -545,9 +546,9 @@ impl Eq for BuildHasherDefault {} ////////////////////////////////////////////////////////////////////////////// mod impls { + use super::*; use mem; use slice; - use super::*; macro_rules! impl_write { ($(($ty:ident, $meth:ident),)*) => {$( @@ -657,7 +658,6 @@ mod impls { } } - #[stable(feature = "rust1", since = "1.0.0")] impl Hash for &T { fn hash(&self, state: &mut H) { @@ -680,9 +680,7 @@ mod impls { state.write_usize(*self as *const () as usize); } else { // Fat pointer - let (a, b) = unsafe { - *(self as *const Self as *const (usize, usize)) - }; + let (a, b) = unsafe { *(self as *const Self as *const (usize, usize)) }; state.write_usize(a); state.write_usize(b); } @@ -697,9 +695,7 @@ mod impls { state.write_usize(*self as *const () as usize); } else { // Fat pointer - let (a, b) = unsafe { - *(self as *const Self as *const (usize, usize)) - }; + let (a, b) = unsafe { *(self as *const Self as *const (usize, usize)) }; state.write_usize(a); state.write_usize(b); } diff --git a/src/libcore/hash/sip.rs b/src/libcore/hash/sip.rs index 3377b831a9daa..e4cea307bcdf2 100644 --- a/src/libcore/hash/sip.rs +++ b/src/libcore/hash/sip.rs @@ -2,10 +2,10 @@ #![allow(deprecated)] -use marker::PhantomData; -use ptr; use cmp; +use marker::PhantomData; use mem; +use ptr; /// An implementation of SipHash 1-3. /// @@ -14,8 +14,10 @@ use mem; /// /// See: #[unstable(feature = "hashmap_internals", issue = "0")] -#[rustc_deprecated(since = "1.13.0", - reason = "use `std::collections::hash_map::DefaultHasher` instead")] +#[rustc_deprecated( + since = "1.13.0", + reason = "use `std::collections::hash_map::DefaultHasher` instead" +)] #[derive(Debug, Clone, Default)] #[doc(hidden)] pub struct SipHasher13 { @@ -26,8 +28,10 @@ pub struct SipHasher13 { /// /// See: #[unstable(feature = "hashmap_internals", issue = "0")] -#[rustc_deprecated(since = "1.13.0", - reason = "use `std::collections::hash_map::DefaultHasher` instead")] +#[rustc_deprecated( + since = "1.13.0", + reason = "use `std::collections::hash_map::DefaultHasher` instead" +)] #[derive(Debug, Clone, Default)] struct SipHasher24 { hasher: Hasher, @@ -46,8 +50,10 @@ struct SipHasher24 { /// it is not intended for cryptographic purposes. As such, all /// cryptographic uses of this implementation are _strongly discouraged_. #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_deprecated(since = "1.13.0", - reason = "use `std::collections::hash_map::DefaultHasher` instead")] +#[rustc_deprecated( + since = "1.13.0", + reason = "use `std::collections::hash_map::DefaultHasher` instead" +)] #[derive(Debug, Clone, Default)] pub struct SipHasher(SipHasher24); @@ -56,9 +62,9 @@ struct Hasher { k0: u64, k1: u64, length: usize, // how many bytes we've processed - state: State, // hash State - tail: u64, // unprocessed bytes le - ntail: usize, // how many bytes in tail are valid + state: State, // hash State + tail: u64, // unprocessed bytes le + ntail: usize, // how many bytes in tail are valid _marker: PhantomData, } @@ -76,18 +82,25 @@ struct State { } macro_rules! compress { - ($state:expr) => ({ + ($state:expr) => {{ compress!($state.v0, $state.v1, $state.v2, $state.v3) - }); - ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => - ({ - $v0 = $v0.wrapping_add($v1); $v1 = $v1.rotate_left(13); $v1 ^= $v0; + }}; + ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => {{ + $v0 = $v0.wrapping_add($v1); + $v1 = $v1.rotate_left(13); + $v1 ^= $v0; $v0 = $v0.rotate_left(32); - $v2 = $v2.wrapping_add($v3); $v3 = $v3.rotate_left(16); $v3 ^= $v2; - $v0 = $v0.wrapping_add($v3); $v3 = $v3.rotate_left(21); $v3 ^= $v0; - $v2 = $v2.wrapping_add($v1); $v1 = $v1.rotate_left(17); $v1 ^= $v2; + $v2 = $v2.wrapping_add($v3); + $v3 = $v3.rotate_left(16); + $v3 ^= $v2; + $v0 = $v0.wrapping_add($v3); + $v3 = $v3.rotate_left(21); + $v3 ^= $v0; + $v2 = $v2.wrapping_add($v1); + $v1 = $v1.rotate_left(17); + $v1 ^= $v2; $v2 = $v2.rotate_left(32); - }); + }}; } /// Load an integer of the desired type from a byte stream, in LE order. Uses @@ -96,15 +109,16 @@ macro_rules! compress { /// /// Unsafe because: unchecked indexing at i..i+size_of(int_ty) macro_rules! load_int_le { - ($buf:expr, $i:expr, $int_ty:ident) => - ({ - debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len()); - let mut data = 0 as $int_ty; - ptr::copy_nonoverlapping($buf.get_unchecked($i), - &mut data as *mut _ as *mut u8, - mem::size_of::<$int_ty>()); - data.to_le() - }); + ($buf:expr, $i:expr, $int_ty:ident) => {{ + debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len()); + let mut data = 0 as $int_ty; + ptr::copy_nonoverlapping( + $buf.get_unchecked($i), + &mut data as *mut _ as *mut u8, + mem::size_of::<$int_ty>(), + ); + data.to_le() + }}; } /// Load an u64 using up to 7 bytes of a byte slice. @@ -135,8 +149,10 @@ impl SipHasher { /// Creates a new `SipHasher` with the two initial keys set to 0. #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_deprecated(since = "1.13.0", - reason = "use `std::collections::hash_map::DefaultHasher` instead")] + #[rustc_deprecated( + since = "1.13.0", + reason = "use `std::collections::hash_map::DefaultHasher` instead" + )] pub fn new() -> SipHasher { SipHasher::new_with_keys(0, 0) } @@ -144,11 +160,13 @@ impl SipHasher { /// Creates a `SipHasher` that is keyed off the provided keys. #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_deprecated(since = "1.13.0", - reason = "use `std::collections::hash_map::DefaultHasher` instead")] + #[rustc_deprecated( + since = "1.13.0", + reason = "use `std::collections::hash_map::DefaultHasher` instead" + )] pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher { SipHasher(SipHasher24 { - hasher: Hasher::new_with_keys(key0, key1) + hasher: Hasher::new_with_keys(key0, key1), }) } } @@ -157,8 +175,10 @@ impl SipHasher13 { /// Creates a new `SipHasher13` with the two initial keys set to 0. #[inline] #[unstable(feature = "hashmap_internals", issue = "0")] - #[rustc_deprecated(since = "1.13.0", - reason = "use `std::collections::hash_map::DefaultHasher` instead")] + #[rustc_deprecated( + since = "1.13.0", + reason = "use `std::collections::hash_map::DefaultHasher` instead" + )] pub fn new() -> SipHasher13 { SipHasher13::new_with_keys(0, 0) } @@ -166,11 +186,13 @@ impl SipHasher13 { /// Creates a `SipHasher13` that is keyed off the provided keys. #[inline] #[unstable(feature = "hashmap_internals", issue = "0")] - #[rustc_deprecated(since = "1.13.0", - reason = "use `std::collections::hash_map::DefaultHasher` instead")] + #[rustc_deprecated( + since = "1.13.0", + reason = "use `std::collections::hash_map::DefaultHasher` instead" + )] pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 { SipHasher13 { - hasher: Hasher::new_with_keys(key0, key1) + hasher: Hasher::new_with_keys(key0, key1), } } } @@ -292,7 +314,7 @@ impl super::Hasher for Hasher { self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail; if length < needed { self.ntail += length; - return + return; } else { self.state.v3 ^= self.tail; S::c_rounds(&mut self.state); diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 7508257f7806f..99009b7a68d73 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -37,8 +37,10 @@ #![allow(missing_docs)] #[stable(feature = "drop_in_place", since = "1.8.0")] -#[rustc_deprecated(reason = "no longer an intrinsic - use `ptr::drop_in_place` directly", - since = "1.18.0")] +#[rustc_deprecated( + reason = "no longer an intrinsic - use `ptr::drop_in_place` directly", + since = "1.18.0" +)] pub use ptr::drop_in_place; extern "rust-intrinsic" { @@ -1179,8 +1181,7 @@ extern "rust-intrinsic" { /// /// The volatile parameter is set to `true`, so it will not be optimized out /// unless size is equal to zero. - pub fn volatile_copy_nonoverlapping_memory(dst: *mut T, src: *const T, - count: usize); + pub fn volatile_copy_nonoverlapping_memory(dst: *mut T, src: *const T, count: usize); /// Equivalent to the appropriate `llvm.memmove.p0i8.0i8.*` intrinsic, with /// a size of `count` * `size_of::()` and an alignment of /// `min_align_of::()` @@ -1329,7 +1330,6 @@ extern "rust-intrinsic" { /// May assume inputs are finite. pub fn frem_fast(a: T, b: T) -> T; - /// Returns the number of bits set in an integer type `T` pub fn ctpop(x: T) -> T; diff --git a/src/libcore/iter/iterator.rs b/src/libcore/iter/iterator.rs index 1ea500858ed16..65069ec469598 100644 --- a/src/libcore/iter/iterator.rs +++ b/src/libcore/iter/iterator.rs @@ -2,13 +2,13 @@ use cmp::Ordering; use ops::Try; use super::LoopState; -use super::{Chain, Cycle, Copied, Cloned, Enumerate, Filter, FilterMap, Fuse}; -use super::{Flatten, FlatMap, flatten_compat}; -use super::{Inspect, Map, Peekable, Scan, Skip, SkipWhile, StepBy, Take, TakeWhile, Rev}; -use super::{Zip, Sum, Product}; +use super::{flatten_compat, FlatMap, Flatten}; +use super::{Chain, Cloned, Copied, Cycle, Enumerate, Filter, FilterMap, Fuse}; use super::{ChainState, FromIterator, ZipImpl}; +use super::{Inspect, Map, Peekable, Rev, Scan, Skip, SkipWhile, StepBy, Take, TakeWhile}; +use super::{Product, Sum, Zip}; -fn _assert_is_object_safe(_: &dyn Iterator) {} +fn _assert_is_object_safe(_: &dyn Iterator) {} /// An interface for dealing with iterators. /// @@ -21,71 +21,71 @@ fn _assert_is_object_safe(_: &dyn Iterator) {} #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( on( - _Self="[std::ops::Range; 1]", - label="if you meant to iterate between two values, remove the square brackets", - note="`[start..end]` is an array of one `Range`; you might have meant to have a `Range` \ - without the brackets: `start..end`" + _Self = "[std::ops::Range; 1]", + label = "if you meant to iterate between two values, remove the square brackets", + note = "`[start..end]` is an array of one `Range`; you might have meant to have a `Range` \ + without the brackets: `start..end`" ), on( - _Self="[std::ops::RangeFrom; 1]", - label="if you meant to iterate from a value onwards, remove the square brackets", - note="`[start..]` is an array of one `RangeFrom`; you might have meant to have a \ + _Self = "[std::ops::RangeFrom; 1]", + label = "if you meant to iterate from a value onwards, remove the square brackets", + note = "`[start..]` is an array of one `RangeFrom`; you might have meant to have a \ `RangeFrom` without the brackets: `start..`, keeping in mind that iterating over an \ unbounded iterator will run forever unless you `break` or `return` from within the \ loop" ), on( - _Self="[std::ops::RangeTo; 1]", - label="if you meant to iterate until a value, remove the square brackets and add a \ - starting value", - note="`[..end]` is an array of one `RangeTo`; you might have meant to have a bounded \ - `Range` without the brackets: `0..end`" + _Self = "[std::ops::RangeTo; 1]", + label = "if you meant to iterate until a value, remove the square brackets and add a \ + starting value", + note = "`[..end]` is an array of one `RangeTo`; you might have meant to have a bounded \ + `Range` without the brackets: `0..end`" ), on( - _Self="[std::ops::RangeInclusive; 1]", - label="if you meant to iterate between two values, remove the square brackets", - note="`[start..=end]` is an array of one `RangeInclusive`; you might have meant to have a \ + _Self = "[std::ops::RangeInclusive; 1]", + label = "if you meant to iterate between two values, remove the square brackets", + note = "`[start..=end]` is an array of one `RangeInclusive`; you might have meant to have a \ `RangeInclusive` without the brackets: `start..=end`" ), on( - _Self="[std::ops::RangeToInclusive; 1]", - label="if you meant to iterate until a value (including it), remove the square brackets \ - and add a starting value", - note="`[..=end]` is an array of one `RangeToInclusive`; you might have meant to have a \ - bounded `RangeInclusive` without the brackets: `0..=end`" + _Self = "[std::ops::RangeToInclusive; 1]", + label = "if you meant to iterate until a value (including it), remove the square brackets \ + and add a starting value", + note = "`[..=end]` is an array of one `RangeToInclusive`; you might have meant to have a \ + bounded `RangeInclusive` without the brackets: `0..=end`" ), on( - _Self="std::ops::RangeTo", - label="if you meant to iterate until a value, add a starting value", - note="`..end` is a `RangeTo`, which cannot be iterated on; you might have meant to have a \ + _Self = "std::ops::RangeTo", + label = "if you meant to iterate until a value, add a starting value", + note = "`..end` is a `RangeTo`, which cannot be iterated on; you might have meant to have a \ bounded `Range`: `0..end`" ), on( - _Self="std::ops::RangeToInclusive", - label="if you meant to iterate until a value (including it), add a starting value", - note="`..=end` is a `RangeToInclusive`, which cannot be iterated on; you might have meant \ + _Self = "std::ops::RangeToInclusive", + label = "if you meant to iterate until a value (including it), add a starting value", + note = "`..=end` is a `RangeToInclusive`, which cannot be iterated on; you might have meant \ to have a bounded `RangeInclusive`: `0..=end`" ), on( - _Self="&str", - label="`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" + _Self = "&str", + label = "`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" ), on( - _Self="std::string::String", - label="`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" + _Self = "std::string::String", + label = "`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" ), on( - _Self="[]", - label="borrow the array with `&` or call `.iter()` on it to iterate over it", - note="arrays are not iterators, but slices like the following are: `&[1, 2, 3]`" + _Self = "[]", + label = "borrow the array with `&` or call `.iter()` on it to iterate over it", + note = "arrays are not iterators, but slices like the following are: `&[1, 2, 3]`" ), on( - _Self="{integral}", - note="if you want to iterate between `start` until a value `end`, use the exclusive range \ + _Self = "{integral}", + note = "if you want to iterate between `start` until a value `end`, use the exclusive range \ syntax `start..end` or the inclusive range syntax `start..=end`" ), - label="`{Self}` is not an iterator", - message="`{Self}` is not an iterator" + label = "`{Self}` is not an iterator", + message = "`{Self}` is not an iterator" )] #[doc(spotlight)] #[must_use] @@ -198,7 +198,9 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn size_hint(&self) -> (usize, Option) { (0, None) } + fn size_hint(&self) -> (usize, Option) { + (0, None) + } /// Consumes the iterator, counting the number of iterations and returning it. /// @@ -237,7 +239,10 @@ pub trait Iterator { #[inline] #[rustc_inherit_overflow_checks] #[stable(feature = "rust1", since = "1.0.0")] - fn count(self) -> usize where Self: Sized { + fn count(self) -> usize + where + Self: Sized, + { // Might overflow. self.fold(0, |cnt, _| cnt + 1) } @@ -263,9 +268,14 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn last(self) -> Option where Self: Sized { + fn last(self) -> Option + where + Self: Sized, + { let mut last = None; - for x in self { last = Some(x); } + for x in self { + last = Some(x); + } last } @@ -314,7 +324,9 @@ pub trait Iterator { #[stable(feature = "rust1", since = "1.0.0")] fn nth(&mut self, mut n: usize) -> Option { for x in self { - if n == 0 { return Some(x) } + if n == 0 { + return Some(x); + } n -= 1; } None @@ -366,9 +378,16 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_step_by", since = "1.28.0")] - fn step_by(self, step: usize) -> StepBy where Self: Sized { + fn step_by(self, step: usize) -> StepBy + where + Self: Sized, + { assert!(step != 0); - StepBy{iter: self, step: step - 1, first_take: true} + StepBy { + iter: self, + step: step - 1, + first_take: true, + } } /// Takes two iterators and creates a new iterator over both in sequence. @@ -422,10 +441,16 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn chain(self, other: U) -> Chain where - Self: Sized, U: IntoIterator, + fn chain(self, other: U) -> Chain + where + Self: Sized, + U: IntoIterator, { - Chain{a: self, b: other.into_iter(), state: ChainState::Both} + Chain { + a: self, + b: other.into_iter(), + state: ChainState::Both, + } } /// 'Zips up' two iterators into a single iterator of pairs. @@ -500,8 +525,10 @@ pub trait Iterator { /// [`None`]: ../../std/option/enum.Option.html#variant.None #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn zip(self, other: U) -> Zip where - Self: Sized, U: IntoIterator + fn zip(self, other: U) -> Zip + where + Self: Sized, + U: IntoIterator, { Zip::new(self, other.into_iter()) } @@ -557,8 +584,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn map(self, f: F) -> Map where - Self: Sized, F: FnMut(Self::Item) -> B, + fn map(self, f: F) -> Map + where + Self: Sized, + F: FnMut(Self::Item) -> B, { Map { iter: self, f } } @@ -600,8 +629,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_for_each", since = "1.21.0")] - fn for_each(self, mut f: F) where - Self: Sized, F: FnMut(Self::Item), + fn for_each(self, mut f: F) + where + Self: Sized, + F: FnMut(Self::Item), { self.fold((), move |(), item| f(item)); } @@ -668,10 +699,15 @@ pub trait Iterator { /// of these layers. #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn filter

(self, predicate: P) -> Filter where - Self: Sized, P: FnMut(&Self::Item) -> bool, + fn filter

(self, predicate: P) -> Filter + where + Self: Sized, + P: FnMut(&Self::Item) -> bool, { - Filter {iter: self, predicate } + Filter { + iter: self, + predicate, + } } /// Creates an iterator that both filters and maps. @@ -725,8 +761,10 @@ pub trait Iterator { /// [`None`]: ../../std/option/enum.Option.html#variant.None #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn filter_map(self, f: F) -> FilterMap where - Self: Sized, F: FnMut(Self::Item) -> Option, + fn filter_map(self, f: F) -> FilterMap + where + Self: Sized, + F: FnMut(Self::Item) -> Option, { FilterMap { iter: self, f } } @@ -771,8 +809,14 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn enumerate(self) -> Enumerate where Self: Sized { - Enumerate { iter: self, count: 0 } + fn enumerate(self) -> Enumerate + where + Self: Sized, + { + Enumerate { + iter: self, + count: 0, + } } /// Creates an iterator which can use `peek` to look at the next element of @@ -817,8 +861,14 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn peekable(self) -> Peekable where Self: Sized { - Peekable{iter: self, peeked: None} + fn peekable(self) -> Peekable + where + Self: Sized, + { + Peekable { + iter: self, + peeked: None, + } } /// Creates an iterator that [`skip`]s elements based on a predicate. @@ -878,10 +928,16 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn skip_while

(self, predicate: P) -> SkipWhile where - Self: Sized, P: FnMut(&Self::Item) -> bool, + fn skip_while

(self, predicate: P) -> SkipWhile + where + Self: Sized, + P: FnMut(&Self::Item) -> bool, { - SkipWhile { iter: self, flag: false, predicate } + SkipWhile { + iter: self, + flag: false, + predicate, + } } /// Creates an iterator that yields elements based on a predicate. @@ -958,10 +1014,16 @@ pub trait Iterator { /// some similar thing. #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn take_while

(self, predicate: P) -> TakeWhile where - Self: Sized, P: FnMut(&Self::Item) -> bool, + fn take_while

(self, predicate: P) -> TakeWhile + where + Self: Sized, + P: FnMut(&Self::Item) -> bool, { - TakeWhile { iter: self, flag: false, predicate } + TakeWhile { + iter: self, + flag: false, + predicate, + } } /// Creates an iterator that skips the first `n` elements. @@ -982,7 +1044,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn skip(self, n: usize) -> Skip where Self: Sized { + fn skip(self, n: usize) -> Skip + where + Self: Sized, + { Skip { iter: self, n } } @@ -1014,7 +1079,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn take(self, n: usize) -> Take where Self: Sized, { + fn take(self, n: usize) -> Take + where + Self: Sized, + { Take { iter: self, n } } @@ -1058,9 +1126,15 @@ pub trait Iterator { #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn scan(self, initial_state: St, f: F) -> Scan - where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option, + where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option, { - Scan { iter: self, f, state: initial_state } + Scan { + iter: self, + f, + state: initial_state, + } } /// Creates an iterator that works like map, but flattens nested structure. @@ -1096,9 +1170,14 @@ pub trait Iterator { #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn flat_map(self, f: F) -> FlatMap - where Self: Sized, U: IntoIterator, F: FnMut(Self::Item) -> U, + where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U, { - FlatMap { inner: flatten_compat(self.map(f)) } + FlatMap { + inner: flatten_compat(self.map(f)), + } } /// Creates an iterator that flattens nested structure. @@ -1165,8 +1244,13 @@ pub trait Iterator { #[inline] #[stable(feature = "iterator_flatten", since = "1.29.0")] fn flatten(self) -> Flatten - where Self: Sized, Self::Item: IntoIterator { - Flatten { inner: flatten_compat(self) } + where + Self: Sized, + Self::Item: IntoIterator, + { + Flatten { + inner: flatten_compat(self), + } } /// Creates an iterator which ends after the first [`None`]. @@ -1225,8 +1309,14 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn fuse(self) -> Fuse where Self: Sized { - Fuse{iter: self, done: false} + fn fuse(self) -> Fuse + where + Self: Sized, + { + Fuse { + iter: self, + done: false, + } } /// Do something with each element of an iterator, passing the value on. @@ -1306,8 +1396,10 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn inspect(self, f: F) -> Inspect where - Self: Sized, F: FnMut(&Self::Item), + fn inspect(self, f: F) -> Inspect + where + Self: Sized, + F: FnMut(&Self::Item), { Inspect { iter: self, f } } @@ -1349,7 +1441,12 @@ pub trait Iterator { /// assert_eq!(iter.next(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - fn by_ref(&mut self) -> &mut Self where Self: Sized { self } + fn by_ref(&mut self) -> &mut Self + where + Self: Sized, + { + self + } /// Transforms an iterator into a collection. /// @@ -1464,7 +1561,10 @@ pub trait Iterator { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[must_use = "if you really need to exhaust the iterator, consider `.for_each(drop)` instead"] - fn collect>(self) -> B where Self: Sized { + fn collect>(self) -> B + where + Self: Sized, + { FromIterator::from_iter(self) } @@ -1489,10 +1589,11 @@ pub trait Iterator { /// assert_eq!(odd, vec![1, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - fn partition(self, mut f: F) -> (B, B) where + fn partition(self, mut f: F) -> (B, B) + where Self: Sized, B: Default + Extend, - F: FnMut(&Self::Item) -> bool + F: FnMut(&Self::Item) -> bool, { let mut left: B = Default::default(); let mut right: B = Default::default(); @@ -1567,8 +1668,11 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_try_fold", since = "1.27.0")] - fn try_fold(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, { let mut accum = init; while let Some(x) = self.next() { @@ -1606,8 +1710,11 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_try_fold", since = "1.27.0")] - fn try_for_each(&mut self, mut f: F) -> R where - Self: Sized, F: FnMut(Self::Item) -> R, R: Try + fn try_for_each(&mut self, mut f: F) -> R + where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try, { self.try_fold((), move |(), x| f(x)) } @@ -1681,10 +1788,13 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn fold(mut self, init: B, mut f: F) -> B where - Self: Sized, F: FnMut(B, Self::Item) -> B, + fn fold(mut self, init: B, mut f: F) -> B + where + Self: Sized, + F: FnMut(B, Self::Item) -> B, { - self.try_fold(init, move |acc, x| Ok::(f(acc, x))).unwrap() + self.try_fold(init, move |acc, x| Ok::(f(acc, x))) + .unwrap() } /// Tests if every element of the iterator matches a predicate. @@ -1726,12 +1836,17 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn all(&mut self, mut f: F) -> bool where - Self: Sized, F: FnMut(Self::Item) -> bool + fn all(&mut self, mut f: F) -> bool + where + Self: Sized, + F: FnMut(Self::Item) -> bool, { self.try_for_each(move |x| { - if f(x) { LoopState::Continue(()) } - else { LoopState::Break(()) } + if f(x) { + LoopState::Continue(()) + } else { + LoopState::Break(()) + } }) == LoopState::Continue(()) } @@ -1774,13 +1889,17 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn any(&mut self, mut f: F) -> bool where + fn any(&mut self, mut f: F) -> bool + where Self: Sized, - F: FnMut(Self::Item) -> bool + F: FnMut(Self::Item) -> bool, { self.try_for_each(move |x| { - if f(x) { LoopState::Break(()) } - else { LoopState::Continue(()) } + if f(x) { + LoopState::Break(()) + } else { + LoopState::Continue(()) + } }) == LoopState::Break(()) } @@ -1828,14 +1947,19 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn find

(&mut self, mut predicate: P) -> Option where + fn find

(&mut self, mut predicate: P) -> Option + where Self: Sized, P: FnMut(&Self::Item) -> bool, { self.try_for_each(move |x| { - if predicate(&x) { LoopState::Break(x) } - else { LoopState::Continue(()) } - }).break_value() + if predicate(&x) { + LoopState::Break(x) + } else { + LoopState::Continue(()) + } + }) + .break_value() } /// Applies function to the elements of iterator and returns @@ -1855,16 +1979,16 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "iterator_find_map", since = "1.30.0")] - fn find_map(&mut self, mut f: F) -> Option where + fn find_map(&mut self, mut f: F) -> Option + where Self: Sized, F: FnMut(Self::Item) -> Option, { - self.try_for_each(move |x| { - match f(x) { - Some(x) => LoopState::Break(x), - None => LoopState::Continue(()), - } - }).break_value() + self.try_for_each(move |x| match f(x) { + Some(x) => LoopState::Break(x), + None => LoopState::Continue(()), + }) + .break_value() } /// Searches for an element in an iterator, returning its index. @@ -1924,15 +2048,20 @@ pub trait Iterator { #[inline] #[rustc_inherit_overflow_checks] #[stable(feature = "rust1", since = "1.0.0")] - fn position

(&mut self, mut predicate: P) -> Option where + fn position

(&mut self, mut predicate: P) -> Option + where Self: Sized, P: FnMut(Self::Item) -> bool, { // The addition might panic on overflow self.try_fold(0, move |i, x| { - if predicate(x) { LoopState::Break(i) } - else { LoopState::Continue(i + 1) } - }).break_value() + if predicate(x) { + LoopState::Break(i) + } else { + LoopState::Continue(i + 1) + } + }) + .break_value() } /// Searches for an element in an iterator from the right, returning its @@ -1975,18 +2104,23 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn rposition

(&mut self, mut predicate: P) -> Option where + fn rposition

(self) -> P - where Self: Sized, - P: Product, + where + Self: Sized, + P: Product, { Product::product(self) } @@ -2375,7 +2550,8 @@ pub trait Iterator { /// Lexicographically compares the elements of this `Iterator` with those /// of another. #[stable(feature = "iter_order", since = "1.5.0")] - fn cmp(mut self, other: I) -> Ordering where + fn cmp(mut self, other: I) -> Ordering + where I: IntoIterator, Self::Item: Ord, Self: Sized, @@ -2384,11 +2560,13 @@ pub trait Iterator { loop { let x = match self.next() { - None => if other.next().is_none() { - return Ordering::Equal - } else { - return Ordering::Less - }, + None => { + if other.next().is_none() { + return Ordering::Equal; + } else { + return Ordering::Less; + } + } Some(val) => val, }; @@ -2407,7 +2585,8 @@ pub trait Iterator { /// Lexicographically compares the elements of this `Iterator` with those /// of another. #[stable(feature = "iter_order", since = "1.5.0")] - fn partial_cmp(mut self, other: I) -> Option where + fn partial_cmp(mut self, other: I) -> Option + where I: IntoIterator, Self::Item: PartialOrd, Self: Sized, @@ -2416,11 +2595,13 @@ pub trait Iterator { loop { let x = match self.next() { - None => if other.next().is_none() { - return Some(Ordering::Equal) - } else { - return Some(Ordering::Less) - }, + None => { + if other.next().is_none() { + return Some(Ordering::Equal); + } else { + return Some(Ordering::Less); + } + } Some(val) => val, }; @@ -2439,7 +2620,8 @@ pub trait Iterator { /// Determines if the elements of this `Iterator` are equal to those of /// another. #[stable(feature = "iter_order", since = "1.5.0")] - fn eq(mut self, other: I) -> bool where + fn eq(mut self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialEq, Self: Sized, @@ -2457,14 +2639,17 @@ pub trait Iterator { Some(val) => val, }; - if x != y { return false } + if x != y { + return false; + } } } /// Determines if the elements of this `Iterator` are unequal to those of /// another. #[stable(feature = "iter_order", since = "1.5.0")] - fn ne(mut self, other: I) -> bool where + fn ne(mut self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialEq, Self: Sized, @@ -2482,14 +2667,17 @@ pub trait Iterator { Some(val) => val, }; - if x != y { return true } + if x != y { + return true; + } } } /// Determines if the elements of this `Iterator` are lexicographically /// less than those of another. #[stable(feature = "iter_order", since = "1.5.0")] - fn lt(mut self, other: I) -> bool where + fn lt(mut self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialOrd, Self: Sized, @@ -2519,7 +2707,8 @@ pub trait Iterator { /// Determines if the elements of this `Iterator` are lexicographically /// less or equal to those of another. #[stable(feature = "iter_order", since = "1.5.0")] - fn le(mut self, other: I) -> bool where + fn le(mut self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialOrd, Self: Sized, @@ -2528,7 +2717,10 @@ pub trait Iterator { loop { let x = match self.next() { - None => { other.next(); return true; }, + None => { + other.next(); + return true; + } Some(val) => val, }; @@ -2549,7 +2741,8 @@ pub trait Iterator { /// Determines if the elements of this `Iterator` are lexicographically /// greater than those of another. #[stable(feature = "iter_order", since = "1.5.0")] - fn gt(mut self, other: I) -> bool where + fn gt(mut self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialOrd, Self: Sized, @@ -2558,7 +2751,10 @@ pub trait Iterator { loop { let x = match self.next() { - None => { other.next(); return false; }, + None => { + other.next(); + return false; + } Some(val) => val, }; @@ -2579,7 +2775,8 @@ pub trait Iterator { /// Determines if the elements of this `Iterator` are lexicographically /// greater than or equal to those of another. #[stable(feature = "iter_order", since = "1.5.0")] - fn ge(mut self, other: I) -> bool where + fn ge(mut self, other: I) -> bool + where I: IntoIterator, Self::Item: PartialOrd, Self: Sized, @@ -2614,12 +2811,15 @@ pub trait Iterator { /// commonalities of {max,min}{,_by}. In particular, this avoids /// having to implement optimizations several times. #[inline] -fn select_fold1(mut it: I, - mut f_proj: FProj, - mut f_cmp: FCmp) -> Option<(B, I::Item)> - where I: Iterator, - FProj: FnMut(&I::Item) -> B, - FCmp: FnMut(&B, &I::Item, &B, &I::Item) -> bool +fn select_fold1( + mut it: I, + mut f_proj: FProj, + mut f_cmp: FCmp, +) -> Option<(B, I::Item)> +where + I: Iterator, + FProj: FnMut(&I::Item) -> B, + FCmp: FnMut(&B, &I::Item, &B, &I::Item) -> bool, { // start with the first element as our selection. This avoids // having to use `Option`s inside the loop, translating to a @@ -2641,8 +2841,12 @@ fn select_fold1(mut it: I, #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for &mut I { type Item = I::Item; - fn next(&mut self) -> Option { (**self).next() } - fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } + fn next(&mut self) -> Option { + (**self).next() + } + fn size_hint(&self) -> (usize, Option) { + (**self).size_hint() + } fn nth(&mut self, n: usize) -> Option { (**self).nth(n) } diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index 03369d6c8f3fd..8ae4cfef4f227 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -308,38 +308,40 @@ use cmp; use fmt; +use intrinsics; use iter_private::TrustedRandomAccess; use ops::Try; use usize; -use intrinsics; #[stable(feature = "rust1", since = "1.0.0")] pub use self::iterator::Iterator; -#[unstable(feature = "step_trait", - reason = "likely to be replaced by finer-grained traits", - issue = "42168")] +#[unstable( + feature = "step_trait", + reason = "likely to be replaced by finer-grained traits", + issue = "42168" +)] pub use self::range::Step; -#[stable(feature = "rust1", since = "1.0.0")] -pub use self::sources::{Repeat, repeat}; -#[stable(feature = "iterator_repeat_with", since = "1.28.0")] -pub use self::sources::{RepeatWith, repeat_with}; #[stable(feature = "iter_empty", since = "1.2.0")] -pub use self::sources::{Empty, empty}; +pub use self::sources::{empty, Empty}; #[stable(feature = "iter_once", since = "1.2.0")] -pub use self::sources::{Once, once}; +pub use self::sources::{once, Once}; +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::sources::{repeat, Repeat}; +#[stable(feature = "iterator_repeat_with", since = "1.28.0")] +pub use self::sources::{repeat_with, RepeatWith}; #[unstable(feature = "iter_unfold", issue = "55977")] -pub use self::sources::{Unfold, unfold, Successors, successors}; +pub use self::sources::{successors, unfold, Successors, Unfold}; -#[stable(feature = "rust1", since = "1.0.0")] -pub use self::traits::{FromIterator, IntoIterator, DoubleEndedIterator, Extend}; -#[stable(feature = "rust1", since = "1.0.0")] -pub use self::traits::{ExactSizeIterator, Sum, Product}; #[stable(feature = "fused", since = "1.26.0")] pub use self::traits::FusedIterator; #[unstable(feature = "trusted_len", issue = "37572")] pub use self::traits::TrustedLen; +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::traits::{DoubleEndedIterator, Extend, FromIterator, IntoIterator}; +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::traits::{ExactSizeIterator, Product, Sum}; mod iterator; mod range; @@ -364,9 +366,13 @@ impl Try for LoopState { } } #[inline] - fn from_error(v: Self::Error) -> Self { LoopState::Break(v) } + fn from_error(v: Self::Error) -> Self { + LoopState::Break(v) + } #[inline] - fn from_ok(v: Self::Ok) -> Self { LoopState::Continue(v) } + fn from_ok(v: Self::Ok) -> Self { + LoopState::Continue(v) + } } impl LoopState { @@ -407,70 +413,97 @@ impl LoopState { #[must_use = "iterator adaptors are lazy and do nothing unless consumed"] #[stable(feature = "rust1", since = "1.0.0")] pub struct Rev { - iter: T + iter: T, } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Rev where I: DoubleEndedIterator { +impl Iterator for Rev +where + I: DoubleEndedIterator, +{ type Item = ::Item; #[inline] - fn next(&mut self) -> Option<::Item> { self.iter.next_back() } + fn next(&mut self) -> Option<::Item> { + self.iter.next_back() + } #[inline] - fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } #[inline] - fn nth(&mut self, n: usize) -> Option<::Item> { self.iter.nth_back(n) } + fn nth(&mut self, n: usize) -> Option<::Item> { + self.iter.nth_back(n) + } - fn try_fold(&mut self, init: B, f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, { self.iter.try_rfold(init, f) } fn fold(self, init: Acc, f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.iter.rfold(init, f) } #[inline] fn find

(&mut self, predicate: P) -> Option - where P: FnMut(&Self::Item) -> bool + where + P: FnMut(&Self::Item) -> bool, { self.iter.rfind(predicate) } #[inline] - fn rposition

(&mut self, predicate: P) -> Option where - P: FnMut(Self::Item) -> bool + fn rposition

(&mut self, predicate: P) -> Option + where + P: FnMut(Self::Item) -> bool, { self.iter.position(predicate) } } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Rev where I: DoubleEndedIterator { +impl DoubleEndedIterator for Rev +where + I: DoubleEndedIterator, +{ #[inline] - fn next_back(&mut self) -> Option<::Item> { self.iter.next() } + fn next_back(&mut self) -> Option<::Item> { + self.iter.next() + } #[inline] - fn nth_back(&mut self, n: usize) -> Option<::Item> { self.iter.nth(n) } + fn nth_back(&mut self, n: usize) -> Option<::Item> { + self.iter.nth(n) + } - fn try_rfold(&mut self, init: B, f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, { self.iter.try_fold(init, f) } fn rfold(self, init: Acc, f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.iter.fold(init, f) } fn rfind

(&mut self, predicate: P) -> Option - where P: FnMut(&Self::Item) -> bool + where + P: FnMut(&Self::Item) -> bool, { self.iter.find(predicate) } @@ -478,7 +511,8 @@ impl DoubleEndedIterator for Rev where I: DoubleEndedIterator { #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Rev - where I: ExactSizeIterator + DoubleEndedIterator +where + I: ExactSizeIterator + DoubleEndedIterator, { fn len(&self) -> usize { self.iter.len() @@ -490,12 +524,10 @@ impl ExactSizeIterator for Rev } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Rev - where I: FusedIterator + DoubleEndedIterator {} +impl FusedIterator for Rev where I: FusedIterator + DoubleEndedIterator {} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for Rev - where I: TrustedLen + DoubleEndedIterator {} +unsafe impl TrustedLen for Rev where I: TrustedLen + DoubleEndedIterator {} /// An iterator that copies the elements of an underlying iterator. /// @@ -513,7 +545,9 @@ pub struct Copied { #[unstable(feature = "iter_copied", issue = "57127")] impl<'a, I, T: 'a> Iterator for Copied - where I: Iterator, T: Copy +where + I: Iterator, + T: Copy, { type Item = T; @@ -525,14 +559,18 @@ impl<'a, I, T: 'a> Iterator for Copied self.it.size_hint() } - fn try_fold(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, { self.it.try_fold(init, move |acc, &elt| f(acc, elt)) } fn fold(self, init: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.it.fold(init, move |acc, &elt| f(acc, elt)) } @@ -540,20 +578,26 @@ impl<'a, I, T: 'a> Iterator for Copied #[unstable(feature = "iter_copied", issue = "57127")] impl<'a, I, T: 'a> DoubleEndedIterator for Copied - where I: DoubleEndedIterator, T: Copy +where + I: DoubleEndedIterator, + T: Copy, { fn next_back(&mut self) -> Option { self.it.next_back().copied() } - fn try_rfold(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, { self.it.try_rfold(init, move |acc, &elt| f(acc, elt)) } fn rfold(self, init: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.it.rfold(init, move |acc, &elt| f(acc, elt)) } @@ -561,7 +605,9 @@ impl<'a, I, T: 'a> DoubleEndedIterator for Copied #[unstable(feature = "iter_copied", issue = "57127")] impl<'a, I, T: 'a> ExactSizeIterator for Copied - where I: ExactSizeIterator, T: Copy +where + I: ExactSizeIterator, + T: Copy, { fn len(&self) -> usize { self.it.len() @@ -574,12 +620,17 @@ impl<'a, I, T: 'a> ExactSizeIterator for Copied #[unstable(feature = "iter_copied", issue = "57127")] impl<'a, I, T: 'a> FusedIterator for Copied - where I: FusedIterator, T: Copy -{} +where + I: FusedIterator, + T: Copy, +{ +} #[doc(hidden)] unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Copied - where I: TrustedRandomAccess, T: Copy +where + I: TrustedRandomAccess, + T: Copy, { unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { *self.it.get_unchecked(i) @@ -593,9 +644,11 @@ unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Copied #[unstable(feature = "iter_copied", issue = "57127")] unsafe impl<'a, I, T: 'a> TrustedLen for Copied - where I: TrustedLen, - T: Copy -{} +where + I: TrustedLen, + T: Copy, +{ +} /// An iterator that clones the elements of an underlying iterator. /// @@ -613,7 +666,9 @@ pub struct Cloned { #[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> Iterator for Cloned - where I: Iterator, T: Clone +where + I: Iterator, + T: Clone, { type Item = T; @@ -625,14 +680,18 @@ impl<'a, I, T: 'a> Iterator for Cloned self.it.size_hint() } - fn try_fold(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, { self.it.try_fold(init, move |acc, elt| f(acc, elt.clone())) } fn fold(self, init: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.it.fold(init, move |acc, elt| f(acc, elt.clone())) } @@ -640,20 +699,26 @@ impl<'a, I, T: 'a> Iterator for Cloned #[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> DoubleEndedIterator for Cloned - where I: DoubleEndedIterator, T: Clone +where + I: DoubleEndedIterator, + T: Clone, { fn next_back(&mut self) -> Option { self.it.next_back().cloned() } - fn try_rfold(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, { self.it.try_rfold(init, move |acc, elt| f(acc, elt.clone())) } fn rfold(self, init: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { self.it.rfold(init, move |acc, elt| f(acc, elt.clone())) } @@ -661,7 +726,9 @@ impl<'a, I, T: 'a> DoubleEndedIterator for Cloned #[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> ExactSizeIterator for Cloned - where I: ExactSizeIterator, T: Clone +where + I: ExactSizeIterator, + T: Clone, { fn len(&self) -> usize { self.it.len() @@ -674,24 +741,33 @@ impl<'a, I, T: 'a> ExactSizeIterator for Cloned #[stable(feature = "fused", since = "1.26.0")] impl<'a, I, T: 'a> FusedIterator for Cloned - where I: FusedIterator, T: Clone -{} +where + I: FusedIterator, + T: Clone, +{ +} #[doc(hidden)] unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Cloned - where I: TrustedRandomAccess, T: Clone +where + I: TrustedRandomAccess, + T: Clone, { default unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { self.it.get_unchecked(i).clone() } #[inline] - default fn may_have_side_effect() -> bool { true } + default fn may_have_side_effect() -> bool { + true + } } #[doc(hidden)] unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Cloned - where I: TrustedRandomAccess, T: Copy +where + I: TrustedRandomAccess, + T: Copy, { unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { *self.it.get_unchecked(i) @@ -705,9 +781,11 @@ unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Cloned #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl<'a, I, T: 'a> TrustedLen for Cloned - where I: TrustedLen, - T: Clone -{} +where + I: TrustedLen, + T: Clone, +{ +} /// An iterator that repeats endlessly. /// @@ -725,14 +803,20 @@ pub struct Cycle { } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Cycle where I: Clone + Iterator { +impl Iterator for Cycle +where + I: Clone + Iterator, +{ type Item = ::Item; #[inline] fn next(&mut self) -> Option<::Item> { match self.iter.next() { - None => { self.iter = self.orig.clone(); self.iter.next() } - y => y + None => { + self.iter = self.orig.clone(); + self.iter.next() + } + y => y, } } @@ -742,7 +826,7 @@ impl Iterator for Cycle where I: Clone + Iterator { match self.orig.size_hint() { sz @ (0, Some(0)) => sz, (0, _) => (0, None), - _ => (usize::MAX, None) + _ => (usize::MAX, None), } } } @@ -767,7 +851,10 @@ pub struct StepBy { } #[stable(feature = "iterator_step_by", since = "1.28.0")] -impl Iterator for StepBy where I: Iterator { +impl Iterator for StepBy +where + I: Iterator, +{ type Item = I::Item; #[inline] @@ -785,10 +872,16 @@ impl Iterator for StepBy where I: Iterator { let inner_hint = self.iter.size_hint(); if self.first_take { - let f = |n| if n == 0 { 0 } else { 1 + (n-1)/(self.step+1) }; + let f = |n| { + if n == 0 { + 0 + } else { + 1 + (n - 1) / (self.step + 1) + } + }; (f(inner_hint.0), inner_hint.1.map(f)) } else { - let f = |n| n / (self.step+1); + let f = |n| n / (self.step + 1); (f(inner_hint.0), inner_hint.1.map(f)) } } @@ -881,9 +974,10 @@ enum ChainState { } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Chain where +impl Iterator for Chain +where A: Iterator, - B: Iterator + B: Iterator, { type Item = A::Item; @@ -912,8 +1006,11 @@ impl Iterator for Chain where } } - fn try_fold(&mut self, init: Acc, mut f: F) -> R where - Self: Sized, F: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut f: F) -> R + where + Self: Sized, + F: FnMut(Acc, Self::Item) -> R, + R: Try, { let mut accum = init; match self.state { @@ -923,7 +1020,7 @@ impl Iterator for Chain where self.state = ChainState::Back; } } - _ => { } + _ => {} } if let ChainState::Back = self.state { accum = self.b.try_fold(accum, &mut f)?; @@ -932,20 +1029,21 @@ impl Iterator for Chain where } fn fold(self, init: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { let mut accum = init; match self.state { ChainState::Both | ChainState::Front => { accum = self.a.fold(accum, &mut f); } - _ => { } + _ => {} } match self.state { ChainState::Both | ChainState::Back => { accum = self.b.fold(accum, &mut f); } - _ => { } + _ => {} } accum } @@ -956,7 +1054,7 @@ impl Iterator for Chain where ChainState::Both | ChainState::Front => { for x in self.a.by_ref() { if n == 0 { - return Some(x) + return Some(x); } n -= 1; } @@ -974,7 +1072,8 @@ impl Iterator for Chain where } #[inline] - fn find

(&mut self, mut predicate: P) -> Option where + fn find

(&mut self, mut predicate: P) -> Option + where P: FnMut(&Self::Item) -> bool, { match self.state { @@ -983,7 +1082,7 @@ impl Iterator for Chain where self.state = ChainState::Back; self.b.find(predicate) } - v => v + v => v, }, ChainState::Front => self.a.find(predicate), ChainState::Back => self.b.find(predicate), @@ -998,9 +1097,9 @@ impl Iterator for Chain where let a_last = self.a.last(); let b_last = self.b.last(); b_last.or(a_last) - }, + } ChainState::Front => self.a.last(), - ChainState::Back => self.b.last() + ChainState::Back => self.b.last(), } } @@ -1013,7 +1112,7 @@ impl Iterator for Chain where let upper = match (a_upper, b_upper) { (Some(x), Some(y)) => x.checked_add(y), - _ => None + _ => None, }; (lower, upper) @@ -1021,9 +1120,10 @@ impl Iterator for Chain where } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Chain where +impl DoubleEndedIterator for Chain +where A: DoubleEndedIterator, - B: DoubleEndedIterator, + B: DoubleEndedIterator, { #[inline] fn next_back(&mut self) -> Option { @@ -1040,8 +1140,11 @@ impl DoubleEndedIterator for Chain where } } - fn try_rfold(&mut self, init: Acc, mut f: F) -> R where - Self: Sized, F: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, mut f: F) -> R + where + Self: Sized, + F: FnMut(Acc, Self::Item) -> R, + R: Try, { let mut accum = init; match self.state { @@ -1051,7 +1154,7 @@ impl DoubleEndedIterator for Chain where self.state = ChainState::Front; } } - _ => { } + _ => {} } if let ChainState::Front = self.state { accum = self.a.try_rfold(accum, &mut f)?; @@ -1060,37 +1163,42 @@ impl DoubleEndedIterator for Chain where } fn rfold(self, init: Acc, mut f: F) -> Acc - where F: FnMut(Acc, Self::Item) -> Acc, + where + F: FnMut(Acc, Self::Item) -> Acc, { let mut accum = init; match self.state { ChainState::Both | ChainState::Back => { accum = self.b.rfold(accum, &mut f); } - _ => { } + _ => {} } match self.state { ChainState::Both | ChainState::Front => { accum = self.a.rfold(accum, &mut f); } - _ => { } + _ => {} } accum } - } // Note: *both* must be fused to handle double-ended iterators. #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Chain - where A: FusedIterator, - B: FusedIterator, -{} +where + A: FusedIterator, + B: FusedIterator, +{ +} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for Chain - where A: TrustedLen, B: TrustedLen, -{} +where + A: TrustedLen, + B: TrustedLen, +{ +} /// An iterator that iterates two other iterators simultaneously. /// @@ -1111,7 +1219,10 @@ pub struct Zip { } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Zip where A: Iterator, B: Iterator +impl Iterator for Zip +where + A: Iterator, + B: Iterator, { type Item = (A::Item, B::Item); @@ -1132,7 +1243,8 @@ impl Iterator for Zip where A: Iterator, B: Iterator } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Zip where +impl DoubleEndedIterator for Zip +where A: DoubleEndedIterator + ExactSizeIterator, B: DoubleEndedIterator + ExactSizeIterator, { @@ -1152,20 +1264,25 @@ trait ZipImpl { fn nth(&mut self, n: usize) -> Option; fn super_nth(&mut self, mut n: usize) -> Option { while let Some(x) = self.next() { - if n == 0 { return Some(x) } + if n == 0 { + return Some(x); + } n -= 1; } None } fn next_back(&mut self) -> Option - where A: DoubleEndedIterator + ExactSizeIterator, - B: DoubleEndedIterator + ExactSizeIterator; + where + A: DoubleEndedIterator + ExactSizeIterator, + B: DoubleEndedIterator + ExactSizeIterator; } // General Zip impl #[doc(hidden)] impl ZipImpl for Zip - where A: Iterator, B: Iterator +where + A: Iterator, + B: Iterator, { type Item = (A::Item, B::Item); default fn new(a: A, b: B) -> Self { @@ -1173,17 +1290,15 @@ impl ZipImpl for Zip a, b, index: 0, // unused - len: 0, // unused + len: 0, // unused } } #[inline] default fn next(&mut self) -> Option<(A::Item, B::Item)> { - self.a.next().and_then(|x| { - self.b.next().and_then(|y| { - Some((x, y)) - }) - }) + self.a + .next() + .and_then(|x| self.b.next().and_then(|y| Some((x, y)))) } #[inline] @@ -1193,17 +1308,22 @@ impl ZipImpl for Zip #[inline] default fn next_back(&mut self) -> Option<(A::Item, B::Item)> - where A: DoubleEndedIterator + ExactSizeIterator, - B: DoubleEndedIterator + ExactSizeIterator + where + A: DoubleEndedIterator + ExactSizeIterator, + B: DoubleEndedIterator + ExactSizeIterator, { let a_sz = self.a.len(); let b_sz = self.b.len(); if a_sz != b_sz { // Adjust a, b to equal length if a_sz > b_sz { - for _ in 0..a_sz - b_sz { self.a.next_back(); } + for _ in 0..a_sz - b_sz { + self.a.next_back(); + } } else { - for _ in 0..b_sz - a_sz { self.b.next_back(); } + for _ in 0..b_sz - a_sz { + self.b.next_back(); + } } } match (self.a.next_back(), self.b.next_back()) { @@ -1221,10 +1341,10 @@ impl ZipImpl for Zip let lower = cmp::min(a_lower, b_lower); let upper = match (a_upper, b_upper) { - (Some(x), Some(y)) => Some(cmp::min(x,y)), + (Some(x), Some(y)) => Some(cmp::min(x, y)), (Some(x), None) => Some(x), (None, Some(y)) => Some(y), - (None, None) => None + (None, None) => None, }; (lower, upper) @@ -1233,7 +1353,9 @@ impl ZipImpl for Zip #[doc(hidden)] impl ZipImpl for Zip - where A: TrustedRandomAccess, B: TrustedRandomAccess +where + A: TrustedRandomAccess, + B: TrustedRandomAccess, { fn new(a: A, b: B) -> Self { let len = cmp::min(a.len(), b.len()); @@ -1250,9 +1372,7 @@ impl ZipImpl for Zip if self.index < self.len { let i = self.index; self.index += 1; - unsafe { - Some((self.a.get_unchecked(i), self.b.get_unchecked(i))) - } + unsafe { Some((self.a.get_unchecked(i), self.b.get_unchecked(i))) } } else if A::may_have_side_effect() && self.index < self.a.len() { // match the base implementation's potential side effects unsafe { @@ -1279,10 +1399,14 @@ impl ZipImpl for Zip let i = self.index; self.index += 1; if A::may_have_side_effect() { - unsafe { self.a.get_unchecked(i); } + unsafe { + self.a.get_unchecked(i); + } } if B::may_have_side_effect() { - unsafe { self.b.get_unchecked(i); } + unsafe { + self.b.get_unchecked(i); + } } } @@ -1291,8 +1415,9 @@ impl ZipImpl for Zip #[inline] fn next_back(&mut self) -> Option<(A::Item, B::Item)> - where A: DoubleEndedIterator + ExactSizeIterator, - B: DoubleEndedIterator + ExactSizeIterator + where + A: DoubleEndedIterator + ExactSizeIterator, + B: DoubleEndedIterator + ExactSizeIterator, { // Adjust a, b to equal length if A::may_have_side_effect() { @@ -1314,9 +1439,7 @@ impl ZipImpl for Zip if self.index < self.len { self.len -= 1; let i = self.len; - unsafe { - Some((self.a.get_unchecked(i), self.b.get_unchecked(i))) - } + unsafe { Some((self.a.get_unchecked(i), self.b.get_unchecked(i))) } } else { None } @@ -1325,12 +1448,17 @@ impl ZipImpl for Zip #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Zip - where A: ExactSizeIterator, B: ExactSizeIterator {} +where + A: ExactSizeIterator, + B: ExactSizeIterator, +{ +} #[doc(hidden)] unsafe impl TrustedRandomAccess for Zip - where A: TrustedRandomAccess, - B: TrustedRandomAccess, +where + A: TrustedRandomAccess, + B: TrustedRandomAccess, { unsafe fn get_unchecked(&mut self, i: usize) -> (A::Item, B::Item) { (self.a.get_unchecked(i), self.b.get_unchecked(i)) @@ -1343,12 +1471,19 @@ unsafe impl TrustedRandomAccess for Zip #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Zip - where A: FusedIterator, B: FusedIterator, {} +where + A: FusedIterator, + B: FusedIterator, +{ +} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for Zip - where A: TrustedLen, B: TrustedLen, -{} +where + A: TrustedLen, + B: TrustedLen, +{ +} /// An iterator that maps the values of `iter` with `f`. /// @@ -1411,14 +1546,15 @@ pub struct Map { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for Map { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Map") - .field("iter", &self.iter) - .finish() + f.debug_struct("Map").field("iter", &self.iter).finish() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Map where F: FnMut(I::Item) -> B { +impl Iterator for Map +where + F: FnMut(I::Item) -> B, +{ type Item = B; #[inline] @@ -1431,15 +1567,19 @@ impl Iterator for Map where F: FnMut(I::Item) -> B { self.iter.size_hint() } - fn try_fold(&mut self, init: Acc, mut g: G) -> R where - Self: Sized, G: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut g: G) -> R + where + Self: Sized, + G: FnMut(Acc, Self::Item) -> R, + R: Try, { let f = &mut self.f; self.iter.try_fold(init, move |acc, elt| g(acc, f(elt))) } fn fold(self, init: Acc, mut g: G) -> Acc - where G: FnMut(Acc, Self::Item) -> Acc, + where + G: FnMut(Acc, Self::Item) -> Acc, { let mut f = self.f; self.iter.fold(init, move |acc, elt| g(acc, f(elt))) @@ -1447,7 +1587,8 @@ impl Iterator for Map where F: FnMut(I::Item) -> B { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Map where +impl DoubleEndedIterator for Map +where F: FnMut(I::Item) -> B, { #[inline] @@ -1455,15 +1596,19 @@ impl DoubleEndedIterator for Map where self.iter.next_back().map(&mut self.f) } - fn try_rfold(&mut self, init: Acc, mut g: G) -> R where - Self: Sized, G: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, mut g: G) -> R + where + Self: Sized, + G: FnMut(Acc, Self::Item) -> R, + R: Try, { let f = &mut self.f; self.iter.try_rfold(init, move |acc, elt| g(acc, f(elt))) } fn rfold(self, init: Acc, mut g: G) -> Acc - where G: FnMut(Acc, Self::Item) -> Acc, + where + G: FnMut(Acc, Self::Item) -> Acc, { let mut f = self.f; self.iter.rfold(init, move |acc, elt| g(acc, f(elt))) @@ -1472,7 +1617,8 @@ impl DoubleEndedIterator for Map where #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Map - where F: FnMut(I::Item) -> B +where + F: FnMut(I::Item) -> B, { fn len(&self) -> usize { self.iter.len() @@ -1484,24 +1630,29 @@ impl ExactSizeIterator for Map } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Map - where F: FnMut(I::Item) -> B {} +impl FusedIterator for Map where F: FnMut(I::Item) -> B {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for Map - where I: TrustedLen, - F: FnMut(I::Item) -> B {} +where + I: TrustedLen, + F: FnMut(I::Item) -> B, +{ +} #[doc(hidden)] unsafe impl TrustedRandomAccess for Map - where I: TrustedRandomAccess, - F: FnMut(I::Item) -> B, +where + I: TrustedRandomAccess, + F: FnMut(I::Item) -> B, { unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { (self.f)(self.iter.get_unchecked(i)) } #[inline] - fn may_have_side_effect() -> bool { true } + fn may_have_side_effect() -> bool { + true + } } /// An iterator that filters the elements of `iter` with `predicate`. @@ -1522,14 +1673,15 @@ pub struct Filter { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for Filter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Filter") - .field("iter", &self.iter) - .finish() + f.debug_struct("Filter").field("iter", &self.iter).finish() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Filter where P: FnMut(&I::Item) -> bool { +impl Iterator for Filter +where + P: FnMut(&I::Item) -> bool, +{ type Item = I::Item; #[inline] @@ -1569,33 +1721,42 @@ impl Iterator for Filter where P: FnMut(&I::Item) -> bool } #[inline] - fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let predicate = &mut self.predicate; - self.iter.try_fold(init, move |acc, item| if predicate(&item) { - fold(acc, item) - } else { - Try::from_ok(acc) + self.iter.try_fold(init, move |acc, item| { + if predicate(&item) { + fold(acc, item) + } else { + Try::from_ok(acc) + } }) } #[inline] fn fold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let mut predicate = self.predicate; - self.iter.fold(init, move |acc, item| if predicate(&item) { - fold(acc, item) - } else { - acc + self.iter.fold(init, move |acc, item| { + if predicate(&item) { + fold(acc, item) + } else { + acc + } }) } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for Filter - where P: FnMut(&I::Item) -> bool, +where + P: FnMut(&I::Item) -> bool, { #[inline] fn next_back(&mut self) -> Option { @@ -1608,33 +1769,40 @@ impl DoubleEndedIterator for Filter } #[inline] - fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let predicate = &mut self.predicate; - self.iter.try_rfold(init, move |acc, item| if predicate(&item) { - fold(acc, item) - } else { - Try::from_ok(acc) + self.iter.try_rfold(init, move |acc, item| { + if predicate(&item) { + fold(acc, item) + } else { + Try::from_ok(acc) + } }) } #[inline] fn rfold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let mut predicate = self.predicate; - self.iter.rfold(init, move |acc, item| if predicate(&item) { - fold(acc, item) - } else { - acc + self.iter.rfold(init, move |acc, item| { + if predicate(&item) { + fold(acc, item) + } else { + acc + } }) } } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Filter - where P: FnMut(&I::Item) -> bool {} +impl FusedIterator for Filter where P: FnMut(&I::Item) -> bool {} /// An iterator that uses `f` to both filter and map elements from `iter`. /// @@ -1662,7 +1830,8 @@ impl fmt::Debug for FilterMap { #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for FilterMap - where F: FnMut(I::Item) -> Option, +where + F: FnMut(I::Item) -> Option, { type Item = B; @@ -1683,8 +1852,11 @@ impl Iterator for FilterMap } #[inline] - fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let f = &mut self.f; self.iter.try_fold(init, move |acc, item| match f(item) { @@ -1695,7 +1867,8 @@ impl Iterator for FilterMap #[inline] fn fold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let mut f = self.f; self.iter.fold(init, move |acc, item| match f(item) { @@ -1707,7 +1880,8 @@ impl Iterator for FilterMap #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for FilterMap - where F: FnMut(I::Item) -> Option, +where + F: FnMut(I::Item) -> Option, { #[inline] fn next_back(&mut self) -> Option { @@ -1720,8 +1894,11 @@ impl DoubleEndedIterator for FilterMap } #[inline] - fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let f = &mut self.f; self.iter.try_rfold(init, move |acc, item| match f(item) { @@ -1732,7 +1909,8 @@ impl DoubleEndedIterator for FilterMap #[inline] fn rfold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let mut f = self.f; self.iter.rfold(init, move |acc, item| match f(item) { @@ -1743,8 +1921,7 @@ impl DoubleEndedIterator for FilterMap } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for FilterMap - where F: FnMut(I::Item) -> Option {} +impl FusedIterator for FilterMap where F: FnMut(I::Item) -> Option {} /// An iterator that yields the current count and the element during iteration. /// @@ -1762,7 +1939,10 @@ pub struct Enumerate { } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Enumerate where I: Iterator { +impl Iterator for Enumerate +where + I: Iterator, +{ type Item = (usize, ::Item); /// # Overflow Behavior @@ -1807,8 +1987,11 @@ impl Iterator for Enumerate where I: Iterator { #[inline] #[rustc_inherit_overflow_checks] - fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let count = &mut self.count; self.iter.try_fold(init, move |acc, item| { @@ -1821,7 +2004,8 @@ impl Iterator for Enumerate where I: Iterator { #[inline] #[rustc_inherit_overflow_checks] fn fold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let mut count = self.count; self.iter.fold(init, move |acc, item| { @@ -1833,8 +2017,9 @@ impl Iterator for Enumerate where I: Iterator { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Enumerate where - I: ExactSizeIterator + DoubleEndedIterator +impl DoubleEndedIterator for Enumerate +where + I: ExactSizeIterator + DoubleEndedIterator, { #[inline] fn next_back(&mut self) -> Option<(usize, ::Item)> { @@ -1847,8 +2032,11 @@ impl DoubleEndedIterator for Enumerate where } #[inline] - fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { // Can safely add and subtract the count, as `ExactSizeIterator` promises // that the number of elements fits into a `usize`. @@ -1861,7 +2049,8 @@ impl DoubleEndedIterator for Enumerate where #[inline] fn rfold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { // Can safely add and subtract the count, as `ExactSizeIterator` promises // that the number of elements fits into a `usize`. @@ -1874,7 +2063,10 @@ impl DoubleEndedIterator for Enumerate where } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Enumerate where I: ExactSizeIterator { +impl ExactSizeIterator for Enumerate +where + I: ExactSizeIterator, +{ fn len(&self) -> usize { self.iter.len() } @@ -1886,7 +2078,8 @@ impl ExactSizeIterator for Enumerate where I: ExactSizeIterator { #[doc(hidden)] unsafe impl TrustedRandomAccess for Enumerate - where I: TrustedRandomAccess +where + I: TrustedRandomAccess, { unsafe fn get_unchecked(&mut self, i: usize) -> (usize, I::Item) { (self.count + i, self.iter.get_unchecked(i)) @@ -1901,10 +2094,7 @@ unsafe impl TrustedRandomAccess for Enumerate impl FusedIterator for Enumerate where I: FusedIterator {} #[unstable(feature = "trusted_len", issue = "37572")] -unsafe impl TrustedLen for Enumerate - where I: TrustedLen, -{} - +unsafe impl TrustedLen for Enumerate where I: TrustedLen {} /// An iterator with a `peek()` that returns an optional reference to the next /// element. @@ -1983,8 +2173,11 @@ impl Iterator for Peekable { } #[inline] - fn try_fold(&mut self, init: B, mut f: F) -> R where - Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, { let acc = match self.peeked.take() { Some(None) => return Try::from_ok(init), @@ -1996,7 +2189,8 @@ impl Iterator for Peekable { #[inline] fn fold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let acc = match self.peeked { Some(None) => return init, @@ -2087,7 +2281,8 @@ impl fmt::Debug for SkipWhile { #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for SkipWhile - where P: FnMut(&I::Item) -> bool +where + P: FnMut(&I::Item) -> bool, { type Item = I::Item; @@ -2112,8 +2307,11 @@ impl Iterator for SkipWhile } #[inline] - fn try_fold(&mut self, mut init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, mut init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { if !self.flag { match self.next() { @@ -2126,7 +2324,8 @@ impl Iterator for SkipWhile #[inline] fn fold(mut self, mut init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { if !self.flag { match self.next() { @@ -2140,7 +2339,11 @@ impl Iterator for SkipWhile #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for SkipWhile - where I: FusedIterator, P: FnMut(&I::Item) -> bool {} +where + I: FusedIterator, + P: FnMut(&I::Item) -> bool, +{ +} /// An iterator that only accepts elements while `predicate` is true. /// @@ -2170,7 +2373,8 @@ impl fmt::Debug for TakeWhile { #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for TakeWhile - where P: FnMut(&I::Item) -> bool +where + P: FnMut(&I::Item) -> bool, { type Item = I::Item; @@ -2201,29 +2405,38 @@ impl Iterator for TakeWhile } #[inline] - fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { if self.flag { Try::from_ok(init) } else { let flag = &mut self.flag; let p = &mut self.predicate; - self.iter.try_fold(init, move |acc, x|{ - if p(&x) { - LoopState::from_try(fold(acc, x)) - } else { - *flag = true; - LoopState::Break(Try::from_ok(acc)) - } - }).into_try() + self.iter + .try_fold(init, move |acc, x| { + if p(&x) { + LoopState::from_try(fold(acc, x)) + } else { + *flag = true; + LoopState::Break(Try::from_ok(acc)) + } + }) + .into_try() } } } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for TakeWhile - where I: FusedIterator, P: FnMut(&I::Item) -> bool {} +where + I: FusedIterator, + P: FnMut(&I::Item) -> bool, +{ +} /// An iterator that skips over `n` elements of `iter`. /// @@ -2237,11 +2450,14 @@ impl FusedIterator for TakeWhile #[stable(feature = "rust1", since = "1.0.0")] pub struct Skip { iter: I, - n: usize + n: usize, } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Skip where I: Iterator { +impl Iterator for Skip +where + I: Iterator, +{ type Item = ::Item; #[inline] @@ -2264,7 +2480,7 @@ impl Iterator for Skip where I: Iterator { let to_skip = self.n; self.n = 0; // nth(n) skips n+1 - if self.iter.nth(to_skip-1).is_none() { + if self.iter.nth(to_skip - 1).is_none() { return None; } self.iter.nth(n) @@ -2302,8 +2518,11 @@ impl Iterator for Skip where I: Iterator { } #[inline] - fn try_fold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let n = self.n; self.n = 0; @@ -2318,7 +2537,8 @@ impl Iterator for Skip where I: Iterator { #[inline] fn fold(mut self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { if self.n > 0 { // nth(n) skips n+1 @@ -2334,7 +2554,10 @@ impl Iterator for Skip where I: Iterator { impl ExactSizeIterator for Skip where I: ExactSizeIterator {} #[stable(feature = "double_ended_skip_iterator", since = "1.9.0")] -impl DoubleEndedIterator for Skip where I: DoubleEndedIterator + ExactSizeIterator { +impl DoubleEndedIterator for Skip +where + I: DoubleEndedIterator + ExactSizeIterator, +{ fn next_back(&mut self) -> Option { if self.len() > 0 { self.iter.next_back() @@ -2343,19 +2566,27 @@ impl DoubleEndedIterator for Skip where I: DoubleEndedIterator + ExactSize } } - fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let mut n = self.len(); if n == 0 { Try::from_ok(init) } else { - self.iter.try_rfold(init, move |acc, x| { - n -= 1; - let r = fold(acc, x); - if n == 0 { LoopState::Break(r) } - else { LoopState::from_try(r) } - }).into_try() + self.iter + .try_rfold(init, move |acc, x| { + n -= 1; + let r = fold(acc, x); + if n == 0 { + LoopState::Break(r) + } else { + LoopState::from_try(r) + } + }) + .into_try() } } } @@ -2375,11 +2606,14 @@ impl FusedIterator for Skip where I: FusedIterator {} #[stable(feature = "rust1", since = "1.0.0")] pub struct Take { iter: I, - n: usize + n: usize, } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Take where I: Iterator{ +impl Iterator for Take +where + I: Iterator, +{ type Item = ::Item; #[inline] @@ -2418,26 +2652,34 @@ impl Iterator for Take where I: Iterator{ let upper = match upper { Some(x) if x < self.n => Some(x), - _ => Some(self.n) + _ => Some(self.n), }; (lower, upper) } #[inline] - fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { if self.n == 0 { Try::from_ok(init) } else { let n = &mut self.n; - self.iter.try_fold(init, move |acc, x| { - *n -= 1; - let r = fold(acc, x); - if *n == 0 { LoopState::Break(r) } - else { LoopState::from_try(r) } - }).into_try() + self.iter + .try_fold(init, move |acc, x| { + *n -= 1; + let r = fold(acc, x); + if *n == 0 { + LoopState::Break(r) + } else { + LoopState::from_try(r) + } + }) + .into_try() } } } @@ -2478,7 +2720,8 @@ impl fmt::Debug for Scan { } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Scan where +impl Iterator for Scan +where I: Iterator, F: FnMut(&mut St, I::Item) -> Option, { @@ -2496,17 +2739,20 @@ impl Iterator for Scan where } #[inline] - fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let state = &mut self.state; let f = &mut self.f; - self.iter.try_fold(init, move |acc, x| { - match f(state, x) { + self.iter + .try_fold(init, move |acc, x| match f(state, x) { None => LoopState::Break(Try::from_ok(acc)), Some(x) => LoopState::from_try(fold(acc, x)), - } - }).into_try() + }) + .into_try() } } @@ -2521,47 +2767,64 @@ impl Iterator for Scan where #[must_use = "iterator adaptors are lazy and do nothing unless consumed"] #[stable(feature = "rust1", since = "1.0.0")] pub struct FlatMap { - inner: FlattenCompat, ::IntoIter> + inner: FlattenCompat, ::IntoIter>, } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for FlatMap - where ::IntoIter: Clone +where + ::IntoIter: Clone, { - fn clone(&self) -> Self { FlatMap { inner: self.inner.clone() } } + fn clone(&self) -> Self { + FlatMap { + inner: self.inner.clone(), + } + } } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for FlatMap - where U::IntoIter: fmt::Debug +where + U::IntoIter: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("FlatMap").field("inner", &self.inner).finish() + f.debug_struct("FlatMap") + .field("inner", &self.inner) + .finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for FlatMap - where F: FnMut(I::Item) -> U, +where + F: FnMut(I::Item) -> U, { type Item = U::Item; #[inline] - fn next(&mut self) -> Option { self.inner.next() } + fn next(&mut self) -> Option { + self.inner.next() + } #[inline] - fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } #[inline] - fn try_fold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { self.inner.try_fold(init, fold) } #[inline] fn fold(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.inner.fold(init, fold) } @@ -2569,23 +2832,30 @@ impl Iterator for FlatMap #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for FlatMap - where F: FnMut(I::Item) -> U, - U: IntoIterator, - U::IntoIter: DoubleEndedIterator +where + F: FnMut(I::Item) -> U, + U: IntoIterator, + U::IntoIter: DoubleEndedIterator, { #[inline] - fn next_back(&mut self) -> Option { self.inner.next_back() } + fn next_back(&mut self) -> Option { + self.inner.next_back() + } #[inline] - fn try_rfold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { self.inner.try_rfold(init, fold) } #[inline] fn rfold(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.inner.rfold(init, fold) } @@ -2593,7 +2863,12 @@ impl DoubleEndedIterator for FlatMap #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for FlatMap - where I: FusedIterator, U: IntoIterator, F: FnMut(I::Item) -> U {} +where + I: FusedIterator, + U: IntoIterator, + F: FnMut(I::Item) -> U, +{ +} /// An iterator that flattens one level of nesting in an iterator of things /// that can be turned into iterators. @@ -2606,51 +2881,73 @@ impl FusedIterator for FlatMap #[must_use = "iterator adaptors are lazy and do nothing unless consumed"] #[stable(feature = "iterator_flatten", since = "1.29.0")] pub struct Flatten -where I::Item: IntoIterator { +where + I::Item: IntoIterator, +{ inner: FlattenCompat::IntoIter>, } #[stable(feature = "iterator_flatten", since = "1.29.0")] impl fmt::Debug for Flatten - where I: Iterator + fmt::Debug, U: Iterator + fmt::Debug, - I::Item: IntoIterator, +where + I: Iterator + fmt::Debug, + U: Iterator + fmt::Debug, + I::Item: IntoIterator, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Flatten").field("inner", &self.inner).finish() + f.debug_struct("Flatten") + .field("inner", &self.inner) + .finish() } } #[stable(feature = "iterator_flatten", since = "1.29.0")] impl Clone for Flatten - where I: Iterator + Clone, U: Iterator + Clone, - I::Item: IntoIterator, +where + I: Iterator + Clone, + U: Iterator + Clone, + I::Item: IntoIterator, { - fn clone(&self) -> Self { Flatten { inner: self.inner.clone() } } + fn clone(&self) -> Self { + Flatten { + inner: self.inner.clone(), + } + } } #[stable(feature = "iterator_flatten", since = "1.29.0")] impl Iterator for Flatten - where I: Iterator, U: Iterator, - I::Item: IntoIterator +where + I: Iterator, + U: Iterator, + I::Item: IntoIterator, { type Item = U::Item; #[inline] - fn next(&mut self) -> Option { self.inner.next() } + fn next(&mut self) -> Option { + self.inner.next() + } #[inline] - fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } #[inline] - fn try_fold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { self.inner.try_fold(init, fold) } #[inline] fn fold(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.inner.fold(init, fold) } @@ -2658,22 +2955,30 @@ impl Iterator for Flatten #[stable(feature = "iterator_flatten", since = "1.29.0")] impl DoubleEndedIterator for Flatten - where I: DoubleEndedIterator, U: DoubleEndedIterator, - I::Item: IntoIterator +where + I: DoubleEndedIterator, + U: DoubleEndedIterator, + I::Item: IntoIterator, { #[inline] - fn next_back(&mut self) -> Option { self.inner.next_back() } + fn next_back(&mut self) -> Option { + self.inner.next_back() + } #[inline] - fn try_rfold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { self.inner.try_rfold(init, fold) } #[inline] fn rfold(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.inner.rfold(init, fold) } @@ -2681,12 +2986,20 @@ impl DoubleEndedIterator for Flatten #[stable(feature = "iterator_flatten", since = "1.29.0")] impl FusedIterator for Flatten - where I: FusedIterator, U: Iterator, - I::Item: IntoIterator {} +where + I: FusedIterator, + U: Iterator, + I::Item: IntoIterator, +{ +} /// Adapts an iterator by flattening it, for use in `flatten()` and `flat_map()`. fn flatten_compat(iter: I) -> FlattenCompat { - FlattenCompat { iter, frontiter: None, backiter: None } + FlattenCompat { + iter, + frontiter: None, + backiter: None, + } } /// Real logic of both `Flatten` and `FlatMap` which simply delegate to @@ -2699,8 +3012,10 @@ struct FlattenCompat { } impl Iterator for FlattenCompat - where I: Iterator, U: Iterator, - I::Item: IntoIterator +where + I: Iterator, + U: Iterator, + I::Item: IntoIterator, { type Item = U::Item; @@ -2708,7 +3023,9 @@ impl Iterator for FlattenCompat fn next(&mut self) -> Option { loop { if let Some(ref mut inner) = self.frontiter { - if let elt@Some(_) = inner.next() { return elt } + if let elt @ Some(_) = inner.next() { + return elt; + } } match self.iter.next() { None => return self.backiter.as_mut().and_then(|it| it.next()), @@ -2719,18 +3036,27 @@ impl Iterator for FlattenCompat #[inline] fn size_hint(&self) -> (usize, Option) { - let (flo, fhi) = self.frontiter.as_ref().map_or((0, Some(0)), |it| it.size_hint()); - let (blo, bhi) = self.backiter.as_ref().map_or((0, Some(0)), |it| it.size_hint()); + let (flo, fhi) = self + .frontiter + .as_ref() + .map_or((0, Some(0)), |it| it.size_hint()); + let (blo, bhi) = self + .backiter + .as_ref() + .map_or((0, Some(0)), |it| it.size_hint()); let lo = flo.saturating_add(blo); match (self.iter.size_hint(), fhi, bhi) { ((0, Some(0)), Some(a), Some(b)) => (lo, a.checked_add(b)), - _ => (lo, None) + _ => (lo, None), } } #[inline] - fn try_fold(&mut self, mut init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, mut init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { if let Some(ref mut front) = self.frontiter { init = front.try_fold(init, &mut fold)?; @@ -2758,9 +3084,11 @@ impl Iterator for FlattenCompat #[inline] fn fold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { - self.frontiter.into_iter() + self.frontiter + .into_iter() .chain(self.iter.map(IntoIterator::into_iter)) .chain(self.backiter) .fold(init, |acc, iter| iter.fold(acc, &mut fold)) @@ -2768,14 +3096,18 @@ impl Iterator for FlattenCompat } impl DoubleEndedIterator for FlattenCompat - where I: DoubleEndedIterator, U: DoubleEndedIterator, - I::Item: IntoIterator +where + I: DoubleEndedIterator, + U: DoubleEndedIterator, + I::Item: IntoIterator, { #[inline] fn next_back(&mut self) -> Option { loop { if let Some(ref mut inner) = self.backiter { - if let elt@Some(_) = inner.next_back() { return elt } + if let elt @ Some(_) = inner.next_back() { + return elt; + } } match self.iter.next_back() { None => return self.frontiter.as_mut().and_then(|it| it.next_back()), @@ -2785,8 +3117,11 @@ impl DoubleEndedIterator for FlattenCompat } #[inline] - fn try_rfold(&mut self, mut init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, mut init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { if let Some(ref mut back) = self.backiter { init = back.try_rfold(init, &mut fold)?; @@ -2814,9 +3149,11 @@ impl DoubleEndedIterator for FlattenCompat #[inline] fn rfold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { - self.frontiter.into_iter() + self.frontiter + .into_iter() .chain(self.iter.map(IntoIterator::into_iter)) .chain(self.backiter) .rfold(init, |acc, iter| iter.rfold(acc, &mut fold)) @@ -2836,14 +3173,17 @@ impl DoubleEndedIterator for FlattenCompat #[stable(feature = "rust1", since = "1.0.0")] pub struct Fuse { iter: I, - done: bool + done: bool, } #[stable(feature = "fused", since = "1.26.0")] impl FusedIterator for Fuse where I: Iterator {} #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Fuse where I: Iterator { +impl Iterator for Fuse +where + I: Iterator, +{ type Item = ::Item; #[inline] @@ -2896,8 +3236,11 @@ impl Iterator for Fuse where I: Iterator { } #[inline] - default fn try_fold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + default fn try_fold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { if self.done { Try::from_ok(init) @@ -2910,7 +3253,8 @@ impl Iterator for Fuse where I: Iterator { #[inline] default fn fold(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { if self.done { init @@ -2921,7 +3265,10 @@ impl Iterator for Fuse where I: Iterator { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Fuse where I: DoubleEndedIterator { +impl DoubleEndedIterator for Fuse +where + I: DoubleEndedIterator, +{ #[inline] default fn next_back(&mut self) -> Option<::Item> { if self.done { @@ -2934,8 +3281,11 @@ impl DoubleEndedIterator for Fuse where I: DoubleEndedIterator { } #[inline] - default fn try_rfold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + default fn try_rfold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { if self.done { Try::from_ok(init) @@ -2948,7 +3298,8 @@ impl DoubleEndedIterator for Fuse where I: DoubleEndedIterator { #[inline] default fn rfold(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { if self.done { init @@ -2959,7 +3310,8 @@ impl DoubleEndedIterator for Fuse where I: DoubleEndedIterator { } unsafe impl TrustedRandomAccess for Fuse - where I: TrustedRandomAccess, +where + I: TrustedRandomAccess, { unsafe fn get_unchecked(&mut self, i: usize) -> I::Item { self.iter.get_unchecked(i) @@ -2971,7 +3323,10 @@ unsafe impl TrustedRandomAccess for Fuse } #[stable(feature = "fused", since = "1.26.0")] -impl Iterator for Fuse where I: FusedIterator { +impl Iterator for Fuse +where + I: FusedIterator, +{ #[inline] fn next(&mut self) -> Option<::Item> { self.iter.next() @@ -2998,15 +3353,19 @@ impl Iterator for Fuse where I: FusedIterator { } #[inline] - fn try_fold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { self.iter.try_fold(init, fold) } #[inline] fn fold(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.fold(init, fold) } @@ -3014,7 +3373,8 @@ impl Iterator for Fuse where I: FusedIterator { #[stable(feature = "fused", since = "1.26.0")] impl DoubleEndedIterator for Fuse - where I: DoubleEndedIterator + FusedIterator +where + I: DoubleEndedIterator + FusedIterator, { #[inline] fn next_back(&mut self) -> Option<::Item> { @@ -3022,23 +3382,29 @@ impl DoubleEndedIterator for Fuse } #[inline] - fn try_rfold(&mut self, init: Acc, fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { self.iter.try_rfold(init, fold) } #[inline] fn rfold(self, init: Acc, fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { self.iter.rfold(init, fold) } } - #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Fuse where I: ExactSizeIterator { +impl ExactSizeIterator for Fuse +where + I: ExactSizeIterator, +{ fn len(&self) -> usize { self.iter.len() } @@ -3067,13 +3433,14 @@ pub struct Inspect { #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for Inspect { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Inspect") - .field("iter", &self.iter) - .finish() + f.debug_struct("Inspect").field("iter", &self.iter).finish() } } -impl Inspect where F: FnMut(&I::Item) { +impl Inspect +where + F: FnMut(&I::Item), +{ #[inline] fn do_inspect(&mut self, elt: Option) -> Option { if let Some(ref a) = elt { @@ -3085,7 +3452,10 @@ impl Inspect where F: FnMut(&I::Item) { } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Inspect where F: FnMut(&I::Item) { +impl Iterator for Inspect +where + F: FnMut(&I::Item), +{ type Item = I::Item; #[inline] @@ -3100,25 +3470,36 @@ impl Iterator for Inspect where F: FnMut(&I::Item) { } #[inline] - fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_fold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let f = &mut self.f; - self.iter.try_fold(init, move |acc, item| { f(&item); fold(acc, item) }) + self.iter.try_fold(init, move |acc, item| { + f(&item); + fold(acc, item) + }) } #[inline] fn fold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let mut f = self.f; - self.iter.fold(init, move |acc, item| { f(&item); fold(acc, item) }) + self.iter.fold(init, move |acc, item| { + f(&item); + fold(acc, item) + }) } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for Inspect - where F: FnMut(&I::Item), +where + F: FnMut(&I::Item), { #[inline] fn next_back(&mut self) -> Option { @@ -3127,25 +3508,36 @@ impl DoubleEndedIterator for Inspect } #[inline] - fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R where - Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try + fn try_rfold(&mut self, init: Acc, mut fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, { let f = &mut self.f; - self.iter.try_rfold(init, move |acc, item| { f(&item); fold(acc, item) }) + self.iter.try_rfold(init, move |acc, item| { + f(&item); + fold(acc, item) + }) } #[inline] fn rfold(self, init: Acc, mut fold: Fold) -> Acc - where Fold: FnMut(Acc, Self::Item) -> Acc, + where + Fold: FnMut(Acc, Self::Item) -> Acc, { let mut f = self.f; - self.iter.rfold(init, move |acc, item| { f(&item); fold(acc, item) }) + self.iter.rfold(init, move |acc, item| { + f(&item); + fold(acc, item) + }) } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Inspect - where F: FnMut(&I::Item) +where + F: FnMut(&I::Item), { fn len(&self) -> usize { self.iter.len() @@ -3157,5 +3549,4 @@ impl ExactSizeIterator for Inspect } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Inspect - where F: FnMut(&I::Item) {} +impl FusedIterator for Inspect where F: FnMut(&I::Item) {} diff --git a/src/libcore/iter/range.rs b/src/libcore/iter/range.rs index 66c09a0ddd0fb..3f55f20fed6d0 100644 --- a/src/libcore/iter/range.rs +++ b/src/libcore/iter/range.rs @@ -9,9 +9,11 @@ use super::{FusedIterator, TrustedLen}; /// /// The `steps_between` function provides a way to efficiently compare /// two `Step` objects. -#[unstable(feature = "step_trait", - reason = "likely to be replaced by finer-grained traits", - issue = "42168")] +#[unstable( + feature = "step_trait", + reason = "likely to be replaced by finer-grained traits", + issue = "42168" +)] pub trait Step: Clone + PartialOrd + Sized { /// Returns the number of steps between two step objects. The count is /// inclusive of `start` and exclusive of `end`. @@ -160,7 +162,7 @@ step_impl_unsigned!(usize u8 u16); step_impl_unsigned!(u32); #[cfg(target_pointer_width = "16")] step_impl_no_between!(u32); -step_impl_signed!([isize: usize] [i8: u8] [i16: u16]); +step_impl_signed!([isize: usize][i8: u8][i16: u16]); #[cfg(not(target_pointer_width = "16"))] step_impl_signed!([i32: u32]); #[cfg(target_pointer_width = "16")] @@ -229,7 +231,7 @@ impl Iterator for ops::Range { fn size_hint(&self) -> (usize, Option) { match Step::steps_between(&self.start, &self.end) { Some(hint) => (hint, Some(hint)), - None => (0, None) + None => (0, None), } } @@ -238,7 +240,7 @@ impl Iterator for ops::Range { if let Some(plus_n) = self.start.add_usize(n) { if plus_n < self.end { self.start = plus_n.add_one(); - return Some(plus_n) + return Some(plus_n); } } @@ -368,11 +370,11 @@ impl Iterator for ops::RangeInclusive { Some(Less) => { self.is_empty = Some(false); self.start = plus_n.add_one(); - return Some(plus_n) + return Some(plus_n); } Some(Equal) => { self.is_empty = Some(true); - return Some(plus_n) + return Some(plus_n); } _ => {} } diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs index 2a39089a8a229..599a3e4df54e9 100644 --- a/src/libcore/iter/sources.rs +++ b/src/libcore/iter/sources.rs @@ -12,7 +12,7 @@ use super::{FusedIterator, TrustedLen}; #[derive(Clone, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Repeat { - element: A + element: A, } #[stable(feature = "rust1", since = "1.0.0")] @@ -20,15 +20,21 @@ impl Iterator for Repeat { type Item = A; #[inline] - fn next(&mut self) -> Option { Some(self.element.clone()) } + fn next(&mut self) -> Option { + Some(self.element.clone()) + } #[inline] - fn size_hint(&self) -> (usize, Option) { (usize::MAX, None) } + fn size_hint(&self) -> (usize, Option) { + (usize::MAX, None) + } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for Repeat { #[inline] - fn next_back(&mut self) -> Option { Some(self.element.clone()) } + fn next_back(&mut self) -> Option { + Some(self.element.clone()) + } } #[stable(feature = "fused", since = "1.26.0")] @@ -92,7 +98,7 @@ unsafe impl TrustedLen for Repeat {} #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn repeat(elt: T) -> Repeat { - Repeat{element: elt} + Repeat { element: elt } } /// An iterator that repeats elements of type `A` endlessly by @@ -105,7 +111,7 @@ pub fn repeat(elt: T) -> Repeat { #[derive(Copy, Clone, Debug)] #[stable(feature = "iterator_repeat_with", since = "1.28.0")] pub struct RepeatWith { - repeater: F + repeater: F, } #[stable(feature = "iterator_repeat_with", since = "1.28.0")] @@ -113,10 +119,14 @@ impl A> Iterator for RepeatWith { type Item = A; #[inline] - fn next(&mut self) -> Option { Some((self.repeater)()) } + fn next(&mut self) -> Option { + Some((self.repeater)()) + } #[inline] - fn size_hint(&self) -> (usize, Option) { (usize::MAX, None) } + fn size_hint(&self) -> (usize, Option) { + (usize::MAX, None) + } } #[stable(feature = "iterator_repeat_with", since = "1.28.0")] @@ -215,7 +225,7 @@ impl Iterator for Empty { None } - fn size_hint(&self) -> (usize, Option){ + fn size_hint(&self) -> (usize, Option) { (0, Some(0)) } } @@ -285,7 +295,7 @@ pub const fn empty() -> Empty { #[derive(Clone, Debug)] #[stable(feature = "iter_once", since = "1.2.0")] pub struct Once { - inner: ::option::IntoIter + inner: ::option::IntoIter, } #[stable(feature = "iter_once", since = "1.2.0")] @@ -374,7 +384,9 @@ impl FusedIterator for Once {} /// ``` #[stable(feature = "iter_once", since = "1.2.0")] pub fn once(value: T) -> Once { - Once { inner: Some(value).into_iter() } + Once { + inner: Some(value).into_iter(), + } } /// Creates a new iterator where each iteration calls the provided closure @@ -420,7 +432,8 @@ pub fn once(value: T) -> Once { #[inline] #[unstable(feature = "iter_unfold", issue = "55977")] pub fn unfold(initial_state: St, f: F) -> Unfold - where F: FnMut(&mut St) -> Option +where + F: FnMut(&mut St) -> Option, { Unfold { state: initial_state, @@ -443,7 +456,8 @@ pub struct Unfold { #[unstable(feature = "iter_unfold", issue = "55977")] impl Iterator for Unfold - where F: FnMut(&mut St) -> Option +where + F: FnMut(&mut St) -> Option, { type Item = T; @@ -476,15 +490,13 @@ impl fmt::Debug for Unfold { /// ``` #[unstable(feature = "iter_unfold", issue = "55977")] pub fn successors(first: Option, succ: F) -> Successors - where F: FnMut(&T) -> Option +where + F: FnMut(&T) -> Option, { // If this function returned `impl Iterator` // it could be based on `unfold` and not need a dedicated type. // However having a named `Successors` type allows it to be `Clone` when `T` and `F` are. - Successors { - next: first, - succ, - } + Successors { next: first, succ } } /// An new iterator where each successive item is computed based on the preceding one. @@ -502,7 +514,8 @@ pub struct Successors { #[unstable(feature = "iter_unfold", issue = "55977")] impl Iterator for Successors - where F: FnMut(&T) -> Option +where + F: FnMut(&T) -> Option, { type Item = T; @@ -525,9 +538,7 @@ impl Iterator for Successors } #[unstable(feature = "iter_unfold", issue = "55977")] -impl FusedIterator for Successors - where F: FnMut(&T) -> Option -{} +impl FusedIterator for Successors where F: FnMut(&T) -> Option {} #[unstable(feature = "iter_unfold", issue = "55977")] impl fmt::Debug for Successors { diff --git a/src/libcore/iter/traits.rs b/src/libcore/iter/traits.rs index e8c6cd8b79c72..cb57959b8c17a 100644 --- a/src/libcore/iter/traits.rs +++ b/src/libcore/iter/traits.rs @@ -1,5 +1,5 @@ -use ops::{Mul, Add, Try}; use num::Wrapping; +use ops::{Add, Mul, Try}; use super::LoopState; @@ -96,9 +96,9 @@ use super::LoopState; /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( - message="a collection of type `{Self}` cannot be built from an iterator \ - over elements of type `{A}`", - label="a collection of type `{Self}` cannot be built from `std::iter::Iterator`", + message = "a collection of type `{Self}` cannot be built from an iterator \ + over elements of type `{A}`", + label = "a collection of type `{Self}` cannot be built from `std::iter::Iterator`" )] pub trait FromIterator: Sized { /// Creates a value from an iterator. @@ -121,7 +121,7 @@ pub trait FromIterator: Sized { /// assert_eq!(v, vec![5, 5, 5, 5, 5]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - fn from_iter>(iter: T) -> Self; + fn from_iter>(iter: T) -> Self; } /// Conversion into an `Iterator`. @@ -217,7 +217,7 @@ pub trait IntoIterator { /// Which kind of iterator are we turning this into? #[stable(feature = "rust1", since = "1.0.0")] - type IntoIter: Iterator; + type IntoIter: Iterator; /// Creates an iterator from a value. /// @@ -343,7 +343,7 @@ pub trait Extend { /// assert_eq!("abcdef", &message); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - fn extend>(&mut self, iter: T); + fn extend>(&mut self, iter: T); } #[stable(feature = "extend_for_unit", since = "1.28.0")] @@ -468,7 +468,9 @@ pub trait DoubleEndedIterator: Iterator { #[unstable(feature = "iter_nth_back", issue = "56995")] fn nth_back(&mut self, mut n: usize) -> Option { for x in self.rev() { - if n == 0 { return Some(x) } + if n == 0 { + return Some(x); + } n -= 1; } None @@ -512,7 +514,7 @@ pub trait DoubleEndedIterator: Iterator { where Self: Sized, F: FnMut(B, Self::Item) -> R, - R: Try + R: Try, { let mut accum = init; while let Some(x) = self.next_back() { @@ -579,7 +581,8 @@ pub trait DoubleEndedIterator: Iterator { Self: Sized, F: FnMut(B, Self::Item) -> B, { - self.try_rfold(accum, move |acc, x| Ok::(f(acc, x))).unwrap() + self.try_rfold(accum, move |acc, x| Ok::(f(acc, x))) + .unwrap() } /// Searches for an element of an iterator from the back that satisfies a predicate. @@ -629,12 +632,16 @@ pub trait DoubleEndedIterator: Iterator { fn rfind

(&mut self, mut predicate: P) -> Option where Self: Sized, - P: FnMut(&Self::Item) -> bool + P: FnMut(&Self::Item) -> bool, { self.try_rfold((), move |(), x| { - if predicate(&x) { LoopState::Break(x) } - else { LoopState::Continue(()) } - }).break_value() + if predicate(&x) { + LoopState::Break(x) + } else { + LoopState::Continue(()) + } + }) + .break_value() } } @@ -806,7 +813,7 @@ pub trait Sum: Sized { /// Method which takes an iterator and generates `Self` from the elements by /// "summing up" the items. #[stable(feature = "iter_arith_traits", since = "1.12.0")] - fn sum>(iter: I) -> Self; + fn sum>(iter: I) -> Self; } /// Trait to represent types that can be created by multiplying elements of an @@ -825,7 +832,7 @@ pub trait Product: Sized { /// Method which takes an iterator and generates `Self` from the elements by /// multiplying the items. #[stable(feature = "iter_arith_traits", since = "1.12.0")] - fn product>(iter: I) -> Self; + fn product>(iter: I) -> Self; } // N.B., explicitly use Add and Mul here to inherit overflow checks @@ -915,13 +922,15 @@ struct ResultShunt { } impl ResultShunt - where I: Iterator> +where + I: Iterator>, { /// Process the given iterator as if it yielded a `T` instead of a /// `Result`. Any errors will stop the inner iterator and /// the overall result will be an error. pub fn process(iter: I, mut f: F) -> Result - where F: FnMut(&mut Self) -> U + where + F: FnMut(&mut Self) -> U, { let mut shunt = ResultShunt::new(iter); let value = f(shunt.by_ref()); @@ -929,10 +938,7 @@ impl ResultShunt } fn new(iter: I) -> Self { - ResultShunt { - iter, - error: None, - } + ResultShunt { iter, error: None } } /// Consume the adapter and rebuild a `Result` value. This should @@ -947,7 +953,8 @@ impl ResultShunt } impl Iterator for ResultShunt - where I: Iterator> +where + I: Iterator>, { type Item = T; @@ -972,9 +979,10 @@ impl Iterator for ResultShunt } } -#[stable(feature = "iter_arith_traits_result", since="1.16.0")] +#[stable(feature = "iter_arith_traits_result", since = "1.16.0")] impl Sum> for Result - where T: Sum, +where + T: Sum, { /// Takes each element in the `Iterator`: if it is an `Err`, no further /// elements are taken, and the `Err` is returned. Should no `Err` occur, @@ -994,21 +1002,24 @@ impl Sum> for Result /// assert_eq!(res, Ok(3)); /// ``` fn sum(iter: I) -> Result - where I: Iterator>, + where + I: Iterator>, { ResultShunt::process(iter, |i| i.sum()) } } -#[stable(feature = "iter_arith_traits_result", since="1.16.0")] +#[stable(feature = "iter_arith_traits_result", since = "1.16.0")] impl Product> for Result - where T: Product, +where + T: Product, { /// Takes each element in the `Iterator`: if it is an `Err`, no further /// elements are taken, and the `Err` is returned. Should no `Err` occur, /// the product of all elements is returned. fn product(iter: I) -> Result - where I: Iterator>, + where + I: Iterator>, { ResultShunt::process(iter, |i| i.product()) } @@ -1054,7 +1065,7 @@ impl FusedIterator for &mut I {} /// [`usize::MAX`]: ../../std/usize/constant.MAX.html /// [`.size_hint`]: ../../std/iter/trait.Iterator.html#method.size_hint #[unstable(feature = "trusted_len", issue = "37572")] -pub unsafe trait TrustedLen : Iterator {} +pub unsafe trait TrustedLen: Iterator {} #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for &mut I {} diff --git a/src/libcore/iter_private.rs b/src/libcore/iter_private.rs index 890db47b19700..3c86749ea644f 100644 --- a/src/libcore/iter_private.rs +++ b/src/libcore/iter_private.rs @@ -9,7 +9,7 @@ /// indices (if applicable), and must return a valid reference if index is in /// 0..self.len(). #[doc(hidden)] -pub unsafe trait TrustedRandomAccess : ExactSizeIterator { +pub unsafe trait TrustedRandomAccess: ExactSizeIterator { unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item; /// Returns `true` if getting an iterator element may have /// side effects. Remember to take inner iterators into account. diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index 5ea765d3585a2..be974d2dc1541 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -49,21 +49,20 @@ // // This cfg won't affect doc tests. #![cfg(not(test))] - #![stable(feature = "core", since = "1.6.0")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - html_playground_url = "https://play.rust-lang.org/", - issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", - test(no_crate_inject, attr(deny(warnings))), - test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))] - +#![doc( + html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + html_playground_url = "https://play.rust-lang.org/", + issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", + test(no_crate_inject, attr(deny(warnings))), + test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))) +)] #![no_core] #![deny(missing_docs)] #![deny(intra_doc_link_resolution_failure)] #![deny(missing_debug_implementations)] - #![feature(allow_internal_unstable)] #![feature(arbitrary_self_types)] #![feature(asm)] @@ -140,22 +139,36 @@ mod int_macros; #[macro_use] mod uint_macros; -#[path = "num/isize.rs"] pub mod isize; -#[path = "num/i8.rs"] pub mod i8; -#[path = "num/i16.rs"] pub mod i16; -#[path = "num/i32.rs"] pub mod i32; -#[path = "num/i64.rs"] pub mod i64; -#[path = "num/i128.rs"] pub mod i128; +#[path = "num/i128.rs"] +pub mod i128; +#[path = "num/i16.rs"] +pub mod i16; +#[path = "num/i32.rs"] +pub mod i32; +#[path = "num/i64.rs"] +pub mod i64; +#[path = "num/i8.rs"] +pub mod i8; +#[path = "num/isize.rs"] +pub mod isize; -#[path = "num/usize.rs"] pub mod usize; -#[path = "num/u8.rs"] pub mod u8; -#[path = "num/u16.rs"] pub mod u16; -#[path = "num/u32.rs"] pub mod u32; -#[path = "num/u64.rs"] pub mod u64; -#[path = "num/u128.rs"] pub mod u128; +#[path = "num/u128.rs"] +pub mod u128; +#[path = "num/u16.rs"] +pub mod u16; +#[path = "num/u32.rs"] +pub mod u32; +#[path = "num/u64.rs"] +pub mod u64; +#[path = "num/u8.rs"] +pub mod u8; +#[path = "num/usize.rs"] +pub mod usize; -#[path = "num/f32.rs"] pub mod f32; -#[path = "num/f64.rs"] pub mod f64; +#[path = "num/f32.rs"] +pub mod f32; +#[path = "num/f64.rs"] +pub mod f64; #[macro_use] pub mod num; @@ -166,42 +179,42 @@ pub mod prelude; /* Core modules for ownership management */ +pub mod hint; pub mod intrinsics; pub mod mem; pub mod ptr; -pub mod hint; /* Core language traits */ -pub mod marker; -pub mod ops; -pub mod cmp; +pub mod borrow; pub mod clone; -pub mod default; +pub mod cmp; pub mod convert; -pub mod borrow; +pub mod default; +pub mod marker; +pub mod ops; /* Core types and methods on primitives */ pub mod any; pub mod array; pub mod ascii; -pub mod sync; pub mod cell; pub mod char; +pub mod ffi; +pub mod iter; +pub mod option; pub mod panic; pub mod panicking; pub mod pin; -pub mod iter; -pub mod option; pub mod raw; pub mod result; -pub mod ffi; +pub mod sync; +pub mod fmt; +pub mod hash; pub mod slice; pub mod str; -pub mod hash; -pub mod fmt; pub mod time; pub mod unicode; @@ -225,17 +238,29 @@ mod unit; // different repository, rust-lang-nursery/stdsimd. That's why the setup here is // a bit wonky. #[allow(unused_macros)] -macro_rules! test_v16 { ($item:item) => {}; } +macro_rules! test_v16 { + ($item:item) => {}; +} #[allow(unused_macros)] -macro_rules! test_v32 { ($item:item) => {}; } +macro_rules! test_v32 { + ($item:item) => {}; +} #[allow(unused_macros)] -macro_rules! test_v64 { ($item:item) => {}; } +macro_rules! test_v64 { + ($item:item) => {}; +} #[allow(unused_macros)] -macro_rules! test_v128 { ($item:item) => {}; } +macro_rules! test_v128 { + ($item:item) => {}; +} #[allow(unused_macros)] -macro_rules! test_v256 { ($item:item) => {}; } +macro_rules! test_v256 { + ($item:item) => {}; +} #[allow(unused_macros)] -macro_rules! test_v512 { ($item:item) => {}; } +macro_rules! test_v512 { + ($item:item) => {}; +} #[allow(unused_macros)] macro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } } #[path = "../stdsimd/coresimd/mod.rs"] diff --git a/src/libcore/macros.rs b/src/libcore/macros.rs index 2f350df2f5c18..ef0c1422c0e07 100644 --- a/src/libcore/macros.rs +++ b/src/libcore/macros.rs @@ -291,13 +291,17 @@ macro_rules! debug_assert_ne { #[stable(feature = "rust1", since = "1.0.0")] #[doc(alias = "?")] macro_rules! r#try { - ($expr:expr) => (match $expr { - $crate::result::Result::Ok(val) => val, - $crate::result::Result::Err(err) => { - return $crate::result::Result::Err($crate::convert::From::from(err)) + ($expr:expr) => { + match $expr { + $crate::result::Result::Ok(val) => val, + $crate::result::Result::Err(err) => { + return $crate::result::Result::Err($crate::convert::From::from(err)) + } } - }); - ($expr:expr,) => (r#try!($expr)); + }; + ($expr:expr,) => { + r#try!($expr) + }; } /// Write formatted data into a buffer. @@ -565,8 +569,8 @@ mod builtin { #[stable(feature = "compile_error_macro", since = "1.20.0")] #[rustc_doc_only_macro] macro_rules! compile_error { - ($msg:expr) => ({ /* compiler built-in */ }); - ($msg:expr,) => ({ /* compiler built-in */ }); + ($msg:expr) => {{ /* compiler built-in */ }}; + ($msg:expr,) => {{ /* compiler built-in */ }}; } /// The core macro for formatted string creation & output. @@ -577,8 +581,8 @@ mod builtin { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] macro_rules! format_args { - ($fmt:expr) => ({ /* compiler built-in */ }); - ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }); + ($fmt:expr) => {{ /* compiler built-in */ }}; + ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; } /// Inspect an environment variable at compile time. @@ -589,8 +593,8 @@ mod builtin { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] macro_rules! env { - ($name:expr) => ({ /* compiler built-in */ }); - ($name:expr,) => ({ /* compiler built-in */ }); + ($name:expr) => {{ /* compiler built-in */ }}; + ($name:expr,) => {{ /* compiler built-in */ }}; } /// Optionally inspect an environment variable at compile time. @@ -601,8 +605,8 @@ mod builtin { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] macro_rules! option_env { - ($name:expr) => ({ /* compiler built-in */ }); - ($name:expr,) => ({ /* compiler built-in */ }); + ($name:expr) => {{ /* compiler built-in */ }}; + ($name:expr,) => {{ /* compiler built-in */ }}; } /// Concatenate identifiers into one identifier. @@ -613,8 +617,8 @@ mod builtin { #[unstable(feature = "concat_idents_macro", issue = "29599")] #[rustc_doc_only_macro] macro_rules! concat_idents { - ($($e:ident),+) => ({ /* compiler built-in */ }); - ($($e:ident,)+) => ({ /* compiler built-in */ }); + ($($e:ident),+) => {{ /* compiler built-in */ }}; + ($($e:ident,)+) => {{ /* compiler built-in */ }}; } /// Concatenates literals into a static string slice. @@ -625,8 +629,8 @@ mod builtin { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] macro_rules! concat { - ($($e:expr),*) => ({ /* compiler built-in */ }); - ($($e:expr,)*) => ({ /* compiler built-in */ }); + ($($e:expr),*) => {{ /* compiler built-in */ }}; + ($($e:expr,)*) => {{ /* compiler built-in */ }}; } /// A macro which expands to the line number on which it was invoked. @@ -636,7 +640,9 @@ mod builtin { /// [`std::line!`]: ../std/macro.line.html #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] - macro_rules! line { () => ({ /* compiler built-in */ }) } + macro_rules! line { + () => {{ /* compiler built-in */ }}; + } /// A macro which expands to the column number on which it was invoked. /// @@ -645,7 +651,9 @@ mod builtin { /// [`std::column!`]: ../std/macro.column.html #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] - macro_rules! column { () => ({ /* compiler built-in */ }) } + macro_rules! column { + () => {{ /* compiler built-in */ }}; + } /// A macro which expands to the file name from which it was invoked. /// @@ -654,7 +662,9 @@ mod builtin { /// [`std::file!`]: ../std/macro.file.html #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] - macro_rules! file { () => ({ /* compiler built-in */ }) } + macro_rules! file { + () => {{ /* compiler built-in */ }}; + } /// A macro which stringifies its arguments. /// @@ -663,7 +673,9 @@ mod builtin { /// [`std::stringify!`]: ../std/macro.stringify.html #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] - macro_rules! stringify { ($($t:tt)*) => ({ /* compiler built-in */ }) } + macro_rules! stringify { + ($($t:tt)*) => {{ /* compiler built-in */ }}; + } /// Includes a utf8-encoded file as a string. /// @@ -673,8 +685,8 @@ mod builtin { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] macro_rules! include_str { - ($file:expr) => ({ /* compiler built-in */ }); - ($file:expr,) => ({ /* compiler built-in */ }); + ($file:expr) => {{ /* compiler built-in */ }}; + ($file:expr,) => {{ /* compiler built-in */ }}; } /// Includes a file as a reference to a byte array. @@ -685,8 +697,8 @@ mod builtin { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] macro_rules! include_bytes { - ($file:expr) => ({ /* compiler built-in */ }); - ($file:expr,) => ({ /* compiler built-in */ }); + ($file:expr) => {{ /* compiler built-in */ }}; + ($file:expr,) => {{ /* compiler built-in */ }}; } /// Expands to a string that represents the current module path. @@ -696,7 +708,9 @@ mod builtin { /// [`std::module_path!`]: ../std/macro.module_path.html #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] - macro_rules! module_path { () => ({ /* compiler built-in */ }) } + macro_rules! module_path { + () => {{ /* compiler built-in */ }}; + } /// Boolean evaluation of configuration flags, at compile-time. /// @@ -705,7 +719,9 @@ mod builtin { /// [`std::cfg!`]: ../std/macro.cfg.html #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] - macro_rules! cfg { ($($cfg:tt)*) => ({ /* compiler built-in */ }) } + macro_rules! cfg { + ($($cfg:tt)*) => {{ /* compiler built-in */ }}; + } /// Parse a file as an expression or an item according to the context. /// @@ -715,8 +731,8 @@ mod builtin { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_doc_only_macro] macro_rules! include { - ($file:expr) => ({ /* compiler built-in */ }); - ($file:expr,) => ({ /* compiler built-in */ }); + ($file:expr) => {{ /* compiler built-in */ }}; + ($file:expr,) => {{ /* compiler built-in */ }}; } /// Ensure that a boolean expression is `true` at runtime. @@ -727,8 +743,8 @@ mod builtin { #[rustc_doc_only_macro] #[stable(feature = "rust1", since = "1.0.0")] macro_rules! assert { - ($cond:expr) => ({ /* compiler built-in */ }); - ($cond:expr,) => ({ /* compiler built-in */ }); - ($cond:expr, $($arg:tt)+) => ({ /* compiler built-in */ }); + ($cond:expr) => {{ /* compiler built-in */ }}; + ($cond:expr,) => {{ /* compiler built-in */ }}; + ($cond:expr, $($arg:tt)+) => {{ /* compiler built-in */ }}; } } diff --git a/src/libcore/marker.rs b/src/libcore/marker.rs index 53af924376b9d..ac8a03293b556 100644 --- a/src/libcore/marker.rs +++ b/src/libcore/marker.rs @@ -30,17 +30,17 @@ use hash::Hasher; /// [ub]: ../../reference/behavior-considered-undefined.html #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( - message="`{Self}` cannot be sent between threads safely", - label="`{Self}` cannot be sent between threads safely" + message = "`{Self}` cannot be sent between threads safely", + label = "`{Self}` cannot be sent between threads safely" )] pub unsafe auto trait Send { // empty. } #[stable(feature = "rust1", since = "1.0.0")] -impl !Send for *const T { } +impl !Send for *const T {} #[stable(feature = "rust1", since = "1.0.0")] -impl !Send for *mut T { } +impl !Send for *mut T {} /// Types with a constant size known at compile time. /// @@ -82,11 +82,11 @@ impl !Send for *mut T { } #[stable(feature = "rust1", since = "1.0.0")] #[lang = "sized"] #[rustc_on_unimplemented( - on(parent_trait="std::path::Path", label="borrow the `Path` instead"), - message="the size for values of type `{Self}` cannot be known at compilation time", - label="doesn't have a size known at compile-time", - note="to learn more, visit ", + on(parent_trait = "std::path::Path", label = "borrow the `Path` instead"), + message = "the size for values of type `{Self}` cannot be known at compilation time", + label = "doesn't have a size known at compile-time", + note = "to learn more, visit " )] #[fundamental] // for Default, for example, which requires that `[T]: !Default` be evaluatable pub trait Sized { @@ -284,7 +284,7 @@ pub trait Unsize { /// [impls]: #implementors #[stable(feature = "rust1", since = "1.0.0")] #[lang = "copy"] -pub trait Copy : Clone { +pub trait Copy: Clone { // Empty. } @@ -359,8 +359,8 @@ pub trait Copy : Clone { #[stable(feature = "rust1", since = "1.0.0")] #[lang = "sync"] #[rustc_on_unimplemented( - message="`{Self}` cannot be shared between threads safely", - label="`{Self}` cannot be shared between threads safely" + message = "`{Self}` cannot be shared between threads safely", + label = "`{Self}` cannot be shared between threads safely" )] pub unsafe auto trait Sync { // FIXME(estebank): once support to add notes in `rustc_on_unimplemented` @@ -377,61 +377,59 @@ pub unsafe auto trait Sync { } #[stable(feature = "rust1", since = "1.0.0")] -impl !Sync for *const T { } +impl !Sync for *const T {} #[stable(feature = "rust1", since = "1.0.0")] -impl !Sync for *mut T { } +impl !Sync for *mut T {} -macro_rules! impls{ - ($t: ident) => ( +macro_rules! impls { + ($t: ident) => { #[stable(feature = "rust1", since = "1.0.0")] - impl Hash for $t { + impl Hash for $t { #[inline] - fn hash(&self, _: &mut H) { - } + fn hash(&self, _: &mut H) {} } #[stable(feature = "rust1", since = "1.0.0")] - impl cmp::PartialEq for $t { + impl cmp::PartialEq for $t { fn eq(&self, _other: &$t) -> bool { true } } #[stable(feature = "rust1", since = "1.0.0")] - impl cmp::Eq for $t { - } + impl cmp::Eq for $t {} #[stable(feature = "rust1", since = "1.0.0")] - impl cmp::PartialOrd for $t { + impl cmp::PartialOrd for $t { fn partial_cmp(&self, _other: &$t) -> Option { Option::Some(cmp::Ordering::Equal) } } #[stable(feature = "rust1", since = "1.0.0")] - impl cmp::Ord for $t { + impl cmp::Ord for $t { fn cmp(&self, _other: &$t) -> cmp::Ordering { cmp::Ordering::Equal } } #[stable(feature = "rust1", since = "1.0.0")] - impl Copy for $t { } + impl Copy for $t {} #[stable(feature = "rust1", since = "1.0.0")] - impl Clone for $t { + impl Clone for $t { fn clone(&self) -> $t { $t } } #[stable(feature = "rust1", since = "1.0.0")] - impl Default for $t { + impl Default for $t { fn default() -> $t { $t } } - ) + }; } /// Zero-sized type used to mark things that "act like" they own a `T`. @@ -570,7 +568,7 @@ macro_rules! impls{ #[lang = "phantom_data"] #[structural_match] #[stable(feature = "rust1", since = "1.0.0")] -pub struct PhantomData; +pub struct PhantomData; impls! { PhantomData } diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index c024868714cab..653f5dffac92e 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -11,8 +11,8 @@ use fmt; use hash; use intrinsics; use marker::{Copy, PhantomData, Sized}; -use ptr; use ops::{Deref, DerefMut}; +use ptr; #[stable(feature = "rust1", since = "1.0.0")] pub use intrinsics::transmute; @@ -621,7 +621,10 @@ pub unsafe fn zeroed() -> T { /// [copy_no]: ../intrinsics/fn.copy_nonoverlapping.html /// [`Drop`]: ../ops/trait.Drop.html #[inline] -#[rustc_deprecated(since = "2.0.0", reason = "use `mem::MaybeUninit::uninitialized` instead")] +#[rustc_deprecated( + since = "2.0.0", + reason = "use `mem::MaybeUninit::uninitialized` instead" +)] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn uninitialized() -> T { intrinsics::uninit() @@ -799,7 +802,7 @@ pub fn replace(dest: &mut T, mut src: T) -> T { /// [`Copy`]: ../../std/marker/trait.Copy.html #[inline] #[stable(feature = "rust1", since = "1.0.0")] -pub fn drop(_x: T) { } +pub fn drop(_x: T) {} /// Interprets `src` as having type `&U`, and then reads `src` without moving /// the contained value. @@ -888,9 +891,7 @@ impl hash::Hash for Discriminant { #[stable(feature = "discriminant_value", since = "1.21.0")] impl fmt::Debug for Discriminant { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_tuple("Discriminant") - .field(&self.0) - .finish() + fmt.debug_tuple("Discriminant").field(&self.0).finish() } } @@ -920,9 +921,7 @@ impl fmt::Debug for Discriminant { /// ``` #[stable(feature = "discriminant_value", since = "1.21.0")] pub fn discriminant(v: &T) -> Discriminant { - unsafe { - Discriminant(intrinsics::discriminant_value(v), PhantomData) - } + unsafe { Discriminant(intrinsics::discriminant_value(v), PhantomData) } } /// A wrapper to inhibit compiler from automatically calling `T`’s destructor. @@ -1077,7 +1076,9 @@ impl MaybeUninit { #[unstable(feature = "maybe_uninit", issue = "53491")] #[inline(always)] pub const fn new(val: T) -> MaybeUninit { - MaybeUninit { value: ManuallyDrop::new(val) } + MaybeUninit { + value: ManuallyDrop::new(val), + } } /// Create a new `MaybeUninit` in an uninitialized state. diff --git a/src/libcore/num/bignum.rs b/src/libcore/num/bignum.rs index 99a427ba15974..24bcdbf2481f7 100644 --- a/src/libcore/num/bignum.rs +++ b/src/libcore/num/bignum.rs @@ -17,8 +17,8 @@ issue = "0")] #![macro_use] -use mem; use intrinsics; +use mem; /// Arithmetic operations required by bignums. pub trait FullOps: Sized { @@ -36,10 +36,8 @@ pub trait FullOps: Sized { /// Returns `(quo, rem)` such that `borrow * 2^W + self = quo * other + rem` /// and `0 <= rem < other`, where `W` is the number of bits in `Self`. - fn full_div_rem(self, - other: Self, - borrow: Self) - -> (Self /* quotient */, Self /* remainder */); + fn full_div_rem(self, other: Self, borrow: Self) + -> (Self /* quotient */, Self /* remainder */); } macro_rules! impl_full_ops { @@ -96,7 +94,7 @@ impl_full_ops! { const SMALL_POW5: [(u64, usize); 3] = [(125, 3), (15625, 6), (1_220_703_125, 13)]; macro_rules! define_bignum { - ($name:ident: type=$ty:ty, n=$n:expr) => ( + ($name:ident: type=$ty:ty, n=$n:expr) => { /// Stack-allocated arbitrary-precision (up to certain limit) integer. /// /// This is backed by a fixed-size array of given type ("digit"). @@ -113,7 +111,7 @@ macro_rules! define_bignum { size: usize, /// Digits. `[a, b, c, ...]` represents `a + b*2^W + c*2^(2W) + ...` /// where `W` is the number of bits in the digit type. - base: [$ty; $n] + base: [$ty; $n], } impl $name { @@ -121,7 +119,10 @@ macro_rules! define_bignum { pub fn from_small(v: $ty) -> $name { let mut base = [0; $n]; base[0] = v; - $name { size: 1, base: base } + $name { + size: 1, + base: base, + } } /// Makes a bignum from `u64` value. @@ -135,7 +136,10 @@ macro_rules! define_bignum { v >>= mem::size_of::<$ty>() * 8; sz += 1; } - $name { size: sz, base: base } + $name { + size: sz, + base: base, + } } /// Returns the internal digits as a slice `[a, b, c, ...]` such that the numeric @@ -178,7 +182,7 @@ macro_rules! define_bignum { } // This could be optimized with leading_zeros() and bit shifts, but that's // probably not worth the hassle. - let digitbits = mem::size_of::<$ty>()* 8; + let digitbits = mem::size_of::<$ty>() * 8; let mut i = nonzero.len() * digitbits - 1; while self.get_bit(i) == 0 { i -= 1; @@ -270,12 +274,12 @@ macro_rules! define_bignum { let bits = bits % digitbits; assert!(digits < $n); - debug_assert!(self.base[$n-digits..].iter().all(|&v| v == 0)); - debug_assert!(bits == 0 || (self.base[$n-digits-1] >> (digitbits - bits)) == 0); + debug_assert!(self.base[$n - digits..].iter().all(|&v| v == 0)); + debug_assert!(bits == 0 || (self.base[$n - digits - 1] >> (digitbits - bits)) == 0); // shift by `digits * digitbits` bits for i in (0..self.size).rev() { - self.base[i+digits] = self.base[i]; + self.base[i + digits] = self.base[i]; } for i in 0..digits { self.base[i] = 0; @@ -285,14 +289,14 @@ macro_rules! define_bignum { let mut sz = self.size + digits; if bits > 0 { let last = sz; - let overflow = self.base[last-1] >> (digitbits - bits); + let overflow = self.base[last - 1] >> (digitbits - bits); if overflow > 0 { self.base[last] = overflow; sz += 1; } - for i in (digits+1..last).rev() { - self.base[i] = (self.base[i] << bits) | - (self.base[i-1] >> (digitbits - bits)); + for i in (digits + 1..last).rev() { + self.base[i] = + (self.base[i] << bits) | (self.base[i - 1] >> (digitbits - bits)); } self.base[digits] <<= bits; // self.base[..digits] is zero, no need to shift @@ -329,7 +333,6 @@ macro_rules! define_bignum { self } - /// Multiplies itself by a number described by `other[0] + other[1] * 2^W + /// other[2] * 2^(2W) + ...` (where `W` is the number of bits in the digit type) /// and returns its own mutable reference. @@ -340,7 +343,9 @@ macro_rules! define_bignum { let mut retsz = 0; for (i, &a) in aa.iter().enumerate() { - if a == 0 { continue; } + if a == 0 { + continue; + } let mut sz = bb.len(); let mut carry = 0; for (j, &b) in bb.iter().enumerate() { @@ -428,11 +433,12 @@ macro_rules! define_bignum { } impl ::cmp::PartialEq for $name { - fn eq(&self, other: &$name) -> bool { self.base[..] == other.base[..] } + fn eq(&self, other: &$name) -> bool { + self.base[..] == other.base[..] + } } - impl ::cmp::Eq for $name { - } + impl ::cmp::Eq for $name {} impl ::cmp::PartialOrd for $name { fn partial_cmp(&self, other: &$name) -> ::option::Option<::cmp::Ordering> { @@ -452,7 +458,10 @@ macro_rules! define_bignum { impl ::clone::Clone for $name { fn clone(&self) -> $name { - $name { size: self.size, base: self.base } + $name { + size: self.size, + base: self.base, + } } } @@ -460,17 +469,17 @@ macro_rules! define_bignum { fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result { use mem; - let sz = if self.size < 1 {1} else {self.size}; + let sz = if self.size < 1 { 1 } else { self.size }; let digitlen = mem::size_of::<$ty>() * 2; - write!(f, "{:#x}", self.base[sz-1])?; - for &v in self.base[..sz-1].iter().rev() { + write!(f, "{:#x}", self.base[sz - 1])?; + for &v in self.base[..sz - 1].iter().rev() { write!(f, "_{:01$x}", v, digitlen)?; } ::result::Result::Ok(()) } } - ) + }; } /// The digit type for `Big32x40`. diff --git a/src/libcore/num/dec2flt/algorithm.rs b/src/libcore/num/dec2flt/algorithm.rs index d56fa9662a994..3f50ddbae9d22 100644 --- a/src/libcore/num/dec2flt/algorithm.rs +++ b/src/libcore/num/dec2flt/algorithm.rs @@ -1,11 +1,11 @@ //! The various algorithms from the paper. use cmp::min; -use cmp::Ordering::{Less, Equal, Greater}; -use num::diy_float::Fp; -use num::dec2flt::table; -use num::dec2flt::rawfp::{self, Unpacked, RawFloat, fp_to_float, next_float, prev_float}; +use cmp::Ordering::{Equal, Greater, Less}; use num::dec2flt::num::{self, Big}; +use num::dec2flt::rawfp::{self, fp_to_float, next_float, prev_float, RawFloat, Unpacked}; +use num::dec2flt::table; +use num::diy_float::Fp; /// Number of significand bits in Fp const P: u32 = 64; @@ -23,9 +23,9 @@ fn power_of_ten(e: i16) -> Fp { // In most architectures, floating point operations have an explicit bit size, therefore the // precision of the computation is determined on a per-operation basis. -#[cfg(any(not(target_arch="x86"), target_feature="sse2"))] +#[cfg(any(not(target_arch = "x86"), target_feature = "sse2"))] mod fpu_precision { - pub fn set_precision() { } + pub fn set_precision() {} } // On x86, the x87 FPU is used for float operations if the SSE/SSE2 extensions are not available. @@ -33,7 +33,7 @@ mod fpu_precision { // round to 80 bits causing double rounding to happen when values are eventually represented as // 32/64 bit float values. To overcome this, the FPU control word can be set so that the // computations are performed in the desired precision. -#[cfg(all(target_arch="x86", not(target_feature="sse2")))] +#[cfg(all(target_arch = "x86", not(target_feature = "sse2")))] mod fpu_precision { use mem::size_of; diff --git a/src/libcore/num/dec2flt/mod.rs b/src/libcore/num/dec2flt/mod.rs index 58b196a6eac3d..d1b4c1eb3eea6 100644 --- a/src/libcore/num/dec2flt/mod.rs +++ b/src/libcore/num/dec2flt/mod.rs @@ -85,16 +85,16 @@ use fmt; use str::FromStr; -use self::parse::{parse_decimal, Decimal, Sign, ParseResult}; use self::num::digits_to_big; +use self::parse::{parse_decimal, Decimal, ParseResult, Sign}; use self::rawfp::RawFloat; mod algorithm; -mod table; mod num; +mod table; // These two have their own tests. -pub mod rawfp; pub mod parse; +pub mod rawfp; macro_rules! from_str_float_impl { ($t:ty) => { @@ -131,7 +131,7 @@ macro_rules! from_str_float_impl { dec2flt(src) } } - } + }; } from_str_float_impl!(f32); from_str_float_impl!(f64); @@ -147,7 +147,7 @@ from_str_float_impl!(f64); #[derive(Debug, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct ParseFloatError { - kind: FloatErrorKind + kind: FloatErrorKind, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -157,10 +157,12 @@ enum FloatErrorKind { } impl ParseFloatError { - #[unstable(feature = "int_error_internals", - reason = "available through Error trait and this method should \ - not be exposed publicly", - issue = "0")] + #[unstable( + feature = "int_error_internals", + reason = "available through Error trait and this method should \ + not be exposed publicly", + issue = "0" + )] #[doc(hidden)] pub fn __description(&self) -> &str { match self.kind { @@ -178,11 +180,15 @@ impl fmt::Display for ParseFloatError { } fn pfe_empty() -> ParseFloatError { - ParseFloatError { kind: FloatErrorKind::Empty } + ParseFloatError { + kind: FloatErrorKind::Empty, + } } fn pfe_invalid() -> ParseFloatError { - ParseFloatError { kind: FloatErrorKind::Invalid } + ParseFloatError { + kind: FloatErrorKind::Invalid, + } } /// Split decimal string into sign and the rest, without inspecting or validating the rest. @@ -198,7 +204,7 @@ fn extract_sign(s: &str) -> (Sign, &str) { /// Convert a decimal string into a floating point number. fn dec2flt(s: &str) -> Result { if s.is_empty() { - return Err(pfe_empty()) + return Err(pfe_empty()); } let (sign, s) = extract_sign(s); let flt = match parse_decimal(s) { @@ -208,8 +214,10 @@ fn dec2flt(s: &str) -> Result { ParseResult::Invalid => match s { "inf" => T::INFINITY, "NaN" => T::NAN, - _ => { return Err(pfe_invalid()); } - } + _ => { + return Err(pfe_invalid()); + } + }, }; match sign { diff --git a/src/libcore/num/dec2flt/num.rs b/src/libcore/num/dec2flt/num.rs index b76c58cc66e6b..d984c2d71b2bf 100644 --- a/src/libcore/num/dec2flt/num.rs +++ b/src/libcore/num/dec2flt/num.rs @@ -2,7 +2,7 @@ // FIXME This module's name is a bit unfortunate, since other modules also import `core::num`. -use cmp::Ordering::{self, Less, Equal, Greater}; +use cmp::Ordering::{self, Equal, Greater, Less}; pub use num::bignum::Big32x40 as Big; @@ -36,7 +36,10 @@ pub fn compare_with_half_ulp(f: &Big, ones_place: usize) -> Ordering { /// 1. using `FromStr` on `&[u8]` requires `from_utf8_unchecked`, which is bad, and /// 2. piecing together the results of `integral.parse()` and `fractional.parse()` is /// more complicated than this entire function. -pub fn from_str_unchecked<'a, T>(bytes: T) -> u64 where T : IntoIterator { +pub fn from_str_unchecked<'a, T>(bytes: T) -> u64 +where + T: IntoIterator, +{ let mut result = 0; for &c in bytes { result = result * 10 + (c - b'0') as u64; @@ -68,7 +71,6 @@ pub fn to_u64(x: &Big) -> u64 { } } - /// Extract a range of bits. /// Index 0 is the least significant bit and the range is half-open as usual. diff --git a/src/libcore/num/dec2flt/parse.rs b/src/libcore/num/dec2flt/parse.rs index 9e075e43303b6..9279e350fc8f7 100644 --- a/src/libcore/num/dec2flt/parse.rs +++ b/src/libcore/num/dec2flt/parse.rs @@ -10,8 +10,8 @@ //! modules rely on to not panic (or overflow) in turn. //! To make matters worse, all that happens in a single pass over the input. //! So, be careful when modifying anything, and double-check with the other modules. +use self::ParseResult::{Invalid, ShortcutToInf, ShortcutToZero, Valid}; use super::num; -use self::ParseResult::{Valid, ShortcutToInf, ShortcutToZero, Invalid}; #[derive(Debug)] pub enum Sign { @@ -30,7 +30,11 @@ pub struct Decimal<'a> { impl<'a> Decimal<'a> { pub fn new(integral: &'a [u8], fractional: &'a [u8], exp: i64) -> Decimal<'a> { - Decimal { integral, fractional, exp } + Decimal { + integral, + fractional, + exp, + } } } diff --git a/src/libcore/num/dec2flt/rawfp.rs b/src/libcore/num/dec2flt/rawfp.rs index 6976bd1a0eefd..4909732e45d10 100644 --- a/src/libcore/num/dec2flt/rawfp.rs +++ b/src/libcore/num/dec2flt/rawfp.rs @@ -17,15 +17,15 @@ //! Many functions in this module only handle normal numbers. The dec2flt routines conservatively //! take the universally-correct slow path (Algorithm M) for very small and very large numbers. //! That algorithm needs only next_float() which does handle subnormals and zeros. -use cmp::Ordering::{Less, Equal, Greater}; +use cmp::Ordering::{Equal, Greater, Less}; use convert::{TryFrom, TryInto}; -use ops::{Add, Mul, Div, Neg}; use fmt::{Debug, LowerExp}; -use num::diy_float::Fp; -use num::FpCategory::{Infinite, Zero, Subnormal, Normal, Nan}; -use num::FpCategory; use num::dec2flt::num::{self, Big}; use num::dec2flt::table; +use num::diy_float::Fp; +use num::FpCategory; +use num::FpCategory::{Infinite, Nan, Normal, Subnormal, Zero}; +use ops::{Add, Div, Mul, Neg}; #[derive(Copy, Clone, Debug)] pub struct Unpacked { @@ -44,13 +44,8 @@ impl Unpacked { /// See the parent module's doc comment for why this is necessary. /// /// Should **never ever** be implemented for other types or be used outside the dec2flt module. -pub trait RawFloat - : Copy - + Debug - + LowerExp - + Mul - + Div - + Neg +pub trait RawFloat: + Copy + Debug + LowerExp + Mul + Div + Neg { const INFINITY: Self; const NAN: Self; @@ -144,7 +139,7 @@ macro_rules! other_constants { const INFINITY: Self = $crate::$type::INFINITY; const NAN: Self = $crate::$type::NAN; const ZERO: Self = 0.0; - } + }; } impl RawFloat for f32 { @@ -188,12 +183,17 @@ impl RawFloat for f32 { table::F32_SHORT_POWERS[e] } - fn classify(self) -> FpCategory { self.classify() } - fn to_bits(self) -> Self::Bits { self.to_bits() } - fn from_bits(v: Self::Bits) -> Self { Self::from_bits(v) } + fn classify(self) -> FpCategory { + self.classify() + } + fn to_bits(self) -> Self::Bits { + self.to_bits() + } + fn from_bits(v: Self::Bits) -> Self { + Self::from_bits(v) + } } - impl RawFloat for f64 { type Bits = u64; @@ -235,9 +235,15 @@ impl RawFloat for f64 { table::F64_SHORT_POWERS[e] } - fn classify(self) -> FpCategory { self.classify() } - fn to_bits(self) -> Self::Bits { self.to_bits() } - fn from_bits(v: Self::Bits) -> Self { Self::from_bits(v) } + fn classify(self) -> FpCategory { + self.classify() + } + fn to_bits(self) -> Self::Bits { + self.to_bits() + } + fn from_bits(v: Self::Bits) -> Self { + Self::from_bits(v) + } } /// Convert an Fp to the closest machine float type. @@ -248,7 +254,7 @@ pub fn fp_to_float(x: Fp) -> T { let e = x.e + 63; if e > T::MAX_EXP { panic!("fp_to_float: exponent {} too large", e) - } else if e > T::MIN_EXP { + } else if e > T::MIN_EXP { encode_normal(round_normal::(x)) } else { panic!("fp_to_float: exponent {} too small", e) @@ -278,14 +284,18 @@ pub fn round_normal(x: Fp) -> Unpacked { /// Inverse of `RawFloat::unpack()` for normalized numbers. /// Panics if the significand or exponent are not valid for normalized numbers. pub fn encode_normal(x: Unpacked) -> T { - debug_assert!(T::MIN_SIG <= x.sig && x.sig <= T::MAX_SIG, - "encode_normal: significand not normalized"); + debug_assert!( + T::MIN_SIG <= x.sig && x.sig <= T::MAX_SIG, + "encode_normal: significand not normalized" + ); // Remove the hidden bit let sig_enc = x.sig & !(1 << T::EXPLICIT_SIG_BITS); // Adjust the exponent for exponent bias and mantissa shift let k_enc = x.k + T::MAX_EXP + T::EXPLICIT_SIG_BITS as i16; - debug_assert!(k_enc != 0 && k_enc < T::MAX_ENCODED_EXP, - "encode_normal: exponent out of range"); + debug_assert!( + k_enc != 0 && k_enc < T::MAX_ENCODED_EXP, + "encode_normal: exponent out of range" + ); // Leave sign bit at 0 ("+"), our numbers are all positive let bits = (k_enc as u64) << T::EXPLICIT_SIG_BITS | sig_enc; T::from_bits(bits.try_into().unwrap_or_else(|_| unreachable!())) @@ -293,7 +303,10 @@ pub fn encode_normal(x: Unpacked) -> T { /// Construct a subnormal. A mantissa of 0 is allowed and constructs zero. pub fn encode_subnormal(significand: u64) -> T { - assert!(significand < T::MIN_SIG, "encode_subnormal: not actually subnormal"); + assert!( + significand < T::MIN_SIG, + "encode_subnormal: not actually subnormal" + ); // Encoded exponent is 0, the sign bit is 0, so we just have to reinterpret the bits. T::from_bits(significand.try_into().unwrap_or_else(|_| unreachable!())) } @@ -314,8 +327,11 @@ pub fn big_to_fp(f: &Big) -> Fp { Equal if leading % 2 == 0 => rounded_down, Equal | Greater => match leading.checked_add(1) { Some(f) => Fp { f, e }.normalize(), - None => Fp { f: 1 << 63, e: e + 1 }, - } + None => Fp { + f: 1 << 63, + e: e + 1, + }, + }, } } @@ -354,8 +370,6 @@ pub fn next_float(x: T) -> T { // want, and the mantissa bits become zero. Because of the hidden bit convention, this // too is exactly what we want! // Finally, f64::MAX + 1 = 7eff...f + 1 = 7ff0...0 = f64::INFINITY. - Zero | Subnormal | Normal => { - T::from_bits(x.to_bits() + T::Bits::from(1u8)) - } + Zero | Subnormal | Normal => T::from_bits(x.to_bits() + T::Bits::from(1u8)), } } diff --git a/src/libcore/num/dec2flt/table.rs b/src/libcore/num/dec2flt/table.rs index 345ac830aaa77..600a419090cd3 100644 --- a/src/libcore/num/dec2flt/table.rs +++ b/src/libcore/num/dec2flt/table.rs @@ -4,1268 +4,665 @@ pub const MIN_E: i16 = -305; pub const MAX_E: i16 = 305; -pub const POWERS: ([u64; 611], [i16; 611]) = ([ - 0xe0b62e2929aba83c, - 0x8c71dcd9ba0b4926, - 0xaf8e5410288e1b6f, - 0xdb71e91432b1a24b, - 0x892731ac9faf056f, - 0xab70fe17c79ac6ca, - 0xd64d3d9db981787d, - 0x85f0468293f0eb4e, - 0xa76c582338ed2622, - 0xd1476e2c07286faa, - 0x82cca4db847945ca, - 0xa37fce126597973d, - 0xcc5fc196fefd7d0c, - 0xff77b1fcbebcdc4f, - 0x9faacf3df73609b1, - 0xc795830d75038c1e, - 0xf97ae3d0d2446f25, - 0x9becce62836ac577, - 0xc2e801fb244576d5, - 0xf3a20279ed56d48a, - 0x9845418c345644d7, - 0xbe5691ef416bd60c, - 0xedec366b11c6cb8f, - 0x94b3a202eb1c3f39, - 0xb9e08a83a5e34f08, - 0xe858ad248f5c22ca, - 0x91376c36d99995be, - 0xb58547448ffffb2e, - 0xe2e69915b3fff9f9, - 0x8dd01fad907ffc3c, - 0xb1442798f49ffb4b, - 0xdd95317f31c7fa1d, - 0x8a7d3eef7f1cfc52, - 0xad1c8eab5ee43b67, - 0xd863b256369d4a41, - 0x873e4f75e2224e68, - 0xa90de3535aaae202, - 0xd3515c2831559a83, - 0x8412d9991ed58092, - 0xa5178fff668ae0b6, - 0xce5d73ff402d98e4, - 0x80fa687f881c7f8e, - 0xa139029f6a239f72, - 0xc987434744ac874f, - 0xfbe9141915d7a922, - 0x9d71ac8fada6c9b5, - 0xc4ce17b399107c23, - 0xf6019da07f549b2b, - 0x99c102844f94e0fb, - 0xc0314325637a193a, - 0xf03d93eebc589f88, - 0x96267c7535b763b5, - 0xbbb01b9283253ca3, - 0xea9c227723ee8bcb, - 0x92a1958a7675175f, - 0xb749faed14125d37, - 0xe51c79a85916f485, - 0x8f31cc0937ae58d3, - 0xb2fe3f0b8599ef08, - 0xdfbdcece67006ac9, - 0x8bd6a141006042be, - 0xaecc49914078536d, - 0xda7f5bf590966849, - 0x888f99797a5e012d, - 0xaab37fd7d8f58179, - 0xd5605fcdcf32e1d7, - 0x855c3be0a17fcd26, - 0xa6b34ad8c9dfc070, - 0xd0601d8efc57b08c, - 0x823c12795db6ce57, - 0xa2cb1717b52481ed, - 0xcb7ddcdda26da269, - 0xfe5d54150b090b03, - 0x9efa548d26e5a6e2, - 0xc6b8e9b0709f109a, - 0xf867241c8cc6d4c1, - 0x9b407691d7fc44f8, - 0xc21094364dfb5637, - 0xf294b943e17a2bc4, - 0x979cf3ca6cec5b5b, - 0xbd8430bd08277231, - 0xece53cec4a314ebe, - 0x940f4613ae5ed137, - 0xb913179899f68584, - 0xe757dd7ec07426e5, - 0x9096ea6f3848984f, - 0xb4bca50b065abe63, - 0xe1ebce4dc7f16dfc, - 0x8d3360f09cf6e4bd, - 0xb080392cc4349ded, - 0xdca04777f541c568, - 0x89e42caaf9491b61, - 0xac5d37d5b79b6239, - 0xd77485cb25823ac7, - 0x86a8d39ef77164bd, - 0xa8530886b54dbdec, - 0xd267caa862a12d67, - 0x8380dea93da4bc60, - 0xa46116538d0deb78, - 0xcd795be870516656, - 0x806bd9714632dff6, - 0xa086cfcd97bf97f4, - 0xc8a883c0fdaf7df0, - 0xfad2a4b13d1b5d6c, - 0x9cc3a6eec6311a64, - 0xc3f490aa77bd60fd, - 0xf4f1b4d515acb93c, - 0x991711052d8bf3c5, - 0xbf5cd54678eef0b7, - 0xef340a98172aace5, - 0x9580869f0e7aac0f, - 0xbae0a846d2195713, - 0xe998d258869facd7, - 0x91ff83775423cc06, - 0xb67f6455292cbf08, - 0xe41f3d6a7377eeca, - 0x8e938662882af53e, - 0xb23867fb2a35b28e, - 0xdec681f9f4c31f31, - 0x8b3c113c38f9f37f, - 0xae0b158b4738705f, - 0xd98ddaee19068c76, - 0x87f8a8d4cfa417ca, - 0xa9f6d30a038d1dbc, - 0xd47487cc8470652b, - 0x84c8d4dfd2c63f3b, - 0xa5fb0a17c777cf0a, - 0xcf79cc9db955c2cc, - 0x81ac1fe293d599c0, - 0xa21727db38cb0030, - 0xca9cf1d206fdc03c, - 0xfd442e4688bd304b, - 0x9e4a9cec15763e2f, - 0xc5dd44271ad3cdba, - 0xf7549530e188c129, - 0x9a94dd3e8cf578ba, - 0xc13a148e3032d6e8, - 0xf18899b1bc3f8ca2, - 0x96f5600f15a7b7e5, - 0xbcb2b812db11a5de, - 0xebdf661791d60f56, - 0x936b9fcebb25c996, - 0xb84687c269ef3bfb, - 0xe65829b3046b0afa, - 0x8ff71a0fe2c2e6dc, - 0xb3f4e093db73a093, - 0xe0f218b8d25088b8, - 0x8c974f7383725573, - 0xafbd2350644eead0, - 0xdbac6c247d62a584, - 0x894bc396ce5da772, - 0xab9eb47c81f5114f, - 0xd686619ba27255a3, - 0x8613fd0145877586, - 0xa798fc4196e952e7, - 0xd17f3b51fca3a7a1, - 0x82ef85133de648c5, - 0xa3ab66580d5fdaf6, - 0xcc963fee10b7d1b3, - 0xffbbcfe994e5c620, - 0x9fd561f1fd0f9bd4, - 0xc7caba6e7c5382c9, - 0xf9bd690a1b68637b, - 0x9c1661a651213e2d, - 0xc31bfa0fe5698db8, - 0xf3e2f893dec3f126, - 0x986ddb5c6b3a76b8, - 0xbe89523386091466, - 0xee2ba6c0678b597f, - 0x94db483840b717f0, - 0xba121a4650e4ddec, - 0xe896a0d7e51e1566, - 0x915e2486ef32cd60, - 0xb5b5ada8aaff80b8, - 0xe3231912d5bf60e6, - 0x8df5efabc5979c90, - 0xb1736b96b6fd83b4, - 0xddd0467c64bce4a1, - 0x8aa22c0dbef60ee4, - 0xad4ab7112eb3929e, - 0xd89d64d57a607745, - 0x87625f056c7c4a8b, - 0xa93af6c6c79b5d2e, - 0xd389b47879823479, - 0x843610cb4bf160cc, - 0xa54394fe1eedb8ff, - 0xce947a3da6a9273e, - 0x811ccc668829b887, - 0xa163ff802a3426a9, - 0xc9bcff6034c13053, - 0xfc2c3f3841f17c68, - 0x9d9ba7832936edc1, - 0xc5029163f384a931, - 0xf64335bcf065d37d, - 0x99ea0196163fa42e, - 0xc06481fb9bcf8d3a, - 0xf07da27a82c37088, - 0x964e858c91ba2655, - 0xbbe226efb628afeb, - 0xeadab0aba3b2dbe5, - 0x92c8ae6b464fc96f, - 0xb77ada0617e3bbcb, - 0xe55990879ddcaabe, - 0x8f57fa54c2a9eab7, - 0xb32df8e9f3546564, - 0xdff9772470297ebd, - 0x8bfbea76c619ef36, - 0xaefae51477a06b04, - 0xdab99e59958885c5, - 0x88b402f7fd75539b, - 0xaae103b5fcd2a882, - 0xd59944a37c0752a2, - 0x857fcae62d8493a5, - 0xa6dfbd9fb8e5b88f, - 0xd097ad07a71f26b2, - 0x825ecc24c8737830, - 0xa2f67f2dfa90563b, - 0xcbb41ef979346bca, - 0xfea126b7d78186bd, - 0x9f24b832e6b0f436, - 0xc6ede63fa05d3144, - 0xf8a95fcf88747d94, - 0x9b69dbe1b548ce7d, - 0xc24452da229b021c, - 0xf2d56790ab41c2a3, - 0x97c560ba6b0919a6, - 0xbdb6b8e905cb600f, - 0xed246723473e3813, - 0x9436c0760c86e30c, - 0xb94470938fa89bcf, - 0xe7958cb87392c2c3, - 0x90bd77f3483bb9ba, - 0xb4ecd5f01a4aa828, - 0xe2280b6c20dd5232, - 0x8d590723948a535f, - 0xb0af48ec79ace837, - 0xdcdb1b2798182245, - 0x8a08f0f8bf0f156b, - 0xac8b2d36eed2dac6, - 0xd7adf884aa879177, - 0x86ccbb52ea94baeb, - 0xa87fea27a539e9a5, - 0xd29fe4b18e88640f, - 0x83a3eeeef9153e89, - 0xa48ceaaab75a8e2b, - 0xcdb02555653131b6, - 0x808e17555f3ebf12, - 0xa0b19d2ab70e6ed6, - 0xc8de047564d20a8c, - 0xfb158592be068d2f, - 0x9ced737bb6c4183d, - 0xc428d05aa4751e4d, - 0xf53304714d9265e0, - 0x993fe2c6d07b7fac, - 0xbf8fdb78849a5f97, - 0xef73d256a5c0f77d, - 0x95a8637627989aae, - 0xbb127c53b17ec159, - 0xe9d71b689dde71b0, - 0x9226712162ab070e, - 0xb6b00d69bb55c8d1, - 0xe45c10c42a2b3b06, - 0x8eb98a7a9a5b04e3, - 0xb267ed1940f1c61c, - 0xdf01e85f912e37a3, - 0x8b61313bbabce2c6, - 0xae397d8aa96c1b78, - 0xd9c7dced53c72256, - 0x881cea14545c7575, - 0xaa242499697392d3, - 0xd4ad2dbfc3d07788, - 0x84ec3c97da624ab5, - 0xa6274bbdd0fadd62, - 0xcfb11ead453994ba, - 0x81ceb32c4b43fcf5, - 0xa2425ff75e14fc32, - 0xcad2f7f5359a3b3e, - 0xfd87b5f28300ca0e, - 0x9e74d1b791e07e48, - 0xc612062576589ddb, - 0xf79687aed3eec551, - 0x9abe14cd44753b53, - 0xc16d9a0095928a27, - 0xf1c90080baf72cb1, - 0x971da05074da7bef, - 0xbce5086492111aeb, - 0xec1e4a7db69561a5, - 0x9392ee8e921d5d07, - 0xb877aa3236a4b449, - 0xe69594bec44de15b, - 0x901d7cf73ab0acd9, - 0xb424dc35095cd80f, - 0xe12e13424bb40e13, - 0x8cbccc096f5088cc, - 0xafebff0bcb24aaff, - 0xdbe6fecebdedd5bf, - 0x89705f4136b4a597, - 0xabcc77118461cefd, - 0xd6bf94d5e57a42bc, - 0x8637bd05af6c69b6, - 0xa7c5ac471b478423, - 0xd1b71758e219652c, - 0x83126e978d4fdf3b, - 0xa3d70a3d70a3d70a, - 0xcccccccccccccccd, - 0x8000000000000000, - 0xa000000000000000, - 0xc800000000000000, - 0xfa00000000000000, - 0x9c40000000000000, - 0xc350000000000000, - 0xf424000000000000, - 0x9896800000000000, - 0xbebc200000000000, - 0xee6b280000000000, - 0x9502f90000000000, - 0xba43b74000000000, - 0xe8d4a51000000000, - 0x9184e72a00000000, - 0xb5e620f480000000, - 0xe35fa931a0000000, - 0x8e1bc9bf04000000, - 0xb1a2bc2ec5000000, - 0xde0b6b3a76400000, - 0x8ac7230489e80000, - 0xad78ebc5ac620000, - 0xd8d726b7177a8000, - 0x878678326eac9000, - 0xa968163f0a57b400, - 0xd3c21bcecceda100, - 0x84595161401484a0, - 0xa56fa5b99019a5c8, - 0xcecb8f27f4200f3a, - 0x813f3978f8940984, - 0xa18f07d736b90be5, - 0xc9f2c9cd04674edf, - 0xfc6f7c4045812296, - 0x9dc5ada82b70b59e, - 0xc5371912364ce305, - 0xf684df56c3e01bc7, - 0x9a130b963a6c115c, - 0xc097ce7bc90715b3, - 0xf0bdc21abb48db20, - 0x96769950b50d88f4, - 0xbc143fa4e250eb31, - 0xeb194f8e1ae525fd, - 0x92efd1b8d0cf37be, - 0xb7abc627050305ae, - 0xe596b7b0c643c719, - 0x8f7e32ce7bea5c70, - 0xb35dbf821ae4f38c, - 0xe0352f62a19e306f, - 0x8c213d9da502de45, - 0xaf298d050e4395d7, - 0xdaf3f04651d47b4c, - 0x88d8762bf324cd10, - 0xab0e93b6efee0054, - 0xd5d238a4abe98068, - 0x85a36366eb71f041, - 0xa70c3c40a64e6c52, - 0xd0cf4b50cfe20766, - 0x82818f1281ed44a0, - 0xa321f2d7226895c8, - 0xcbea6f8ceb02bb3a, - 0xfee50b7025c36a08, - 0x9f4f2726179a2245, - 0xc722f0ef9d80aad6, - 0xf8ebad2b84e0d58c, - 0x9b934c3b330c8577, - 0xc2781f49ffcfa6d5, - 0xf316271c7fc3908b, - 0x97edd871cfda3a57, - 0xbde94e8e43d0c8ec, - 0xed63a231d4c4fb27, - 0x945e455f24fb1cf9, - 0xb975d6b6ee39e437, - 0xe7d34c64a9c85d44, - 0x90e40fbeea1d3a4b, - 0xb51d13aea4a488dd, - 0xe264589a4dcdab15, - 0x8d7eb76070a08aed, - 0xb0de65388cc8ada8, - 0xdd15fe86affad912, - 0x8a2dbf142dfcc7ab, - 0xacb92ed9397bf996, - 0xd7e77a8f87daf7fc, - 0x86f0ac99b4e8dafd, - 0xa8acd7c0222311bd, - 0xd2d80db02aabd62c, - 0x83c7088e1aab65db, - 0xa4b8cab1a1563f52, - 0xcde6fd5e09abcf27, - 0x80b05e5ac60b6178, - 0xa0dc75f1778e39d6, - 0xc913936dd571c84c, - 0xfb5878494ace3a5f, - 0x9d174b2dcec0e47b, - 0xc45d1df942711d9a, - 0xf5746577930d6501, - 0x9968bf6abbe85f20, - 0xbfc2ef456ae276e9, - 0xefb3ab16c59b14a3, - 0x95d04aee3b80ece6, - 0xbb445da9ca61281f, - 0xea1575143cf97227, - 0x924d692ca61be758, - 0xb6e0c377cfa2e12e, - 0xe498f455c38b997a, - 0x8edf98b59a373fec, - 0xb2977ee300c50fe7, - 0xdf3d5e9bc0f653e1, - 0x8b865b215899f46d, - 0xae67f1e9aec07188, - 0xda01ee641a708dea, - 0x884134fe908658b2, - 0xaa51823e34a7eedf, - 0xd4e5e2cdc1d1ea96, - 0x850fadc09923329e, - 0xa6539930bf6bff46, - 0xcfe87f7cef46ff17, - 0x81f14fae158c5f6e, - 0xa26da3999aef774a, - 0xcb090c8001ab551c, - 0xfdcb4fa002162a63, - 0x9e9f11c4014dda7e, - 0xc646d63501a1511e, - 0xf7d88bc24209a565, - 0x9ae757596946075f, - 0xc1a12d2fc3978937, - 0xf209787bb47d6b85, - 0x9745eb4d50ce6333, - 0xbd176620a501fc00, - 0xec5d3fa8ce427b00, - 0x93ba47c980e98ce0, - 0xb8a8d9bbe123f018, - 0xe6d3102ad96cec1e, - 0x9043ea1ac7e41393, - 0xb454e4a179dd1877, - 0xe16a1dc9d8545e95, - 0x8ce2529e2734bb1d, - 0xb01ae745b101e9e4, - 0xdc21a1171d42645d, - 0x899504ae72497eba, - 0xabfa45da0edbde69, - 0xd6f8d7509292d603, - 0x865b86925b9bc5c2, - 0xa7f26836f282b733, - 0xd1ef0244af2364ff, - 0x8335616aed761f1f, - 0xa402b9c5a8d3a6e7, - 0xcd036837130890a1, - 0x802221226be55a65, - 0xa02aa96b06deb0fe, - 0xc83553c5c8965d3d, - 0xfa42a8b73abbf48d, - 0x9c69a97284b578d8, - 0xc38413cf25e2d70e, - 0xf46518c2ef5b8cd1, - 0x98bf2f79d5993803, - 0xbeeefb584aff8604, - 0xeeaaba2e5dbf6785, - 0x952ab45cfa97a0b3, - 0xba756174393d88e0, - 0xe912b9d1478ceb17, - 0x91abb422ccb812ef, - 0xb616a12b7fe617aa, - 0xe39c49765fdf9d95, - 0x8e41ade9fbebc27d, - 0xb1d219647ae6b31c, - 0xde469fbd99a05fe3, - 0x8aec23d680043bee, - 0xada72ccc20054aea, - 0xd910f7ff28069da4, - 0x87aa9aff79042287, - 0xa99541bf57452b28, - 0xd3fa922f2d1675f2, - 0x847c9b5d7c2e09b7, - 0xa59bc234db398c25, - 0xcf02b2c21207ef2f, - 0x8161afb94b44f57d, - 0xa1ba1ba79e1632dc, - 0xca28a291859bbf93, - 0xfcb2cb35e702af78, - 0x9defbf01b061adab, - 0xc56baec21c7a1916, - 0xf6c69a72a3989f5c, - 0x9a3c2087a63f6399, - 0xc0cb28a98fcf3c80, - 0xf0fdf2d3f3c30b9f, - 0x969eb7c47859e744, - 0xbc4665b596706115, - 0xeb57ff22fc0c795a, - 0x9316ff75dd87cbd8, - 0xb7dcbf5354e9bece, - 0xe5d3ef282a242e82, - 0x8fa475791a569d11, - 0xb38d92d760ec4455, - 0xe070f78d3927556b, - 0x8c469ab843b89563, - 0xaf58416654a6babb, - 0xdb2e51bfe9d0696a, - 0x88fcf317f22241e2, - 0xab3c2fddeeaad25b, - 0xd60b3bd56a5586f2, - 0x85c7056562757457, - 0xa738c6bebb12d16d, - 0xd106f86e69d785c8, - 0x82a45b450226b39d, - 0xa34d721642b06084, - 0xcc20ce9bd35c78a5, - 0xff290242c83396ce, - 0x9f79a169bd203e41, - 0xc75809c42c684dd1, - 0xf92e0c3537826146, - 0x9bbcc7a142b17ccc, - 0xc2abf989935ddbfe, - 0xf356f7ebf83552fe, - 0x98165af37b2153df, - 0xbe1bf1b059e9a8d6, - 0xeda2ee1c7064130c, - 0x9485d4d1c63e8be8, - 0xb9a74a0637ce2ee1, - 0xe8111c87c5c1ba9a, - 0x910ab1d4db9914a0, - 0xb54d5e4a127f59c8, - 0xe2a0b5dc971f303a, - 0x8da471a9de737e24, - 0xb10d8e1456105dad, - 0xdd50f1996b947519, - 0x8a5296ffe33cc930, - 0xace73cbfdc0bfb7b, - 0xd8210befd30efa5a, - 0x8714a775e3e95c78, - 0xa8d9d1535ce3b396, - 0xd31045a8341ca07c, - 0x83ea2b892091e44e, - 0xa4e4b66b68b65d61, - 0xce1de40642e3f4b9, - 0x80d2ae83e9ce78f4, - 0xa1075a24e4421731, - 0xc94930ae1d529cfd, - 0xfb9b7cd9a4a7443c, - 0x9d412e0806e88aa6, - 0xc491798a08a2ad4f, - 0xf5b5d7ec8acb58a3, - 0x9991a6f3d6bf1766, - 0xbff610b0cc6edd3f, - 0xeff394dcff8a948f, - 0x95f83d0a1fb69cd9, - 0xbb764c4ca7a44410, - 0xea53df5fd18d5514, - 0x92746b9be2f8552c, - 0xb7118682dbb66a77, - 0xe4d5e82392a40515, - 0x8f05b1163ba6832d, - 0xb2c71d5bca9023f8, - 0xdf78e4b2bd342cf7, - 0x8bab8eefb6409c1a, - 0xae9672aba3d0c321, - 0xda3c0f568cc4f3e9, - 0x8865899617fb1871, - 0xaa7eebfb9df9de8e, - 0xd51ea6fa85785631, - 0x8533285c936b35df, - 0xa67ff273b8460357, - 0xd01fef10a657842c, - 0x8213f56a67f6b29c, - 0xa298f2c501f45f43, - 0xcb3f2f7642717713, - 0xfe0efb53d30dd4d8, - 0x9ec95d1463e8a507, - 0xc67bb4597ce2ce49, - 0xf81aa16fdc1b81db, - 0x9b10a4e5e9913129, - 0xc1d4ce1f63f57d73, - 0xf24a01a73cf2dcd0, - 0x976e41088617ca02, - 0xbd49d14aa79dbc82, - 0xec9c459d51852ba3, - 0x93e1ab8252f33b46, - 0xb8da1662e7b00a17, - 0xe7109bfba19c0c9d, - 0x906a617d450187e2, - 0xb484f9dc9641e9db, - 0xe1a63853bbd26451, - 0x8d07e33455637eb3, - 0xb049dc016abc5e60, - 0xdc5c5301c56b75f7, - 0x89b9b3e11b6329bb, - 0xac2820d9623bf429, - 0xd732290fbacaf134, - 0x867f59a9d4bed6c0, - 0xa81f301449ee8c70, - 0xd226fc195c6a2f8c, - 0x83585d8fd9c25db8, - 0xa42e74f3d032f526, - 0xcd3a1230c43fb26f, - 0x80444b5e7aa7cf85, - 0xa0555e361951c367, - 0xc86ab5c39fa63441, - 0xfa856334878fc151, - 0x9c935e00d4b9d8d2, - 0xc3b8358109e84f07, - 0xf4a642e14c6262c9, - 0x98e7e9cccfbd7dbe, - 0xbf21e44003acdd2d, - 0xeeea5d5004981478, - 0x95527a5202df0ccb, - 0xbaa718e68396cffe, - 0xe950df20247c83fd, - 0x91d28b7416cdd27e, -], [ - -1077, - -1073, - -1070, - -1067, - -1063, - -1060, - -1057, - -1053, - -1050, - -1047, - -1043, - -1040, - -1037, - -1034, - -1030, - -1027, - -1024, - -1020, - -1017, - -1014, - -1010, - -1007, - -1004, - -1000, - -997, - -994, - -990, - -987, - -984, - -980, - -977, - -974, - -970, - -967, - -964, - -960, - -957, - -954, - -950, - -947, - -944, - -940, - -937, - -934, - -931, - -927, - -924, - -921, - -917, - -914, - -911, - -907, - -904, - -901, - -897, - -894, - -891, - -887, - -884, - -881, - -877, - -874, - -871, - -867, - -864, - -861, - -857, - -854, - -851, - -847, - -844, - -841, - -838, - -834, - -831, - -828, - -824, - -821, - -818, - -814, - -811, - -808, - -804, - -801, - -798, - -794, - -791, - -788, - -784, - -781, - -778, - -774, - -771, - -768, - -764, - -761, - -758, - -754, - -751, - -748, - -744, - -741, - -738, - -735, - -731, - -728, - -725, - -721, - -718, - -715, - -711, - -708, - -705, - -701, - -698, - -695, - -691, - -688, - -685, - -681, - -678, - -675, - -671, - -668, - -665, - -661, - -658, - -655, - -651, - -648, - -645, - -642, - -638, - -635, - -632, - -628, - -625, - -622, - -618, - -615, - -612, - -608, - -605, - -602, - -598, - -595, - -592, - -588, - -585, - -582, - -578, - -575, - -572, - -568, - -565, - -562, - -558, - -555, - -552, - -549, - -545, - -542, - -539, - -535, - -532, - -529, - -525, - -522, - -519, - -515, - -512, - -509, - -505, - -502, - -499, - -495, - -492, - -489, - -485, - -482, - -479, - -475, - -472, - -469, - -465, - -462, - -459, - -455, - -452, - -449, - -446, - -442, - -439, - -436, - -432, - -429, - -426, - -422, - -419, - -416, - -412, - -409, - -406, - -402, - -399, - -396, - -392, - -389, - -386, - -382, - -379, - -376, - -372, - -369, - -366, - -362, - -359, - -356, - -353, - -349, - -346, - -343, - -339, - -336, - -333, - -329, - -326, - -323, - -319, - -316, - -313, - -309, - -306, - -303, - -299, - -296, - -293, - -289, - -286, - -283, - -279, - -276, - -273, - -269, - -266, - -263, - -259, - -256, - -253, - -250, - -246, - -243, - -240, - -236, - -233, - -230, - -226, - -223, - -220, - -216, - -213, - -210, - -206, - -203, - -200, - -196, - -193, - -190, - -186, - -183, - -180, - -176, - -173, - -170, - -166, - -163, - -160, - -157, - -153, - -150, - -147, - -143, - -140, - -137, - -133, - -130, - -127, - -123, - -120, - -117, - -113, - -110, - -107, - -103, - -100, - -97, - -93, - -90, - -87, - -83, - -80, - -77, - -73, - -70, - -67, - -63, - -60, - -57, - -54, - -50, - -47, - -44, - -40, - -37, - -34, - -30, - -27, - -24, - -20, - -17, - -14, - -10, - -7, - -4, - 0, - 3, - 6, - 10, - 13, - 16, - 20, - 23, - 26, - 30, - 33, - 36, - 39, - 43, - 46, - 49, - 53, - 56, - 59, - 63, - 66, - 69, - 73, - 76, - 79, - 83, - 86, - 89, - 93, - 96, - 99, - 103, - 106, - 109, - 113, - 116, - 119, - 123, - 126, - 129, - 132, - 136, - 139, - 142, - 146, - 149, - 152, - 156, - 159, - 162, - 166, - 169, - 172, - 176, - 179, - 182, - 186, - 189, - 192, - 196, - 199, - 202, - 206, - 209, - 212, - 216, - 219, - 222, - 226, - 229, - 232, - 235, - 239, - 242, - 245, - 249, - 252, - 255, - 259, - 262, - 265, - 269, - 272, - 275, - 279, - 282, - 285, - 289, - 292, - 295, - 299, - 302, - 305, - 309, - 312, - 315, - 319, - 322, - 325, - 328, - 332, - 335, - 338, - 342, - 345, - 348, - 352, - 355, - 358, - 362, - 365, - 368, - 372, - 375, - 378, - 382, - 385, - 388, - 392, - 395, - 398, - 402, - 405, - 408, - 412, - 415, - 418, - 422, - 425, - 428, - 431, - 435, - 438, - 441, - 445, - 448, - 451, - 455, - 458, - 461, - 465, - 468, - 471, - 475, - 478, - 481, - 485, - 488, - 491, - 495, - 498, - 501, - 505, - 508, - 511, - 515, - 518, - 521, - 524, - 528, - 531, - 534, - 538, - 541, - 544, - 548, - 551, - 554, - 558, - 561, - 564, - 568, - 571, - 574, - 578, - 581, - 584, - 588, - 591, - 594, - 598, - 601, - 604, - 608, - 611, - 614, - 617, - 621, - 624, - 627, - 631, - 634, - 637, - 641, - 644, - 647, - 651, - 654, - 657, - 661, - 664, - 667, - 671, - 674, - 677, - 681, - 684, - 687, - 691, - 694, - 697, - 701, - 704, - 707, - 711, - 714, - 717, - 720, - 724, - 727, - 730, - 734, - 737, - 740, - 744, - 747, - 750, - 754, - 757, - 760, - 764, - 767, - 770, - 774, - 777, - 780, - 784, - 787, - 790, - 794, - 797, - 800, - 804, - 807, - 810, - 813, - 817, - 820, - 823, - 827, - 830, - 833, - 837, - 840, - 843, - 847, - 850, - 853, - 857, - 860, - 863, - 867, - 870, - 873, - 877, - 880, - 883, - 887, - 890, - 893, - 897, - 900, - 903, - 907, - 910, - 913, - 916, - 920, - 923, - 926, - 930, - 933, - 936, - 940, - 943, - 946, - 950, -]); +pub const POWERS: ([u64; 611], [i16; 611]) = ( + [ + 0xe0b62e2929aba83c, + 0x8c71dcd9ba0b4926, + 0xaf8e5410288e1b6f, + 0xdb71e91432b1a24b, + 0x892731ac9faf056f, + 0xab70fe17c79ac6ca, + 0xd64d3d9db981787d, + 0x85f0468293f0eb4e, + 0xa76c582338ed2622, + 0xd1476e2c07286faa, + 0x82cca4db847945ca, + 0xa37fce126597973d, + 0xcc5fc196fefd7d0c, + 0xff77b1fcbebcdc4f, + 0x9faacf3df73609b1, + 0xc795830d75038c1e, + 0xf97ae3d0d2446f25, + 0x9becce62836ac577, + 0xc2e801fb244576d5, + 0xf3a20279ed56d48a, + 0x9845418c345644d7, + 0xbe5691ef416bd60c, + 0xedec366b11c6cb8f, + 0x94b3a202eb1c3f39, + 0xb9e08a83a5e34f08, + 0xe858ad248f5c22ca, + 0x91376c36d99995be, + 0xb58547448ffffb2e, + 0xe2e69915b3fff9f9, + 0x8dd01fad907ffc3c, + 0xb1442798f49ffb4b, + 0xdd95317f31c7fa1d, + 0x8a7d3eef7f1cfc52, + 0xad1c8eab5ee43b67, + 0xd863b256369d4a41, + 0x873e4f75e2224e68, + 0xa90de3535aaae202, + 0xd3515c2831559a83, + 0x8412d9991ed58092, + 0xa5178fff668ae0b6, + 0xce5d73ff402d98e4, + 0x80fa687f881c7f8e, + 0xa139029f6a239f72, + 0xc987434744ac874f, + 0xfbe9141915d7a922, + 0x9d71ac8fada6c9b5, + 0xc4ce17b399107c23, + 0xf6019da07f549b2b, + 0x99c102844f94e0fb, + 0xc0314325637a193a, + 0xf03d93eebc589f88, + 0x96267c7535b763b5, + 0xbbb01b9283253ca3, + 0xea9c227723ee8bcb, + 0x92a1958a7675175f, + 0xb749faed14125d37, + 0xe51c79a85916f485, + 0x8f31cc0937ae58d3, + 0xb2fe3f0b8599ef08, + 0xdfbdcece67006ac9, + 0x8bd6a141006042be, + 0xaecc49914078536d, + 0xda7f5bf590966849, + 0x888f99797a5e012d, + 0xaab37fd7d8f58179, + 0xd5605fcdcf32e1d7, + 0x855c3be0a17fcd26, + 0xa6b34ad8c9dfc070, + 0xd0601d8efc57b08c, + 0x823c12795db6ce57, + 0xa2cb1717b52481ed, + 0xcb7ddcdda26da269, + 0xfe5d54150b090b03, + 0x9efa548d26e5a6e2, + 0xc6b8e9b0709f109a, + 0xf867241c8cc6d4c1, + 0x9b407691d7fc44f8, + 0xc21094364dfb5637, + 0xf294b943e17a2bc4, + 0x979cf3ca6cec5b5b, + 0xbd8430bd08277231, + 0xece53cec4a314ebe, + 0x940f4613ae5ed137, + 0xb913179899f68584, + 0xe757dd7ec07426e5, + 0x9096ea6f3848984f, + 0xb4bca50b065abe63, + 0xe1ebce4dc7f16dfc, + 0x8d3360f09cf6e4bd, + 0xb080392cc4349ded, + 0xdca04777f541c568, + 0x89e42caaf9491b61, + 0xac5d37d5b79b6239, + 0xd77485cb25823ac7, + 0x86a8d39ef77164bd, + 0xa8530886b54dbdec, + 0xd267caa862a12d67, + 0x8380dea93da4bc60, + 0xa46116538d0deb78, + 0xcd795be870516656, + 0x806bd9714632dff6, + 0xa086cfcd97bf97f4, + 0xc8a883c0fdaf7df0, + 0xfad2a4b13d1b5d6c, + 0x9cc3a6eec6311a64, + 0xc3f490aa77bd60fd, + 0xf4f1b4d515acb93c, + 0x991711052d8bf3c5, + 0xbf5cd54678eef0b7, + 0xef340a98172aace5, + 0x9580869f0e7aac0f, + 0xbae0a846d2195713, + 0xe998d258869facd7, + 0x91ff83775423cc06, + 0xb67f6455292cbf08, + 0xe41f3d6a7377eeca, + 0x8e938662882af53e, + 0xb23867fb2a35b28e, + 0xdec681f9f4c31f31, + 0x8b3c113c38f9f37f, + 0xae0b158b4738705f, + 0xd98ddaee19068c76, + 0x87f8a8d4cfa417ca, + 0xa9f6d30a038d1dbc, + 0xd47487cc8470652b, + 0x84c8d4dfd2c63f3b, + 0xa5fb0a17c777cf0a, + 0xcf79cc9db955c2cc, + 0x81ac1fe293d599c0, + 0xa21727db38cb0030, + 0xca9cf1d206fdc03c, + 0xfd442e4688bd304b, + 0x9e4a9cec15763e2f, + 0xc5dd44271ad3cdba, + 0xf7549530e188c129, + 0x9a94dd3e8cf578ba, + 0xc13a148e3032d6e8, + 0xf18899b1bc3f8ca2, + 0x96f5600f15a7b7e5, + 0xbcb2b812db11a5de, + 0xebdf661791d60f56, + 0x936b9fcebb25c996, + 0xb84687c269ef3bfb, + 0xe65829b3046b0afa, + 0x8ff71a0fe2c2e6dc, + 0xb3f4e093db73a093, + 0xe0f218b8d25088b8, + 0x8c974f7383725573, + 0xafbd2350644eead0, + 0xdbac6c247d62a584, + 0x894bc396ce5da772, + 0xab9eb47c81f5114f, + 0xd686619ba27255a3, + 0x8613fd0145877586, + 0xa798fc4196e952e7, + 0xd17f3b51fca3a7a1, + 0x82ef85133de648c5, + 0xa3ab66580d5fdaf6, + 0xcc963fee10b7d1b3, + 0xffbbcfe994e5c620, + 0x9fd561f1fd0f9bd4, + 0xc7caba6e7c5382c9, + 0xf9bd690a1b68637b, + 0x9c1661a651213e2d, + 0xc31bfa0fe5698db8, + 0xf3e2f893dec3f126, + 0x986ddb5c6b3a76b8, + 0xbe89523386091466, + 0xee2ba6c0678b597f, + 0x94db483840b717f0, + 0xba121a4650e4ddec, + 0xe896a0d7e51e1566, + 0x915e2486ef32cd60, + 0xb5b5ada8aaff80b8, + 0xe3231912d5bf60e6, + 0x8df5efabc5979c90, + 0xb1736b96b6fd83b4, + 0xddd0467c64bce4a1, + 0x8aa22c0dbef60ee4, + 0xad4ab7112eb3929e, + 0xd89d64d57a607745, + 0x87625f056c7c4a8b, + 0xa93af6c6c79b5d2e, + 0xd389b47879823479, + 0x843610cb4bf160cc, + 0xa54394fe1eedb8ff, + 0xce947a3da6a9273e, + 0x811ccc668829b887, + 0xa163ff802a3426a9, + 0xc9bcff6034c13053, + 0xfc2c3f3841f17c68, + 0x9d9ba7832936edc1, + 0xc5029163f384a931, + 0xf64335bcf065d37d, + 0x99ea0196163fa42e, + 0xc06481fb9bcf8d3a, + 0xf07da27a82c37088, + 0x964e858c91ba2655, + 0xbbe226efb628afeb, + 0xeadab0aba3b2dbe5, + 0x92c8ae6b464fc96f, + 0xb77ada0617e3bbcb, + 0xe55990879ddcaabe, + 0x8f57fa54c2a9eab7, + 0xb32df8e9f3546564, + 0xdff9772470297ebd, + 0x8bfbea76c619ef36, + 0xaefae51477a06b04, + 0xdab99e59958885c5, + 0x88b402f7fd75539b, + 0xaae103b5fcd2a882, + 0xd59944a37c0752a2, + 0x857fcae62d8493a5, + 0xa6dfbd9fb8e5b88f, + 0xd097ad07a71f26b2, + 0x825ecc24c8737830, + 0xa2f67f2dfa90563b, + 0xcbb41ef979346bca, + 0xfea126b7d78186bd, + 0x9f24b832e6b0f436, + 0xc6ede63fa05d3144, + 0xf8a95fcf88747d94, + 0x9b69dbe1b548ce7d, + 0xc24452da229b021c, + 0xf2d56790ab41c2a3, + 0x97c560ba6b0919a6, + 0xbdb6b8e905cb600f, + 0xed246723473e3813, + 0x9436c0760c86e30c, + 0xb94470938fa89bcf, + 0xe7958cb87392c2c3, + 0x90bd77f3483bb9ba, + 0xb4ecd5f01a4aa828, + 0xe2280b6c20dd5232, + 0x8d590723948a535f, + 0xb0af48ec79ace837, + 0xdcdb1b2798182245, + 0x8a08f0f8bf0f156b, + 0xac8b2d36eed2dac6, + 0xd7adf884aa879177, + 0x86ccbb52ea94baeb, + 0xa87fea27a539e9a5, + 0xd29fe4b18e88640f, + 0x83a3eeeef9153e89, + 0xa48ceaaab75a8e2b, + 0xcdb02555653131b6, + 0x808e17555f3ebf12, + 0xa0b19d2ab70e6ed6, + 0xc8de047564d20a8c, + 0xfb158592be068d2f, + 0x9ced737bb6c4183d, + 0xc428d05aa4751e4d, + 0xf53304714d9265e0, + 0x993fe2c6d07b7fac, + 0xbf8fdb78849a5f97, + 0xef73d256a5c0f77d, + 0x95a8637627989aae, + 0xbb127c53b17ec159, + 0xe9d71b689dde71b0, + 0x9226712162ab070e, + 0xb6b00d69bb55c8d1, + 0xe45c10c42a2b3b06, + 0x8eb98a7a9a5b04e3, + 0xb267ed1940f1c61c, + 0xdf01e85f912e37a3, + 0x8b61313bbabce2c6, + 0xae397d8aa96c1b78, + 0xd9c7dced53c72256, + 0x881cea14545c7575, + 0xaa242499697392d3, + 0xd4ad2dbfc3d07788, + 0x84ec3c97da624ab5, + 0xa6274bbdd0fadd62, + 0xcfb11ead453994ba, + 0x81ceb32c4b43fcf5, + 0xa2425ff75e14fc32, + 0xcad2f7f5359a3b3e, + 0xfd87b5f28300ca0e, + 0x9e74d1b791e07e48, + 0xc612062576589ddb, + 0xf79687aed3eec551, + 0x9abe14cd44753b53, + 0xc16d9a0095928a27, + 0xf1c90080baf72cb1, + 0x971da05074da7bef, + 0xbce5086492111aeb, + 0xec1e4a7db69561a5, + 0x9392ee8e921d5d07, + 0xb877aa3236a4b449, + 0xe69594bec44de15b, + 0x901d7cf73ab0acd9, + 0xb424dc35095cd80f, + 0xe12e13424bb40e13, + 0x8cbccc096f5088cc, + 0xafebff0bcb24aaff, + 0xdbe6fecebdedd5bf, + 0x89705f4136b4a597, + 0xabcc77118461cefd, + 0xd6bf94d5e57a42bc, + 0x8637bd05af6c69b6, + 0xa7c5ac471b478423, + 0xd1b71758e219652c, + 0x83126e978d4fdf3b, + 0xa3d70a3d70a3d70a, + 0xcccccccccccccccd, + 0x8000000000000000, + 0xa000000000000000, + 0xc800000000000000, + 0xfa00000000000000, + 0x9c40000000000000, + 0xc350000000000000, + 0xf424000000000000, + 0x9896800000000000, + 0xbebc200000000000, + 0xee6b280000000000, + 0x9502f90000000000, + 0xba43b74000000000, + 0xe8d4a51000000000, + 0x9184e72a00000000, + 0xb5e620f480000000, + 0xe35fa931a0000000, + 0x8e1bc9bf04000000, + 0xb1a2bc2ec5000000, + 0xde0b6b3a76400000, + 0x8ac7230489e80000, + 0xad78ebc5ac620000, + 0xd8d726b7177a8000, + 0x878678326eac9000, + 0xa968163f0a57b400, + 0xd3c21bcecceda100, + 0x84595161401484a0, + 0xa56fa5b99019a5c8, + 0xcecb8f27f4200f3a, + 0x813f3978f8940984, + 0xa18f07d736b90be5, + 0xc9f2c9cd04674edf, + 0xfc6f7c4045812296, + 0x9dc5ada82b70b59e, + 0xc5371912364ce305, + 0xf684df56c3e01bc7, + 0x9a130b963a6c115c, + 0xc097ce7bc90715b3, + 0xf0bdc21abb48db20, + 0x96769950b50d88f4, + 0xbc143fa4e250eb31, + 0xeb194f8e1ae525fd, + 0x92efd1b8d0cf37be, + 0xb7abc627050305ae, + 0xe596b7b0c643c719, + 0x8f7e32ce7bea5c70, + 0xb35dbf821ae4f38c, + 0xe0352f62a19e306f, + 0x8c213d9da502de45, + 0xaf298d050e4395d7, + 0xdaf3f04651d47b4c, + 0x88d8762bf324cd10, + 0xab0e93b6efee0054, + 0xd5d238a4abe98068, + 0x85a36366eb71f041, + 0xa70c3c40a64e6c52, + 0xd0cf4b50cfe20766, + 0x82818f1281ed44a0, + 0xa321f2d7226895c8, + 0xcbea6f8ceb02bb3a, + 0xfee50b7025c36a08, + 0x9f4f2726179a2245, + 0xc722f0ef9d80aad6, + 0xf8ebad2b84e0d58c, + 0x9b934c3b330c8577, + 0xc2781f49ffcfa6d5, + 0xf316271c7fc3908b, + 0x97edd871cfda3a57, + 0xbde94e8e43d0c8ec, + 0xed63a231d4c4fb27, + 0x945e455f24fb1cf9, + 0xb975d6b6ee39e437, + 0xe7d34c64a9c85d44, + 0x90e40fbeea1d3a4b, + 0xb51d13aea4a488dd, + 0xe264589a4dcdab15, + 0x8d7eb76070a08aed, + 0xb0de65388cc8ada8, + 0xdd15fe86affad912, + 0x8a2dbf142dfcc7ab, + 0xacb92ed9397bf996, + 0xd7e77a8f87daf7fc, + 0x86f0ac99b4e8dafd, + 0xa8acd7c0222311bd, + 0xd2d80db02aabd62c, + 0x83c7088e1aab65db, + 0xa4b8cab1a1563f52, + 0xcde6fd5e09abcf27, + 0x80b05e5ac60b6178, + 0xa0dc75f1778e39d6, + 0xc913936dd571c84c, + 0xfb5878494ace3a5f, + 0x9d174b2dcec0e47b, + 0xc45d1df942711d9a, + 0xf5746577930d6501, + 0x9968bf6abbe85f20, + 0xbfc2ef456ae276e9, + 0xefb3ab16c59b14a3, + 0x95d04aee3b80ece6, + 0xbb445da9ca61281f, + 0xea1575143cf97227, + 0x924d692ca61be758, + 0xb6e0c377cfa2e12e, + 0xe498f455c38b997a, + 0x8edf98b59a373fec, + 0xb2977ee300c50fe7, + 0xdf3d5e9bc0f653e1, + 0x8b865b215899f46d, + 0xae67f1e9aec07188, + 0xda01ee641a708dea, + 0x884134fe908658b2, + 0xaa51823e34a7eedf, + 0xd4e5e2cdc1d1ea96, + 0x850fadc09923329e, + 0xa6539930bf6bff46, + 0xcfe87f7cef46ff17, + 0x81f14fae158c5f6e, + 0xa26da3999aef774a, + 0xcb090c8001ab551c, + 0xfdcb4fa002162a63, + 0x9e9f11c4014dda7e, + 0xc646d63501a1511e, + 0xf7d88bc24209a565, + 0x9ae757596946075f, + 0xc1a12d2fc3978937, + 0xf209787bb47d6b85, + 0x9745eb4d50ce6333, + 0xbd176620a501fc00, + 0xec5d3fa8ce427b00, + 0x93ba47c980e98ce0, + 0xb8a8d9bbe123f018, + 0xe6d3102ad96cec1e, + 0x9043ea1ac7e41393, + 0xb454e4a179dd1877, + 0xe16a1dc9d8545e95, + 0x8ce2529e2734bb1d, + 0xb01ae745b101e9e4, + 0xdc21a1171d42645d, + 0x899504ae72497eba, + 0xabfa45da0edbde69, + 0xd6f8d7509292d603, + 0x865b86925b9bc5c2, + 0xa7f26836f282b733, + 0xd1ef0244af2364ff, + 0x8335616aed761f1f, + 0xa402b9c5a8d3a6e7, + 0xcd036837130890a1, + 0x802221226be55a65, + 0xa02aa96b06deb0fe, + 0xc83553c5c8965d3d, + 0xfa42a8b73abbf48d, + 0x9c69a97284b578d8, + 0xc38413cf25e2d70e, + 0xf46518c2ef5b8cd1, + 0x98bf2f79d5993803, + 0xbeeefb584aff8604, + 0xeeaaba2e5dbf6785, + 0x952ab45cfa97a0b3, + 0xba756174393d88e0, + 0xe912b9d1478ceb17, + 0x91abb422ccb812ef, + 0xb616a12b7fe617aa, + 0xe39c49765fdf9d95, + 0x8e41ade9fbebc27d, + 0xb1d219647ae6b31c, + 0xde469fbd99a05fe3, + 0x8aec23d680043bee, + 0xada72ccc20054aea, + 0xd910f7ff28069da4, + 0x87aa9aff79042287, + 0xa99541bf57452b28, + 0xd3fa922f2d1675f2, + 0x847c9b5d7c2e09b7, + 0xa59bc234db398c25, + 0xcf02b2c21207ef2f, + 0x8161afb94b44f57d, + 0xa1ba1ba79e1632dc, + 0xca28a291859bbf93, + 0xfcb2cb35e702af78, + 0x9defbf01b061adab, + 0xc56baec21c7a1916, + 0xf6c69a72a3989f5c, + 0x9a3c2087a63f6399, + 0xc0cb28a98fcf3c80, + 0xf0fdf2d3f3c30b9f, + 0x969eb7c47859e744, + 0xbc4665b596706115, + 0xeb57ff22fc0c795a, + 0x9316ff75dd87cbd8, + 0xb7dcbf5354e9bece, + 0xe5d3ef282a242e82, + 0x8fa475791a569d11, + 0xb38d92d760ec4455, + 0xe070f78d3927556b, + 0x8c469ab843b89563, + 0xaf58416654a6babb, + 0xdb2e51bfe9d0696a, + 0x88fcf317f22241e2, + 0xab3c2fddeeaad25b, + 0xd60b3bd56a5586f2, + 0x85c7056562757457, + 0xa738c6bebb12d16d, + 0xd106f86e69d785c8, + 0x82a45b450226b39d, + 0xa34d721642b06084, + 0xcc20ce9bd35c78a5, + 0xff290242c83396ce, + 0x9f79a169bd203e41, + 0xc75809c42c684dd1, + 0xf92e0c3537826146, + 0x9bbcc7a142b17ccc, + 0xc2abf989935ddbfe, + 0xf356f7ebf83552fe, + 0x98165af37b2153df, + 0xbe1bf1b059e9a8d6, + 0xeda2ee1c7064130c, + 0x9485d4d1c63e8be8, + 0xb9a74a0637ce2ee1, + 0xe8111c87c5c1ba9a, + 0x910ab1d4db9914a0, + 0xb54d5e4a127f59c8, + 0xe2a0b5dc971f303a, + 0x8da471a9de737e24, + 0xb10d8e1456105dad, + 0xdd50f1996b947519, + 0x8a5296ffe33cc930, + 0xace73cbfdc0bfb7b, + 0xd8210befd30efa5a, + 0x8714a775e3e95c78, + 0xa8d9d1535ce3b396, + 0xd31045a8341ca07c, + 0x83ea2b892091e44e, + 0xa4e4b66b68b65d61, + 0xce1de40642e3f4b9, + 0x80d2ae83e9ce78f4, + 0xa1075a24e4421731, + 0xc94930ae1d529cfd, + 0xfb9b7cd9a4a7443c, + 0x9d412e0806e88aa6, + 0xc491798a08a2ad4f, + 0xf5b5d7ec8acb58a3, + 0x9991a6f3d6bf1766, + 0xbff610b0cc6edd3f, + 0xeff394dcff8a948f, + 0x95f83d0a1fb69cd9, + 0xbb764c4ca7a44410, + 0xea53df5fd18d5514, + 0x92746b9be2f8552c, + 0xb7118682dbb66a77, + 0xe4d5e82392a40515, + 0x8f05b1163ba6832d, + 0xb2c71d5bca9023f8, + 0xdf78e4b2bd342cf7, + 0x8bab8eefb6409c1a, + 0xae9672aba3d0c321, + 0xda3c0f568cc4f3e9, + 0x8865899617fb1871, + 0xaa7eebfb9df9de8e, + 0xd51ea6fa85785631, + 0x8533285c936b35df, + 0xa67ff273b8460357, + 0xd01fef10a657842c, + 0x8213f56a67f6b29c, + 0xa298f2c501f45f43, + 0xcb3f2f7642717713, + 0xfe0efb53d30dd4d8, + 0x9ec95d1463e8a507, + 0xc67bb4597ce2ce49, + 0xf81aa16fdc1b81db, + 0x9b10a4e5e9913129, + 0xc1d4ce1f63f57d73, + 0xf24a01a73cf2dcd0, + 0x976e41088617ca02, + 0xbd49d14aa79dbc82, + 0xec9c459d51852ba3, + 0x93e1ab8252f33b46, + 0xb8da1662e7b00a17, + 0xe7109bfba19c0c9d, + 0x906a617d450187e2, + 0xb484f9dc9641e9db, + 0xe1a63853bbd26451, + 0x8d07e33455637eb3, + 0xb049dc016abc5e60, + 0xdc5c5301c56b75f7, + 0x89b9b3e11b6329bb, + 0xac2820d9623bf429, + 0xd732290fbacaf134, + 0x867f59a9d4bed6c0, + 0xa81f301449ee8c70, + 0xd226fc195c6a2f8c, + 0x83585d8fd9c25db8, + 0xa42e74f3d032f526, + 0xcd3a1230c43fb26f, + 0x80444b5e7aa7cf85, + 0xa0555e361951c367, + 0xc86ab5c39fa63441, + 0xfa856334878fc151, + 0x9c935e00d4b9d8d2, + 0xc3b8358109e84f07, + 0xf4a642e14c6262c9, + 0x98e7e9cccfbd7dbe, + 0xbf21e44003acdd2d, + 0xeeea5d5004981478, + 0x95527a5202df0ccb, + 0xbaa718e68396cffe, + 0xe950df20247c83fd, + 0x91d28b7416cdd27e, + ], + [ + -1077, -1073, -1070, -1067, -1063, -1060, -1057, -1053, -1050, -1047, -1043, -1040, -1037, + -1034, -1030, -1027, -1024, -1020, -1017, -1014, -1010, -1007, -1004, -1000, -997, -994, + -990, -987, -984, -980, -977, -974, -970, -967, -964, -960, -957, -954, -950, -947, -944, + -940, -937, -934, -931, -927, -924, -921, -917, -914, -911, -907, -904, -901, -897, -894, + -891, -887, -884, -881, -877, -874, -871, -867, -864, -861, -857, -854, -851, -847, -844, + -841, -838, -834, -831, -828, -824, -821, -818, -814, -811, -808, -804, -801, -798, -794, + -791, -788, -784, -781, -778, -774, -771, -768, -764, -761, -758, -754, -751, -748, -744, + -741, -738, -735, -731, -728, -725, -721, -718, -715, -711, -708, -705, -701, -698, -695, + -691, -688, -685, -681, -678, -675, -671, -668, -665, -661, -658, -655, -651, -648, -645, + -642, -638, -635, -632, -628, -625, -622, -618, -615, -612, -608, -605, -602, -598, -595, + -592, -588, -585, -582, -578, -575, -572, -568, -565, -562, -558, -555, -552, -549, -545, + -542, -539, -535, -532, -529, -525, -522, -519, -515, -512, -509, -505, -502, -499, -495, + -492, -489, -485, -482, -479, -475, -472, -469, -465, -462, -459, -455, -452, -449, -446, + -442, -439, -436, -432, -429, -426, -422, -419, -416, -412, -409, -406, -402, -399, -396, + -392, -389, -386, -382, -379, -376, -372, -369, -366, -362, -359, -356, -353, -349, -346, + -343, -339, -336, -333, -329, -326, -323, -319, -316, -313, -309, -306, -303, -299, -296, + -293, -289, -286, -283, -279, -276, -273, -269, -266, -263, -259, -256, -253, -250, -246, + -243, -240, -236, -233, -230, -226, -223, -220, -216, -213, -210, -206, -203, -200, -196, + -193, -190, -186, -183, -180, -176, -173, -170, -166, -163, -160, -157, -153, -150, -147, + -143, -140, -137, -133, -130, -127, -123, -120, -117, -113, -110, -107, -103, -100, -97, + -93, -90, -87, -83, -80, -77, -73, -70, -67, -63, -60, -57, -54, -50, -47, -44, -40, -37, + -34, -30, -27, -24, -20, -17, -14, -10, -7, -4, 0, 3, 6, 10, 13, 16, 20, 23, 26, 30, 33, + 36, 39, 43, 46, 49, 53, 56, 59, 63, 66, 69, 73, 76, 79, 83, 86, 89, 93, 96, 99, 103, 106, + 109, 113, 116, 119, 123, 126, 129, 132, 136, 139, 142, 146, 149, 152, 156, 159, 162, 166, + 169, 172, 176, 179, 182, 186, 189, 192, 196, 199, 202, 206, 209, 212, 216, 219, 222, 226, + 229, 232, 235, 239, 242, 245, 249, 252, 255, 259, 262, 265, 269, 272, 275, 279, 282, 285, + 289, 292, 295, 299, 302, 305, 309, 312, 315, 319, 322, 325, 328, 332, 335, 338, 342, 345, + 348, 352, 355, 358, 362, 365, 368, 372, 375, 378, 382, 385, 388, 392, 395, 398, 402, 405, + 408, 412, 415, 418, 422, 425, 428, 431, 435, 438, 441, 445, 448, 451, 455, 458, 461, 465, + 468, 471, 475, 478, 481, 485, 488, 491, 495, 498, 501, 505, 508, 511, 515, 518, 521, 524, + 528, 531, 534, 538, 541, 544, 548, 551, 554, 558, 561, 564, 568, 571, 574, 578, 581, 584, + 588, 591, 594, 598, 601, 604, 608, 611, 614, 617, 621, 624, 627, 631, 634, 637, 641, 644, + 647, 651, 654, 657, 661, 664, 667, 671, 674, 677, 681, 684, 687, 691, 694, 697, 701, 704, + 707, 711, 714, 717, 720, 724, 727, 730, 734, 737, 740, 744, 747, 750, 754, 757, 760, 764, + 767, 770, 774, 777, 780, 784, 787, 790, 794, 797, 800, 804, 807, 810, 813, 817, 820, 823, + 827, 830, 833, 837, 840, 843, 847, 850, 853, 857, 860, 863, 867, 870, 873, 877, 880, 883, + 887, 890, 893, 897, 900, 903, 907, 910, 913, 916, 920, 923, 926, 930, 933, 936, 940, 943, + 946, 950, + ], +); -pub const F32_SHORT_POWERS: [f32; 11] = [ - 1e0, - 1e1, - 1e2, - 1e3, - 1e4, - 1e5, - 1e6, - 1e7, - 1e8, - 1e9, - 1e10, -]; +pub const F32_SHORT_POWERS: [f32; 11] = [1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10]; pub const F64_SHORT_POWERS: [f64; 23] = [ - 1e0, - 1e1, - 1e2, - 1e3, - 1e4, - 1e5, - 1e6, - 1e7, - 1e8, - 1e9, - 1e10, - 1e11, - 1e12, - 1e13, - 1e14, - 1e15, - 1e16, - 1e17, - 1e18, - 1e19, - 1e20, - 1e21, - 1e22, + 1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, + 1e17, 1e18, 1e19, 1e20, 1e21, 1e22, ]; diff --git a/src/libcore/num/f32.rs b/src/libcore/num/f32.rs index d1bd97552024d..c135120aa5e5c 100644 --- a/src/libcore/num/f32.rs +++ b/src/libcore/num/f32.rs @@ -323,7 +323,7 @@ impl f32 { /// /// assert!(abs_difference <= f32::EPSILON); /// ``` - #[stable(feature = "f32_deg_rad_conversions", since="1.7.0")] + #[stable(feature = "f32_deg_rad_conversions", since = "1.7.0")] #[inline] pub fn to_degrees(self) -> f32 { // Use a constant for better precision. @@ -342,7 +342,7 @@ impl f32 { /// /// assert!(abs_difference <= f32::EPSILON); /// ``` - #[stable(feature = "f32_deg_rad_conversions", since="1.7.0")] + #[stable(feature = "f32_deg_rad_conversions", since = "1.7.0")] #[inline] pub fn to_radians(self) -> f32 { let value: f32 = consts::PI; @@ -370,7 +370,11 @@ impl f32 { // Since we do not support sNaN in Rust yet, we do not need to handle them. // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by // multiplying by 1.0. Should switch to the `canonicalize` when it works. - (if self.is_nan() || self < other { other } else { self }) * 1.0 + (if self.is_nan() || self < other { + other + } else { + self + }) * 1.0 } /// Returns the minimum of the two numbers. @@ -394,7 +398,11 @@ impl f32 { // Since we do not support sNaN in Rust yet, we do not need to handle them. // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by // multiplying by 1.0. Should switch to the `canonicalize` when it works. - (if other.is_nan() || self < other { self } else { other }) * 1.0 + (if other.is_nan() || self < other { + self + } else { + other + }) * 1.0 } /// Raw transmutation to `u32`. diff --git a/src/libcore/num/f64.rs b/src/libcore/num/f64.rs index 8ada5b6756c38..68947321b9969 100644 --- a/src/libcore/num/f64.rs +++ b/src/libcore/num/f64.rs @@ -383,7 +383,11 @@ impl f64 { // Since we do not support sNaN in Rust yet, we do not need to handle them. // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by // multiplying by 1.0. Should switch to the `canonicalize` when it works. - (if self.is_nan() || self < other { other } else { self }) * 1.0 + (if self.is_nan() || self < other { + other + } else { + self + }) * 1.0 } /// Returns the minimum of the two numbers. @@ -407,7 +411,11 @@ impl f64 { // Since we do not support sNaN in Rust yet, we do not need to handle them. // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by // multiplying by 1.0. Should switch to the `canonicalize` when it works. - (if other.is_nan() || self < other { self } else { other }) * 1.0 + (if other.is_nan() || self < other { + self + } else { + other + }) * 1.0 } /// Raw transmutation to `u64`. diff --git a/src/libcore/num/flt2dec/decoder.rs b/src/libcore/num/flt2dec/decoder.rs index a3bf783976bbd..6151e473fd7f6 100644 --- a/src/libcore/num/flt2dec/decoder.rs +++ b/src/libcore/num/flt2dec/decoder.rs @@ -1,8 +1,8 @@ //! Decodes a floating-point value into individual parts and error ranges. -use {f32, f64}; -use num::FpCategory; use num::dec2flt::rawfp::RawFloat; +use num::FpCategory; +use {f32, f64}; /// Decoded unsigned finite value, such that: /// @@ -47,11 +47,15 @@ pub trait DecodableFloat: RawFloat + Copy { } impl DecodableFloat for f32 { - fn min_pos_norm_value() -> Self { f32::MIN_POSITIVE } + fn min_pos_norm_value() -> Self { + f32::MIN_POSITIVE + } } impl DecodableFloat for f64 { - fn min_pos_norm_value() -> Self { f64::MIN_POSITIVE } + fn min_pos_norm_value() -> Self { + f64::MIN_POSITIVE + } } /// Returns a sign (true when negative) and `FullDecoded` value @@ -67,23 +71,37 @@ pub fn decode(v: T) -> (/*negative?*/ bool, FullDecoded) { // neighbors: (mant - 2, exp) -- (mant, exp) -- (mant + 2, exp) // Float::integer_decode always preserves the exponent, // so the mantissa is scaled for subnormals. - FullDecoded::Finite(Decoded { mant, minus: 1, plus: 1, - exp, inclusive: even }) + FullDecoded::Finite(Decoded { + mant, + minus: 1, + plus: 1, + exp, + inclusive: even, + }) } FpCategory::Normal => { let minnorm = ::min_pos_norm_value().integer_decode(); if mant == minnorm.0 { // neighbors: (maxmant, exp - 1) -- (minnormmant, exp) -- (minnormmant + 1, exp) // where maxmant = minnormmant * 2 - 1 - FullDecoded::Finite(Decoded { mant: mant << 2, minus: 1, plus: 2, - exp: exp - 2, inclusive: even }) + FullDecoded::Finite(Decoded { + mant: mant << 2, + minus: 1, + plus: 2, + exp: exp - 2, + inclusive: even, + }) } else { // neighbors: (mant - 1, exp) -- (mant, exp) -- (mant + 1, exp) - FullDecoded::Finite(Decoded { mant: mant << 1, minus: 1, plus: 1, - exp: exp - 1, inclusive: even }) + FullDecoded::Finite(Decoded { + mant: mant << 1, + minus: 1, + plus: 1, + exp: exp - 1, + inclusive: even, + }) } } }; (sign < 0, decoded) } - diff --git a/src/libcore/num/flt2dec/mod.rs b/src/libcore/num/flt2dec/mod.rs index f9b46a12f7fef..1a791da638770 100644 --- a/src/libcore/num/flt2dec/mod.rs +++ b/src/libcore/num/flt2dec/mod.rs @@ -120,11 +120,11 @@ functions. reason = "internal routines only exposed for testing", issue = "0")] +pub use self::decoder::{decode, DecodableFloat, Decoded, FullDecoded}; use i16; -pub use self::decoder::{decode, DecodableFloat, FullDecoded, Decoded}; -pub mod estimator; pub mod decoder; +pub mod estimator; /// Digit-generation algorithms. pub mod strategy { @@ -144,17 +144,24 @@ pub const MAX_SIG_DIGITS: usize = 17; #[doc(hidden)] pub fn round_up(d: &mut [u8], n: usize) -> Option { match d[..n].iter().rposition(|&c| c != b'9') { - Some(i) => { // d[i+1..n] is all nines + Some(i) => { + // d[i+1..n] is all nines d[i] += 1; - for j in i+1..n { d[j] = b'0'; } + for j in i + 1..n { + d[j] = b'0'; + } None } - None if n > 0 => { // 999..999 rounds to 1000..000 with an increased exponent + None if n > 0 => { + // 999..999 rounds to 1000..000 with an increased exponent d[0] = b'1'; - for j in 1..n { d[j] = b'0'; } + for j in 1..n { + d[j] = b'0'; + } Some(b'0') } - None => { // an empty buffer rounds up (a bit strange but reasonable) + None => { + // an empty buffer rounds up (a bit strange but reasonable) Some(b'1') } } @@ -176,8 +183,23 @@ impl<'a> Part<'a> { pub fn len(&self) -> usize { match *self { Part::Zero(nzeroes) => nzeroes, - Part::Num(v) => if v < 1_000 { if v < 10 { 1 } else if v < 100 { 2 } else { 3 } } - else { if v < 10_000 { 4 } else { 5 } }, + Part::Num(v) => { + if v < 1_000 { + if v < 10 { + 1 + } else if v < 100 { + 2 + } else { + 3 + } + } else { + if v < 10_000 { + 4 + } else { + 5 + } + } + } Part::Copy(buf) => buf.len(), } } @@ -190,7 +212,9 @@ impl<'a> Part<'a> { if out.len() >= len { match *self { Part::Zero(nzeroes) => { - for c in &mut out[..nzeroes] { *c = b'0'; } + for c in &mut out[..nzeroes] { + *c = b'0'; + } } Part::Num(mut v) => { for c in out[..len].iter_mut().rev() { @@ -234,14 +258,20 @@ impl<'a> Formatted<'a> { /// Returns the number of written bytes, or `None` if the buffer is not enough. /// (It may still leave partially written bytes in the buffer; do not rely on that.) pub fn write(&self, out: &mut [u8]) -> Option { - if out.len() < self.sign.len() { return None; } + if out.len() < self.sign.len() { + return None; + } out[..self.sign.len()].copy_from_slice(self.sign); let mut written = self.sign.len(); for part in self.parts { match part.write(&mut out[written..]) { - Some(len) => { written += len; } - None => { return None; } + Some(len) => { + written += len; + } + None => { + return None; + } } } Some(written) @@ -256,8 +286,12 @@ impl<'a> Formatted<'a> { /// it will be ignored and full digits will be printed. It is only used to print /// additional zeroes after rendered digits. Thus `frac_digits` of 0 means that /// it will only print given digits and nothing else. -fn digits_to_dec_str<'a>(buf: &'a [u8], exp: i16, frac_digits: usize, - parts: &'a mut [Part<'a>]) -> &'a [Part<'a>] { +fn digits_to_dec_str<'a>( + buf: &'a [u8], + exp: i16, + frac_digits: usize, + parts: &'a mut [Part<'a>], +) -> &'a [Part<'a>] { assert!(!buf.is_empty()); assert!(buf[0] > b'0'); assert!(parts.len() >= 4); @@ -324,8 +358,13 @@ fn digits_to_dec_str<'a>(buf: &'a [u8], exp: i16, frac_digits: usize, /// it will be ignored and full digits will be printed. It is only used to print /// additional zeroes after rendered digits. Thus `min_digits` of 0 means that /// it will only print given digits and nothing else. -fn digits_to_exp_str<'a>(buf: &'a [u8], exp: i16, min_ndigits: usize, upper: bool, - parts: &'a mut [Part<'a>]) -> &'a [Part<'a>] { +fn digits_to_exp_str<'a>( + buf: &'a [u8], + exp: i16, + min_ndigits: usize, + upper: bool, + parts: &'a mut [Part<'a>], +) -> &'a [Part<'a>] { assert!(!buf.is_empty()); assert!(buf[0] > b'0'); assert!(parts.len() >= 6); @@ -361,11 +400,11 @@ fn digits_to_exp_str<'a>(buf: &'a [u8], exp: i16, min_ndigits: usize, upper: boo #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Sign { /// Prints `-` only for the negative non-zero values. - Minus, // -inf -1 0 0 1 inf nan + Minus, // -inf -1 0 0 1 inf nan /// Prints `-` only for any negative values (including the negative zero). - MinusRaw, // -inf -1 -0 0 1 inf nan + MinusRaw, // -inf -1 -0 0 1 inf nan /// Prints `-` for the negative non-zero values, or `+` otherwise. - MinusPlus, // -inf -1 +0 +0 +1 +inf nan + MinusPlus, // -inf -1 +0 +0 +1 +inf nan /// Prints `-` for any negative values (including the negative zero), or `+` otherwise. MinusPlusRaw, // -inf -1 -0 +0 +1 +inf nan } @@ -376,11 +415,35 @@ fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static match (*decoded, sign) { (FullDecoded::Nan, _) => b"", (FullDecoded::Zero, Sign::Minus) => b"", - (FullDecoded::Zero, Sign::MinusRaw) => if negative { b"-" } else { b"" }, + (FullDecoded::Zero, Sign::MinusRaw) => { + if negative { + b"-" + } else { + b"" + } + } (FullDecoded::Zero, Sign::MinusPlus) => b"+", - (FullDecoded::Zero, Sign::MinusPlusRaw) => if negative { b"-" } else { b"+" }, - (_, Sign::Minus) | (_, Sign::MinusRaw) => if negative { b"-" } else { b"" }, - (_, Sign::MinusPlus) | (_, Sign::MinusPlusRaw) => if negative { b"-" } else { b"+" }, + (FullDecoded::Zero, Sign::MinusPlusRaw) => { + if negative { + b"-" + } else { + b"+" + } + } + (_, Sign::Minus) | (_, Sign::MinusRaw) => { + if negative { + b"-" + } else { + b"" + } + } + (_, Sign::MinusPlus) | (_, Sign::MinusPlusRaw) => { + if negative { + b"-" + } else { + b"+" + } + } } } @@ -402,10 +465,19 @@ fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static /// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long. /// There should be at least 4 parts available, due to the worst case like /// `[+][0.][0000][2][0000]` with `frac_digits = 10`. -pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T, - sign: Sign, frac_digits: usize, _upper: bool, - buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a> - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +pub fn to_shortest_str<'a, T, F>( + mut format_shortest: F, + v: T, + sign: Sign, + frac_digits: usize, + _upper: bool, + buf: &'a mut [u8], + parts: &'a mut [Part<'a>], +) -> Formatted<'a> +where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ assert!(parts.len() >= 4); assert!(buf.len() >= MAX_SIG_DIGITS); @@ -414,26 +486,41 @@ pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T, match full_decoded { FullDecoded::Nan => { parts[0] = Part::Copy(b"NaN"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Infinite => { parts[0] = Part::Copy(b"inf"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Zero => { - if frac_digits > 0 { // [0.][0000] + if frac_digits > 0 { + // [0.][0000] parts[0] = Part::Copy(b"0."); parts[1] = Part::Zero(frac_digits); - Formatted { sign, parts: &parts[..2] } + Formatted { + sign, + parts: &parts[..2], + } } else { parts[0] = Part::Copy(b"0"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } } FullDecoded::Finite(ref decoded) => { let (len, exp) = format_shortest(decoded, buf); - Formatted { sign, - parts: digits_to_dec_str(&buf[..len], exp, frac_digits, parts) } + Formatted { + sign, + parts: digits_to_dec_str(&buf[..len], exp, frac_digits, parts), + } } } } @@ -457,10 +544,19 @@ pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T, /// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long. /// There should be at least 6 parts available, due to the worst case like /// `[+][1][.][2345][e][-][6]`. -pub fn to_shortest_exp_str<'a, T, F>(mut format_shortest: F, v: T, - sign: Sign, dec_bounds: (i16, i16), upper: bool, - buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a> - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +pub fn to_shortest_exp_str<'a, T, F>( + mut format_shortest: F, + v: T, + sign: Sign, + dec_bounds: (i16, i16), + upper: bool, + buf: &'a mut [u8], + parts: &'a mut [Part<'a>], +) -> Formatted<'a> +where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ assert!(parts.len() >= 6); assert!(buf.len() >= MAX_SIG_DIGITS); assert!(dec_bounds.0 <= dec_bounds.1); @@ -470,11 +566,17 @@ pub fn to_shortest_exp_str<'a, T, F>(mut format_shortest: F, v: T, match full_decoded { FullDecoded::Nan => { parts[0] = Part::Copy(b"NaN"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Infinite => { parts[0] = Part::Copy(b"inf"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Zero => { parts[0] = if dec_bounds.0 <= 0 && 0 < dec_bounds.1 { @@ -482,7 +584,10 @@ pub fn to_shortest_exp_str<'a, T, F>(mut format_shortest: F, v: T, } else { Part::Copy(if upper { b"0E0" } else { b"0e0" }) }; - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Finite(ref decoded) => { let (len, exp) = format_shortest(decoded, buf); @@ -536,10 +641,19 @@ fn estimate_max_buf_len(exp: i16) -> usize { /// (The tipping point for `f64` is about 800, so 1000 bytes should be enough.) /// There should be at least 6 parts available, due to the worst case like /// `[+][1][.][2345][e][-][6]`. -pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T, - sign: Sign, ndigits: usize, upper: bool, - buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a> - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { +pub fn to_exact_exp_str<'a, T, F>( + mut format_exact: F, + v: T, + sign: Sign, + ndigits: usize, + upper: bool, + buf: &'a mut [u8], + parts: &'a mut [Part<'a>], +) -> Formatted<'a> +where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), +{ assert!(parts.len() >= 6); assert!(ndigits > 0); @@ -548,21 +662,34 @@ pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T, match full_decoded { FullDecoded::Nan => { parts[0] = Part::Copy(b"NaN"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Infinite => { parts[0] = Part::Copy(b"inf"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Zero => { - if ndigits > 1 { // [0.][0000][e0] + if ndigits > 1 { + // [0.][0000][e0] parts[0] = Part::Copy(b"0."); parts[1] = Part::Zero(ndigits - 1); parts[2] = Part::Copy(if upper { b"E0" } else { b"e0" }); - Formatted { sign, parts: &parts[..3] } + Formatted { + sign, + parts: &parts[..3], + } } else { parts[0] = Part::Copy(if upper { b"0E0" } else { b"0e0" }); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } } FullDecoded::Finite(ref decoded) => { @@ -571,8 +698,10 @@ pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T, let trunc = if ndigits < maxlen { ndigits } else { maxlen }; let (len, exp) = format_exact(decoded, &mut buf[..trunc], i16::MIN); - Formatted { sign, - parts: digits_to_exp_str(&buf[..len], exp, ndigits, upper, parts) } + Formatted { + sign, + parts: digits_to_exp_str(&buf[..len], exp, ndigits, upper, parts), + } } } } @@ -592,10 +721,19 @@ pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T, /// (The tipping point for `f64` is about 800, and 1000 bytes should be enough.) /// There should be at least 4 parts available, due to the worst case like /// `[+][0.][0000][2][0000]` with `frac_digits = 10`. -pub fn to_exact_fixed_str<'a, T, F>(mut format_exact: F, v: T, - sign: Sign, frac_digits: usize, _upper: bool, - buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a> - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { +pub fn to_exact_fixed_str<'a, T, F>( + mut format_exact: F, + v: T, + sign: Sign, + frac_digits: usize, + _upper: bool, + buf: &'a mut [u8], + parts: &'a mut [Part<'a>], +) -> Formatted<'a> +where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), +{ assert!(parts.len() >= 4); let (negative, full_decoded) = decode(v); @@ -603,20 +741,33 @@ pub fn to_exact_fixed_str<'a, T, F>(mut format_exact: F, v: T, match full_decoded { FullDecoded::Nan => { parts[0] = Part::Copy(b"NaN"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Infinite => { parts[0] = Part::Copy(b"inf"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } FullDecoded::Zero => { - if frac_digits > 0 { // [0.][0000] + if frac_digits > 0 { + // [0.][0000] parts[0] = Part::Copy(b"0."); parts[1] = Part::Zero(frac_digits); - Formatted { sign, parts: &parts[..2] } + Formatted { + sign, + parts: &parts[..2], + } } else { parts[0] = Part::Copy(b"0"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } } FullDecoded::Finite(ref decoded) => { @@ -626,24 +777,37 @@ pub fn to_exact_fixed_str<'a, T, F>(mut format_exact: F, v: T, // it *is* possible that `frac_digits` is ridiculously large. // `format_exact` will end rendering digits much earlier in this case, // because we are strictly limited by `maxlen`. - let limit = if frac_digits < 0x8000 { -(frac_digits as i16) } else { i16::MIN }; + let limit = if frac_digits < 0x8000 { + -(frac_digits as i16) + } else { + i16::MIN + }; let (len, exp) = format_exact(decoded, &mut buf[..maxlen], limit); if exp <= limit { // the restriction couldn't been met, so this should render like zero no matter // `exp` was. this does not include the case that the restriction has been met // only after the final rounding-up; it's a regular case with `exp = limit + 1`. debug_assert_eq!(len, 0); - if frac_digits > 0 { // [0.][0000] + if frac_digits > 0 { + // [0.][0000] parts[0] = Part::Copy(b"0."); parts[1] = Part::Zero(frac_digits); - Formatted { sign, parts: &parts[..2] } + Formatted { + sign, + parts: &parts[..2], + } } else { parts[0] = Part::Copy(b"0"); - Formatted { sign, parts: &parts[..1] } + Formatted { + sign, + parts: &parts[..1], + } } } else { - Formatted { sign, - parts: digits_to_dec_str(&buf[..len], exp, frac_digits, parts) } + Formatted { + sign, + parts: digits_to_dec_str(&buf[..len], exp, frac_digits, parts), + } } } } diff --git a/src/libcore/num/flt2dec/strategy/dragon.rs b/src/libcore/num/flt2dec/strategy/dragon.rs index 582fe22f85406..cc14a557a19b8 100644 --- a/src/libcore/num/flt2dec/strategy/dragon.rs +++ b/src/libcore/num/flt2dec/strategy/dragon.rs @@ -6,38 +6,58 @@ use cmp::Ordering; -use num::flt2dec::{Decoded, MAX_SIG_DIGITS, round_up}; -use num::flt2dec::estimator::estimate_scaling_factor; -use num::bignum::Digit32 as Digit; use num::bignum::Big32x40 as Big; +use num::bignum::Digit32 as Digit; +use num::flt2dec::estimator::estimate_scaling_factor; +use num::flt2dec::{round_up, Decoded, MAX_SIG_DIGITS}; -static POW10: [Digit; 10] = [1, 10, 100, 1000, 10000, 100000, - 1000000, 10000000, 100000000, 1000000000]; -static TWOPOW10: [Digit; 10] = [2, 20, 200, 2000, 20000, 200000, - 2000000, 20000000, 200000000, 2000000000]; +static POW10: [Digit; 10] = [ + 1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000, +]; +static TWOPOW10: [Digit; 10] = [ + 2, 20, 200, 2000, 20000, 200000, 2000000, 20000000, 200000000, 2000000000, +]; // precalculated arrays of `Digit`s for 10^(2^n) static POW10TO16: [Digit; 2] = [0x6fc10000, 0x2386f2]; static POW10TO32: [Digit; 4] = [0, 0x85acef81, 0x2d6d415b, 0x4ee]; -static POW10TO64: [Digit; 7] = [0, 0, 0xbf6a1f01, 0x6e38ed64, 0xdaa797ed, 0xe93ff9f4, 0x184f03]; -static POW10TO128: [Digit; 14] = - [0, 0, 0, 0, 0x2e953e01, 0x3df9909, 0xf1538fd, 0x2374e42f, 0xd3cff5ec, 0xc404dc08, - 0xbccdb0da, 0xa6337f19, 0xe91f2603, 0x24e]; -static POW10TO256: [Digit; 27] = - [0, 0, 0, 0, 0, 0, 0, 0, 0x982e7c01, 0xbed3875b, 0xd8d99f72, 0x12152f87, 0x6bde50c6, - 0xcf4a6e70, 0xd595d80f, 0x26b2716e, 0xadc666b0, 0x1d153624, 0x3c42d35a, 0x63ff540e, - 0xcc5573c0, 0x65f9ef17, 0x55bc28f2, 0x80dcc7f7, 0xf46eeddc, 0x5fdcefce, 0x553f7]; +static POW10TO64: [Digit; 7] = [ + 0, 0, 0xbf6a1f01, 0x6e38ed64, 0xdaa797ed, 0xe93ff9f4, 0x184f03, +]; +static POW10TO128: [Digit; 14] = [ + 0, 0, 0, 0, 0x2e953e01, 0x3df9909, 0xf1538fd, 0x2374e42f, 0xd3cff5ec, 0xc404dc08, 0xbccdb0da, + 0xa6337f19, 0xe91f2603, 0x24e, +]; +static POW10TO256: [Digit; 27] = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0x982e7c01, 0xbed3875b, 0xd8d99f72, 0x12152f87, 0x6bde50c6, 0xcf4a6e70, + 0xd595d80f, 0x26b2716e, 0xadc666b0, 0x1d153624, 0x3c42d35a, 0x63ff540e, 0xcc5573c0, 0x65f9ef17, + 0x55bc28f2, 0x80dcc7f7, 0xf46eeddc, 0x5fdcefce, 0x553f7, +]; #[doc(hidden)] pub fn mul_pow10(x: &mut Big, n: usize) -> &mut Big { debug_assert!(n < 512); - if n & 7 != 0 { x.mul_small(POW10[n & 7]); } - if n & 8 != 0 { x.mul_small(POW10[8]); } - if n & 16 != 0 { x.mul_digits(&POW10TO16); } - if n & 32 != 0 { x.mul_digits(&POW10TO32); } - if n & 64 != 0 { x.mul_digits(&POW10TO64); } - if n & 128 != 0 { x.mul_digits(&POW10TO128); } - if n & 256 != 0 { x.mul_digits(&POW10TO256); } + if n & 7 != 0 { + x.mul_small(POW10[n & 7]); + } + if n & 8 != 0 { + x.mul_small(POW10[8]); + } + if n & 16 != 0 { + x.mul_digits(&POW10TO16); + } + if n & 32 != 0 { + x.mul_digits(&POW10TO32); + } + if n & 64 != 0 { + x.mul_digits(&POW10TO64); + } + if n & 128 != 0 { + x.mul_digits(&POW10TO128); + } + if n & 256 != 0 { + x.mul_digits(&POW10TO256); + } x } @@ -52,13 +72,30 @@ fn div_2pow10(x: &mut Big, mut n: usize) -> &mut Big { } // only usable when `x < 16 * scale`; `scaleN` should be `scale.mul_small(N)` -fn div_rem_upto_16<'a>(x: &'a mut Big, scale: &Big, - scale2: &Big, scale4: &Big, scale8: &Big) -> (u8, &'a mut Big) { +fn div_rem_upto_16<'a>( + x: &'a mut Big, + scale: &Big, + scale2: &Big, + scale4: &Big, + scale8: &Big, +) -> (u8, &'a mut Big) { let mut d = 0; - if *x >= *scale8 { x.sub(scale8); d += 8; } - if *x >= *scale4 { x.sub(scale4); d += 4; } - if *x >= *scale2 { x.sub(scale2); d += 2; } - if *x >= *scale { x.sub(scale); d += 1; } + if *x >= *scale8 { + x.sub(scale8); + d += 8; + } + if *x >= *scale4 { + x.sub(scale4); + d += 4; + } + if *x >= *scale2 { + x.sub(scale2); + d += 2; + } + if *x >= *scale { + x.sub(scale); + d += 1; + } debug_assert!(*x < *scale); (d, x) } @@ -85,7 +122,11 @@ pub fn format_shortest(d: &Decoded, buf: &mut [u8]) -> (/*#digits*/ usize, /*exp assert!(buf.len() >= MAX_SIG_DIGITS); // `a.cmp(&b) < rounding` is `if d.inclusive {a <= b} else {a < b}` - let rounding = if d.inclusive {Ordering::Greater} else {Ordering::Equal}; + let rounding = if d.inclusive { + Ordering::Greater + } else { + Ordering::Equal + }; // estimate `k_0` from original inputs satisfying `10^(k_0-1) < high <= 10^(k_0+1)`. // the tight bound `k` satisfying `10^(k-1) < high <= 10^k` is calculated later. @@ -132,9 +173,12 @@ pub fn format_shortest(d: &Decoded, buf: &mut [u8]) -> (/*#digits*/ usize, /*exp } // cache `(2, 4, 8) * scale` for digit generation. - let mut scale2 = scale.clone(); scale2.mul_pow2(1); - let mut scale4 = scale.clone(); scale4.mul_pow2(2); - let mut scale8 = scale.clone(); scale8.mul_pow2(3); + let mut scale2 = scale.clone(); + scale2.mul_pow2(1); + let mut scale4 = scale.clone(); + scale4.mul_pow2(2); + let mut scale8 = scale.clone(); + scale8.mul_pow2(3); let mut down; let mut up; @@ -186,7 +230,9 @@ pub fn format_shortest(d: &Decoded, buf: &mut [u8]) -> (/*#digits*/ usize, /*exp // - keep generating otherwise. down = mant.cmp(&minus) < rounding; up = scale.cmp(mant.clone().add(&plus)) < rounding; - if down || up { break; } // we have the shortest representation, proceed to the rounding + if down || up { + break; + } // we have the shortest representation, proceed to the rounding // restore the invariants. // this makes the algorithm always terminating: `minus` and `plus` always increases, @@ -269,22 +315,40 @@ pub fn format_exact(d: &Decoded, buf: &mut [u8], limit: i16) -> (/*#digits*/ usi if len > 0 { // cache `(2, 4, 8) * scale` for digit generation. // (this can be expensive, so do not calculate them when the buffer is empty.) - let mut scale2 = scale.clone(); scale2.mul_pow2(1); - let mut scale4 = scale.clone(); scale4.mul_pow2(2); - let mut scale8 = scale.clone(); scale8.mul_pow2(3); + let mut scale2 = scale.clone(); + scale2.mul_pow2(1); + let mut scale4 = scale.clone(); + scale4.mul_pow2(2); + let mut scale8 = scale.clone(); + scale8.mul_pow2(3); for i in 0..len { - if mant.is_zero() { // following digits are all zeroes, we stop here + if mant.is_zero() { + // following digits are all zeroes, we stop here // do *not* try to perform rounding! rather, fill remaining digits. - for c in &mut buf[i..len] { *c = b'0'; } + for c in &mut buf[i..len] { + *c = b'0'; + } return (len, k); } let mut d = 0; - if mant >= scale8 { mant.sub(&scale8); d += 8; } - if mant >= scale4 { mant.sub(&scale4); d += 4; } - if mant >= scale2 { mant.sub(&scale2); d += 2; } - if mant >= scale { mant.sub(&scale); d += 1; } + if mant >= scale8 { + mant.sub(&scale8); + d += 8; + } + if mant >= scale4 { + mant.sub(&scale4); + d += 4; + } + if mant >= scale2 { + mant.sub(&scale2); + d += 2; + } + if mant >= scale { + mant.sub(&scale); + d += 1; + } debug_assert!(mant < scale); debug_assert!(d < 10); buf[i] = b'0' + d; @@ -296,8 +360,9 @@ pub fn format_exact(d: &Decoded, buf: &mut [u8], limit: i16) -> (/*#digits*/ usi // if the following digits are exactly 5000..., check the prior digit and try to // round to even (i.e., avoid rounding up when the prior digit is even). let order = mant.cmp(scale.mul_small(5)); - if order == Ordering::Greater || (order == Ordering::Equal && - (len == 0 || buf[len-1] & 1 == 1)) { + if order == Ordering::Greater + || (order == Ordering::Equal && (len == 0 || buf[len - 1] & 1 == 1)) + { // if rounding up changes the length, the exponent should also change. // but we've been requested a fixed number of digits, so do not alter the buffer... if let Some(c) = round_up(buf, len) { diff --git a/src/libcore/num/flt2dec/strategy/grisu.rs b/src/libcore/num/flt2dec/strategy/grisu.rs index aa21fcffa5c61..c3c20a8c8b1dd 100644 --- a/src/libcore/num/flt2dec/strategy/grisu.rs +++ b/src/libcore/num/flt2dec/strategy/grisu.rs @@ -6,12 +6,13 @@ //! accurately with integers. SIGPLAN Not. 45, 6 (June 2010), 233-243. use num::diy_float::Fp; -use num::flt2dec::{Decoded, MAX_SIG_DIGITS, round_up}; - +use num::flt2dec::{round_up, Decoded, MAX_SIG_DIGITS}; // see the comments in `format_shortest_opt` for the rationale. -#[doc(hidden)] pub const ALPHA: i16 = -60; -#[doc(hidden)] pub const GAMMA: i16 = -32; +#[doc(hidden)] +pub const ALPHA: i16 = -60; +#[doc(hidden)] +pub const GAMMA: i16 = -32; /* # the following Python code generates this table: @@ -24,92 +25,95 @@ for i in xrange(-308, 333, 8): */ #[doc(hidden)] -pub static CACHED_POW10: [(u64, i16, i16); 81] = [ // (f, e, k) +pub static CACHED_POW10: [(u64, i16, i16); 81] = [ + // (f, e, k) (0xe61acf033d1a45df, -1087, -308), (0xab70fe17c79ac6ca, -1060, -300), (0xff77b1fcbebcdc4f, -1034, -292), (0xbe5691ef416bd60c, -1007, -284), - (0x8dd01fad907ffc3c, -980, -276), - (0xd3515c2831559a83, -954, -268), - (0x9d71ac8fada6c9b5, -927, -260), - (0xea9c227723ee8bcb, -901, -252), - (0xaecc49914078536d, -874, -244), - (0x823c12795db6ce57, -847, -236), - (0xc21094364dfb5637, -821, -228), - (0x9096ea6f3848984f, -794, -220), - (0xd77485cb25823ac7, -768, -212), - (0xa086cfcd97bf97f4, -741, -204), - (0xef340a98172aace5, -715, -196), - (0xb23867fb2a35b28e, -688, -188), - (0x84c8d4dfd2c63f3b, -661, -180), - (0xc5dd44271ad3cdba, -635, -172), - (0x936b9fcebb25c996, -608, -164), - (0xdbac6c247d62a584, -582, -156), - (0xa3ab66580d5fdaf6, -555, -148), - (0xf3e2f893dec3f126, -529, -140), - (0xb5b5ada8aaff80b8, -502, -132), - (0x87625f056c7c4a8b, -475, -124), - (0xc9bcff6034c13053, -449, -116), - (0x964e858c91ba2655, -422, -108), - (0xdff9772470297ebd, -396, -100), - (0xa6dfbd9fb8e5b88f, -369, -92), - (0xf8a95fcf88747d94, -343, -84), - (0xb94470938fa89bcf, -316, -76), - (0x8a08f0f8bf0f156b, -289, -68), - (0xcdb02555653131b6, -263, -60), - (0x993fe2c6d07b7fac, -236, -52), - (0xe45c10c42a2b3b06, -210, -44), - (0xaa242499697392d3, -183, -36), - (0xfd87b5f28300ca0e, -157, -28), - (0xbce5086492111aeb, -130, -20), - (0x8cbccc096f5088cc, -103, -12), - (0xd1b71758e219652c, -77, -4), - (0x9c40000000000000, -50, 4), - (0xe8d4a51000000000, -24, 12), - (0xad78ebc5ac620000, 3, 20), - (0x813f3978f8940984, 30, 28), - (0xc097ce7bc90715b3, 56, 36), - (0x8f7e32ce7bea5c70, 83, 44), - (0xd5d238a4abe98068, 109, 52), - (0x9f4f2726179a2245, 136, 60), - (0xed63a231d4c4fb27, 162, 68), - (0xb0de65388cc8ada8, 189, 76), - (0x83c7088e1aab65db, 216, 84), - (0xc45d1df942711d9a, 242, 92), - (0x924d692ca61be758, 269, 100), - (0xda01ee641a708dea, 295, 108), - (0xa26da3999aef774a, 322, 116), - (0xf209787bb47d6b85, 348, 124), - (0xb454e4a179dd1877, 375, 132), - (0x865b86925b9bc5c2, 402, 140), - (0xc83553c5c8965d3d, 428, 148), - (0x952ab45cfa97a0b3, 455, 156), - (0xde469fbd99a05fe3, 481, 164), - (0xa59bc234db398c25, 508, 172), - (0xf6c69a72a3989f5c, 534, 180), - (0xb7dcbf5354e9bece, 561, 188), - (0x88fcf317f22241e2, 588, 196), - (0xcc20ce9bd35c78a5, 614, 204), - (0x98165af37b2153df, 641, 212), - (0xe2a0b5dc971f303a, 667, 220), - (0xa8d9d1535ce3b396, 694, 228), - (0xfb9b7cd9a4a7443c, 720, 236), - (0xbb764c4ca7a44410, 747, 244), - (0x8bab8eefb6409c1a, 774, 252), - (0xd01fef10a657842c, 800, 260), - (0x9b10a4e5e9913129, 827, 268), - (0xe7109bfba19c0c9d, 853, 276), - (0xac2820d9623bf429, 880, 284), - (0x80444b5e7aa7cf85, 907, 292), - (0xbf21e44003acdd2d, 933, 300), - (0x8e679c2f5e44ff8f, 960, 308), - (0xd433179d9c8cb841, 986, 316), - (0x9e19db92b4e31ba9, 1013, 324), - (0xeb96bf6ebadf77d9, 1039, 332), + (0x8dd01fad907ffc3c, -980, -276), + (0xd3515c2831559a83, -954, -268), + (0x9d71ac8fada6c9b5, -927, -260), + (0xea9c227723ee8bcb, -901, -252), + (0xaecc49914078536d, -874, -244), + (0x823c12795db6ce57, -847, -236), + (0xc21094364dfb5637, -821, -228), + (0x9096ea6f3848984f, -794, -220), + (0xd77485cb25823ac7, -768, -212), + (0xa086cfcd97bf97f4, -741, -204), + (0xef340a98172aace5, -715, -196), + (0xb23867fb2a35b28e, -688, -188), + (0x84c8d4dfd2c63f3b, -661, -180), + (0xc5dd44271ad3cdba, -635, -172), + (0x936b9fcebb25c996, -608, -164), + (0xdbac6c247d62a584, -582, -156), + (0xa3ab66580d5fdaf6, -555, -148), + (0xf3e2f893dec3f126, -529, -140), + (0xb5b5ada8aaff80b8, -502, -132), + (0x87625f056c7c4a8b, -475, -124), + (0xc9bcff6034c13053, -449, -116), + (0x964e858c91ba2655, -422, -108), + (0xdff9772470297ebd, -396, -100), + (0xa6dfbd9fb8e5b88f, -369, -92), + (0xf8a95fcf88747d94, -343, -84), + (0xb94470938fa89bcf, -316, -76), + (0x8a08f0f8bf0f156b, -289, -68), + (0xcdb02555653131b6, -263, -60), + (0x993fe2c6d07b7fac, -236, -52), + (0xe45c10c42a2b3b06, -210, -44), + (0xaa242499697392d3, -183, -36), + (0xfd87b5f28300ca0e, -157, -28), + (0xbce5086492111aeb, -130, -20), + (0x8cbccc096f5088cc, -103, -12), + (0xd1b71758e219652c, -77, -4), + (0x9c40000000000000, -50, 4), + (0xe8d4a51000000000, -24, 12), + (0xad78ebc5ac620000, 3, 20), + (0x813f3978f8940984, 30, 28), + (0xc097ce7bc90715b3, 56, 36), + (0x8f7e32ce7bea5c70, 83, 44), + (0xd5d238a4abe98068, 109, 52), + (0x9f4f2726179a2245, 136, 60), + (0xed63a231d4c4fb27, 162, 68), + (0xb0de65388cc8ada8, 189, 76), + (0x83c7088e1aab65db, 216, 84), + (0xc45d1df942711d9a, 242, 92), + (0x924d692ca61be758, 269, 100), + (0xda01ee641a708dea, 295, 108), + (0xa26da3999aef774a, 322, 116), + (0xf209787bb47d6b85, 348, 124), + (0xb454e4a179dd1877, 375, 132), + (0x865b86925b9bc5c2, 402, 140), + (0xc83553c5c8965d3d, 428, 148), + (0x952ab45cfa97a0b3, 455, 156), + (0xde469fbd99a05fe3, 481, 164), + (0xa59bc234db398c25, 508, 172), + (0xf6c69a72a3989f5c, 534, 180), + (0xb7dcbf5354e9bece, 561, 188), + (0x88fcf317f22241e2, 588, 196), + (0xcc20ce9bd35c78a5, 614, 204), + (0x98165af37b2153df, 641, 212), + (0xe2a0b5dc971f303a, 667, 220), + (0xa8d9d1535ce3b396, 694, 228), + (0xfb9b7cd9a4a7443c, 720, 236), + (0xbb764c4ca7a44410, 747, 244), + (0x8bab8eefb6409c1a, 774, 252), + (0xd01fef10a657842c, 800, 260), + (0x9b10a4e5e9913129, 827, 268), + (0xe7109bfba19c0c9d, 853, 276), + (0xac2820d9623bf429, 880, 284), + (0x80444b5e7aa7cf85, 907, 292), + (0xbf21e44003acdd2d, 933, 300), + (0x8e679c2f5e44ff8f, 960, 308), + (0xd433179d9c8cb841, 986, 316), + (0x9e19db92b4e31ba9, 1013, 324), + (0xeb96bf6ebadf77d9, 1039, 332), ]; -#[doc(hidden)] pub const CACHED_POW10_FIRST_E: i16 = -1087; -#[doc(hidden)] pub const CACHED_POW10_LAST_E: i16 = 1039; +#[doc(hidden)] +pub const CACHED_POW10_FIRST_E: i16 = -1087; +#[doc(hidden)] +pub const CACHED_POW10_LAST_E: i16 = 1039; #[doc(hidden)] pub fn cached_power(alpha: i16, gamma: i16) -> (i16, Fp) { @@ -128,30 +132,59 @@ pub fn max_pow10_no_more_than(x: u32) -> (u8, u32) { debug_assert!(x > 0); const X9: u32 = 10_0000_0000; - const X8: u32 = 1_0000_0000; - const X7: u32 = 1000_0000; - const X6: u32 = 100_0000; - const X5: u32 = 10_0000; - const X4: u32 = 1_0000; - const X3: u32 = 1000; - const X2: u32 = 100; - const X1: u32 = 10; + const X8: u32 = 1_0000_0000; + const X7: u32 = 1000_0000; + const X6: u32 = 100_0000; + const X5: u32 = 10_0000; + const X4: u32 = 1_0000; + const X3: u32 = 1000; + const X2: u32 = 100; + const X1: u32 = 10; if x < X4 { - if x < X2 { if x < X1 {(0, 1)} else {(1, X1)} } - else { if x < X3 {(2, X2)} else {(3, X3)} } + if x < X2 { + if x < X1 { + (0, 1) + } else { + (1, X1) + } + } else { + if x < X3 { + (2, X2) + } else { + (3, X3) + } + } } else { - if x < X6 { if x < X5 {(4, X4)} else {(5, X5)} } - else if x < X8 { if x < X7 {(6, X6)} else {(7, X7)} } - else { if x < X9 {(8, X8)} else {(9, X9)} } + if x < X6 { + if x < X5 { + (4, X4) + } else { + (5, X5) + } + } else if x < X8 { + if x < X7 { + (6, X6) + } else { + (7, X7) + } + } else { + if x < X9 { + (8, X8) + } else { + (9, X9) + } + } } } /// The shortest mode implementation for Grisu. /// /// It returns `None` when it would return an inexact representation otherwise. -pub fn format_shortest_opt(d: &Decoded, - buf: &mut [u8]) -> Option<(/*#digits*/ usize, /*exp*/ i16)> { +pub fn format_shortest_opt( + d: &Decoded, + buf: &mut [u8], +) -> Option<(/*#digits*/ usize, /*exp*/ i16)> { assert!(d.mant > 0); assert!(d.minus > 0); assert!(d.plus > 0); @@ -161,9 +194,21 @@ pub fn format_shortest_opt(d: &Decoded, assert!(d.mant + d.plus < (1 << 61)); // we need at least three bits of additional precision // start with the normalized values with the shared exponent - let plus = Fp { f: d.mant + d.plus, e: d.exp }.normalize(); - let minus = Fp { f: d.mant - d.minus, e: d.exp }.normalize_to(plus.e); - let v = Fp { f: d.mant, e: d.exp }.normalize_to(plus.e); + let plus = Fp { + f: d.mant + d.plus, + e: d.exp, + } + .normalize(); + let minus = Fp { + f: d.mant - d.minus, + e: d.exp, + } + .normalize_to(plus.e); + let v = Fp { + f: d.mant, + e: d.exp, + } + .normalize_to(plus.e); // find any `cached = 10^minusk` such that `ALPHA <= minusk + plus.e + 64 <= GAMMA`. // since `plus` is normalized, this means `2^(62 + ALPHA) <= plus * cached < 2^(64 + GAMMA)`; @@ -208,8 +253,8 @@ pub fn format_shortest_opt(d: &Decoded, // we start with the correct repr within the unsafe region, and try to find the closest repr // to `v` which is also within the safe region. if we can't, we give up. let plus1 = plus.f + 1; -// let plus0 = plus.f - 1; // only for explanation -// let minus0 = minus.f + 1; // only for explanation + // let plus0 = plus.f - 1; // only for explanation + // let minus0 = minus.f + 1; // only for explanation let minus1 = minus.f - 1; let e = -plus.e as usize; // shared exponent @@ -235,14 +280,15 @@ pub fn format_shortest_opt(d: &Decoded, // (e.g., `x` = 32000, `y` = 32777; `kappa` = 2 since `y mod 10^3 = 777 < y - x = 777`.) // the algorithm relies on the later verification phase to exclude `y`. let delta1 = plus1 - minus1; -// let delta1int = (delta1 >> e) as usize; // only for explanation + // let delta1int = (delta1 >> e) as usize; // only for explanation let delta1frac = delta1 & ((1 << e) - 1); // render integral parts, while checking for the accuracy at each step. let mut kappa = max_kappa as i16; let mut ten_kappa = max_ten_kappa; // 10^kappa let mut remainder = plus1int; // digits yet to be rendered - loop { // we always have at least one digit to render, as `plus1 >= 10^kappa` + loop { + // we always have at least one digit to render, as `plus1 >= 10^kappa` // invariants: // - `delta1int <= remainder < 10^(kappa+1)` // - `plus1int = d[0..n-1] * 10^(kappa+1) + remainder` @@ -259,7 +305,15 @@ pub fn format_shortest_opt(d: &Decoded, if plus1rem < delta1 { // `plus1 % 10^kappa < delta1 = plus1 - minus1`; we've found the correct `kappa`. let ten_kappa = (ten_kappa as u64) << e; // scale 10^kappa back to the shared exponent - return round_and_weed(&mut buf[..i], exp, plus1rem, delta1, plus1 - v.f, ten_kappa, 1); + return round_and_weed( + &mut buf[..i], + exp, + plus1rem, + delta1, + plus1 - v.f, + ten_kappa, + 1, + ); } // break the loop when we have rendered all integral digits. @@ -281,7 +335,8 @@ pub fn format_shortest_opt(d: &Decoded, let mut remainder = plus1frac; let mut threshold = delta1frac; let mut ulp = 1; - loop { // the next digit should be significant as we've tested that before breaking out + loop { + // the next digit should be significant as we've tested that before breaking out // invariants, where `m = max_kappa + 1` (# of digits in the integral part): // - `remainder < 2^e` // - `plus1frac * 10^(n-m) = d[m..n-1] * 2^e + remainder` @@ -300,8 +355,15 @@ pub fn format_shortest_opt(d: &Decoded, if r < threshold { let ten_kappa = 1 << e; // implicit divisor - return round_and_weed(&mut buf[..i], exp, r, threshold, - (plus1 - v.f) * ulp, ten_kappa, ulp); + return round_and_weed( + &mut buf[..i], + exp, + r, + threshold, + (plus1 - v.f) * ulp, + ten_kappa, + ulp, + ); } // restore invariants @@ -325,8 +387,15 @@ pub fn format_shortest_opt(d: &Decoded, // - `plus1v = (plus1 - v) * k` (and also, `threshold > plus1v` from prior invariants) // - `ten_kappa = 10^kappa * k` // - `ulp = 2^-e * k` - fn round_and_weed(buf: &mut [u8], exp: i16, remainder: u64, threshold: u64, plus1v: u64, - ten_kappa: u64, ulp: u64) -> Option<(usize, i16)> { + fn round_and_weed( + buf: &mut [u8], + exp: i16, + remainder: u64, + threshold: u64, + plus1v: u64, + ten_kappa: u64, + ulp: u64, + ) -> Option<(usize, i16)> { assert!(!buf.is_empty()); // produce two approximations to `v` (actually `plus1 - v`) within 1.5 ulps. @@ -381,10 +450,11 @@ pub fn format_shortest_opt(d: &Decoded, // // consequently, we should stop when `TC1 || TC2 || (TC3a && TC3b)`. the following is // equal to its inverse, `!TC1 && !TC2 && (!TC3a || !TC3b)`. - while plus1w < plus1v_up && - threshold - plus1w >= ten_kappa && - (plus1w + ten_kappa < plus1v_up || - plus1v_up - plus1w >= plus1w + ten_kappa - plus1v_up) { + while plus1w < plus1v_up + && threshold - plus1w >= ten_kappa + && (plus1w + ten_kappa < plus1v_up + || plus1v_up - plus1w >= plus1w + ten_kappa - plus1v_up) + { *last -= 1; debug_assert!(*last > b'0'); // the shortest repr cannot end with `0` plus1w += ten_kappa; @@ -395,10 +465,11 @@ pub fn format_shortest_opt(d: &Decoded, // // this is simply same to the terminating conditions for `v + 1 ulp`, with all `plus1v_up` // replaced by `plus1v_down` instead. overflow analysis equally holds. - if plus1w < plus1v_down && - threshold - plus1w >= ten_kappa && - (plus1w + ten_kappa < plus1v_down || - plus1v_down - plus1w >= plus1w + ten_kappa - plus1v_down) { + if plus1w < plus1v_down + && threshold - plus1w >= ten_kappa + && (plus1w + ten_kappa < plus1v_down + || plus1v_down - plus1w >= plus1w + ten_kappa - plus1v_down) + { return None; } @@ -428,14 +499,21 @@ pub fn format_shortest(d: &Decoded, buf: &mut [u8]) -> (/*#digits*/ usize, /*exp /// The exact and fixed mode implementation for Grisu. /// /// It returns `None` when it would return an inexact representation otherwise. -pub fn format_exact_opt(d: &Decoded, buf: &mut [u8], limit: i16) - -> Option<(/*#digits*/ usize, /*exp*/ i16)> { +pub fn format_exact_opt( + d: &Decoded, + buf: &mut [u8], + limit: i16, +) -> Option<(/*#digits*/ usize, /*exp*/ i16)> { assert!(d.mant > 0); assert!(d.mant < (1 << 61)); // we need at least three bits of additional precision assert!(!buf.is_empty()); // normalize and scale `v`. - let v = Fp { f: d.mant, e: d.exp }.normalize(); + let v = Fp { + f: d.mant, + e: d.exp, + } + .normalize(); let (minusk, cached) = cached_power(ALPHA - v.e - 64, GAMMA - v.e - 64); let v = v.mul(&cached); @@ -476,7 +554,15 @@ pub fn format_exact_opt(d: &Decoded, buf: &mut [u8], limit: i16) // thus we are being sloppy here and widen the error range by a factor of 10. // this will increase the false negative rate, but only very, *very* slightly; // it can only matter noticeably when the mantissa is bigger than 60 bits. - return possibly_round(buf, 0, exp, limit, v.f / 10, (max_ten_kappa as u64) << e, err << e); + return possibly_round( + buf, + 0, + exp, + limit, + v.f / 10, + (max_ten_kappa as u64) << e, + err << e, + ); } else if ((exp as i32 - limit as i32) as usize) < buf.len() { (exp - limit) as usize } else { @@ -489,7 +575,8 @@ pub fn format_exact_opt(d: &Decoded, buf: &mut [u8], limit: i16) let mut kappa = max_kappa as i16; let mut ten_kappa = max_ten_kappa; // 10^kappa let mut remainder = vint; // digits yet to be rendered - loop { // we always have at least one digit to render + loop { + // we always have at least one digit to render // invariants: // - `remainder < 10^(kappa+1)` // - `vint = d[0..n-1] * 10^(kappa+1) + remainder` @@ -505,7 +592,15 @@ pub fn format_exact_opt(d: &Decoded, buf: &mut [u8], limit: i16) // is the buffer full? run the rounding pass with the remainder. if i == len { let vrem = ((r as u64) << e) + vfrac; // == (v % 10^kappa) * 2^e - return possibly_round(buf, len, exp, limit, vrem, (ten_kappa as u64) << e, err << e); + return possibly_round( + buf, + len, + exp, + limit, + vrem, + (ten_kappa as u64) << e, + err << e, + ); } // break the loop when we have rendered all integral digits. @@ -575,8 +670,15 @@ pub fn format_exact_opt(d: &Decoded, buf: &mut [u8], limit: i16) // - `remainder = (v % 10^kappa) * k` // - `ten_kappa = 10^kappa * k` // - `ulp = 2^-e * k` - fn possibly_round(buf: &mut [u8], mut len: usize, mut exp: i16, limit: i16, - remainder: u64, ten_kappa: u64, ulp: u64) -> Option<(usize, i16)> { + fn possibly_round( + buf: &mut [u8], + mut len: usize, + mut exp: i16, + limit: i16, + remainder: u64, + ten_kappa: u64, + ulp: u64, + ) -> Option<(usize, i16)> { debug_assert!(remainder < ten_kappa); // 10^kappa @@ -593,7 +695,9 @@ pub fn format_exact_opt(d: &Decoded, buf: &mut [u8], limit: i16) // // error is too large that there are at least three possible representations // between `v - 1 ulp` and `v + 1 ulp`. we cannot determine which one is correct. - if ulp >= ten_kappa { return None; } + if ulp >= ten_kappa { + return None; + } // 10^kappa // :<------->: @@ -607,7 +711,9 @@ pub fn format_exact_opt(d: &Decoded, buf: &mut [u8], limit: i16) // in fact, 1/2 ulp is enough to introduce two possible representations. // (remember that we need a unique representation for both `v - 1 ulp` and `v + 1 ulp`.) // this won't overflow, as `ulp < ten_kappa` from the first check. - if ten_kappa - ulp <= ulp { return None; } + if ten_kappa - ulp <= ulp { + return None; + } // remainder // :<->| : diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 97bf582df5a8c..0a17cd880774e 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -135,8 +135,7 @@ nonzero_integers! { #[stable(feature = "rust1", since = "1.0.0")] #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Hash)] #[repr(transparent)] -pub struct Wrapping(#[stable(feature = "rust1", since = "1.0.0")] - pub T); +pub struct Wrapping(#[stable(feature = "rust1", since = "1.0.0")] pub T); #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Wrapping { @@ -181,10 +180,10 @@ impl fmt::UpperHex for Wrapping { } // All these modules are technically private and only exposed for coretests: -pub mod flt2dec; -pub mod dec2flt; pub mod bignum; +pub mod dec2flt; pub mod diy_float; +pub mod flt2dec; mod wrapping; @@ -2086,39 +2085,39 @@ fn read_be_", stringify!($SelfT), "(input: &mut &[u8]) -> ", stringify!($SelfT), #[lang = "i8"] impl i8 { int_impl! { i8, i8, u8, 8, -128, 127, "", "", 2, "-0x7e", "0xa", "0x12", "0x12", "0x48", - "[0x12]", "[0x12]" } + "[0x12]", "[0x12]" } } #[lang = "i16"] impl i16 { int_impl! { i16, i16, u16, 16, -32768, 32767, "", "", 4, "-0x5ffd", "0x3a", "0x1234", "0x3412", - "0x2c48", "[0x34, 0x12]", "[0x12, 0x34]" } + "0x2c48", "[0x34, 0x12]", "[0x12, 0x34]" } } #[lang = "i32"] impl i32 { int_impl! { i32, i32, u32, 32, -2147483648, 2147483647, "", "", 8, "0x10000b3", "0xb301", - "0x12345678", "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", - "[0x12, 0x34, 0x56, 0x78]" } + "0x12345678", "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", + "[0x12, 0x34, 0x56, 0x78]" } } #[lang = "i64"] impl i64 { int_impl! { i64, i64, u64, 64, -9223372036854775808, 9223372036854775807, "", "", 12, - "0xaa00000000006e1", "0x6e10aa", "0x1234567890123456", "0x5634129078563412", - "0x6a2c48091e6a2c48", "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", - "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]" } + "0xaa00000000006e1", "0x6e10aa", "0x1234567890123456", "0x5634129078563412", + "0x6a2c48091e6a2c48", "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", + "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]" } } #[lang = "i128"] impl i128 { int_impl! { i128, i128, u128, 128, -170141183460469231731687303715884105728, - 170141183460469231731687303715884105727, "", "", 16, - "0x13f40000000000000000000000004f76", "0x4f7613f4", "0x12345678901234567890123456789012", - "0x12907856341290785634129078563412", "0x48091e6a2c48091e6a2c48091e6a2c48", - "[0x12, 0x90, 0x78, 0x56, 0x34, 0x12, 0x90, 0x78, \ - 0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", - "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, \ + 170141183460469231731687303715884105727, "", "", 16, + "0x13f40000000000000000000000004f76", "0x4f7613f4", "0x12345678901234567890123456789012", + "0x12907856341290785634129078563412", "0x48091e6a2c48091e6a2c48091e6a2c48", + "[0x12, 0x90, 0x78, 0x56, 0x34, 0x12, 0x90, 0x78, \ + 0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", + "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, \ 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12]" } } @@ -2126,24 +2125,24 @@ impl i128 { #[lang = "isize"] impl isize { int_impl! { isize, i16, u16, 16, -32768, 32767, "", "", 4, "-0x5ffd", "0x3a", "0x1234", - "0x3412", "0x2c48", "[0x34, 0x12]", "[0x12, 0x34]" } + "0x3412", "0x2c48", "[0x34, 0x12]", "[0x12, 0x34]" } } #[cfg(target_pointer_width = "32")] #[lang = "isize"] impl isize { int_impl! { isize, i32, u32, 32, -2147483648, 2147483647, "", "", 8, "0x10000b3", "0xb301", - "0x12345678", "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", - "[0x12, 0x34, 0x56, 0x78]" } + "0x12345678", "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", + "[0x12, 0x34, 0x56, 0x78]" } } #[cfg(target_pointer_width = "64")] #[lang = "isize"] impl isize { int_impl! { isize, i64, u64, 64, -9223372036854775808, 9223372036854775807, "", "", - 12, "0xaa00000000006e1", "0x6e10aa", "0x1234567890123456", "0x5634129078563412", - "0x6a2c48091e6a2c48", "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", - "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]" } + 12, "0xaa00000000006e1", "0x6e10aa", "0x1234567890123456", "0x5634129078563412", + "0x6a2c48091e6a2c48", "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", + "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]" } } // `Int` + `UnsignedInt` implemented for unsigned integers @@ -3849,8 +3848,7 @@ fn read_be_", stringify!($SelfT), "(input: &mut &[u8]) -> ", stringify!($SelfT), #[lang = "u8"] impl u8 { uint_impl! { u8, u8, 8, 255, "", "", 2, "0x82", "0xa", "0x12", "0x12", "0x48", "[0x12]", - "[0x12]" } - + "[0x12]" } /// Checks if the value is within the ASCII range. /// @@ -4012,10 +4010,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_alphabetic(&self) -> bool { - if *self >= 0x80 { return false; } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { L | Lx | U | Ux => true, - _ => false + _ => false, } } @@ -4048,10 +4048,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_uppercase(&self) -> bool { - if *self >= 0x80 { return false } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { U | Ux => true, - _ => false + _ => false, } } @@ -4084,10 +4086,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_lowercase(&self) -> bool { - if *self >= 0x80 { return false } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { L | Lx => true, - _ => false + _ => false, } } @@ -4123,10 +4127,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_alphanumeric(&self) -> bool { - if *self >= 0x80 { return false } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { D | L | Lx | U | Ux => true, - _ => false + _ => false, } } @@ -4159,10 +4165,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_digit(&self) -> bool { - if *self >= 0x80 { return false } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { D => true, - _ => false + _ => false, } } @@ -4198,10 +4206,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_hexdigit(&self) -> bool { - if *self >= 0x80 { return false } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { D | Lx | Ux => true, - _ => false + _ => false, } } @@ -4238,10 +4248,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_punctuation(&self) -> bool { - if *self >= 0x80 { return false } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { P => true, - _ => false + _ => false, } } @@ -4274,10 +4286,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_graphic(&self) -> bool { - if *self >= 0x80 { return false; } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { Ux | U | Lx | L | D | P => true, - _ => false + _ => false, } } @@ -4327,10 +4341,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_whitespace(&self) -> bool { - if *self >= 0x80 { return false; } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { Cw | W => true, - _ => false + _ => false, } } @@ -4365,10 +4381,12 @@ impl u8 { #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")] #[inline] pub fn is_ascii_control(&self) -> bool { - if *self >= 0x80 { return false; } + if *self >= 0x80 { + return false; + } match ASCII_CHARACTER_CLASS[*self as usize] { C | Cw => true, - _ => false + _ => false, } } } @@ -4376,31 +4394,31 @@ impl u8 { #[lang = "u16"] impl u16 { uint_impl! { u16, u16, 16, 65535, "", "", 4, "0xa003", "0x3a", "0x1234", "0x3412", "0x2c48", - "[0x34, 0x12]", "[0x12, 0x34]" } + "[0x34, 0x12]", "[0x12, 0x34]" } } #[lang = "u32"] impl u32 { uint_impl! { u32, u32, 32, 4294967295, "", "", 8, "0x10000b3", "0xb301", "0x12345678", - "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", "[0x12, 0x34, 0x56, 0x78]" } + "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", "[0x12, 0x34, 0x56, 0x78]" } } #[lang = "u64"] impl u64 { uint_impl! { u64, u64, 64, 18446744073709551615, "", "", 12, "0xaa00000000006e1", "0x6e10aa", - "0x1234567890123456", "0x5634129078563412", "0x6a2c48091e6a2c48", - "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", - "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]" } + "0x1234567890123456", "0x5634129078563412", "0x6a2c48091e6a2c48", + "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", + "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]" } } #[lang = "u128"] impl u128 { uint_impl! { u128, u128, 128, 340282366920938463463374607431768211455, "", "", 16, - "0x13f40000000000000000000000004f76", "0x4f7613f4", "0x12345678901234567890123456789012", - "0x12907856341290785634129078563412", "0x48091e6a2c48091e6a2c48091e6a2c48", - "[0x12, 0x90, 0x78, 0x56, 0x34, 0x12, 0x90, 0x78, \ - 0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", - "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, \ + "0x13f40000000000000000000000004f76", "0x4f7613f4", "0x12345678901234567890123456789012", + "0x12907856341290785634129078563412", "0x48091e6a2c48091e6a2c48091e6a2c48", + "[0x12, 0x90, 0x78, 0x56, 0x34, 0x12, 0x90, 0x78, \ + 0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", + "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, \ 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12]" } } @@ -4408,22 +4426,22 @@ impl u128 { #[lang = "usize"] impl usize { uint_impl! { usize, u16, 16, 65536, "", "", 4, "0xa003", "0x3a", "0x1234", "0x3412", "0x2c48", - "[0x34, 0x12]", "[0x12, 0x34]" } + "[0x34, 0x12]", "[0x12, 0x34]" } } #[cfg(target_pointer_width = "32")] #[lang = "usize"] impl usize { uint_impl! { usize, u32, 32, 4294967295, "", "", 8, "0x10000b3", "0xb301", "0x12345678", - "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", "[0x12, 0x34, 0x56, 0x78]" } + "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", "[0x12, 0x34, 0x56, 0x78]" } } #[cfg(target_pointer_width = "64")] #[lang = "usize"] impl usize { uint_impl! { usize, u64, 64, 18446744073709551615, "", "", 12, "0xaa00000000006e1", "0x6e10aa", - "0x1234567890123456", "0x5634129078563412", "0x6a2c48091e6a2c48", - "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", - "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]" } + "0x1234567890123456", "0x5634129078563412", "0x6a2c48091e6a2c48", + "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]", + "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]" } } /// A classification of floating point numbers. @@ -4495,10 +4513,12 @@ from_str_radix_int_impl! { isize i8 i16 i32 i64 i128 usize u8 u16 u32 u64 u128 } pub struct TryFromIntError(()); impl TryFromIntError { - #[unstable(feature = "int_error_internals", - reason = "available through Error trait and this method should \ - not be exposed publicly", - issue = "0")] + #[unstable( + feature = "int_error_internals", + reason = "available through Error trait and this method should \ + not be exposed publicly", + issue = "0" + )] #[doc(hidden)] pub fn __description(&self) -> &str { "out of range integral type conversion attempted" @@ -4745,9 +4765,11 @@ fn from_str_radix(src: &str, radix: u32) -> Result= 2 && radix <= 36, - "from_str_radix_int: must lie in the range `[2, 36]` - found {}", - radix); + assert!( + radix >= 2 && radix <= 36, + "from_str_radix_int: must lie in the range `[2, 36]` - found {}", + radix + ); if src.is_empty() { return Err(PIE { kind: Empty }); @@ -4828,10 +4850,12 @@ pub struct ParseIntError { } /// Enum to store the various types of errors that can cause parsing an integer to fail. -#[unstable(feature = "int_error_matching", - reason = "it can be useful to match errors when making error messages \ - for integer parsing", - issue = "22639")] +#[unstable( + feature = "int_error_matching", + reason = "it can be useful to match errors when making error messages \ + for integer parsing", + issue = "22639" +)] #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum IntErrorKind { @@ -4852,17 +4876,21 @@ pub enum IntErrorKind { impl ParseIntError { /// Outputs the detailed cause of parsing an integer failing. - #[unstable(feature = "int_error_matching", - reason = "it can be useful to match errors when making error messages \ - for integer parsing", - issue = "22639")] + #[unstable( + feature = "int_error_matching", + reason = "it can be useful to match errors when making error messages \ + for integer parsing", + issue = "22639" + )] pub fn kind(&self) -> &IntErrorKind { &self.kind } - #[unstable(feature = "int_error_internals", - reason = "available through Error trait and this method should \ - not be exposed publicly", - issue = "0")] + #[unstable( + feature = "int_error_internals", + reason = "available through Error trait and this method should \ + not be exposed publicly", + issue = "0" + )] #[doc(hidden)] pub fn __description(&self) -> &str { match self.kind { @@ -4990,7 +5018,6 @@ impl_from! { i16, isize, #[stable(feature = "lossless_iusize_conv", since = "1.2 // https://www.cl.cam.ac.uk/research/security/ctsrd/pdfs/20171017a-cheri-poster.pdf // http://www.csl.sri.com/users/neumann/2012resolve-cheri.pdf - // Note: integers can only be represented with full precision in a float if // they fit in the significand, which is 24 bits in f32 and 53 bits in f64. // Lossy float conversions are not implemented at this time. @@ -5013,81 +5040,43 @@ impl_from! { u32, f64, #[stable(feature = "lossless_float_conv", since = "1.6.0" impl_from! { f32, f64, #[stable(feature = "lossless_float_conv", since = "1.6.0")] } static ASCII_LOWERCASE_MAP: [u8; 256] = [ - 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, - 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, - 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, - 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, - b' ', b'!', b'"', b'#', b'$', b'%', b'&', b'\'', - b'(', b')', b'*', b'+', b',', b'-', b'.', b'/', - b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', - b'8', b'9', b':', b';', b'<', b'=', b'>', b'?', - b'@', - - b'a', b'b', b'c', b'd', b'e', b'f', b'g', - b'h', b'i', b'j', b'k', b'l', b'm', b'n', b'o', - b'p', b'q', b'r', b's', b't', b'u', b'v', b'w', - b'x', b'y', b'z', - - b'[', b'\\', b']', b'^', b'_', - b'`', b'a', b'b', b'c', b'd', b'e', b'f', b'g', - b'h', b'i', b'j', b'k', b'l', b'm', b'n', b'o', - b'p', b'q', b'r', b's', b't', b'u', b'v', b'w', - b'x', b'y', b'z', b'{', b'|', b'}', b'~', 0x7f, - 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, - 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, - 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, - 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, - 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, - 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, - 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, - 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, - 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, - 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf, - 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, - 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, - 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, - 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef, - 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, - 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff, + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, + b' ', b'!', b'"', b'#', b'$', b'%', b'&', b'\'', b'(', b')', b'*', b'+', b',', b'-', b'.', + b'/', b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b':', b';', b'<', b'=', b'>', + b'?', b'@', b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h', b'i', b'j', b'k', b'l', b'm', b'n', + b'o', b'p', b'q', b'r', b's', b't', b'u', b'v', b'w', b'x', b'y', b'z', b'[', b'\\', b']', + b'^', b'_', b'`', b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h', b'i', b'j', b'k', b'l', b'm', + b'n', b'o', b'p', b'q', b'r', b's', b't', b'u', b'v', b'w', b'x', b'y', b'z', b'{', b'|', b'}', + b'~', 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, + 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, + 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, + 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, + 0xbe, 0xbf, 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, + 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, + 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, + 0xee, 0xef, 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, + 0xfe, 0xff, ]; static ASCII_UPPERCASE_MAP: [u8; 256] = [ - 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, - 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, - 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, - 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, - b' ', b'!', b'"', b'#', b'$', b'%', b'&', b'\'', - b'(', b')', b'*', b'+', b',', b'-', b'.', b'/', - b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', - b'8', b'9', b':', b';', b'<', b'=', b'>', b'?', - b'@', b'A', b'B', b'C', b'D', b'E', b'F', b'G', - b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O', - b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W', - b'X', b'Y', b'Z', b'[', b'\\', b']', b'^', b'_', - b'`', - - b'A', b'B', b'C', b'D', b'E', b'F', b'G', - b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O', - b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W', - b'X', b'Y', b'Z', - - b'{', b'|', b'}', b'~', 0x7f, - 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, - 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, - 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, - 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, - 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, - 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, - 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, - 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, - 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, - 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf, - 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, - 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, - 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, - 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef, - 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, - 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff, + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, + b' ', b'!', b'"', b'#', b'$', b'%', b'&', b'\'', b'(', b')', b'*', b'+', b',', b'-', b'.', + b'/', b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b':', b';', b'<', b'=', b'>', + b'?', b'@', b'A', b'B', b'C', b'D', b'E', b'F', b'G', b'H', b'I', b'J', b'K', b'L', b'M', b'N', + b'O', b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W', b'X', b'Y', b'Z', b'[', b'\\', b']', + b'^', b'_', b'`', b'A', b'B', b'C', b'D', b'E', b'F', b'G', b'H', b'I', b'J', b'K', b'L', b'M', + b'N', b'O', b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W', b'X', b'Y', b'Z', b'{', b'|', b'}', + b'~', 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, + 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, + 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, + 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, + 0xbe, 0xbf, 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, + 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, + 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, + 0xee, 0xef, 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, + 0xfe, 0xff, ]; enum AsciiCharacterClass { @@ -5104,13 +5093,13 @@ enum AsciiCharacterClass { use self::AsciiCharacterClass::*; static ASCII_CHARACTER_CLASS: [AsciiCharacterClass; 128] = [ -// _0 _1 _2 _3 _4 _5 _6 _7 _8 _9 _a _b _c _d _e _f - C, C, C, C, C, C, C, C, C, Cw,Cw,C, Cw,Cw,C, C, // 0_ + // _0 _1 _2 _3 _4 _5 _6 _7 _8 _9 _a _b _c _d _e _f + C, C, C, C, C, C, C, C, C, Cw, Cw, C, Cw, Cw, C, C, // 0_ C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, // 1_ W, P, P, P, P, P, P, P, P, P, P, P, P, P, P, P, // 2_ D, D, D, D, D, D, D, D, D, D, P, P, P, P, P, P, // 3_ - P, Ux,Ux,Ux,Ux,Ux,Ux,U, U, U, U, U, U, U, U, U, // 4_ + P, Ux, Ux, Ux, Ux, Ux, Ux, U, U, U, U, U, U, U, U, U, // 4_ U, U, U, U, U, U, U, U, U, U, U, P, P, P, P, P, // 5_ - P, Lx,Lx,Lx,Lx,Lx,Lx,L, L, L, L, L, L, L, L, L, // 6_ + P, Lx, Lx, Lx, Lx, Lx, Lx, L, L, L, L, L, L, L, L, L, // 6_ L, L, L, L, L, L, L, L, L, L, L, P, P, P, P, C, // 7_ ]; diff --git a/src/libcore/num/wrapping.rs b/src/libcore/num/wrapping.rs index 50e189c9e3640..863a33966d875 100644 --- a/src/libcore/num/wrapping.rs +++ b/src/libcore/num/wrapping.rs @@ -4,7 +4,7 @@ use ops::*; #[allow(unused_macros)] macro_rules! sh_impl_signed { - ($t:ident, $f:ident) => ( + ($t:ident, $f:ident) => { #[stable(feature = "rust1", since = "1.0.0")] impl Shl<$f> for Wrapping<$t> { type Output = Wrapping<$t>; @@ -12,9 +12,15 @@ macro_rules! sh_impl_signed { #[inline] fn shl(self, other: $f) -> Wrapping<$t> { if other < 0 { - Wrapping(self.0.wrapping_shr((-other & self::shift_max::$t as $f) as u32)) + Wrapping( + self.0 + .wrapping_shr((-other & self::shift_max::$t as $f) as u32), + ) } else { - Wrapping(self.0.wrapping_shl((other & self::shift_max::$t as $f) as u32)) + Wrapping( + self.0 + .wrapping_shl((other & self::shift_max::$t as $f) as u32), + ) } } } @@ -35,9 +41,15 @@ macro_rules! sh_impl_signed { #[inline] fn shr(self, other: $f) -> Wrapping<$t> { if other < 0 { - Wrapping(self.0.wrapping_shl((-other & self::shift_max::$t as $f) as u32)) + Wrapping( + self.0 + .wrapping_shl((-other & self::shift_max::$t as $f) as u32), + ) } else { - Wrapping(self.0.wrapping_shr((other & self::shift_max::$t as $f) as u32)) + Wrapping( + self.0 + .wrapping_shr((other & self::shift_max::$t as $f) as u32), + ) } } } @@ -50,18 +62,21 @@ macro_rules! sh_impl_signed { } } forward_ref_op_assign! { impl ShrAssign, shr_assign for Wrapping<$t>, $f } - ) + }; } macro_rules! sh_impl_unsigned { - ($t:ident, $f:ident) => ( + ($t:ident, $f:ident) => { #[stable(feature = "rust1", since = "1.0.0")] impl Shl<$f> for Wrapping<$t> { type Output = Wrapping<$t>; #[inline] fn shl(self, other: $f) -> Wrapping<$t> { - Wrapping(self.0.wrapping_shl((other & self::shift_max::$t as $f) as u32)) + Wrapping( + self.0 + .wrapping_shl((other & self::shift_max::$t as $f) as u32), + ) } } @@ -80,7 +95,10 @@ macro_rules! sh_impl_unsigned { #[inline] fn shr(self, other: $f) -> Wrapping<$t> { - Wrapping(self.0.wrapping_shr((other & self::shift_max::$t as $f) as u32)) + Wrapping( + self.0 + .wrapping_shr((other & self::shift_max::$t as $f) as u32), + ) } } @@ -92,7 +110,7 @@ macro_rules! sh_impl_unsigned { } } forward_ref_op_assign! { impl ShrAssign, shr_assign for Wrapping<$t>, $f } - ) + }; } // FIXME (#23545): uncomment the remaining impls diff --git a/src/libcore/ops/arith.rs b/src/libcore/ops/arith.rs index 7d8bf18d33a01..6cff1f116638f 100644 --- a/src/libcore/ops/arith.rs +++ b/src/libcore/ops/arith.rs @@ -67,18 +67,18 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( on( - all(_Self="{integer}", RHS="{float}"), - message="cannot add a float to an integer", + all(_Self = "{integer}", RHS = "{float}"), + message = "cannot add a float to an integer", ), on( - all(_Self="{float}", RHS="{integer}"), - message="cannot add an integer to a float", + all(_Self = "{float}", RHS = "{integer}"), + message = "cannot add an integer to a float", ), - message="cannot add `{RHS}` to `{Self}`", - label="no implementation for `{Self} + {RHS}`", + message = "cannot add `{RHS}` to `{Self}`", + label = "no implementation for `{Self} + {RHS}`" )] #[doc(alias = "+")] -pub trait Add { +pub trait Add { /// The resulting type after applying the `+` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -173,10 +173,12 @@ add_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// ``` #[lang = "sub"] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="cannot subtract `{RHS}` from `{Self}`", - label="no implementation for `{Self} - {RHS}`")] +#[rustc_on_unimplemented( + message = "cannot subtract `{RHS}` from `{Self}`", + label = "no implementation for `{Self} - {RHS}`" +)] #[doc(alias = "-")] -pub trait Sub { +pub trait Sub { /// The resulting type after applying the `-` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -293,10 +295,12 @@ sub_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// ``` #[lang = "mul"] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="cannot multiply `{RHS}` to `{Self}`", - label="no implementation for `{Self} * {RHS}`")] +#[rustc_on_unimplemented( + message = "cannot multiply `{RHS}` to `{Self}`", + label = "no implementation for `{Self} * {RHS}`" +)] #[doc(alias = "*")] -pub trait Mul { +pub trait Mul { /// The resulting type after applying the `*` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -417,10 +421,12 @@ mul_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// ``` #[lang = "div"] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="cannot divide `{Self}` by `{RHS}`", - label="no implementation for `{Self} / {RHS}`")] +#[rustc_on_unimplemented( + message = "cannot divide `{Self}` by `{RHS}`", + label = "no implementation for `{Self} / {RHS}`" +)] #[doc(alias = "/")] -pub trait Div { +pub trait Div { /// The resulting type after applying the `/` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -502,10 +508,12 @@ div_impl_float! { f32 f64 } /// ``` #[lang = "rem"] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="cannot mod `{Self}` by `{RHS}`", - label="no implementation for `{Self} % {RHS}`")] +#[rustc_on_unimplemented( + message = "cannot mod `{Self}` by `{RHS}`", + label = "no implementation for `{Self} % {RHS}`" +)] #[doc(alias = "%")] -pub trait Rem { +pub trait Rem { /// The resulting type after applying the `%` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output = Self; @@ -534,7 +542,6 @@ macro_rules! rem_impl_integer { rem_impl_integer! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } - macro_rules! rem_impl_float { ($($t:ty)*) => ($( #[stable(feature = "rust1", since = "1.0.0")] @@ -601,8 +608,6 @@ pub trait Neg { fn neg(self) -> Self::Output; } - - macro_rules! neg_impl_core { ($id:ident => $body:expr, $($t:ty)*) => ($( #[stable(feature = "rust1", since = "1.0.0")] @@ -664,11 +669,13 @@ neg_impl_numeric! { isize i8 i16 i32 i64 i128 f32 f64 } /// ``` #[lang = "add_assign"] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="cannot add-assign `{Rhs}` to `{Self}`", - label="no implementation for `{Self} += {Rhs}`")] +#[rustc_on_unimplemented( + message = "cannot add-assign `{Rhs}` to `{Self}`", + label = "no implementation for `{Self} += {Rhs}`" +)] #[doc(alias = "+")] #[doc(alias = "+=")] -pub trait AddAssign { +pub trait AddAssign { /// Performs the `+=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn add_assign(&mut self, rhs: Rhs); @@ -720,11 +727,13 @@ add_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// ``` #[lang = "sub_assign"] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="cannot subtract-assign `{Rhs}` from `{Self}`", - label="no implementation for `{Self} -= {Rhs}`")] +#[rustc_on_unimplemented( + message = "cannot subtract-assign `{Rhs}` from `{Self}`", + label = "no implementation for `{Self} -= {Rhs}`" +)] #[doc(alias = "-")] #[doc(alias = "-=")] -pub trait SubAssign { +pub trait SubAssign { /// Performs the `-=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn sub_assign(&mut self, rhs: Rhs); @@ -767,11 +776,13 @@ sub_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// ``` #[lang = "mul_assign"] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="cannot multiply-assign `{Rhs}` to `{Self}`", - label="no implementation for `{Self} *= {Rhs}`")] +#[rustc_on_unimplemented( + message = "cannot multiply-assign `{Rhs}` to `{Self}`", + label = "no implementation for `{Self} *= {Rhs}`" +)] #[doc(alias = "*")] #[doc(alias = "*=")] -pub trait MulAssign { +pub trait MulAssign { /// Performs the `*=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn mul_assign(&mut self, rhs: Rhs); @@ -814,11 +825,13 @@ mul_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// ``` #[lang = "div_assign"] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="cannot divide-assign `{Self}` by `{Rhs}`", - label="no implementation for `{Self} /= {Rhs}`")] +#[rustc_on_unimplemented( + message = "cannot divide-assign `{Self}` by `{Rhs}`", + label = "no implementation for `{Self} /= {Rhs}`" +)] #[doc(alias = "/")] #[doc(alias = "/=")] -pub trait DivAssign { +pub trait DivAssign { /// Performs the `/=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn div_assign(&mut self, rhs: Rhs); @@ -864,11 +877,13 @@ div_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } /// ``` #[lang = "rem_assign"] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="cannot mod-assign `{Self}` by `{Rhs}``", - label="no implementation for `{Self} %= {Rhs}`")] +#[rustc_on_unimplemented( + message = "cannot mod-assign `{Self}` by `{Rhs}``", + label = "no implementation for `{Self} %= {Rhs}`" +)] #[doc(alias = "%")] #[doc(alias = "%=")] -pub trait RemAssign { +pub trait RemAssign { /// Performs the `%=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn rem_assign(&mut self, rhs: Rhs); diff --git a/src/libcore/ops/bit.rs b/src/libcore/ops/bit.rs index 2c9bf248633c4..2cccf1c95faa3 100644 --- a/src/libcore/ops/bit.rs +++ b/src/libcore/ops/bit.rs @@ -112,9 +112,11 @@ not_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } #[lang = "bitand"] #[doc(alias = "&")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} & {RHS}`", - label="no implementation for `{Self} & {RHS}`")] -pub trait BitAnd { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} & {RHS}`", + label = "no implementation for `{Self} & {RHS}`" +)] +pub trait BitAnd { /// The resulting type after applying the `&` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -196,9 +198,11 @@ bitand_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } #[lang = "bitor"] #[doc(alias = "|")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} | {RHS}`", - label="no implementation for `{Self} | {RHS}`")] -pub trait BitOr { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} | {RHS}`", + label = "no implementation for `{Self} | {RHS}`" +)] +pub trait BitOr { /// The resulting type after applying the `|` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -283,9 +287,11 @@ bitor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } #[lang = "bitxor"] #[doc(alias = "^")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} ^ {RHS}`", - label="no implementation for `{Self} ^ {RHS}`")] -pub trait BitXor { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} ^ {RHS}`", + label = "no implementation for `{Self} ^ {RHS}`" +)] +pub trait BitXor { /// The resulting type after applying the `^` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -371,9 +377,11 @@ bitxor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } #[lang = "shl"] #[doc(alias = "<<")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} << {RHS}`", - label="no implementation for `{Self} << {RHS}`")] -pub trait Shl { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} << {RHS}`", + label = "no implementation for `{Self} << {RHS}`" +)] +pub trait Shl { /// The resulting type after applying the `<<` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -385,7 +393,7 @@ pub trait Shl { } macro_rules! shl_impl { - ($t:ty, $f:ty) => ( + ($t:ty, $f:ty) => { #[stable(feature = "rust1", since = "1.0.0")] impl Shl<$f> for $t { type Output = $t; @@ -398,7 +406,7 @@ macro_rules! shl_impl { } forward_ref_binop! { impl Shl, shl for $t, $f } - ) + }; } macro_rules! shl_impl_all { @@ -480,9 +488,11 @@ shl_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 isize i128 } #[lang = "shr"] #[doc(alias = ">>")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} >> {RHS}`", - label="no implementation for `{Self} >> {RHS}`")] -pub trait Shr { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} >> {RHS}`", + label = "no implementation for `{Self} >> {RHS}`" +)] +pub trait Shr { /// The resulting type after applying the `>>` operator. #[stable(feature = "rust1", since = "1.0.0")] type Output; @@ -494,7 +504,7 @@ pub trait Shr { } macro_rules! shr_impl { - ($t:ty, $f:ty) => ( + ($t:ty, $f:ty) => { #[stable(feature = "rust1", since = "1.0.0")] impl Shr<$f> for $t { type Output = $t; @@ -507,7 +517,7 @@ macro_rules! shr_impl { } forward_ref_binop! { impl Shr, shr for $t, $f } - ) + }; } macro_rules! shr_impl_all { @@ -596,9 +606,11 @@ shr_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize } #[lang = "bitand_assign"] #[doc(alias = "&=")] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} &= {Rhs}`", - label="no implementation for `{Self} &= {Rhs}`")] -pub trait BitAndAssign { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} &= {Rhs}`", + label = "no implementation for `{Self} &= {Rhs}`" +)] +pub trait BitAndAssign { /// Performs the `&=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn bitand_assign(&mut self, rhs: Rhs); @@ -645,9 +657,11 @@ bitand_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } #[lang = "bitor_assign"] #[doc(alias = "|=")] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} |= {Rhs}`", - label="no implementation for `{Self} |= {Rhs}`")] -pub trait BitOrAssign { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} |= {Rhs}`", + label = "no implementation for `{Self} |= {Rhs}`" +)] +pub trait BitOrAssign { /// Performs the `|=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn bitor_assign(&mut self, rhs: Rhs); @@ -694,9 +708,11 @@ bitor_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } #[lang = "bitxor_assign"] #[doc(alias = "^=")] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} ^= {Rhs}`", - label="no implementation for `{Self} ^= {Rhs}`")] -pub trait BitXorAssign { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} ^= {Rhs}`", + label = "no implementation for `{Self} ^= {Rhs}`" +)] +pub trait BitXorAssign { /// Performs the `^=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn bitxor_assign(&mut self, rhs: Rhs); @@ -741,16 +757,18 @@ bitxor_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } #[lang = "shl_assign"] #[doc(alias = "<<=")] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} <<= {Rhs}`", - label="no implementation for `{Self} <<= {Rhs}`")] -pub trait ShlAssign { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} <<= {Rhs}`", + label = "no implementation for `{Self} <<= {Rhs}`" +)] +pub trait ShlAssign { /// Performs the `<<=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn shl_assign(&mut self, rhs: Rhs); } macro_rules! shl_assign_impl { - ($t:ty, $f:ty) => ( + ($t:ty, $f:ty) => { #[stable(feature = "op_assign_traits", since = "1.8.0")] impl ShlAssign<$f> for $t { #[inline] @@ -761,7 +779,7 @@ macro_rules! shl_assign_impl { } forward_ref_op_assign! { impl ShlAssign, shl_assign for $t, $f } - ) + }; } macro_rules! shl_assign_impl_all { @@ -809,16 +827,18 @@ shl_assign_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize } #[lang = "shr_assign"] #[doc(alias = ">>=")] #[stable(feature = "op_assign_traits", since = "1.8.0")] -#[rustc_on_unimplemented(message="no implementation for `{Self} >>= {Rhs}`", - label="no implementation for `{Self} >>= {Rhs}`")] -pub trait ShrAssign { +#[rustc_on_unimplemented( + message = "no implementation for `{Self} >>= {Rhs}`", + label = "no implementation for `{Self} >>= {Rhs}`" +)] +pub trait ShrAssign { /// Performs the `>>=` operation. #[stable(feature = "op_assign_traits", since = "1.8.0")] fn shr_assign(&mut self, rhs: Rhs); } macro_rules! shr_assign_impl { - ($t:ty, $f:ty) => ( + ($t:ty, $f:ty) => { #[stable(feature = "op_assign_traits", since = "1.8.0")] impl ShrAssign<$f> for $t { #[inline] @@ -829,7 +849,7 @@ macro_rules! shr_assign_impl { } forward_ref_op_assign! { impl ShrAssign, shr_assign for $t, $f } - ) + }; } macro_rules! shr_assign_impl_all { diff --git a/src/libcore/ops/deref.rs b/src/libcore/ops/deref.rs index f2187bd66869e..cd123c577e249 100644 --- a/src/libcore/ops/deref.rs +++ b/src/libcore/ops/deref.rs @@ -76,14 +76,18 @@ pub trait Deref { impl Deref for &T { type Target = T; - fn deref(&self) -> &T { *self } + fn deref(&self) -> &T { + *self + } } #[stable(feature = "rust1", since = "1.0.0")] impl Deref for &mut T { type Target = T; - fn deref(&self) -> &T { *self } + fn deref(&self) -> &T { + *self + } } /// Used for mutable dereferencing operations, like in `*v = 1;`. @@ -165,7 +169,9 @@ pub trait DerefMut: Deref { #[stable(feature = "rust1", since = "1.0.0")] impl DerefMut for &mut T { - fn deref_mut(&mut self) -> &mut T { *self } + fn deref_mut(&mut self) -> &mut T { + *self + } } /// Indicates that a struct can be used as a method receiver, without the diff --git a/src/libcore/ops/function.rs b/src/libcore/ops/function.rs index ec2e53412a0a5..444bf09713168 100644 --- a/src/libcore/ops/function.rs +++ b/src/libcore/ops/function.rs @@ -57,13 +57,16 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_paren_sugar] #[rustc_on_unimplemented( - on(Args="()", note="wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}"), - message="expected a `{Fn}<{Args}>` closure, found `{Self}`", - label="expected an `Fn<{Args}>` closure, found `{Self}`", + on( + Args = "()", + note = "wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}" + ), + message = "expected a `{Fn}<{Args}>` closure, found `{Self}`", + label = "expected an `Fn<{Args}>` closure, found `{Self}`" )] #[fundamental] // so that regex can rely that `&str: !FnMut` #[must_use] -pub trait Fn : FnMut { +pub trait Fn: FnMut { /// Performs the call operation. #[unstable(feature = "fn_traits", issue = "29625")] extern "rust-call" fn call(&self, args: Args) -> Self::Output; @@ -136,13 +139,16 @@ pub trait Fn : FnMut { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_paren_sugar] #[rustc_on_unimplemented( - on(Args="()", note="wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}"), - message="expected a `{FnMut}<{Args}>` closure, found `{Self}`", - label="expected an `FnMut<{Args}>` closure, found `{Self}`", + on( + Args = "()", + note = "wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}" + ), + message = "expected a `{FnMut}<{Args}>` closure, found `{Self}`", + label = "expected an `FnMut<{Args}>` closure, found `{Self}`" )] #[fundamental] // so that regex can rely that `&str: !FnMut` #[must_use] -pub trait FnMut : FnOnce { +pub trait FnMut: FnOnce { /// Performs the call operation. #[unstable(feature = "fn_traits", issue = "29625")] extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output; @@ -215,9 +221,12 @@ pub trait FnMut : FnOnce { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_paren_sugar] #[rustc_on_unimplemented( - on(Args="()", note="wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}"), - message="expected a `{FnOnce}<{Args}>` closure, found `{Self}`", - label="expected an `FnOnce<{Args}>` closure, found `{Self}`", + on( + Args = "()", + note = "wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}" + ), + message = "expected a `{FnOnce}<{Args}>` closure, found `{Self}`", + label = "expected an `FnOnce<{Args}>` closure, found `{Self}`" )] #[fundamental] // so that regex can rely that `&str: !FnMut` #[must_use] @@ -233,8 +242,9 @@ pub trait FnOnce { mod impls { #[stable(feature = "rust1", since = "1.0.0")] - impl Fn for &F - where F : Fn + impl Fn for &F + where + F: Fn, { extern "rust-call" fn call(&self, args: A) -> F::Output { (**self).call(args) @@ -242,8 +252,9 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl FnMut for &F - where F : Fn + impl FnMut for &F + where + F: Fn, { extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output { (**self).call(args) @@ -251,8 +262,9 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl FnOnce for &F - where F : Fn + impl FnOnce for &F + where + F: Fn, { type Output = F::Output; @@ -262,8 +274,9 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl FnMut for &mut F - where F : FnMut + impl FnMut for &mut F + where + F: FnMut, { extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output { (*self).call_mut(args) @@ -271,8 +284,9 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl FnOnce for &mut F - where F : FnMut + impl FnOnce for &mut F + where + F: FnMut, { type Output = F::Output; extern "rust-call" fn call_once(self, args: A) -> F::Output { diff --git a/src/libcore/ops/generator.rs b/src/libcore/ops/generator.rs index 1542cd6397e78..ec11a0c462d2e 100644 --- a/src/libcore/ops/generator.rs +++ b/src/libcore/ops/generator.rs @@ -115,7 +115,8 @@ pub trait Generator { #[unstable(feature = "generator_trait", issue = "43122")] impl Generator for &mut T - where T: Generator + ?Sized +where + T: Generator + ?Sized, { type Yield = T::Yield; type Return = T::Return; diff --git a/src/libcore/ops/index.rs b/src/libcore/ops/index.rs index 4f55c68ecd4ae..508477aa58e6e 100644 --- a/src/libcore/ops/index.rs +++ b/src/libcore/ops/index.rs @@ -51,8 +51,8 @@ /// ``` #[lang = "index"] #[rustc_on_unimplemented( - message="the type `{Self}` cannot be indexed by `{Idx}`", - label="`{Self}` cannot be indexed by `{Idx}`", + message = "the type `{Self}` cannot be indexed by `{Idx}`", + label = "`{Self}` cannot be indexed by `{Idx}`" )] #[stable(feature = "rust1", since = "1.0.0")] #[doc(alias = "]")] @@ -141,8 +141,8 @@ pub trait Index { /// ``` #[lang = "index_mut"] #[rustc_on_unimplemented( - message="the type `{Self}` cannot be mutably indexed by `{Idx}`", - label="`{Self}` cannot be mutably indexed by `{Idx}`", + message = "the type `{Self}` cannot be mutably indexed by `{Idx}`", + label = "`{Self}` cannot be mutably indexed by `{Idx}`" )] #[stable(feature = "rust1", since = "1.0.0")] #[doc(alias = "[")] diff --git a/src/libcore/ops/mod.rs b/src/libcore/ops/mod.rs index 0ca64f28ff309..9e5f94efe83f3 100644 --- a/src/libcore/ops/mod.rs +++ b/src/libcore/ops/mod.rs @@ -156,12 +156,12 @@ mod try; mod unsize; #[stable(feature = "rust1", since = "1.0.0")] -pub use self::arith::{Add, Sub, Mul, Div, Rem, Neg}; +pub use self::arith::{Add, Div, Mul, Neg, Rem, Sub}; #[stable(feature = "op_assign_traits", since = "1.8.0")] -pub use self::arith::{AddAssign, SubAssign, MulAssign, DivAssign, RemAssign}; +pub use self::arith::{AddAssign, DivAssign, MulAssign, RemAssign, SubAssign}; #[stable(feature = "rust1", since = "1.0.0")] -pub use self::bit::{Not, BitAnd, BitOr, BitXor, Shl, Shr}; +pub use self::bit::{BitAnd, BitOr, BitXor, Not, Shl, Shr}; #[stable(feature = "op_assign_traits", since = "1.8.0")] pub use self::bit::{BitAndAssign, BitOrAssign, BitXorAssign, ShlAssign, ShrAssign}; @@ -184,7 +184,7 @@ pub use self::index::{Index, IndexMut}; pub use self::range::{Range, RangeFrom, RangeFull, RangeTo}; #[stable(feature = "inclusive_range", since = "1.26.0")] -pub use self::range::{RangeInclusive, RangeToInclusive, RangeBounds, Bound}; +pub use self::range::{Bound, RangeBounds, RangeInclusive, RangeToInclusive}; #[unstable(feature = "try_trait", issue = "42327")] pub use self::try::Try; diff --git a/src/libcore/ops/range.rs b/src/libcore/ops/range.rs index 815a4cfeed88e..805cce7a69ba6 100644 --- a/src/libcore/ops/range.rs +++ b/src/libcore/ops/range.rs @@ -67,7 +67,7 @@ impl fmt::Debug for RangeFull { /// assert_eq!(arr[1..3], [ 'b', 'c' ]); // Range /// ``` #[doc(alias = "..")] -#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186 +#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186 #[stable(feature = "rust1", since = "1.0.0")] pub struct Range { /// The lower bound of the range (inclusive). @@ -108,7 +108,11 @@ impl> Range { /// assert!(!(0.0..f32::NAN).contains(&0.5)); /// assert!(!(f32::NAN..1.0).contains(&0.5)); /// ``` - #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")] + #[unstable( + feature = "range_contains", + reason = "recently added as per RFC", + issue = "32311" + )] pub fn contains(&self, item: &U) -> bool where Idx: PartialOrd, @@ -169,7 +173,7 @@ impl> Range { /// /// [`Iterator`]: ../iter/trait.IntoIterator.html #[doc(alias = "..")] -#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186 +#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186 #[stable(feature = "rust1", since = "1.0.0")] pub struct RangeFrom { /// The lower bound of the range (inclusive). @@ -202,7 +206,11 @@ impl> RangeFrom { /// assert!(!(0.0..).contains(&f32::NAN)); /// assert!(!(f32::NAN..).contains(&0.5)); /// ``` - #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")] + #[unstable( + feature = "range_contains", + reason = "recently added as per RFC", + issue = "32311" + )] pub fn contains(&self, item: &U) -> bool where Idx: PartialOrd, @@ -284,7 +292,11 @@ impl> RangeTo { /// assert!(!(..1.0).contains(&f32::NAN)); /// assert!(!(..f32::NAN).contains(&0.5)); /// ``` - #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")] + #[unstable( + feature = "range_contains", + reason = "recently added as per RFC", + issue = "32311" + )] pub fn contains(&self, item: &U) -> bool where Idx: PartialOrd, @@ -317,7 +329,7 @@ impl> RangeTo { /// assert_eq!(arr[1..=2], [ 1,2 ]); // RangeInclusive /// ``` #[doc(alias = "..=")] -#[derive(Clone)] // not Copy -- see #27186 +#[derive(Clone)] // not Copy -- see #27186 #[stable(feature = "inclusive_range", since = "1.26.0")] pub struct RangeInclusive { pub(crate) start: Idx, @@ -351,7 +363,8 @@ impl RangeInclusiveEquality for T { impl PartialEq for RangeInclusive { #[inline] fn eq(&self, other: &Self) -> bool { - self.start == other.start && self.end == other.end + self.start == other.start + && self.end == other.end && RangeInclusiveEquality::canonicalized_is_empty(self) == RangeInclusiveEquality::canonicalized_is_empty(other) } @@ -383,7 +396,11 @@ impl RangeInclusive { #[inline] #[rustc_promotable] pub const fn new(start: Idx, end: Idx) -> Self { - Self { start, end, is_empty: None } + Self { + start, + end, + is_empty: None, + } } /// Returns the lower bound of the range (inclusive). @@ -482,7 +499,11 @@ impl> RangeInclusive { /// assert!(!(0.0..=f32::NAN).contains(&0.0)); /// assert!(!(f32::NAN..=1.0).contains(&1.0)); /// ``` - #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")] + #[unstable( + feature = "range_contains", + reason = "recently added as per RFC", + issue = "32311" + )] pub fn contains(&self, item: &U) -> bool where Idx: PartialOrd, @@ -591,7 +612,11 @@ impl fmt::Debug for RangeToInclusive { } } -#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")] +#[unstable( + feature = "range_contains", + reason = "recently added as per RFC", + issue = "32311" +)] impl> RangeToInclusive { /// Returns `true` if `item` is contained in the range. /// @@ -610,7 +635,11 @@ impl> RangeToInclusive { /// assert!(!(..=1.0).contains(&f32::NAN)); /// assert!(!(..=f32::NAN).contains(&0.5)); /// ``` - #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")] + #[unstable( + feature = "range_contains", + reason = "recently added as per RFC", + issue = "32311" + )] pub fn contains(&self, item: &U) -> bool where Idx: PartialOrd, @@ -712,7 +741,6 @@ pub trait RangeBounds { #[stable(feature = "collections_range", since = "1.28.0")] fn end_bound(&self) -> Bound<&T>; - /// Returns `true` if `item` is contained in the range. /// /// # Examples @@ -729,7 +757,11 @@ pub trait RangeBounds { /// assert!(!(0.0..1.0).contains(&f32::NAN)); /// assert!(!(0.0..f32::NAN).contains(&0.5)); /// assert!(!(f32::NAN..1.0).contains(&0.5)); - #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")] + #[unstable( + feature = "range_contains", + reason = "recently added as per RFC", + issue = "32311" + )] fn contains(&self, item: &U) -> bool where T: PartialOrd, @@ -739,9 +771,7 @@ pub trait RangeBounds { Included(ref start) => *start <= item, Excluded(ref start) => *start < item, Unbounded => true, - }) - && - (match self.end_bound() { + }) && (match self.end_bound() { Included(ref end) => item <= *end, Excluded(ref end) => item < *end, Unbounded => true, @@ -817,7 +847,7 @@ impl RangeBounds for (Bound, Bound) { match *self { (Included(ref start), _) => Included(start), (Excluded(ref start), _) => Excluded(start), - (Unbounded, _) => Unbounded, + (Unbounded, _) => Unbounded, } } @@ -825,7 +855,7 @@ impl RangeBounds for (Bound, Bound) { match *self { (_, Included(ref end)) => Included(end), (_, Excluded(ref end)) => Excluded(end), - (_, Unbounded) => Unbounded, + (_, Unbounded) => Unbounded, } } } diff --git a/src/libcore/ops/try.rs b/src/libcore/ops/try.rs index 380bd12131cf6..9e4c53ccc7258 100644 --- a/src/libcore/ops/try.rs +++ b/src/libcore/ops/try.rs @@ -6,17 +6,22 @@ /// creating a new instance from a success or failure value. #[unstable(feature = "try_trait", issue = "42327")] #[rustc_on_unimplemented( - on(all( - any(from_method="from_error", from_method="from_ok"), - from_desugaring="?"), - message="the `?` operator can only be used in a \ - function that returns `Result` or `Option` \ - (or another type that implements `{Try}`)", - label="cannot use the `?` operator in a function that returns `{Self}`"), - on(all(from_method="into_result", from_desugaring="?"), - message="the `?` operator can only be applied to values \ - that implement `{Try}`", - label="the `?` operator cannot be applied to type `{Self}`") + on( + all( + any(from_method = "from_error", from_method = "from_ok"), + from_desugaring = "?" + ), + message = "the `?` operator can only be used in a \ + function that returns `Result` or `Option` \ + (or another type that implements `{Try}`)", + label = "cannot use the `?` operator in a function that returns `{Self}`" + ), + on( + all(from_method = "into_result", from_desugaring = "?"), + message = "the `?` operator can only be applied to values \ + that implement `{Try}`", + label = "the `?` operator cannot be applied to type `{Self}`" + ) )] #[doc(alias = "?")] pub trait Try { diff --git a/src/libcore/ops/unsize.rs b/src/libcore/ops/unsize.rs index bd95ddf060ee4..d6ad134f3ea29 100644 --- a/src/libcore/ops/unsize.rs +++ b/src/libcore/ops/unsize.rs @@ -39,35 +39,34 @@ pub trait CoerceUnsized { // &mut T -> &mut U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl<'a, T: ?Sized+Unsize, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {} +impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {} // &mut T -> &U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl<'a, 'b: 'a, T: ?Sized+Unsize, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {} +impl<'a, 'b: 'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {} // &mut T -> *mut U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl<'a, T: ?Sized+Unsize, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {} +impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {} // &mut T -> *const U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl<'a, T: ?Sized+Unsize, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {} +impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {} // &T -> &U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl<'a, 'b: 'a, T: ?Sized+Unsize, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} +impl<'a, 'b: 'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} // &T -> *const U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl<'a, T: ?Sized+Unsize, U: ?Sized> CoerceUnsized<*const U> for &'a T {} +impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<*const U> for &'a T {} // *mut T -> *mut U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} +impl, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} // *mut T -> *const U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized<*const U> for *mut T {} +impl, U: ?Sized> CoerceUnsized<*const U> for *mut T {} // *const T -> *const U #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized<*const U> for *const T {} - +impl, U: ?Sized> CoerceUnsized<*const U> for *const T {} /// This is used for object safety, to check that a method's receiver type can be dispatched on. /// @@ -90,14 +89,13 @@ pub trait DispatchFromDyn { // &T -> &U #[unstable(feature = "dispatch_from_dyn", issue = "0")] -impl<'a, T: ?Sized+Unsize, U: ?Sized> DispatchFromDyn<&'a U> for &'a T {} +impl<'a, T: ?Sized + Unsize, U: ?Sized> DispatchFromDyn<&'a U> for &'a T {} // &mut T -> &mut U #[unstable(feature = "dispatch_from_dyn", issue = "0")] -impl<'a, T: ?Sized+Unsize, U: ?Sized> DispatchFromDyn<&'a mut U> for &'a mut T {} +impl<'a, T: ?Sized + Unsize, U: ?Sized> DispatchFromDyn<&'a mut U> for &'a mut T {} // *const T -> *const U #[unstable(feature = "dispatch_from_dyn", issue = "0")] -impl, U: ?Sized> DispatchFromDyn<*const U> for *const T {} +impl, U: ?Sized> DispatchFromDyn<*const U> for *const T {} // *mut T -> *mut U #[unstable(feature = "dispatch_from_dyn", issue = "0")] -impl, U: ?Sized> DispatchFromDyn<*mut U> for *mut T {} - +impl, U: ?Sized> DispatchFromDyn<*mut U> for *mut T {} diff --git a/src/libcore/option.rs b/src/libcore/option.rs index 6d4f4be1a1782..5e802bea1d203 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -136,8 +136,11 @@ #![stable(feature = "rust1", since = "1.0.0")] use iter::{FromIterator, FusedIterator, TrustedLen}; -use {hint, mem, ops::{self, Deref}}; use pin::Pin; +use { + hint, mem, + ops::{self, Deref}, +}; // Note that this is not a lang item per se, but it has a hidden dependency on // `Iterator`, which is one. The compiler assumes that the `next` method of @@ -260,14 +263,11 @@ impl Option { } } - /// Converts from `Pin<&Option>` to `Option>` #[inline] #[stable(feature = "pin", since = "1.33.0")] pub fn as_pin_ref<'a>(self: Pin<&'a Option>) -> Option> { - unsafe { - Pin::get_ref(self).as_ref().map(|x| Pin::new_unchecked(x)) - } + unsafe { Pin::get_ref(self).as_ref().map(|x| Pin::new_unchecked(x)) } } /// Converts from `Pin<&mut Option>` to `Option>` @@ -275,7 +275,9 @@ impl Option { #[stable(feature = "pin", since = "1.33.0")] pub fn as_pin_mut<'a>(self: Pin<&'a mut Option>) -> Option> { unsafe { - Pin::get_unchecked_mut(self).as_mut().map(|x| Pin::new_unchecked(x)) + Pin::get_unchecked_mut(self) + .as_mut() + .map(|x| Pin::new_unchecked(x)) } } @@ -537,7 +539,9 @@ impl Option { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter { - Iter { inner: Item { opt: self.as_ref() } } + Iter { + inner: Item { opt: self.as_ref() }, + } } /// Returns a mutable iterator over the possibly contained value. @@ -558,7 +562,9 @@ impl Option { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut { - IterMut { inner: Item { opt: self.as_mut() } } + IterMut { + inner: Item { opt: self.as_mut() }, + } } ///////////////////////////////////////////////////////////////////////// @@ -651,7 +657,7 @@ impl Option { pub fn filter bool>(self, predicate: P) -> Self { if let Some(x) = self { if predicate(&x) { - return Some(x) + return Some(x); } } None @@ -1048,7 +1054,9 @@ fn expect_failed(msg: &str) -> ! { impl Default for Option { /// Returns [`None`][Option::None]. #[inline] - fn default() -> Option { None } + fn default() -> Option { + None + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1071,7 +1079,9 @@ impl IntoIterator for Option { /// ``` #[inline] fn into_iter(self) -> IntoIter { - IntoIter { inner: Item { opt: self } } + IntoIter { + inner: Item { opt: self }, + } } } @@ -1122,7 +1132,7 @@ impl<'a, T> From<&'a mut Option> for Option<&'a mut T> { #[derive(Clone, Debug)] struct Item { - opt: Option + opt: Option, } impl Iterator for Item { @@ -1164,22 +1174,30 @@ unsafe impl TrustedLen for Item {} /// [`Option::iter`]: enum.Option.html#method.iter #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] -pub struct Iter<'a, A: 'a> { inner: Item<&'a A> } +pub struct Iter<'a, A: 'a> { + inner: Item<&'a A>, +} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, A> Iterator for Iter<'a, A> { type Item = &'a A; #[inline] - fn next(&mut self) -> Option<&'a A> { self.inner.next() } + fn next(&mut self) -> Option<&'a A> { + self.inner.next() + } #[inline] - fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, A> DoubleEndedIterator for Iter<'a, A> { #[inline] - fn next_back(&mut self) -> Option<&'a A> { self.inner.next_back() } + fn next_back(&mut self) -> Option<&'a A> { + self.inner.next_back() + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1195,7 +1213,9 @@ unsafe impl TrustedLen for Iter<'_, A> {} impl Clone for Iter<'_, A> { #[inline] fn clone(&self) -> Self { - Iter { inner: self.inner.clone() } + Iter { + inner: self.inner.clone(), + } } } @@ -1210,22 +1230,30 @@ impl Clone for Iter<'_, A> { /// [`Option::iter_mut`]: enum.Option.html#method.iter_mut #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] -pub struct IterMut<'a, A: 'a> { inner: Item<&'a mut A> } +pub struct IterMut<'a, A: 'a> { + inner: Item<&'a mut A>, +} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, A> Iterator for IterMut<'a, A> { type Item = &'a mut A; #[inline] - fn next(&mut self) -> Option<&'a mut A> { self.inner.next() } + fn next(&mut self) -> Option<&'a mut A> { + self.inner.next() + } #[inline] - fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, A> DoubleEndedIterator for IterMut<'a, A> { #[inline] - fn next_back(&mut self) -> Option<&'a mut A> { self.inner.next_back() } + fn next_back(&mut self) -> Option<&'a mut A> { + self.inner.next_back() + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1247,22 +1275,30 @@ unsafe impl TrustedLen for IterMut<'_, A> {} /// [`Option::into_iter`]: enum.Option.html#method.into_iter #[derive(Clone, Debug)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct IntoIter { inner: Item } +pub struct IntoIter { + inner: Item, +} #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = A; #[inline] - fn next(&mut self) -> Option { self.inner.next() } + fn next(&mut self) -> Option { + self.inner.next() + } #[inline] - fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { #[inline] - fn next_back(&mut self) -> Option { self.inner.next_back() } + fn next_back(&mut self) -> Option { + self.inner.next_back() + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1323,7 +1359,7 @@ impl> FromIterator> for Option { /// /// [`Iterator`]: ../iter/trait.Iterator.html #[inline] - fn from_iter>>(iter: I) -> Option { + fn from_iter>>(iter: I) -> Option { // FIXME(#11084): This could be replaced with Iterator::scan when this // performance bug is closed. @@ -1332,7 +1368,7 @@ impl> FromIterator> for Option { found_none: bool, } - impl>> Iterator for Adapter { + impl>> Iterator for Adapter { type Item = T; #[inline] @@ -1358,7 +1394,10 @@ impl> FromIterator> for Option { } } - let mut adapter = Adapter { iter: iter.into_iter(), found_none: false }; + let mut adapter = Adapter { + iter: iter.into_iter(), + found_none: false, + }; let v: V = FromIterator::from_iter(adapter.by_ref()); if adapter.found_none { diff --git a/src/libcore/panic.rs b/src/libcore/panic.rs index 1abc0a18a9cc9..8b777c524d605 100644 --- a/src/libcore/panic.rs +++ b/src/libcore/panic.rs @@ -39,17 +39,24 @@ pub struct PanicInfo<'a> { } impl<'a> PanicInfo<'a> { - #![unstable(feature = "panic_internals", - reason = "internal details of the implementation of the `panic!` \ - and related macros", - issue = "0")] + #![unstable( + feature = "panic_internals", + reason = "internal details of the implementation of the `panic!` \ + and related macros", + issue = "0" + )] #[doc(hidden)] #[inline] - pub fn internal_constructor(message: Option<&'a fmt::Arguments<'a>>, - location: Location<'a>) - -> Self { + pub fn internal_constructor( + message: Option<&'a fmt::Arguments<'a>>, + location: Location<'a>, + ) -> Self { struct NoPayload; - PanicInfo { payload: &NoPayload, location, message } + PanicInfo { + payload: &NoPayload, + location, + message, + } } #[doc(hidden)] @@ -171,10 +178,12 @@ pub struct Location<'a> { } impl<'a> Location<'a> { - #![unstable(feature = "panic_internals", - reason = "internal details of the implementation of the `panic!` \ - and related macros", - issue = "0")] + #![unstable( + feature = "panic_internals", + reason = "internal details of the implementation of the `panic!` \ + and related macros", + issue = "0" + )] #[doc(hidden)] pub fn internal_constructor(file: &'a str, line: u32, col: u32) -> Self { Location { file, line, col } diff --git a/src/libcore/panicking.rs b/src/libcore/panicking.rs index d9cdb2a2b8a9f..4702e50fb2f06 100644 --- a/src/libcore/panicking.rs +++ b/src/libcore/panicking.rs @@ -32,7 +32,7 @@ use panic::{Location, PanicInfo}; #[cold] // never inline unless panic_immediate_abort to avoid code // bloat at the call sites as much as possible -#[cfg_attr(not(feature="panic_immediate_abort"),inline(never))] +#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] #[lang = "panic"] pub fn panic(expr_file_line_col: &(&'static str, &'static str, u32, u32)) -> ! { if cfg!(feature = "panic_immediate_abort") { @@ -50,21 +50,25 @@ pub fn panic(expr_file_line_col: &(&'static str, &'static str, u32, u32)) -> ! { } #[cold] -#[cfg_attr(not(feature="panic_immediate_abort"),inline(never))] +#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] #[lang = "panic_bounds_check"] -fn panic_bounds_check(file_line_col: &(&'static str, u32, u32), - index: usize, len: usize) -> ! { +fn panic_bounds_check(file_line_col: &(&'static str, u32, u32), index: usize, len: usize) -> ! { if cfg!(feature = "panic_immediate_abort") { unsafe { super::intrinsics::abort() } } - panic_fmt(format_args!("index out of bounds: the len is {} but the index is {}", - len, index), file_line_col) + panic_fmt( + format_args!( + "index out of bounds: the len is {} but the index is {}", + len, index + ), + file_line_col, + ) } #[cold] -#[cfg_attr(not(feature="panic_immediate_abort"),inline(never))] -#[cfg_attr( feature="panic_immediate_abort" ,inline)] +#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] +#[cfg_attr(feature = "panic_immediate_abort", inline)] pub fn panic_fmt(fmt: fmt::Arguments, file_line_col: &(&'static str, u32, u32)) -> ! { if cfg!(feature = "panic_immediate_abort") { unsafe { super::intrinsics::abort() } diff --git a/src/libcore/pin.rs b/src/libcore/pin.rs index e31ac691e3a3c..cf13ac2d8cdf3 100644 --- a/src/libcore/pin.rs +++ b/src/libcore/pin.rs @@ -99,7 +99,7 @@ use fmt; use marker::{Sized, Unpin}; -use ops::{Deref, DerefMut, Receiver, CoerceUnsized, DispatchFromDyn}; +use ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Receiver}; /// A pinned pointer. /// @@ -196,7 +196,8 @@ impl<'a, T: ?Sized> Pin<&'a T> { /// because it is one of the fields of that value), and also that you do /// not move out of the argument you receive to the interior function. #[stable(feature = "pin", since = "1.33.0")] - pub unsafe fn map_unchecked(self: Pin<&'a T>, func: F) -> Pin<&'a U> where + pub unsafe fn map_unchecked(self: Pin<&'a T>, func: F) -> Pin<&'a U> + where F: FnOnce(&T) -> &U, { let pointer = &*self.pointer; @@ -223,7 +224,9 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] pub fn into_ref(self: Pin<&'a mut T>) -> Pin<&'a T> { - Pin { pointer: self.pointer } + Pin { + pointer: self.pointer, + } } /// Get a mutable reference to the data inside of this `Pin`. @@ -238,7 +241,8 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { #[stable(feature = "pin", since = "1.33.0")] #[inline(always)] pub fn get_mut(self: Pin<&'a mut T>) -> &'a mut T - where T: Unpin, + where + T: Unpin, { self.pointer } @@ -271,7 +275,8 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { /// because it is one of the fields of that value), and also that you do /// not move out of the argument you receive to the interior function. #[stable(feature = "pin", since = "1.33.0")] - pub unsafe fn map_unchecked_mut(self: Pin<&'a mut T>, func: F) -> Pin<&'a mut U> where + pub unsafe fn map_unchecked_mut(self: Pin<&'a mut T>, func: F) -> Pin<&'a mut U> + where F: FnOnce(&mut T) -> &mut U, { let pointer = Pin::get_unchecked_mut(self); @@ -291,7 +296,7 @@ impl Deref for Pin

{ #[stable(feature = "pin", since = "1.33.0")] impl DerefMut for Pin

where - P::Target: Unpin + P::Target: Unpin, { fn deref_mut(&mut self) -> &mut P::Target { Pin::get_mut(Pin::as_mut(self)) @@ -328,13 +333,7 @@ impl fmt::Pointer for Pin

{ // for other reasons, though, so we just need to take care not to allow such // impls to land in std. #[stable(feature = "pin", since = "1.33.0")] -impl CoerceUnsized> for Pin

-where - P: CoerceUnsized, -{} +impl CoerceUnsized> for Pin

where P: CoerceUnsized {} #[stable(feature = "pin", since = "1.33.0")] -impl<'a, P, U> DispatchFromDyn> for Pin

-where - P: DispatchFromDyn, -{} +impl<'a, P, U> DispatchFromDyn> for Pin

where P: DispatchFromDyn {} diff --git a/src/libcore/prelude/v1.rs b/src/libcore/prelude/v1.rs index b53494edbf401..f3f62cd87d3e2 100644 --- a/src/libcore/prelude/v1.rs +++ b/src/libcore/prelude/v1.rs @@ -25,22 +25,22 @@ pub use mem::drop; pub use clone::Clone; #[stable(feature = "core_prelude", since = "1.4.0")] #[doc(no_inline)] -pub use cmp::{PartialEq, PartialOrd, Eq, Ord}; +pub use cmp::{Eq, Ord, PartialEq, PartialOrd}; #[stable(feature = "core_prelude", since = "1.4.0")] #[doc(no_inline)] -pub use convert::{AsRef, AsMut, Into, From}; +pub use convert::{AsMut, AsRef, From, Into}; #[stable(feature = "core_prelude", since = "1.4.0")] #[doc(no_inline)] pub use default::Default; #[stable(feature = "core_prelude", since = "1.4.0")] #[doc(no_inline)] -pub use iter::{Iterator, Extend, IntoIterator}; +pub use iter::{DoubleEndedIterator, ExactSizeIterator}; #[stable(feature = "core_prelude", since = "1.4.0")] #[doc(no_inline)] -pub use iter::{DoubleEndedIterator, ExactSizeIterator}; +pub use iter::{Extend, IntoIterator, Iterator}; #[stable(feature = "core_prelude", since = "1.4.0")] #[doc(no_inline)] -pub use option::Option::{self, Some, None}; +pub use option::Option::{self, None, Some}; #[stable(feature = "core_prelude", since = "1.4.0")] #[doc(no_inline)] -pub use result::Result::{self, Ok, Err}; +pub use result::Result::{self, Err, Ok}; diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 55a72d7a9a94d..7006bf7171e69 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -64,14 +64,14 @@ #![stable(feature = "rust1", since = "1.0.0")] use convert::From; -use intrinsics; -use ops::{CoerceUnsized, DispatchFromDyn}; use fmt; use hash; +use intrinsics; use marker::{PhantomData, Unsize}; use mem::{self, MaybeUninit}; +use ops::{CoerceUnsized, DispatchFromDyn}; -use cmp::Ordering::{self, Less, Equal, Greater}; +use cmp::Ordering::{self, Equal, Greater, Less}; #[stable(feature = "rust1", since = "1.0.0")] pub use intrinsics::copy_nonoverlapping; @@ -209,7 +209,9 @@ unsafe fn real_drop_in_place(to_drop: &mut T) { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] -pub const fn null() -> *const T { 0 as *const T } +pub const fn null() -> *const T { + 0 as *const T +} /// Creates a null mutable raw pointer. /// @@ -224,7 +226,9 @@ pub const fn null() -> *const T { 0 as *const T } #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_promotable] -pub const fn null_mut() -> *mut T { 0 as *mut T } +pub const fn null_mut() -> *mut T { + 0 as *mut T +} /// Swaps the values at two mutable locations of the same type, without /// deinitializing either. @@ -373,9 +377,10 @@ unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) { // // FIXME repr(simd) broken on emscripten and redox // It's also broken on big-endian powerpc64 and s390x. #42778 - #[cfg_attr(not(any(target_os = "emscripten", target_os = "redox", - target_endian = "big")), - repr(simd))] + #[cfg_attr( + not(any(target_os = "emscripten", target_os = "redox", target_endian = "big")), + repr(simd) + )] struct Block(u64, u64, u64, u64); struct UnalignedBlock(u64, u64, u64, u64); @@ -639,9 +644,11 @@ pub unsafe fn read(src: *const T) -> T { #[stable(feature = "ptr_unaligned", since = "1.17.0")] pub unsafe fn read_unaligned(src: *const T) -> T { let mut tmp = MaybeUninit::::uninitialized(); - copy_nonoverlapping(src as *const u8, - tmp.as_mut_ptr() as *mut u8, - mem::size_of::()); + copy_nonoverlapping( + src as *const u8, + tmp.as_mut_ptr() as *mut u8, + mem::size_of::(), + ); tmp.into_inner() } @@ -798,9 +805,11 @@ pub unsafe fn write(dst: *mut T, src: T) { #[inline] #[stable(feature = "ptr_unaligned", since = "1.17.0")] pub unsafe fn write_unaligned(dst: *mut T, src: T) { - copy_nonoverlapping(&src as *const T as *const u8, - dst as *mut u8, - mem::size_of::()); + copy_nonoverlapping( + &src as *const T as *const u8, + dst as *mut u8, + mem::size_of::(), + ); mem::forget(src); } @@ -1072,7 +1081,10 @@ impl *const T { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub unsafe fn offset(self, count: isize) -> *const T where T: Sized { + pub unsafe fn offset(self, count: isize) -> *const T + where + T: Sized, + { intrinsics::offset(self, count) } @@ -1116,10 +1128,11 @@ impl *const T { /// ``` #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")] #[inline] - pub fn wrapping_offset(self, count: isize) -> *const T where T: Sized { - unsafe { - intrinsics::arith_offset(self, count) - } + pub fn wrapping_offset(self, count: isize) -> *const T + where + T: Sized, + { + unsafe { intrinsics::arith_offset(self, count) } } /// Calculates the distance between two pointers. The returned value is in @@ -1185,7 +1198,10 @@ impl *const T { /// ``` #[unstable(feature = "ptr_offset_from", issue = "41079")] #[inline] - pub unsafe fn offset_from(self, origin: *const T) -> isize where T: Sized { + pub unsafe fn offset_from(self, origin: *const T) -> isize + where + T: Sized, + { let pointee_size = mem::size_of::(); assert!(0 < pointee_size && pointee_size <= isize::max_value() as usize); @@ -1232,7 +1248,10 @@ impl *const T { /// ``` #[unstable(feature = "ptr_wrapping_offset_from", issue = "41079")] #[inline] - pub fn wrapping_offset_from(self, origin: *const T) -> isize where T: Sized { + pub fn wrapping_offset_from(self, origin: *const T) -> isize + where + T: Sized, + { let pointee_size = mem::size_of::(); assert!(0 < pointee_size && pointee_size <= isize::max_value() as usize); @@ -1291,7 +1310,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn add(self, count: usize) -> Self - where T: Sized, + where + T: Sized, { self.offset(count as isize) } @@ -1348,7 +1368,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn sub(self, count: usize) -> Self - where T: Sized, + where + T: Sized, { self.offset((count as isize).wrapping_neg()) } @@ -1389,7 +1410,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub fn wrapping_add(self, count: usize) -> Self - where T: Sized, + where + T: Sized, { self.wrapping_offset(count as isize) } @@ -1430,7 +1452,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub fn wrapping_sub(self, count: usize) -> Self - where T: Sized, + where + T: Sized, { self.wrapping_offset((count as isize).wrapping_neg()) } @@ -1444,7 +1467,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn read(self) -> T - where T: Sized, + where + T: Sized, { read(self) } @@ -1462,7 +1486,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn read_volatile(self) -> T - where T: Sized, + where + T: Sized, { read_volatile(self) } @@ -1478,7 +1503,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn read_unaligned(self) -> T - where T: Sized, + where + T: Sized, { read_unaligned(self) } @@ -1494,7 +1520,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn copy_to(self, dest: *mut T, count: usize) - where T: Sized, + where + T: Sized, { copy(self, dest, count) } @@ -1510,7 +1537,8 @@ impl *const T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize) - where T: Sized, + where + T: Sized, { copy_nonoverlapping(self, dest, count) } @@ -1554,17 +1582,17 @@ impl *const T { /// # } } /// ``` #[unstable(feature = "align_offset", issue = "44488")] - pub fn align_offset(self, align: usize) -> usize where T: Sized { + pub fn align_offset(self, align: usize) -> usize + where + T: Sized, + { if !align.is_power_of_two() { panic!("align_offset: align is not a power-of-two"); } - unsafe { - align_offset(self, align) - } + unsafe { align_offset(self, align) } } } - #[lang = "mut_ptr"] impl *mut T { /// Returns `true` if the pointer is null. @@ -1692,7 +1720,10 @@ impl *mut T { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub unsafe fn offset(self, count: isize) -> *mut T where T: Sized { + pub unsafe fn offset(self, count: isize) -> *mut T + where + T: Sized, + { intrinsics::offset(self, count) as *mut T } @@ -1735,10 +1766,11 @@ impl *mut T { /// ``` #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")] #[inline] - pub fn wrapping_offset(self, count: isize) -> *mut T where T: Sized { - unsafe { - intrinsics::arith_offset(self, count) as *mut T - } + pub fn wrapping_offset(self, count: isize) -> *mut T + where + T: Sized, + { + unsafe { intrinsics::arith_offset(self, count) as *mut T } } /// Returns `None` if the pointer is null, or else returns a mutable @@ -1834,7 +1866,10 @@ impl *mut T { /// ``` #[unstable(feature = "ptr_offset_from", issue = "41079")] #[inline] - pub unsafe fn offset_from(self, origin: *const T) -> isize where T: Sized { + pub unsafe fn offset_from(self, origin: *const T) -> isize + where + T: Sized, + { (self as *const T).offset_from(origin) } @@ -1874,7 +1909,10 @@ impl *mut T { /// ``` #[unstable(feature = "ptr_wrapping_offset_from", issue = "41079")] #[inline] - pub fn wrapping_offset_from(self, origin: *const T) -> isize where T: Sized { + pub fn wrapping_offset_from(self, origin: *const T) -> isize + where + T: Sized, + { (self as *const T).wrapping_offset_from(origin) } @@ -1929,7 +1967,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn add(self, count: usize) -> Self - where T: Sized, + where + T: Sized, { self.offset(count as isize) } @@ -1986,7 +2025,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn sub(self, count: usize) -> Self - where T: Sized, + where + T: Sized, { self.offset((count as isize).wrapping_neg()) } @@ -2027,7 +2067,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub fn wrapping_add(self, count: usize) -> Self - where T: Sized, + where + T: Sized, { self.wrapping_offset(count as isize) } @@ -2068,7 +2109,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub fn wrapping_sub(self, count: usize) -> Self - where T: Sized, + where + T: Sized, { self.wrapping_offset((count as isize).wrapping_neg()) } @@ -2082,7 +2124,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn read(self) -> T - where T: Sized, + where + T: Sized, { read(self) } @@ -2100,7 +2143,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn read_volatile(self) -> T - where T: Sized, + where + T: Sized, { read_volatile(self) } @@ -2116,7 +2160,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn read_unaligned(self) -> T - where T: Sized, + where + T: Sized, { read_unaligned(self) } @@ -2132,7 +2177,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn copy_to(self, dest: *mut T, count: usize) - where T: Sized, + where + T: Sized, { copy(self, dest, count) } @@ -2148,7 +2194,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize) - where T: Sized, + where + T: Sized, { copy_nonoverlapping(self, dest, count) } @@ -2164,7 +2211,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn copy_from(self, src: *const T, count: usize) - where T: Sized, + where + T: Sized, { copy(src, self, count) } @@ -2180,7 +2228,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize) - where T: Sized, + where + T: Sized, { copy_nonoverlapping(src, self, count) } @@ -2205,7 +2254,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn write(self, val: T) - where T: Sized, + where + T: Sized, { write(self, val) } @@ -2219,7 +2269,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn write_bytes(self, val: u8, count: usize) - where T: Sized, + where + T: Sized, { write_bytes(self, val, count) } @@ -2237,7 +2288,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn write_volatile(self, val: T) - where T: Sized, + where + T: Sized, { write_volatile(self, val) } @@ -2253,7 +2305,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn write_unaligned(self, val: T) - where T: Sized, + where + T: Sized, { write_unaligned(self, val) } @@ -2267,7 +2320,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn replace(self, src: T) -> T - where T: Sized, + where + T: Sized, { replace(self, src) } @@ -2282,7 +2336,8 @@ impl *mut T { #[stable(feature = "pointer_methods", since = "1.26.0")] #[inline] pub unsafe fn swap(self, with: *mut T) - where T: Sized, + where + T: Sized, { swap(self, with) } @@ -2326,13 +2381,14 @@ impl *mut T { /// # } } /// ``` #[unstable(feature = "align_offset", issue = "44488")] - pub fn align_offset(self, align: usize) -> usize where T: Sized { + pub fn align_offset(self, align: usize) -> usize + where + T: Sized, + { if !align.is_power_of_two() { panic!("align_offset: align is not a power-of-two"); } - unsafe { - align_offset(self, align) - } + unsafe { align_offset(self, align) } } } @@ -2350,7 +2406,7 @@ impl *mut T { /// than trying to adapt this to accommodate that change. /// /// Any questions go to @nagisa. -#[lang="align_offset"] +#[lang = "align_offset"] pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { /// Calculate multiplicative modular inverse of `x` modulo `m`. /// @@ -2390,9 +2446,8 @@ pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { // uses e.g., subtraction `mod n`. It is entirely fine to do them `mod // usize::max_value()` instead, because we take the result `mod n` at the end // anyway. - inverse = inverse.wrapping_mul( - 2usize.wrapping_sub(x.wrapping_mul(inverse)) - ) & (going_mod - 1); + inverse = inverse.wrapping_mul(2usize.wrapping_sub(x.wrapping_mul(inverse))) + & (going_mod - 1); if going_mod > m { return inverse & (m - 1); } @@ -2451,13 +2506,13 @@ pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { usize::max_value() } - - // Equality for pointers #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for *const T { #[inline] - fn eq(&self, other: &*const T) -> bool { *self == *other } + fn eq(&self, other: &*const T) -> bool { + *self == *other + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -2466,7 +2521,9 @@ impl Eq for *const T {} #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for *mut T { #[inline] - fn eq(&self, other: &*mut T) -> bool { *self == *other } + fn eq(&self, other: &*mut T) -> bool { + *self == *other + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -2609,7 +2666,7 @@ macro_rules! fnptr_impls_args { }; } -fnptr_impls_args! { } +fnptr_impls_args! {} fnptr_impls_args! { A } fnptr_impls_args! { A, B } fnptr_impls_args! { A, B, C } @@ -2646,16 +2703,24 @@ impl PartialOrd for *const T { } #[inline] - fn lt(&self, other: &*const T) -> bool { *self < *other } + fn lt(&self, other: &*const T) -> bool { + *self < *other + } #[inline] - fn le(&self, other: &*const T) -> bool { *self <= *other } + fn le(&self, other: &*const T) -> bool { + *self <= *other + } #[inline] - fn gt(&self, other: &*const T) -> bool { *self > *other } + fn gt(&self, other: &*const T) -> bool { + *self > *other + } #[inline] - fn ge(&self, other: &*const T) -> bool { *self >= *other } + fn ge(&self, other: &*const T) -> bool { + *self >= *other + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -2680,16 +2745,24 @@ impl PartialOrd for *mut T { } #[inline] - fn lt(&self, other: &*mut T) -> bool { *self < *other } + fn lt(&self, other: &*mut T) -> bool { + *self < *other + } #[inline] - fn le(&self, other: &*mut T) -> bool { *self <= *other } + fn le(&self, other: &*mut T) -> bool { + *self <= *other + } #[inline] - fn gt(&self, other: &*mut T) -> bool { *self > *other } + fn gt(&self, other: &*mut T) -> bool { + *self > *other + } #[inline] - fn ge(&self, other: &*mut T) -> bool { *self >= *other } + fn ge(&self, other: &*mut T) -> bool { + *self >= *other + } } /// A wrapper around a raw non-null `*mut T` that indicates that the possessor @@ -2712,9 +2785,12 @@ impl PartialOrd for *mut T { /// /// Unlike `*mut T`, `Unique` is covariant over `T`. This should always be correct /// for any type which upholds Unique's aliasing requirements. -#[unstable(feature = "ptr_internals", issue = "0", - reason = "use NonNull instead and consider PhantomData \ - (if you also use #[may_dangle]), Send, and/or Sync")] +#[unstable( + feature = "ptr_internals", + issue = "0", + reason = "use NonNull instead and consider PhantomData \ + (if you also use #[may_dangle]), Send, and/or Sync" +)] #[doc(hidden)] #[repr(transparent)] #[rustc_layout_scalar_valid_range_start(1)] @@ -2740,14 +2816,14 @@ impl fmt::Debug for Unique { /// unenforced by the type system; the abstraction using the /// `Unique` must enforce it. #[unstable(feature = "ptr_internals", issue = "0")] -unsafe impl Send for Unique { } +unsafe impl Send for Unique {} /// `Unique` pointers are `Sync` if `T` is `Sync` because the data they /// reference is unaliased. Note that this aliasing invariant is /// unenforced by the type system; the abstraction using the /// `Unique` must enforce it. #[unstable(feature = "ptr_internals", issue = "0")] -unsafe impl Sync for Unique { } +unsafe impl Sync for Unique {} #[unstable(feature = "ptr_internals", issue = "0")] impl Unique { @@ -2762,9 +2838,7 @@ impl Unique { /// some other means. // FIXME: rename to dangling() to match NonNull? pub const fn empty() -> Self { - unsafe { - Unique::new_unchecked(mem::align_of::() as *mut T) - } + unsafe { Unique::new_unchecked(mem::align_of::() as *mut T) } } } @@ -2776,13 +2850,21 @@ impl Unique { /// /// `ptr` must be non-null. pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { - Unique { pointer: ptr as _, _marker: PhantomData } + Unique { + pointer: ptr as _, + _marker: PhantomData, + } } /// Creates a new `Unique` if `ptr` is non-null. pub fn new(ptr: *mut T) -> Option { if !ptr.is_null() { - Some(unsafe { Unique { pointer: ptr as _, _marker: PhantomData } }) + Some(unsafe { + Unique { + pointer: ptr as _, + _marker: PhantomData, + } + }) } else { None } @@ -2820,13 +2902,13 @@ impl Clone for Unique { } #[unstable(feature = "ptr_internals", issue = "0")] -impl Copy for Unique { } +impl Copy for Unique {} #[unstable(feature = "ptr_internals", issue = "0")] -impl CoerceUnsized> for Unique where T: Unsize { } +impl CoerceUnsized> for Unique where T: Unsize {} #[unstable(feature = "ptr_internals", issue = "0")] -impl DispatchFromDyn> for Unique where T: Unsize { } +impl DispatchFromDyn> for Unique where T: Unsize {} #[unstable(feature = "ptr_internals", issue = "0")] impl fmt::Pointer for Unique { @@ -2838,21 +2920,36 @@ impl fmt::Pointer for Unique { #[unstable(feature = "ptr_internals", issue = "0")] impl<'a, T: ?Sized> From<&'a mut T> for Unique { fn from(reference: &'a mut T) -> Self { - unsafe { Unique { pointer: reference as *mut T, _marker: PhantomData } } + unsafe { + Unique { + pointer: reference as *mut T, + _marker: PhantomData, + } + } } } #[unstable(feature = "ptr_internals", issue = "0")] impl<'a, T: ?Sized> From<&'a T> for Unique { fn from(reference: &'a T) -> Self { - unsafe { Unique { pointer: reference as *const T, _marker: PhantomData } } + unsafe { + Unique { + pointer: reference as *const T, + _marker: PhantomData, + } + } } } #[unstable(feature = "ptr_internals", issue = "0")] impl<'a, T: ?Sized> From> for Unique { fn from(p: NonNull) -> Self { - unsafe { Unique { pointer: p.pointer, _marker: PhantomData } } + unsafe { + Unique { + pointer: p.pointer, + _marker: PhantomData, + } + } } } @@ -2883,12 +2980,12 @@ pub struct NonNull { /// `NonNull` pointers are not `Send` because the data they reference may be aliased. // N.B., this impl is unnecessary, but should provide better error messages. #[stable(feature = "nonnull", since = "1.25.0")] -impl !Send for NonNull { } +impl !Send for NonNull {} /// `NonNull` pointers are not `Sync` because the data they reference may be aliased. // N.B., this impl is unnecessary, but should provide better error messages. #[stable(feature = "nonnull", since = "1.25.0")] -impl !Sync for NonNull { } +impl !Sync for NonNull {} impl NonNull { /// Creates a new `NonNull` that is dangling, but well-aligned. @@ -2966,9 +3063,7 @@ impl NonNull { #[stable(feature = "nonnull_cast", since = "1.27.0")] #[inline] pub fn cast(self) -> NonNull { - unsafe { - NonNull::new_unchecked(self.as_ptr() as *mut U) - } + unsafe { NonNull::new_unchecked(self.as_ptr() as *mut U) } } } @@ -2980,13 +3075,13 @@ impl Clone for NonNull { } #[stable(feature = "nonnull", since = "1.25.0")] -impl Copy for NonNull { } +impl Copy for NonNull {} #[unstable(feature = "coerce_unsized", issue = "27732")] -impl CoerceUnsized> for NonNull where T: Unsize { } +impl CoerceUnsized> for NonNull where T: Unsize {} #[unstable(feature = "dispatch_from_dyn", issue = "0")] -impl DispatchFromDyn> for NonNull where T: Unsize { } +impl DispatchFromDyn> for NonNull where T: Unsize {} #[stable(feature = "nonnull", since = "1.25.0")] impl fmt::Debug for NonNull { @@ -3041,7 +3136,11 @@ impl hash::Hash for NonNull { impl From> for NonNull { #[inline] fn from(unique: Unique) -> Self { - unsafe { NonNull { pointer: unique.pointer } } + unsafe { + NonNull { + pointer: unique.pointer, + } + } } } @@ -3049,7 +3148,11 @@ impl From> for NonNull { impl<'a, T: ?Sized> From<&'a mut T> for NonNull { #[inline] fn from(reference: &'a mut T) -> Self { - unsafe { NonNull { pointer: reference as *mut T } } + unsafe { + NonNull { + pointer: reference as *mut T, + } + } } } @@ -3057,6 +3160,10 @@ impl<'a, T: ?Sized> From<&'a mut T> for NonNull { impl<'a, T: ?Sized> From<&'a T> for NonNull { #[inline] fn from(reference: &'a T) -> Self { - unsafe { NonNull { pointer: reference as *const T } } + unsafe { + NonNull { + pointer: reference as *const T, + } + } } } diff --git a/src/libcore/result.rs b/src/libcore/result.rs index b10d767efb24e..c3b9f97d20100 100644 --- a/src/libcore/result.rs +++ b/src/libcore/result.rs @@ -282,7 +282,7 @@ impl Result { pub fn is_ok(&self) -> bool { match *self { Ok(_) => true, - Err(_) => false + Err(_) => false, } } @@ -333,7 +333,7 @@ impl Result { #[stable(feature = "rust1", since = "1.0.0")] pub fn ok(self) -> Option { match self { - Ok(x) => Some(x), + Ok(x) => Some(x), Err(_) => None, } } @@ -360,7 +360,7 @@ impl Result { #[stable(feature = "rust1", since = "1.0.0")] pub fn err(self) -> Option { match self { - Ok(_) => None, + Ok(_) => None, Err(x) => Some(x), } } @@ -453,10 +453,10 @@ impl Result { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn map U>(self, op: F) -> Result { + pub fn map U>(self, op: F) -> Result { match self { Ok(t) => Ok(op(t)), - Err(e) => Err(e) + Err(e) => Err(e), } } @@ -514,10 +514,10 @@ impl Result { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn map_err F>(self, op: O) -> Result { + pub fn map_err F>(self, op: O) -> Result { match self { Ok(t) => Ok(t), - Err(e) => Err(op(e)) + Err(e) => Err(op(e)), } } @@ -543,7 +543,9 @@ impl Result { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter { - Iter { inner: self.as_ref().ok() } + Iter { + inner: self.as_ref().ok(), + } } /// Returns a mutable iterator over the possibly contained value. @@ -568,7 +570,9 @@ impl Result { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut { - IterMut { inner: self.as_mut().ok() } + IterMut { + inner: self.as_mut().ok(), + } } //////////////////////////////////////////////////////////////////////// @@ -736,7 +740,7 @@ impl Result { pub fn unwrap_or(self, optb: T) -> T { match self { Ok(t) => t, - Err(_) => optb + Err(_) => optb, } } @@ -761,7 +765,7 @@ impl Result { pub fn unwrap_or_else T>(self, op: F) -> T { match self { Ok(t) => t, - Err(e) => op(e) + Err(e) => op(e), } } } @@ -944,8 +948,7 @@ impl Result { /// Leaves the original Result in-place, creating a new one with a reference /// to the original one, additionally coercing the `Err` arm of the Result via /// `Deref`. - pub fn deref_err(&self) -> Result<&T, &E::Target> - { + pub fn deref_err(&self) -> Result<&T, &E::Target> { self.as_ref().map_err(|e| e.deref()) } } @@ -957,8 +960,7 @@ impl Result { /// Leaves the original Result in-place, creating a new one with a reference /// to the original one, additionally coercing both the `Ok` and `Err` arms /// of the Result via `Deref`. - pub fn deref(&self) -> Result<&T::Target, &E::Target> - { + pub fn deref(&self) -> Result<&T::Target, &E::Target> { self.as_ref().map(|t| t.deref()).map_err(|e| e.deref()) } } @@ -1066,17 +1068,21 @@ impl<'a, T, E> IntoIterator for &'a mut Result { /// [`Result::iter`]: enum.Result.html#method.iter #[derive(Debug)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Iter<'a, T: 'a> { inner: Option<&'a T> } +pub struct Iter<'a, T: 'a> { + inner: Option<&'a T>, +} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; #[inline] - fn next(&mut self) -> Option<&'a T> { self.inner.take() } + fn next(&mut self) -> Option<&'a T> { + self.inner.take() + } #[inline] fn size_hint(&self) -> (usize, Option) { - let n = if self.inner.is_some() {1} else {0}; + let n = if self.inner.is_some() { 1 } else { 0 }; (n, Some(n)) } } @@ -1084,7 +1090,9 @@ impl<'a, T> Iterator for Iter<'a, T> { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Iter<'a, T> { #[inline] - fn next_back(&mut self) -> Option<&'a T> { self.inner.take() } + fn next_back(&mut self) -> Option<&'a T> { + self.inner.take() + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1099,7 +1107,9 @@ unsafe impl TrustedLen for Iter<'_, A> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, T> { #[inline] - fn clone(&self) -> Self { Iter { inner: self.inner } } + fn clone(&self) -> Self { + Iter { inner: self.inner } + } } /// An iterator over a mutable reference to the [`Ok`] variant of a [`Result`]. @@ -1111,17 +1121,21 @@ impl Clone for Iter<'_, T> { /// [`Result::iter_mut`]: enum.Result.html#method.iter_mut #[derive(Debug)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct IterMut<'a, T: 'a> { inner: Option<&'a mut T> } +pub struct IterMut<'a, T: 'a> { + inner: Option<&'a mut T>, +} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for IterMut<'a, T> { type Item = &'a mut T; #[inline] - fn next(&mut self) -> Option<&'a mut T> { self.inner.take() } + fn next(&mut self) -> Option<&'a mut T> { + self.inner.take() + } #[inline] fn size_hint(&self) -> (usize, Option) { - let n = if self.inner.is_some() {1} else {0}; + let n = if self.inner.is_some() { 1 } else { 0 }; (n, Some(n)) } } @@ -1129,7 +1143,9 @@ impl<'a, T> Iterator for IterMut<'a, T> { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { #[inline] - fn next_back(&mut self) -> Option<&'a mut T> { self.inner.take() } + fn next_back(&mut self) -> Option<&'a mut T> { + self.inner.take() + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1154,17 +1170,21 @@ unsafe impl TrustedLen for IterMut<'_, A> {} /// [`IntoIterator`]: ../iter/trait.IntoIterator.html #[derive(Clone, Debug)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct IntoIter { inner: Option } +pub struct IntoIter { + inner: Option, +} #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = T; #[inline] - fn next(&mut self) -> Option { self.inner.take() } + fn next(&mut self) -> Option { + self.inner.take() + } #[inline] fn size_hint(&self) -> (usize, Option) { - let n = if self.inner.is_some() {1} else {0}; + let n = if self.inner.is_some() { 1 } else { 0 }; (n, Some(n)) } } @@ -1172,7 +1192,9 @@ impl Iterator for IntoIter { #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { #[inline] - fn next_back(&mut self) -> Option { self.inner.take() } + fn next_back(&mut self) -> Option { + self.inner.take() + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1205,7 +1227,7 @@ impl> FromIterator> for Result { /// assert!(res == Ok(vec![2, 3])); /// ``` #[inline] - fn from_iter>>(iter: I) -> Result { + fn from_iter>>(iter: I) -> Result { // FIXME(#11084): This could be replaced with Iterator::scan when this // performance bug is closed. @@ -1214,7 +1236,7 @@ impl> FromIterator> for Result { err: Option, } - impl>> Iterator for Adapter { + impl>> Iterator for Adapter { type Item = T; #[inline] @@ -1235,7 +1257,10 @@ impl> FromIterator> for Result { } } - let mut adapter = Adapter { iter: iter.into_iter(), err: None }; + let mut adapter = Adapter { + iter: iter.into_iter(), + err: None, + }; let v: V = FromIterator::from_iter(adapter.by_ref()); match adapter.err { @@ -1246,7 +1271,7 @@ impl> FromIterator> for Result { } #[unstable(feature = "try_trait", issue = "42327")] -impl ops::Try for Result { +impl ops::Try for Result { type Ok = T; type Error = E; diff --git a/src/libcore/slice/memchr.rs b/src/libcore/slice/memchr.rs index 0f1bc41dd7fff..ea9ed728be113 100644 --- a/src/libcore/slice/memchr.rs +++ b/src/libcore/slice/memchr.rs @@ -78,7 +78,10 @@ pub fn memchr(x: u8, text: &[u8]) -> Option { } // find the byte after the point the body loop stopped - text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i) + text[offset..] + .iter() + .position(|elt| *elt == x) + .map(|i| offset + i) } /// Return the last index matching the byte `x` in `text`. diff --git a/src/libcore/slice/mod.rs b/src/libcore/slice/mod.rs index 7fdc2acb8cc92..868963219f2a6 100644 --- a/src/libcore/slice/mod.rs +++ b/src/libcore/slice/mod.rs @@ -20,24 +20,27 @@ // * The `raw` and `bytes` submodules. // * Boilerplate trait implementations. -use cmp::Ordering::{self, Less, Equal, Greater}; use cmp; +use cmp::Ordering::{self, Equal, Greater, Less}; use fmt; use intrinsics::assume; use isize; use iter::*; -use ops::{FnMut, Try, self}; +use iter_private::TrustedRandomAccess; +use marker::{self, Copy, Send, Sized, Sync}; +use mem; +use ops::{self, FnMut, Try}; use option::Option; use option::Option::{None, Some}; -use result::Result; -use result::Result::{Ok, Err}; use ptr; -use mem; -use marker::{Copy, Send, Sync, Sized, self}; -use iter_private::TrustedRandomAccess; +use result::Result; +use result::Result::{Err, Ok}; -#[unstable(feature = "slice_internals", issue = "0", - reason = "exposed from core to be reused in std; use the memchr crate")] +#[unstable( + feature = "slice_internals", + issue = "0", + reason = "exposed from core to be reused in std; use the memchr crate" +)] /// Pure rust memchr implementation, taken from rust-memchr pub mod memchr; @@ -76,9 +79,7 @@ impl [T] { #[inline] #[rustc_const_unstable(feature = "const_slice_len")] pub const fn len(&self) -> usize { - unsafe { - Repr { rust: self }.raw.len - } + unsafe { Repr { rust: self }.raw.len } } /// Returns `true` if the slice has a length of 0. @@ -146,7 +147,11 @@ impl [T] { #[stable(feature = "slice_splits", since = "1.5.0")] #[inline] pub fn split_first(&self) -> Option<(&T, &[T])> { - if self.is_empty() { None } else { Some((&self[0], &self[1..])) } + if self.is_empty() { + None + } else { + Some((&self[0], &self[1..])) + } } /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. @@ -166,7 +171,9 @@ impl [T] { #[stable(feature = "slice_splits", since = "1.5.0")] #[inline] pub fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> { - if self.is_empty() { None } else { + if self.is_empty() { + None + } else { let split = self.split_at_mut(1); Some((&mut split.0[0], split.1)) } @@ -188,7 +195,11 @@ impl [T] { #[inline] pub fn split_last(&self) -> Option<(&T, &[T])> { let len = self.len(); - if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) } + if len == 0 { + None + } else { + Some((&self[len - 1], &self[..(len - 1)])) + } } /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty. @@ -209,11 +220,12 @@ impl [T] { #[inline] pub fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> { let len = self.len(); - if len == 0 { None } else { + if len == 0 { + None + } else { let split = self.split_at_mut(len - 1); Some((&mut split.1[0], split.0)) } - } /// Returns the last element of the slice, or `None` if it is empty. @@ -273,7 +285,8 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn get(&self, index: I) -> Option<&I::Output> - where I: SliceIndex + where + I: SliceIndex, { index.get(self) } @@ -296,7 +309,8 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn get_mut(&mut self, index: I) -> Option<&mut I::Output> - where I: SliceIndex + where + I: SliceIndex, { index.get_mut(self) } @@ -321,7 +335,8 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub unsafe fn get_unchecked(&self, index: I) -> &I::Output - where I: SliceIndex + where + I: SliceIndex, { index.get_unchecked(self) } @@ -348,7 +363,8 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub unsafe fn get_unchecked_mut(&mut self, index: I) -> &mut I::Output - where I: SliceIndex + where + I: SliceIndex, { index.get_unchecked_mut(self) } @@ -464,8 +480,7 @@ impl [T] { // will be, when the length is odd -- so there's no way of emitting // pre- and postludes to use fully-aligned SIMD in the middle.) - let fast_unaligned = - cfg!(any(target_arch = "x86", target_arch = "x86_64")); + let fast_unaligned = cfg!(any(target_arch = "x86", target_arch = "x86_64")); if fast_unaligned && mem::size_of::() == 1 { // Use the llvm.bswap intrinsic to reverse u8s in a usize @@ -539,7 +554,7 @@ impl [T] { Iter { ptr, end, - _marker: marker::PhantomData + _marker: marker::PhantomData, } } } @@ -571,7 +586,7 @@ impl [T] { IterMut { ptr, end, - _marker: marker::PhantomData + _marker: marker::PhantomData, } } } @@ -640,7 +655,10 @@ impl [T] { #[inline] pub fn chunks(&self, chunk_size: usize) -> Chunks { assert!(chunk_size != 0); - Chunks { v: self, chunk_size } + Chunks { + v: self, + chunk_size, + } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the @@ -678,7 +696,10 @@ impl [T] { #[inline] pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut { assert!(chunk_size != 0); - ChunksMut { v: self, chunk_size } + ChunksMut { + v: self, + chunk_size, + } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the @@ -718,7 +739,11 @@ impl [T] { let rem = self.len() % chunk_size; let len = self.len() - rem; let (fst, snd) = self.split_at(len); - ChunksExact { v: fst, rem: snd, chunk_size } + ChunksExact { + v: fst, + rem: snd, + chunk_size, + } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the @@ -763,7 +788,11 @@ impl [T] { let rem = self.len() % chunk_size; let len = self.len() - rem; let (fst, snd) = self.split_at_mut(len); - ChunksExactMut { v: fst, rem: snd, chunk_size } + ChunksExactMut { + v: fst, + rem: snd, + chunk_size, + } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end @@ -797,7 +826,10 @@ impl [T] { #[inline] pub fn rchunks(&self, chunk_size: usize) -> RChunks { assert!(chunk_size != 0); - RChunks { v: self, chunk_size } + RChunks { + v: self, + chunk_size, + } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end @@ -835,7 +867,10 @@ impl [T] { #[inline] pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut { assert!(chunk_size != 0); - RChunksMut { v: self, chunk_size } + RChunksMut { + v: self, + chunk_size, + } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the @@ -876,7 +911,11 @@ impl [T] { assert!(chunk_size != 0); let rem = self.len() % chunk_size; let (fst, snd) = self.split_at(rem); - RChunksExact { v: snd, rem: fst, chunk_size } + RChunksExact { + v: snd, + rem: fst, + chunk_size, + } } /// Returns an iterator over `chunk_size` elements of the slice at a time, starting at the end @@ -921,7 +960,11 @@ impl [T] { assert!(chunk_size != 0); let rem = self.len() % chunk_size; let (fst, snd) = self.split_at_mut(rem); - RChunksExactMut { v: snd, rem: fst, chunk_size } + RChunksExactMut { + v: snd, + rem: fst, + chunk_size, + } } /// Divides one slice into two at an index. @@ -996,8 +1039,10 @@ impl [T] { unsafe { assert!(mid <= len); - (from_raw_parts_mut(ptr, mid), - from_raw_parts_mut(ptr.add(mid), len - mid)) + ( + from_raw_parts_mut(ptr, mid), + from_raw_parts_mut(ptr.add(mid), len - mid), + ) } } @@ -1044,12 +1089,13 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn split(&self, pred: F) -> Split - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { Split { v: self, pred, - finished: false + finished: false, } } @@ -1069,9 +1115,14 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn split_mut(&mut self, pred: F) -> SplitMut - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { - SplitMut { v: self, pred, finished: false } + SplitMut { + v: self, + pred, + finished: false, + } } /// Returns an iterator over subslices separated by elements that match @@ -1104,9 +1155,12 @@ impl [T] { #[stable(feature = "slice_rsplit", since = "1.27.0")] #[inline] pub fn rsplit(&self, pred: F) -> RSplit - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { - RSplit { inner: self.split(pred) } + RSplit { + inner: self.split(pred), + } } /// Returns an iterator over mutable subslices separated by elements that @@ -1129,9 +1183,12 @@ impl [T] { #[stable(feature = "slice_rsplit", since = "1.27.0")] #[inline] pub fn rsplit_mut(&mut self, pred: F) -> RSplitMut - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { - RSplitMut { inner: self.split_mut(pred) } + RSplitMut { + inner: self.split_mut(pred), + } } /// Returns an iterator over subslices separated by elements that match @@ -1156,13 +1213,14 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn splitn(&self, n: usize, pred: F) -> SplitN - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { SplitN { inner: GenericSplitN { iter: self.split(pred), - count: n - } + count: n, + }, } } @@ -1186,13 +1244,14 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn splitn_mut(&mut self, n: usize, pred: F) -> SplitNMut - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { SplitNMut { inner: GenericSplitN { iter: self.split_mut(pred), - count: n - } + count: n, + }, } } @@ -1219,13 +1278,14 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn rsplitn(&self, n: usize, pred: F) -> RSplitN - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { RSplitN { inner: GenericSplitN { iter: self.rsplit(pred), - count: n - } + count: n, + }, } } @@ -1250,13 +1310,14 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn rsplitn_mut(&mut self, n: usize, pred: F) -> RSplitNMut - where F: FnMut(&T) -> bool + where + F: FnMut(&T) -> bool, { RSplitNMut { inner: GenericSplitN { iter: self.rsplit_mut(pred), - count: n - } + count: n, + }, } } @@ -1271,7 +1332,8 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains(&self, x: &T) -> bool - where T: PartialEq + where + T: PartialEq, { x.slice_contains(self) } @@ -1298,7 +1360,8 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn starts_with(&self, needle: &[T]) -> bool - where T: PartialEq + where + T: PartialEq, { let n = needle.len(); self.len() >= n && needle == &self[..n] @@ -1326,10 +1389,11 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn ends_with(&self, needle: &[T]) -> bool - where T: PartialEq + where + T: PartialEq, { let (m, n) = (self.len(), needle.len()); - m >= n && needle == &self[m-n..] + m >= n && needle == &self[m - n..] } /// Binary searches this sorted slice for a given element. @@ -1357,7 +1421,8 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn binary_search(&self, x: &T) -> Result - where T: Ord + where + T: Ord, { self.binary_search_by(|p| p.cmp(x)) } @@ -1397,7 +1462,8 @@ impl [T] { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result - where F: FnMut(&'a T) -> Ordering + where + F: FnMut(&'a T) -> Ordering, { let s = self; let mut size = s.len(); @@ -1417,8 +1483,11 @@ impl [T] { } // base is always in [0, size) because base <= mid. let cmp = f(unsafe { s.get_unchecked(base) }); - if cmp == Equal { Ok(base) } else { Err(base + (cmp == Less) as usize) } - + if cmp == Equal { + Ok(base) + } else { + Err(base + (cmp == Less) as usize) + } } /// Binary searches this sorted slice with a key extraction function. @@ -1455,8 +1524,9 @@ impl [T] { #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")] #[inline] pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result - where F: FnMut(&'a T) -> B, - B: Ord + where + F: FnMut(&'a T) -> B, + B: Ord, { self.binary_search_by(|k| f(k).cmp(b)) } @@ -1490,7 +1560,8 @@ impl [T] { #[stable(feature = "sort_unstable", since = "1.20.0")] #[inline] pub fn sort_unstable(&mut self) - where T: Ord + where + T: Ord, { sort::quicksort(self, |a, b| a.lt(b)); } @@ -1544,7 +1615,8 @@ impl [T] { #[stable(feature = "sort_unstable", since = "1.20.0")] #[inline] pub fn sort_unstable_by(&mut self, mut compare: F) - where F: FnMut(&T, &T) -> Ordering + where + F: FnMut(&T, &T) -> Ordering, { sort::quicksort(self, |a, b| compare(a, b) == Ordering::Less); } @@ -1577,7 +1649,9 @@ impl [T] { #[stable(feature = "sort_unstable", since = "1.20.0")] #[inline] pub fn sort_unstable_by_key(&mut self, mut f: F) - where F: FnMut(&T) -> K, K: Ord + where + F: FnMut(&T) -> K, + K: Ord, { sort::quicksort(self, |a, b| f(a).lt(&f(b))); } @@ -1605,7 +1679,8 @@ impl [T] { #[unstable(feature = "slice_partition_dedup", issue = "54279")] #[inline] pub fn partition_dedup(&mut self) -> (&mut [T], &mut [T]) - where T: PartialEq + where + T: PartialEq, { self.partition_dedup_by(|a, b| a == b) } @@ -1638,7 +1713,8 @@ impl [T] { #[unstable(feature = "slice_partition_dedup", issue = "54279")] #[inline] pub fn partition_dedup_by(&mut self, mut same_bucket: F) -> (&mut [T], &mut [T]) - where F: FnMut(&mut T, &mut T) -> bool + where + F: FnMut(&mut T, &mut T) -> bool, { // Although we have a mutable reference to `self`, we cannot make // *arbitrary* changes. The `same_bucket` calls could panic, so we @@ -1699,7 +1775,7 @@ impl [T] { let len = self.len(); if len <= 1 { - return (self, &mut []) + return (self, &mut []); } let ptr = self.as_mut_ptr(); @@ -1748,8 +1824,9 @@ impl [T] { #[unstable(feature = "slice_partition_dedup", issue = "54279")] #[inline] pub fn partition_dedup_by_key(&mut self, mut key: F) -> (&mut [T], &mut [T]) - where F: FnMut(&mut T) -> K, - K: PartialEq, + where + F: FnMut(&mut T) -> K, + K: PartialEq, { self.partition_dedup_by(|a, b| key(a) == key(b)) } @@ -1783,7 +1860,7 @@ impl [T] { /// let mut a = ['a', 'b', 'c', 'd', 'e', 'f']; /// a[1..5].rotate_left(1); /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']); - /// ``` + /// ``` #[stable(feature = "slice_rotate", since = "1.26.0")] pub fn rotate_left(&mut self, mid: usize) { assert!(mid <= self.len()); @@ -1892,9 +1969,14 @@ impl [T] { /// [`copy_from_slice`]: #method.copy_from_slice /// [`split_at_mut`]: #method.split_at_mut #[stable(feature = "clone_from_slice", since = "1.7.0")] - pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone { - assert!(self.len() == src.len(), - "destination and source slices have different lengths"); + pub fn clone_from_slice(&mut self, src: &[T]) + where + T: Clone, + { + assert!( + self.len() == src.len(), + "destination and source slices have different lengths" + ); // NOTE: We need to explicitly slice them to the same length // for bounds checking to be elided, and the optimizer will // generate memcpy for simple cases (for example T = u8). @@ -1903,7 +1985,6 @@ impl [T] { for i in 0..len { self[i].clone_from(&src[i]); } - } /// Copies all elements from `src` into `self`, using a memcpy. @@ -1961,12 +2042,17 @@ impl [T] { /// [`clone_from_slice`]: #method.clone_from_slice /// [`split_at_mut`]: #method.split_at_mut #[stable(feature = "copy_from_slice", since = "1.9.0")] - pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy { - assert_eq!(self.len(), src.len(), - "destination and source slices have different lengths"); + pub fn copy_from_slice(&mut self, src: &[T]) + where + T: Copy, + { + assert_eq!( + self.len(), + src.len(), + "destination and source slices have different lengths" + ); unsafe { - ptr::copy_nonoverlapping( - src.as_ptr(), self.as_mut_ptr(), self.len()); + ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len()); } } @@ -2076,11 +2162,12 @@ impl [T] { /// [`split_at_mut`]: #method.split_at_mut #[stable(feature = "swap_with_slice", since = "1.27.0")] pub fn swap_with_slice(&mut self, other: &mut [T]) { - assert!(self.len() == other.len(), - "destination and source slices have different lengths"); + assert!( + self.len() == other.len(), + "destination and source slices have different lengths" + ); unsafe { - ptr::swap_nonoverlapping( - self.as_mut_ptr(), other.as_mut_ptr(), self.len()); + ptr::swap_nonoverlapping(self.as_mut_ptr(), other.as_mut_ptr(), self.len()); } } @@ -2110,8 +2197,12 @@ impl [T] { // We should still make this `const fn` (and revert to recursive algorithm if we do) // because relying on llvm to consteval all this is… well, it makes me uncomfortable. let (ctz_a, mut ctz_b) = unsafe { - if a == 0 { return b; } - if b == 0 { return a; } + if a == 0 { + return b; + } + if b == 0 { + return a; + } (::intrinsics::cttz_nonzero(a), ::intrinsics::cttz_nonzero(b)) }; let k = ctz_a.min(ctz_b); @@ -2191,9 +2282,11 @@ impl [T] { let (left, rest) = self.split_at(offset); // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay let (us_len, ts_len) = rest.align_to_offsets::(); - (left, - from_raw_parts(rest.as_ptr() as *const U, us_len), - from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len)) + ( + left, + from_raw_parts(rest.as_ptr() as *const U, us_len), + from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len), + ) } } @@ -2245,9 +2338,11 @@ impl [T] { // now `rest` is definitely aligned, so `from_raw_parts_mut` below is okay let (us_len, ts_len) = rest.align_to_offsets::(); let mut_ptr = rest.as_mut_ptr(); - (left, - from_raw_parts_mut(mut_ptr as *mut U, us_len), - from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len)) + ( + left, + from_raw_parts_mut(mut_ptr as *mut U, us_len), + from_raw_parts_mut(mut_ptr.add(rest.len() - ts_len), ts_len), + ) } } } @@ -2269,10 +2364,11 @@ impl [u8] { #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[inline] pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool { - self.len() == other.len() && - self.iter().zip(other).all(|(a, b)| { - a.eq_ignore_ascii_case(b) - }) + self.len() == other.len() + && self + .iter() + .zip(other) + .all(|(a, b)| a.eq_ignore_ascii_case(b)) } /// Converts this slice to its ASCII upper case equivalent in-place. @@ -2308,13 +2404,13 @@ impl [u8] { byte.make_ascii_lowercase(); } } - } #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"] impl ops::Index for [T] - where I: SliceIndex<[T]> +where + I: SliceIndex<[T]>, { type Output = I::Output; @@ -2327,7 +2423,8 @@ impl ops::Index for [T] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"] impl ops::IndexMut for [T] - where I: SliceIndex<[T]> +where + I: SliceIndex<[T]>, { #[inline] fn index_mut(&mut self, index: I) -> &mut I::Output { @@ -2420,9 +2517,7 @@ impl SliceIndex<[T]> for usize { #[inline] fn get(self, slice: &[T]) -> Option<&T> { if self < slice.len() { - unsafe { - Some(self.get_unchecked(slice)) - } + unsafe { Some(self.get_unchecked(slice)) } } else { None } @@ -2431,9 +2526,7 @@ impl SliceIndex<[T]> for usize { #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut T> { if self < slice.len() { - unsafe { - Some(self.get_unchecked_mut(slice)) - } + unsafe { Some(self.get_unchecked_mut(slice)) } } else { None } @@ -2463,7 +2556,7 @@ impl SliceIndex<[T]> for usize { } #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] -impl SliceIndex<[T]> for ops::Range { +impl SliceIndex<[T]> for ops::Range { type Output = [T]; #[inline] @@ -2471,9 +2564,7 @@ impl SliceIndex<[T]> for ops::Range { if self.start > self.end || self.end > slice.len() { None } else { - unsafe { - Some(self.get_unchecked(slice)) - } + unsafe { Some(self.get_unchecked(slice)) } } } @@ -2482,9 +2573,7 @@ impl SliceIndex<[T]> for ops::Range { if self.start > self.end || self.end > slice.len() { None } else { - unsafe { - Some(self.get_unchecked_mut(slice)) - } + unsafe { Some(self.get_unchecked_mut(slice)) } } } @@ -2505,9 +2594,7 @@ impl SliceIndex<[T]> for ops::Range { } else if self.end > slice.len() { slice_index_len_fail(self.end, slice.len()); } - unsafe { - self.get_unchecked(slice) - } + unsafe { self.get_unchecked(slice) } } #[inline] @@ -2517,9 +2604,7 @@ impl SliceIndex<[T]> for ops::Range { } else if self.end > slice.len() { slice_index_len_fail(self.end, slice.len()); } - unsafe { - self.get_unchecked_mut(slice) - } + unsafe { self.get_unchecked_mut(slice) } } } @@ -2628,21 +2713,26 @@ impl SliceIndex<[T]> for ops::RangeFull { } } - #[stable(feature = "inclusive_range", since = "1.26.0")] impl SliceIndex<[T]> for ops::RangeInclusive { type Output = [T]; #[inline] fn get(self, slice: &[T]) -> Option<&[T]> { - if *self.end() == usize::max_value() { None } - else { (*self.start()..self.end() + 1).get(slice) } + if *self.end() == usize::max_value() { + None + } else { + (*self.start()..self.end() + 1).get(slice) + } } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { - if *self.end() == usize::max_value() { None } - else { (*self.start()..self.end() + 1).get_mut(slice) } + if *self.end() == usize::max_value() { + None + } else { + (*self.start()..self.end() + 1).get_mut(slice) + } } #[inline] @@ -2657,13 +2747,17 @@ impl SliceIndex<[T]> for ops::RangeInclusive { #[inline] fn index(self, slice: &[T]) -> &[T] { - if *self.end() == usize::max_value() { slice_index_overflow_fail(); } + if *self.end() == usize::max_value() { + slice_index_overflow_fail(); + } (*self.start()..self.end() + 1).index(slice) } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut [T] { - if *self.end() == usize::max_value() { slice_index_overflow_fail(); } + if *self.end() == usize::max_value() { + slice_index_overflow_fail(); + } (*self.start()..self.end() + 1).index_mut(slice) } } @@ -2710,13 +2804,17 @@ impl SliceIndex<[T]> for ops::RangeToInclusive { #[stable(feature = "rust1", since = "1.0.0")] impl Default for &[T] { /// Creates an empty slice. - fn default() -> Self { &[] } + fn default() -> Self { + &[] + } } #[stable(feature = "mut_slice_default", since = "1.5.0")] impl Default for &mut [T] { /// Creates a mutable empty slice. - fn default() -> Self { &mut [] } + fn default() -> Self { + &mut [] + } } // @@ -2753,7 +2851,9 @@ fn size_from_ptr(_: *const T) -> usize { macro_rules! is_empty { // The way we encode the length of a ZST iterator, this works both for ZST // and non-ZST. - ($self: ident) => {$self.ptr == $self.end} + ($self: ident) => { + $self.ptr == $self.end + }; } // To get rid of some bounds checks (see `position`), we compute the length in a somewhat // unexpected way. (Tested by `codegen/slice-position-bounds-check`.) @@ -2768,7 +2868,7 @@ macro_rules! len { // Using division instead of `offset_from` helps LLVM remove bounds checks diff / size } - }} + }}; } // The shared definition of the `Iter` and `IterMut` iterators @@ -3037,17 +3137,15 @@ macro_rules! iterator { pub struct Iter<'a, T: 'a> { ptr: *const T, end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that - // ptr == end is a quick test for the Iterator being empty, that works - // for both ZST and non-ZST. + // ptr == end is a quick test for the Iterator being empty, that works + // for both ZST and non-ZST. _marker: marker::PhantomData<&'a T>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("Iter") - .field(&self.as_slice()) - .finish() + f.debug_tuple("Iter").field(&self.as_slice()).finish() } } @@ -3087,11 +3185,17 @@ impl<'a, T> Iter<'a, T> { } } -iterator!{struct Iter -> *const T, &'a T, const, /* no mut */} +iterator! {struct Iter -> *const T, &'a T, const, /* no mut */} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Iter<'_, T> { - fn clone(&self) -> Self { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } } + fn clone(&self) -> Self { + Iter { + ptr: self.ptr, + end: self.end, + _marker: self._marker, + } + } } #[stable(feature = "slice_iter_as_ref", since = "1.13.0")] @@ -3129,17 +3233,15 @@ impl AsRef<[T]> for Iter<'_, T> { pub struct IterMut<'a, T: 'a> { ptr: *mut T, end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that - // ptr == end is a quick test for the Iterator being empty, that works - // for both ZST and non-ZST. + // ptr == end is a quick test for the Iterator being empty, that works + // for both ZST and non-ZST. _marker: marker::PhantomData<&'a mut T>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] impl fmt::Debug for IterMut<'_, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("IterMut") - .field(&self.make_slice()) - .finish() + f.debug_tuple("IterMut").field(&self.make_slice()).finish() } } @@ -3188,7 +3290,7 @@ impl<'a, T> IterMut<'a, T> { } } -iterator!{struct IterMut -> *mut T, &'a mut T, mut, mut} +iterator! {struct IterMut -> *mut T, &'a mut T, mut, mut} /// An internal abstraction over the splitting iterators, so that /// splitn, splitn_mut etc can be implemented once. @@ -3207,14 +3309,20 @@ trait SplitIter: DoubleEndedIterator { /// [`split`]: ../../std/primitive.slice.html#method.split /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool { +pub struct Split<'a, T: 'a, P> +where + P: FnMut(&T) -> bool, +{ v: &'a [T], pred: P, - finished: bool + finished: bool, } #[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool { +impl fmt::Debug for Split<'_, T, P> +where + P: FnMut(&T) -> bool, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("Split") .field("v", &self.v) @@ -3225,7 +3333,10 @@ impl fmt::Debug for Split<'_, T, P> where P: FnMut(&T) -> bool // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool { +impl Clone for Split<'_, T, P> +where + P: Clone + FnMut(&T) -> bool, +{ fn clone(&self) -> Self { Split { v: self.v, @@ -3236,12 +3347,17 @@ impl Clone for Split<'_, T, P> where P: Clone + FnMut(&T) -> bool { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> Iterator for Split<'a, T, P> +where + P: FnMut(&T) -> bool, +{ type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { - if self.finished { return None; } + if self.finished { + return None; + } match self.v.iter().position(|x| (self.pred)(x)) { None => self.finish(), @@ -3264,10 +3380,15 @@ impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> +where + P: FnMut(&T) -> bool, +{ #[inline] fn next_back(&mut self) -> Option<&'a [T]> { - if self.finished { return None; } + if self.finished { + return None; + } match self.v.iter().rposition(|x| (self.pred)(x)) { None => self.finish(), @@ -3280,10 +3401,18 @@ impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> boo } } -impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> SplitIter for Split<'a, T, P> +where + P: FnMut(&T) -> bool, +{ #[inline] fn finish(&mut self) -> Option<&'a [T]> { - if self.finished { None } else { self.finished = true; Some(self.v) } + if self.finished { + None + } else { + self.finished = true; + Some(self.v) + } } } @@ -3298,14 +3427,20 @@ impl FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {} /// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool { +pub struct SplitMut<'a, T: 'a, P> +where + P: FnMut(&T) -> bool, +{ v: &'a mut [T], pred: P, - finished: bool + finished: bool, } #[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> bool { +impl fmt::Debug for SplitMut<'_, T, P> +where + P: FnMut(&T) -> bool, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SplitMut") .field("v", &self.v) @@ -3314,7 +3449,10 @@ impl fmt::Debug for SplitMut<'_, T, P> where P: FnMut(&T) -> b } } -impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> SplitIter for SplitMut<'a, T, P> +where + P: FnMut(&T) -> bool, +{ #[inline] fn finish(&mut self) -> Option<&'a mut [T]> { if self.finished { @@ -3327,14 +3465,20 @@ impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> Iterator for SplitMut<'a, T, P> +where + P: FnMut(&T) -> bool, +{ type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { - if self.finished { return None; } + if self.finished { + return None; + } - let idx_opt = { // work around borrowck limitations + let idx_opt = { + // work around borrowck limitations let pred = &mut self.pred; self.v.iter().position(|x| (*pred)(x)) }; @@ -3362,14 +3506,18 @@ impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where +impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> +where P: FnMut(&T) -> bool, { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { - if self.finished { return None; } + if self.finished { + return None; + } - let idx_opt = { // work around borrowck limitations + let idx_opt = { + // work around borrowck limitations let pred = &mut self.pred; self.v.iter().rposition(|x| (*pred)(x)) }; @@ -3397,12 +3545,18 @@ impl FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {} /// [slices]: ../../std/primitive.slice.html #[stable(feature = "slice_rsplit", since = "1.27.0")] #[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`? -pub struct RSplit<'a, T:'a, P> where P: FnMut(&T) -> bool { - inner: Split<'a, T, P> +pub struct RSplit<'a, T: 'a, P> +where + P: FnMut(&T) -> bool, +{ + inner: Split<'a, T, P>, } #[stable(feature = "slice_rsplit", since = "1.27.0")] -impl fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> bool { +impl fmt::Debug for RSplit<'_, T, P> +where + P: FnMut(&T) -> bool, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RSplit") .field("v", &self.inner.v) @@ -3412,7 +3566,10 @@ impl fmt::Debug for RSplit<'_, T, P> where P: FnMut(&T) -> boo } #[stable(feature = "slice_rsplit", since = "1.27.0")] -impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> Iterator for RSplit<'a, T, P> +where + P: FnMut(&T) -> bool, +{ type Item = &'a [T]; #[inline] @@ -3427,7 +3584,10 @@ impl<'a, T, P> Iterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool { } #[stable(feature = "slice_rsplit", since = "1.27.0")] -impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> +where + P: FnMut(&T) -> bool, +{ #[inline] fn next_back(&mut self) -> Option<&'a [T]> { self.inner.next() @@ -3435,7 +3595,10 @@ impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bo } #[stable(feature = "slice_rsplit", since = "1.27.0")] -impl<'a, T, P> SplitIter for RSplit<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> SplitIter for RSplit<'a, T, P> +where + P: FnMut(&T) -> bool, +{ #[inline] fn finish(&mut self) -> Option<&'a [T]> { self.inner.finish() @@ -3453,12 +3616,18 @@ impl FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {} /// [`rsplit_mut`]: ../../std/primitive.slice.html#method.rsplit_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "slice_rsplit", since = "1.27.0")] -pub struct RSplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool { - inner: SplitMut<'a, T, P> +pub struct RSplitMut<'a, T: 'a, P> +where + P: FnMut(&T) -> bool, +{ + inner: SplitMut<'a, T, P>, } #[stable(feature = "slice_rsplit", since = "1.27.0")] -impl fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool { +impl fmt::Debug for RSplitMut<'_, T, P> +where + P: FnMut(&T) -> bool, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RSplitMut") .field("v", &self.inner.v) @@ -3468,7 +3637,10 @@ impl fmt::Debug for RSplitMut<'_, T, P> where P: FnMut(&T) -> } #[stable(feature = "slice_rsplit", since = "1.27.0")] -impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> +where + P: FnMut(&T) -> bool, +{ #[inline] fn finish(&mut self) -> Option<&'a mut [T]> { self.inner.finish() @@ -3476,7 +3648,10 @@ impl<'a, T, P> SplitIter for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool { } #[stable(feature = "slice_rsplit", since = "1.27.0")] -impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool { +impl<'a, T, P> Iterator for RSplitMut<'a, T, P> +where + P: FnMut(&T) -> bool, +{ type Item = &'a mut [T]; #[inline] @@ -3491,7 +3666,8 @@ impl<'a, T, P> Iterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool { } #[stable(feature = "slice_rsplit", since = "1.27.0")] -impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> where +impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P> +where P: FnMut(&T) -> bool, { #[inline] @@ -3512,15 +3688,21 @@ struct GenericSplitN { count: usize, } -impl> Iterator for GenericSplitN { +impl> Iterator for GenericSplitN { type Item = T; #[inline] fn next(&mut self) -> Option { match self.count { 0 => None, - 1 => { self.count -= 1; self.iter.finish() } - _ => { self.count -= 1; self.iter.next() } + 1 => { + self.count -= 1; + self.iter.finish() + } + _ => { + self.count -= 1; + self.iter.next() + } } } @@ -3539,12 +3721,18 @@ impl> Iterator for GenericSplitN { /// [`splitn`]: ../../std/primitive.slice.html#method.splitn /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool { - inner: GenericSplitN> +pub struct SplitN<'a, T: 'a, P> +where + P: FnMut(&T) -> bool, +{ + inner: GenericSplitN>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> bool { +impl fmt::Debug for SplitN<'_, T, P> +where + P: FnMut(&T) -> bool, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SplitN") .field("inner", &self.inner) @@ -3561,12 +3749,18 @@ impl fmt::Debug for SplitN<'_, T, P> where P: FnMut(&T) -> boo /// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool { - inner: GenericSplitN> +pub struct RSplitN<'a, T: 'a, P> +where + P: FnMut(&T) -> bool, +{ + inner: GenericSplitN>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bool { +impl fmt::Debug for RSplitN<'_, T, P> +where + P: FnMut(&T) -> bool, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RSplitN") .field("inner", &self.inner) @@ -3582,12 +3776,18 @@ impl fmt::Debug for RSplitN<'_, T, P> where P: FnMut(&T) -> bo /// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool { - inner: GenericSplitN> +pub struct SplitNMut<'a, T: 'a, P> +where + P: FnMut(&T) -> bool, +{ + inner: GenericSplitN>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> bool { +impl fmt::Debug for SplitNMut<'_, T, P> +where + P: FnMut(&T) -> bool, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SplitNMut") .field("inner", &self.inner) @@ -3604,12 +3804,18 @@ impl fmt::Debug for SplitNMut<'_, T, P> where P: FnMut(&T) -> /// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut /// [slices]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool { - inner: GenericSplitN> +pub struct RSplitNMut<'a, T: 'a, P> +where + P: FnMut(&T) -> bool, +{ + inner: GenericSplitN>, } #[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> bool { +impl fmt::Debug for RSplitNMut<'_, T, P> +where + P: FnMut(&T) -> bool, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RSplitNMut") .field("inner", &self.inner) @@ -3620,8 +3826,9 @@ impl fmt::Debug for RSplitNMut<'_, T, P> where P: FnMut(&T) -> macro_rules! forward_iterator { ($name:ident: $elem:ident, $iter_of:ty) => { #[stable(feature = "rust1", since = "1.0.0")] - impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where - P: FnMut(&T) -> bool + impl<'a, $elem, P> Iterator for $name<'a, $elem, P> + where + P: FnMut(&T) -> bool, { type Item = $iter_of; @@ -3637,9 +3844,8 @@ macro_rules! forward_iterator { } #[stable(feature = "fused", since = "1.26.0")] - impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P> - where P: FnMut(&T) -> bool {} - } + impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool {} + }; } forward_iterator! { SplitN: T, &'a [T] } @@ -3655,9 +3861,9 @@ forward_iterator! { RSplitNMut: T, &'a mut [T] } /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Windows<'a, T:'a> { +pub struct Windows<'a, T: 'a> { v: &'a [T], - size: usize + size: usize, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` @@ -3709,7 +3915,7 @@ impl<'a, T> Iterator for Windows<'a, T> { None } else { let nth = &self.v[n..end]; - self.v = &self.v[n+1..]; + self.v = &self.v[n + 1..]; Some(nth) } } @@ -3732,8 +3938,8 @@ impl<'a, T> DoubleEndedIterator for Windows<'a, T> { if self.size > self.v.len() { None } else { - let ret = Some(&self.v[self.v.len()-self.size..]); - self.v = &self.v[..self.v.len()-1]; + let ret = Some(&self.v[self.v.len() - self.size..]); + self.v = &self.v[..self.v.len() - 1]; ret } } @@ -3753,7 +3959,9 @@ unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> { unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T] { from_raw_parts(self.v.as_ptr().add(i), self.size) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a @@ -3768,9 +3976,9 @@ unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> { /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Chunks<'a, T:'a> { +pub struct Chunks<'a, T: 'a> { v: &'a [T], - chunk_size: usize + chunk_size: usize, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` @@ -3807,7 +4015,7 @@ impl<'a, T> Iterator for Chunks<'a, T> { } else { let n = self.v.len() / self.chunk_size; let rem = self.v.len() % self.chunk_size; - let n = if rem > 0 { n+1 } else { n }; + let n = if rem > 0 { n + 1 } else { n }; (n, Some(n)) } } @@ -3853,7 +4061,11 @@ impl<'a, T> DoubleEndedIterator for Chunks<'a, T> { None } else { let remainder = self.v.len() % self.chunk_size; - let chunksz = if remainder != 0 { remainder } else { self.chunk_size }; + let chunksz = if remainder != 0 { + remainder + } else { + self.chunk_size + }; let (fst, snd) = self.v.split_at(self.v.len() - chunksz); self.v = fst; Some(snd) @@ -3880,7 +4092,9 @@ unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> { }; from_raw_parts(self.v.as_ptr().add(start), end - start) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size` @@ -3895,9 +4109,9 @@ unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> { /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct ChunksMut<'a, T:'a> { +pub struct ChunksMut<'a, T: 'a> { v: &'a mut [T], - chunk_size: usize + chunk_size: usize, } #[stable(feature = "rust1", since = "1.0.0")] @@ -3947,7 +4161,7 @@ impl<'a, T> Iterator for ChunksMut<'a, T> { }; let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(end); - let (_, nth) = head.split_at_mut(start); + let (_, nth) = head.split_at_mut(start); self.v = tail; Some(nth) } @@ -3972,7 +4186,11 @@ impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> { None } else { let remainder = self.v.len() % self.chunk_size; - let sz = if remainder != 0 { remainder } else { self.chunk_size }; + let sz = if remainder != 0 { + remainder + } else { + self.chunk_size + }; let tmp = mem::replace(&mut self.v, &mut []); let tmp_len = tmp.len(); let (head, tail) = tmp.split_at_mut(tmp_len - sz); @@ -4001,7 +4219,9 @@ unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> { }; from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a @@ -4018,10 +4238,10 @@ unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> { /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "chunks_exact", since = "1.31.0")] -pub struct ChunksExact<'a, T:'a> { +pub struct ChunksExact<'a, T: 'a> { v: &'a [T], rem: &'a [T], - chunk_size: usize + chunk_size: usize, } impl<'a, T> ChunksExact<'a, T> { @@ -4125,7 +4345,9 @@ unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> { let start = i * self.chunk_size; from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size` @@ -4142,10 +4364,10 @@ unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> { /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "chunks_exact", since = "1.31.0")] -pub struct ChunksExactMut<'a, T:'a> { +pub struct ChunksExactMut<'a, T: 'a> { v: &'a mut [T], rem: &'a mut [T], - chunk_size: usize + chunk_size: usize, } impl<'a, T> ChunksExactMut<'a, T> { @@ -4241,7 +4463,9 @@ unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> { let start = i * self.chunk_size; from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a @@ -4256,9 +4480,9 @@ unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> { /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rchunks", since = "1.31.0")] -pub struct RChunks<'a, T:'a> { +pub struct RChunks<'a, T: 'a> { v: &'a [T], - chunk_size: usize + chunk_size: usize, } // FIXME(#26925) Remove in favor of `#[derive(Clone)]` @@ -4295,7 +4519,7 @@ impl<'a, T> Iterator for RChunks<'a, T> { } else { let n = self.v.len() / self.chunk_size; let rem = self.v.len() % self.chunk_size; - let n = if rem > 0 { n+1 } else { n }; + let n = if rem > 0 { n + 1 } else { n }; (n, Some(n)) } } @@ -4344,7 +4568,11 @@ impl<'a, T> DoubleEndedIterator for RChunks<'a, T> { None } else { let remainder = self.v.len() % self.chunk_size; - let chunksz = if remainder != 0 { remainder } else { self.chunk_size }; + let chunksz = if remainder != 0 { + remainder + } else { + self.chunk_size + }; let (fst, snd) = self.v.split_at(chunksz); self.v = snd; Some(fst) @@ -4372,7 +4600,9 @@ unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> { }; from_raw_parts(self.v.as_ptr().add(start), end - start) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size` @@ -4387,9 +4617,9 @@ unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> { /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rchunks", since = "1.31.0")] -pub struct RChunksMut<'a, T:'a> { +pub struct RChunksMut<'a, T: 'a> { v: &'a mut [T], - chunk_size: usize + chunk_size: usize, } #[stable(feature = "rchunks", since = "1.31.0")] @@ -4468,7 +4698,11 @@ impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> { None } else { let remainder = self.v.len() % self.chunk_size; - let sz = if remainder != 0 { remainder } else { self.chunk_size }; + let sz = if remainder != 0 { + remainder + } else { + self.chunk_size + }; let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(sz); self.v = tail; @@ -4497,7 +4731,9 @@ unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> { }; from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a @@ -4514,10 +4750,10 @@ unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> { /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rchunks", since = "1.31.0")] -pub struct RChunksExact<'a, T:'a> { +pub struct RChunksExact<'a, T: 'a> { v: &'a [T], rem: &'a [T], - chunk_size: usize + chunk_size: usize, } impl<'a, T> RChunksExact<'a, T> { @@ -4622,7 +4858,9 @@ unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> { let start = end - self.chunk_size; from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size` @@ -4639,10 +4877,10 @@ unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> { /// [slices]: ../../std/primitive.slice.html #[derive(Debug)] #[stable(feature = "rchunks", since = "1.31.0")] -pub struct RChunksExactMut<'a, T:'a> { +pub struct RChunksExactMut<'a, T: 'a> { v: &'a mut [T], rem: &'a mut [T], - chunk_size: usize + chunk_size: usize, } impl<'a, T> RChunksExactMut<'a, T> { @@ -4740,7 +4978,9 @@ unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> { let start = end - self.chunk_size; from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } // @@ -4791,10 +5031,18 @@ unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] { - debug_assert!(data as usize % mem::align_of::() == 0, "attempt to create unaligned slice"); - debug_assert!(mem::size_of::().saturating_mul(len) <= isize::MAX as usize, - "attempt to create slice covering half the address space"); - Repr { raw: FatPtr { data, len } }.rust + debug_assert!( + data as usize % mem::align_of::() == 0, + "attempt to create unaligned slice" + ); + debug_assert!( + mem::size_of::().saturating_mul(len) <= isize::MAX as usize, + "attempt to create slice covering half the address space" + ); + Repr { + raw: FatPtr { data, len }, + } + .rust } /// Performs the same functionality as [`from_raw_parts`], except that a @@ -4812,33 +5060,42 @@ pub unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] { - debug_assert!(data as usize % mem::align_of::() == 0, "attempt to create unaligned slice"); - debug_assert!(mem::size_of::().saturating_mul(len) <= isize::MAX as usize, - "attempt to create slice covering half the address space"); - Repr { raw: FatPtr { data, len } }.rust_mut + debug_assert!( + data as usize % mem::align_of::() == 0, + "attempt to create unaligned slice" + ); + debug_assert!( + mem::size_of::().saturating_mul(len) <= isize::MAX as usize, + "attempt to create slice covering half the address space" + ); + Repr { + raw: FatPtr { data, len }, + } + .rust_mut } /// Converts a reference to T into a slice of length 1 (without copying). #[stable(feature = "from_ref", since = "1.28.0")] pub fn from_ref(s: &T) -> &[T] { - unsafe { - from_raw_parts(s, 1) - } + unsafe { from_raw_parts(s, 1) } } /// Converts a reference to T into a slice of length 1 (without copying). #[stable(feature = "from_ref", since = "1.28.0")] pub fn from_mut(s: &mut T) -> &mut [T] { - unsafe { - from_raw_parts_mut(s, 1) - } + unsafe { from_raw_parts_mut(s, 1) } } // This function is public only because there is no other way to unit test heapsort. -#[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "0")] +#[unstable( + feature = "sort_internals", + reason = "internal to sort module", + issue = "0" +)] #[doc(hidden)] pub fn heapsort(v: &mut [T], mut is_less: F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { sort::heapsort(v, &mut is_less); } @@ -4847,7 +5104,7 @@ pub fn heapsort(v: &mut [T], mut is_less: F) // Comparison traits // -extern { +extern "C" { /// Calls implementation provided memcmp. /// /// Interprets the data as u8. @@ -4859,7 +5116,10 @@ extern { } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq<[B]> for [A] where A: PartialEq { +impl PartialEq<[B]> for [A] +where + A: PartialEq, +{ fn eq(&self, other: &[B]) -> bool { SlicePartialEq::equal(self, other) } @@ -4893,12 +5153,15 @@ impl PartialOrd for [T] { trait SlicePartialEq { fn equal(&self, other: &[B]) -> bool; - fn not_equal(&self, other: &[B]) -> bool { !self.equal(other) } + fn not_equal(&self, other: &[B]) -> bool { + !self.equal(other) + } } // Generic slice equality impl SlicePartialEq for [A] - where A: PartialEq +where + A: PartialEq, { default fn equal(&self, other: &[B]) -> bool { if self.len() != other.len() { @@ -4917,7 +5180,8 @@ impl SlicePartialEq for [A] // Use memcmp for bytewise equality when the types allow impl SlicePartialEq for [A] - where A: PartialEq + BytewiseEquality +where + A: PartialEq + BytewiseEquality, { fn equal(&self, other: &[A]) -> bool { if self.len() != other.len() { @@ -4928,8 +5192,11 @@ impl SlicePartialEq for [A] } unsafe { let size = mem::size_of_val(self); - memcmp(self.as_ptr() as *const u8, - other.as_ptr() as *const u8, size) == 0 + memcmp( + self.as_ptr() as *const u8, + other.as_ptr() as *const u8, + size, + ) == 0 } } } @@ -4941,7 +5208,8 @@ trait SlicePartialOrd { } impl SlicePartialOrd for [A] - where A: PartialOrd +where + A: PartialOrd, { default fn partial_compare(&self, other: &[A]) -> Option { let l = cmp::min(self.len(), other.len()); @@ -4963,7 +5231,8 @@ impl SlicePartialOrd for [A] } impl SlicePartialOrd for [A] - where A: Ord +where + A: Ord, { default fn partial_compare(&self, other: &[A]) -> Option { Some(SliceOrd::compare(self, other)) @@ -4977,7 +5246,8 @@ trait SliceOrd { } impl SliceOrd for [A] - where A: Ord +where + A: Ord, { default fn compare(&self, other: &[A]) -> Ordering { let l = cmp::min(self.len(), other.len()); @@ -5004,8 +5274,11 @@ impl SliceOrd for [u8] { #[inline] fn compare(&self, other: &[u8]) -> Ordering { let order = unsafe { - memcmp(self.as_ptr(), other.as_ptr(), - cmp::min(self.len(), other.len())) + memcmp( + self.as_ptr(), + other.as_ptr(), + cmp::min(self.len(), other.len()), + ) }; if order == 0 { self.len().cmp(&other.len()) @@ -5020,7 +5293,7 @@ impl SliceOrd for [u8] { #[doc(hidden)] /// Trait implemented for types that can be compared for equality using /// their bytewise representation -trait BytewiseEquality { } +trait BytewiseEquality {} macro_rules! impl_marker_for { ($traitname:ident, $($ty:ty)*) => { @@ -5038,7 +5311,9 @@ unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> { unsafe fn get_unchecked(&mut self, i: usize) -> &'a T { &*self.ptr.add(i) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } #[doc(hidden)] @@ -5046,14 +5321,19 @@ unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> { unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T { &mut *self.ptr.add(i) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } trait SliceContains: Sized { fn slice_contains(&self, x: &[Self]) -> bool; } -impl SliceContains for T where T: PartialEq { +impl SliceContains for T +where + T: PartialEq, +{ default fn slice_contains(&self, x: &[Self]) -> bool { x.iter().any(|y| *y == *self) } diff --git a/src/libcore/slice/rotate.rs b/src/libcore/slice/rotate.rs index 52677713f5ac4..41ad575629cd0 100644 --- a/src/libcore/slice/rotate.rs +++ b/src/libcore/slice/rotate.rs @@ -60,10 +60,7 @@ pub unsafe fn ptr_rotate(mut left: usize, mid: *mut T, mut right: usize) { break; } - ptr::swap_nonoverlapping( - mid.sub(left), - mid.add(right - delta), - delta); + ptr::swap_nonoverlapping(mid.sub(left), mid.add(right - delta), delta); if left <= right { right -= delta; @@ -80,8 +77,7 @@ pub unsafe fn ptr_rotate(mut left: usize, mid: *mut T, mut right: usize) { ptr::copy_nonoverlapping(mid.sub(left), buf, left); ptr::copy(mid, mid.sub(left), right); ptr::copy_nonoverlapping(buf, dim, left); - } - else { + } else { ptr::copy_nonoverlapping(mid, buf, right); ptr::copy(mid.sub(left), dim, left); ptr::copy_nonoverlapping(buf, mid.sub(left), right); diff --git a/src/libcore/slice/sort.rs b/src/libcore/slice/sort.rs index dd9b49fb7a002..586740a2a731b 100644 --- a/src/libcore/slice/sort.rs +++ b/src/libcore/slice/sort.rs @@ -18,13 +18,16 @@ struct CopyOnDrop { impl Drop for CopyOnDrop { fn drop(&mut self) { - unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); } + unsafe { + ptr::copy_nonoverlapping(self.src, self.dest, 1); + } } } /// Shifts the first element to the right until it encounters a greater or equal element. fn shift_head(v: &mut [T], is_less: &mut F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { let len = v.len(); unsafe { @@ -56,7 +59,8 @@ fn shift_head(v: &mut [T], is_less: &mut F) /// Shifts the last element to the left until it encounters a smaller or equal element. fn shift_tail(v: &mut [T], is_less: &mut F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { let len = v.len(); unsafe { @@ -72,7 +76,7 @@ fn shift_tail(v: &mut [T], is_less: &mut F) }; ptr::copy_nonoverlapping(v.get_unchecked(len - 2), v.get_unchecked_mut(len - 1), 1); - for i in (0..len-2).rev() { + for i in (0..len - 2).rev() { if !is_less(&*tmp, v.get_unchecked(i)) { break; } @@ -91,7 +95,8 @@ fn shift_tail(v: &mut [T], is_less: &mut F) /// Returns `true` if the slice is sorted at the end. This function is `O(n)` worst-case. #[cold] fn partial_insertion_sort(v: &mut [T], is_less: &mut F) -> bool - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { // Maximum number of adjacent out-of-order pairs that will get shifted. const MAX_STEPS: usize = 5; @@ -134,17 +139,19 @@ fn partial_insertion_sort(v: &mut [T], is_less: &mut F) -> bool /// Sorts a slice using insertion sort, which is `O(n^2)` worst-case. fn insertion_sort(v: &mut [T], is_less: &mut F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { for i in 1..v.len() { - shift_tail(&mut v[..i+1], is_less); + shift_tail(&mut v[..i + 1], is_less); } } /// Sorts `v` using heapsort, which guarantees `O(n log n)` worst-case. #[cold] pub fn heapsort(v: &mut [T], is_less: &mut F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { // This binary heap respects the invariant `parent >= child`. let mut sift_down = |v: &mut [T], mut node| { @@ -172,12 +179,12 @@ pub fn heapsort(v: &mut [T], is_less: &mut F) }; // Build the heap in linear time. - for i in (0 .. v.len() / 2).rev() { + for i in (0..v.len() / 2).rev() { sift_down(v, i); } // Pop maximal elements from the heap. - for i in (1 .. v.len()).rev() { + for i in (1..v.len()).rev() { v.swap(0, i); sift_down(&mut v[..i], 0); } @@ -193,7 +200,8 @@ pub fn heapsort(v: &mut [T], is_less: &mut F) /// /// [pdf]: http://drops.dagstuhl.de/opus/volltexte/2016/6389/pdf/LIPIcs-ESA-2016-38.pdf fn partition_in_blocks(v: &mut [T], pivot: &T, is_less: &mut F) -> usize - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { // Number of elements in a typical block. const BLOCK: usize = 128; @@ -296,8 +304,16 @@ fn partition_in_blocks(v: &mut [T], pivot: &T, is_less: &mut F) -> usize let count = cmp::min(width(start_l, end_l), width(start_r, end_r)); if count > 0 { - macro_rules! left { () => { l.offset(*start_l as isize) } } - macro_rules! right { () => { r.offset(-(*start_r as isize) - 1) } } + macro_rules! left { + () => { + l.offset(*start_l as isize) + }; + } + macro_rules! right { + () => { + r.offset(-(*start_r as isize) - 1) + }; + } // Instead of swapping one pair at the time, it is more efficient to perform a cyclic // permutation. This is not strictly equivalent to swapping, but produces a similar @@ -377,7 +393,8 @@ fn partition_in_blocks(v: &mut [T], pivot: &T, is_less: &mut F) -> usize /// 1. Number of elements smaller than `v[pivot]`. /// 2. True if `v` was already partitioned. fn partition(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { let (mid, was_partitioned) = { // Place the pivot at the beginning of slice. @@ -409,7 +426,10 @@ fn partition(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool) } } - (l + partition_in_blocks(&mut v[l..r], pivot, is_less), l >= r) + ( + l + partition_in_blocks(&mut v[l..r], pivot, is_less), + l >= r, + ) // `_pivot_guard` goes out of scope and writes the pivot (which is a stack-allocated // variable) back into the slice where it originally was. This step is critical in ensuring @@ -427,7 +447,8 @@ fn partition(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool) /// Returns the number of elements equal to the pivot. It is assumed that `v` does not contain /// elements smaller than the pivot. fn partition_equal(v: &mut [T], pivot: usize, is_less: &mut F) -> usize - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { // Place the pivot at the beginning of slice. v.swap(0, pivot); @@ -526,7 +547,8 @@ fn break_patterns(v: &mut [T]) { /// /// Elements in `v` might be reordered in the process. fn choose_pivot(v: &mut [T], is_less: &mut F) -> (usize, bool) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { // Minimum length to choose the median-of-medians method. // Shorter slices use the simple median-of-three method. @@ -594,7 +616,8 @@ fn choose_pivot(v: &mut [T], is_less: &mut F) -> (usize, bool) /// `limit` is the number of allowed imbalanced partitions before switching to `heapsort`. If zero, /// this function will immediately switch to heapsort. fn recurse<'a, T, F>(mut v: &'a mut [T], is_less: &mut F, mut pred: Option<&'a T>, mut limit: usize) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { // Slices of up to this length get sorted using insertion sort. const MAX_INSERTION: usize = 20; @@ -648,7 +671,7 @@ fn recurse<'a, T, F>(mut v: &'a mut [T], is_less: &mut F, mut pred: Option<&'a T let mid = partition_equal(v, pivot, is_less); // Continue sorting elements greater than the pivot. - v = &mut {v}[mid..]; + v = &mut { v }[mid..]; continue; } } @@ -659,7 +682,7 @@ fn recurse<'a, T, F>(mut v: &'a mut [T], is_less: &mut F, mut pred: Option<&'a T was_partitioned = was_p; // Split the slice into `left`, `pivot`, and `right`. - let (left, right) = {v}.split_at_mut(mid); + let (left, right) = { v }.split_at_mut(mid); let (pivot, right) = right.split_at_mut(1); let pivot = &pivot[0]; @@ -679,7 +702,8 @@ fn recurse<'a, T, F>(mut v: &'a mut [T], is_less: &mut F, mut pred: Option<&'a T /// Sorts `v` using pattern-defeating quicksort, which is `O(n log n)` worst-case. pub fn quicksort(v: &mut [T], mut is_less: F) - where F: FnMut(&T, &T) -> bool +where + F: FnMut(&T, &T) -> bool, { // Sorting has no meaningful behavior on zero-sized types. if mem::size_of::() == 0 { diff --git a/src/libcore/str/lossy.rs b/src/libcore/str/lossy.rs index b3e8527c4ae0b..eee138177ae49 100644 --- a/src/libcore/str/lossy.rs +++ b/src/libcore/str/lossy.rs @@ -1,13 +1,13 @@ use char; -use str as core_str; use fmt; use fmt::Write; use mem; +use str as core_str; /// Lossy UTF-8 string. #[unstable(feature = "str_internals", issue = "0")] pub struct Utf8Lossy { - bytes: [u8] + bytes: [u8], } impl Utf8Lossy { @@ -20,11 +20,12 @@ impl Utf8Lossy { } pub fn chunks(&self) -> Utf8LossyChunksIter { - Utf8LossyChunksIter { source: &self.bytes } + Utf8LossyChunksIter { + source: &self.bytes, + } } } - /// Iterator over lossy UTF-8 string #[unstable(feature = "str_internals", issue = "0")] #[allow(missing_debug_implementations)] @@ -68,16 +69,18 @@ impl<'a> Iterator for Utf8LossyChunksIter<'a> { } else { let w = core_str::utf8_char_width(byte); - macro_rules! error { () => ({ - unsafe { - let r = Utf8LossyChunk { - valid: core_str::from_utf8_unchecked(&self.source[0..i_]), - broken: &self.source[i_..i], - }; - self.source = &self.source[i..]; - return Some(r); - } - })} + macro_rules! error { + () => {{ + unsafe { + let r = Utf8LossyChunk { + valid: core_str::from_utf8_unchecked(&self.source[0..i_]), + broken: &self.source[i_..i], + }; + self.source = &self.source[i..]; + return Some(r); + } + }}; + } match w { 2 => { @@ -88,10 +91,10 @@ impl<'a> Iterator for Utf8LossyChunksIter<'a> { } 3 => { match (byte, safe_get(self.source, i)) { - (0xE0, 0xA0 ..= 0xBF) => (), - (0xE1 ..= 0xEC, 0x80 ..= 0xBF) => (), - (0xED, 0x80 ..= 0x9F) => (), - (0xEE ..= 0xEF, 0x80 ..= 0xBF) => (), + (0xE0, 0xA0..=0xBF) => (), + (0xE1..=0xEC, 0x80..=0xBF) => (), + (0xED, 0x80..=0x9F) => (), + (0xEE..=0xEF, 0x80..=0xBF) => (), _ => { error!(); } @@ -104,9 +107,9 @@ impl<'a> Iterator for Utf8LossyChunksIter<'a> { } 4 => { match (byte, safe_get(self.source, i)) { - (0xF0, 0x90 ..= 0xBF) => (), - (0xF1 ..= 0xF3, 0x80 ..= 0xBF) => (), - (0xF4, 0x80 ..= 0x8F) => (), + (0xF0, 0x90..=0xBF) => (), + (0xF1..=0xF3, 0x80..=0xBF) => (), + (0xF4, 0x80..=0x8F) => (), _ => { error!(); } @@ -137,13 +140,12 @@ impl<'a> Iterator for Utf8LossyChunksIter<'a> { } } - impl fmt::Display for Utf8Lossy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // If we're the empty string then our iterator won't actually yield // anything, so perform the formatting manually if self.bytes.len() == 0 { - return "".fmt(f) + return "".fmt(f); } for Utf8LossyChunk { valid, broken } in self.chunks() { @@ -152,7 +154,7 @@ impl fmt::Display for Utf8Lossy { // respect various formatting flags if possible. if valid.len() == self.bytes.len() { assert!(broken.is_empty()); - return valid.fmt(f) + return valid.fmt(f); } f.write_str(valid)?; @@ -169,7 +171,6 @@ impl fmt::Debug for Utf8Lossy { f.write_char('"')?; for Utf8LossyChunk { valid, broken } in self.chunks() { - // Valid part. // Here we partially parse UTF-8 again which is suboptimal. { diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index 689d456d41246..9e66998e73a14 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -5,14 +5,14 @@ #![stable(feature = "rust1", since = "1.0.0")] use self::pattern::Pattern; -use self::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher}; +use self::pattern::{DoubleEndedSearcher, ReverseSearcher, Searcher}; use char; use fmt; -use iter::{Map, Cloned, FusedIterator, TrustedLen, Filter}; +use iter::{Cloned, Filter, FusedIterator, Map, TrustedLen}; use iter_private::TrustedRandomAccess; -use slice::{self, SliceIndex, Split as SliceSplit}; use mem; +use slice::{self, SliceIndex, Split as SliceSplit}; pub mod pattern; @@ -124,9 +124,9 @@ impl FromStr for bool { #[inline] fn from_str(s: &str) -> Result { match s { - "true" => Ok(true), + "true" => Ok(true), "false" => Ok(false), - _ => Err(ParseBoolError { _priv: () }), + _ => Err(ParseBoolError { _priv: () }), } } } @@ -136,7 +136,9 @@ impl FromStr for bool { /// [`from_str`]: ../../std/primitive.bool.html#method.from_str #[derive(Debug, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct ParseBoolError { _priv: () } +pub struct ParseBoolError { + _priv: (), +} #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for ParseBoolError { @@ -221,7 +223,9 @@ impl Utf8Error { /// assert_eq!(1, error.valid_up_to()); /// ``` #[stable(feature = "utf8_error", since = "1.5.0")] - pub fn valid_up_to(&self) -> usize { self.valid_up_to } + pub fn valid_up_to(&self) -> usize { + self.valid_up_to + } /// Provide more information about the failure: /// @@ -435,10 +439,17 @@ pub unsafe fn from_utf8_unchecked_mut(v: &mut [u8]) -> &mut str { impl fmt::Display for Utf8Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(error_len) = self.error_len { - write!(f, "invalid utf-8 sequence of {} bytes from index {}", - error_len, self.valid_up_to) + write!( + f, + "invalid utf-8 sequence of {} bytes from index {}", + error_len, self.valid_up_to + ) } else { - write!(f, "incomplete utf-8 byte sequence from index {}", self.valid_up_to) + write!( + f, + "incomplete utf-8 byte sequence from index {}", + self.valid_up_to + ) } } } @@ -459,23 +470,29 @@ Section: Iterators #[derive(Clone, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Chars<'a> { - iter: slice::Iter<'a, u8> + iter: slice::Iter<'a, u8>, } /// Returns the initial codepoint accumulator for the first byte. /// The first byte is special, only want bottom 5 bits for width 2, 4 bits /// for width 3, and 3 bits for width 4. #[inline] -fn utf8_first_byte(byte: u8, width: u32) -> u32 { (byte & (0x7F >> width)) as u32 } +fn utf8_first_byte(byte: u8, width: u32) -> u32 { + (byte & (0x7F >> width)) as u32 +} /// Returns the value of `ch` updated with continuation byte `byte`. #[inline] -fn utf8_acc_cont_byte(ch: u32, byte: u8) -> u32 { (ch << 6) | (byte & CONT_MASK) as u32 } +fn utf8_acc_cont_byte(ch: u32, byte: u8) -> u32 { + (ch << 6) | (byte & CONT_MASK) as u32 +} /// Checks whether the byte is a UTF-8 continuation byte (i.e., starts with the /// bits `10`). #[inline] -fn utf8_is_cont_byte(byte: u8) -> bool { (byte & !CONT_MASK) == TAG_CONT_U8 } +fn utf8_is_cont_byte(byte: u8) -> bool { + (byte & !CONT_MASK) == TAG_CONT_U8 +} #[inline] fn unwrap_or_0(opt: Option<&u8>) -> u8 { @@ -493,7 +510,7 @@ pub fn next_code_point<'a, I: Iterator>(bytes: &mut I) -> Option< // Decode UTF-8 let x = *bytes.next()?; if x < 128 { - return Some(x as u32) + return Some(x as u32); } // Multibyte case follows @@ -523,7 +540,8 @@ pub fn next_code_point<'a, I: Iterator>(bytes: &mut I) -> Option< /// UTF-8-like encoding). #[inline] fn next_code_point_reverse<'a, I>(bytes: &mut I) -> Option - where I: DoubleEndedIterator, +where + I: DoubleEndedIterator, { // Decode UTF-8 let w = match *bytes.next_back()? { @@ -559,9 +577,7 @@ impl<'a> Iterator for Chars<'a> { fn next(&mut self) -> Option { next_code_point(&mut self.iter).map(|ch| { // str invariant says `ch` is a valid Unicode Scalar Value - unsafe { - char::from_u32_unchecked(ch) - } + unsafe { char::from_u32_unchecked(ch) } }) } @@ -598,9 +614,7 @@ impl<'a> DoubleEndedIterator for Chars<'a> { fn next_back(&mut self) -> Option { next_code_point_reverse(&mut self.iter).map(|ch| { // str invariant says `ch` is a valid Unicode Scalar Value - unsafe { - char::from_u32_unchecked(ch) - } + unsafe { char::from_u32_unchecked(ch) } }) } } @@ -751,32 +765,41 @@ impl Iterator for Bytes<'_> { } #[inline] - fn all(&mut self, f: F) -> bool where F: FnMut(Self::Item) -> bool { + fn all(&mut self, f: F) -> bool + where + F: FnMut(Self::Item) -> bool, + { self.0.all(f) } #[inline] - fn any(&mut self, f: F) -> bool where F: FnMut(Self::Item) -> bool { + fn any(&mut self, f: F) -> bool + where + F: FnMut(Self::Item) -> bool, + { self.0.any(f) } #[inline] - fn find

(&mut self, predicate: P) -> Option where - P: FnMut(&Self::Item) -> bool + fn find

(&mut self, predicate: P) -> Option + where + P: FnMut(&Self::Item) -> bool, { self.0.find(predicate) } #[inline] - fn position

(&mut self, predicate: P) -> Option where - P: FnMut(Self::Item) -> bool + fn position

(&mut self, predicate: P) -> Option + where + P: FnMut(Self::Item) -> bool, { self.0.position(predicate) } #[inline] - fn rposition

(&mut self, predicate: P) -> Option where - P: FnMut(Self::Item) -> bool + fn rposition

(&mut self, predicate: P) -> Option + where + P: FnMut(Self::Item) -> bool, { self.0.rposition(predicate) } @@ -790,8 +813,9 @@ impl DoubleEndedIterator for Bytes<'_> { } #[inline] - fn rfind

(&mut self, predicate: P) -> Option where - P: FnMut(&Self::Item) -> bool + fn rfind

(&mut self, predicate: P) -> Option + where + P: FnMut(&Self::Item) -> bool, { self.0.rfind(predicate) } @@ -821,7 +845,9 @@ unsafe impl<'a> TrustedRandomAccess for Bytes<'a> { unsafe fn get_unchecked(&mut self, i: usize) -> u8 { self.0.get_unchecked(i) } - fn may_have_side_effect() -> bool { false } + fn may_have_side_effect() -> bool { + false + } } /// This macro generates a Clone impl for string pattern API @@ -829,14 +855,15 @@ unsafe impl<'a> TrustedRandomAccess for Bytes<'a> { macro_rules! derive_pattern_clone { (clone $t:ident with |$s:ident| $e:expr) => { impl<'a, P: Pattern<'a>> Clone for $t<'a, P> - where P::Searcher: Clone + where + P::Searcher: Clone, { fn clone(&self) -> Self { let $s = self; $e } } - } + }; } /// This macro generates two public iterator structs @@ -1013,7 +1040,7 @@ macro_rules! generate_pattern_iterators { } => {} } -derive_pattern_clone!{ +derive_pattern_clone! { clone SplitInternal with |s| SplitInternal { matcher: s.matcher.clone(), ..*s } } @@ -1026,7 +1053,10 @@ struct SplitInternal<'a, P: Pattern<'a>> { finished: bool, } -impl<'a, P: Pattern<'a>> fmt::Debug for SplitInternal<'a, P> where P::Searcher: fmt::Debug { +impl<'a, P: Pattern<'a>> fmt::Debug for SplitInternal<'a, P> +where + P::Searcher: fmt::Debug, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SplitInternal") .field("start", &self.start) @@ -1054,7 +1084,9 @@ impl<'a, P: Pattern<'a>> SplitInternal<'a, P> { #[inline] fn next(&mut self) -> Option<&'a str> { - if self.finished { return None } + if self.finished { + return None; + } let haystack = self.matcher.haystack(); match self.matcher.next_match() { @@ -1069,15 +1101,22 @@ impl<'a, P: Pattern<'a>> SplitInternal<'a, P> { #[inline] fn next_back(&mut self) -> Option<&'a str> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { - if self.finished { return None } + if self.finished { + return None; + } if !self.allow_trailing_empty { self.allow_trailing_empty = true; match self.next_back() { Some(elt) if !elt.is_empty() => return Some(elt), - _ => if self.finished { return None } + _ => { + if self.finished { + return None; + } + } } } @@ -1132,7 +1171,7 @@ generate_pattern_iterators! { delegate double ended; } -derive_pattern_clone!{ +derive_pattern_clone! { clone SplitNInternal with |s| SplitNInternal { iter: s.iter.clone(), ..*s } } @@ -1143,7 +1182,10 @@ struct SplitNInternal<'a, P: Pattern<'a>> { count: usize, } -impl<'a, P: Pattern<'a>> fmt::Debug for SplitNInternal<'a, P> where P::Searcher: fmt::Debug { +impl<'a, P: Pattern<'a>> fmt::Debug for SplitNInternal<'a, P> +where + P::Searcher: fmt::Debug, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SplitNInternal") .field("iter", &self.iter) @@ -1157,19 +1199,32 @@ impl<'a, P: Pattern<'a>> SplitNInternal<'a, P> { fn next(&mut self) -> Option<&'a str> { match self.count { 0 => None, - 1 => { self.count = 0; self.iter.get_end() } - _ => { self.count -= 1; self.iter.next() } + 1 => { + self.count = 0; + self.iter.get_end() + } + _ => { + self.count -= 1; + self.iter.next() + } } } #[inline] fn next_back(&mut self) -> Option<&'a str> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { match self.count { 0 => None, - 1 => { self.count = 0; self.iter.get_end() } - _ => { self.count -= 1; self.iter.next_back() } + 1 => { + self.count = 0; + self.iter.get_end() + } + _ => { + self.count -= 1; + self.iter.next_back() + } } } } @@ -1192,14 +1247,17 @@ generate_pattern_iterators! { delegate single ended; } -derive_pattern_clone!{ +derive_pattern_clone! { clone MatchIndicesInternal with |s| MatchIndicesInternal(s.0.clone()) } struct MatchIndicesInternal<'a, P: Pattern<'a>>(P::Searcher); -impl<'a, P: Pattern<'a>> fmt::Debug for MatchIndicesInternal<'a, P> where P::Searcher: fmt::Debug { +impl<'a, P: Pattern<'a>> fmt::Debug for MatchIndicesInternal<'a, P> +where + P::Searcher: fmt::Debug, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("MatchIndicesInternal") .field(&self.0) @@ -1210,18 +1268,19 @@ impl<'a, P: Pattern<'a>> fmt::Debug for MatchIndicesInternal<'a, P> where P::Sea impl<'a, P: Pattern<'a>> MatchIndicesInternal<'a, P> { #[inline] fn next(&mut self) -> Option<(usize, &'a str)> { - self.0.next_match().map(|(start, end)| unsafe { - (start, self.0.haystack().get_unchecked(start..end)) - }) + self.0 + .next_match() + .map(|(start, end)| unsafe { (start, self.0.haystack().get_unchecked(start..end)) }) } #[inline] fn next_back(&mut self) -> Option<(usize, &'a str)> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { - self.0.next_match_back().map(|(start, end)| unsafe { - (start, self.0.haystack().get_unchecked(start..end)) - }) + self.0 + .next_match_back() + .map(|(start, end)| unsafe { (start, self.0.haystack().get_unchecked(start..end)) }) } } @@ -1243,18 +1302,19 @@ generate_pattern_iterators! { delegate double ended; } -derive_pattern_clone!{ +derive_pattern_clone! { clone MatchesInternal with |s| MatchesInternal(s.0.clone()) } struct MatchesInternal<'a, P: Pattern<'a>>(P::Searcher); -impl<'a, P: Pattern<'a>> fmt::Debug for MatchesInternal<'a, P> where P::Searcher: fmt::Debug { +impl<'a, P: Pattern<'a>> fmt::Debug for MatchesInternal<'a, P> +where + P::Searcher: fmt::Debug, +{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("MatchesInternal") - .field(&self.0) - .finish() + f.debug_tuple("MatchesInternal").field(&self.0).finish() } } @@ -1269,7 +1329,8 @@ impl<'a, P: Pattern<'a>> MatchesInternal<'a, P> { #[inline] fn next_back(&mut self) -> Option<&'a str> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { self.0.next_match_back().map(|(a, b)| unsafe { // Indices are known to be on utf8 boundaries @@ -1350,8 +1411,11 @@ impl<'a> Fn<(&'a str,)> for LinesAnyMap { #[inline] extern "rust-call" fn call(&self, (line,): (&'a str,)) -> &'a str { let l = line.len(); - if l > 0 && line.as_bytes()[l - 1] == b'\r' { &line[0 .. l - 1] } - else { line } + if l > 0 && line.as_bytes()[l - 1] == b'\r' { + &line[0..l - 1] + } else { + line + } } } @@ -1422,7 +1486,11 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { let usize_bytes = mem::size_of::(); let ascii_block_size = 2 * usize_bytes; - let blocks_end = if len >= ascii_block_size { len - ascii_block_size + 1 } else { 0 }; + let blocks_end = if len >= ascii_block_size { + len - ascii_block_size + 1 + } else { + 0 + }; while index < len { let old_offset = index; @@ -1431,18 +1499,20 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { return Err(Utf8Error { valid_up_to: old_offset, error_len: $error_len, - }) - } + }); + }; } - macro_rules! next { () => {{ - index += 1; - // we needed data, but there was none: error! - if index >= len { - err!(None) - } - v[index] - }}} + macro_rules! next { + () => {{ + index += 1; + // we needed data, but there was none: error! + if index >= len { + err!(None) + } + v[index] + }}; + } let first = v[index]; if first >= 128 { @@ -1466,16 +1536,18 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { // UTF8-4 = %xF0 %x90-BF 2( UTF8-tail ) / %xF1-F3 3( UTF8-tail ) / // %xF4 %x80-8F 2( UTF8-tail ) match w { - 2 => if next!() & !CONT_MASK != TAG_CONT_U8 { - err!(Some(1)) - }, + 2 => { + if next!() & !CONT_MASK != TAG_CONT_U8 { + err!(Some(1)) + } + } 3 => { match (first, next!()) { - (0xE0 , 0xA0 ..= 0xBF) | - (0xE1 ..= 0xEC, 0x80 ..= 0xBF) | - (0xED , 0x80 ..= 0x9F) | - (0xEE ..= 0xEF, 0x80 ..= 0xBF) => {} - _ => err!(Some(1)) + (0xE0, 0xA0..=0xBF) + | (0xE1..=0xEC, 0x80..=0xBF) + | (0xED, 0x80..=0x9F) + | (0xEE..=0xEF, 0x80..=0xBF) => {} + _ => err!(Some(1)), } if next!() & !CONT_MASK != TAG_CONT_U8 { err!(Some(2)) @@ -1483,10 +1555,8 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { } 4 => { match (first, next!()) { - (0xF0 , 0x90 ..= 0xBF) | - (0xF1 ..= 0xF3, 0x80 ..= 0xBF) | - (0xF4 , 0x80 ..= 0x8F) => {} - _ => err!(Some(1)) + (0xF0, 0x90..=0xBF) | (0xF1..=0xF3, 0x80..=0xBF) | (0xF4, 0x80..=0x8F) => {} + _ => err!(Some(1)), } if next!() & !CONT_MASK != TAG_CONT_U8 { err!(Some(2)) @@ -1495,7 +1565,7 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { err!(Some(3)) } } - _ => err!(Some(1)) + _ => err!(Some(1)), } index += 1; } else { @@ -1535,22 +1605,22 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { // https://tools.ietf.org/html/rfc3629 static UTF8_CHAR_WIDTH: [u8; 256] = [ -1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, -1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x1F -1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, -1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x3F -1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, -1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x5F -1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, -1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x7F -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0x9F -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0xBF -0,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2, -2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, // 0xDF -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, // 0xEF -4,4,4,4,4,0,0,0,0,0,0,0,0,0,0,0, // 0xFF + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, // 0x1F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, // 0x3F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, // 0x5F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, // 0x7F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, // 0x9F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, // 0xBF + 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, // 0xDF + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // 0xEF + 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0xFF ]; /// Given a first byte, determines how many bytes are in this UTF-8 character. @@ -1596,7 +1666,9 @@ mod traits { self.as_bytes() == other.as_bytes() } #[inline] - fn ne(&self, other: &str) -> bool { !(*self).eq(other) } + fn ne(&self, other: &str) -> bool { + !(*self).eq(other) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1845,9 +1917,10 @@ mod traits { type Output = str; #[inline] fn get(self, slice: &str) -> Option<&Self::Output> { - if self.start <= self.end && - slice.is_char_boundary(self.start) && - slice.is_char_boundary(self.end) { + if self.start <= self.end + && slice.is_char_boundary(self.start) + && slice.is_char_boundary(self.end) + { Some(unsafe { self.get_unchecked(slice) }) } else { None @@ -1855,9 +1928,10 @@ mod traits { } #[inline] fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> { - if self.start <= self.end && - slice.is_char_boundary(self.start) && - slice.is_char_boundary(self.end) { + if self.start <= self.end + && slice.is_char_boundary(self.start) + && slice.is_char_boundary(self.end) + { Some(unsafe { self.get_unchecked_mut(slice) }) } else { None @@ -1878,15 +1952,17 @@ mod traits { #[inline] fn index(self, slice: &str) -> &Self::Output { let (start, end) = (self.start, self.end); - self.get(slice).unwrap_or_else(|| super::slice_error_fail(slice, start, end)) + self.get(slice) + .unwrap_or_else(|| super::slice_error_fail(slice, start, end)) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { // is_char_boundary checks that the index is in [0, .len()] // cannot reuse `get` as above, because of NLL trouble - if self.start <= self.end && - slice.is_char_boundary(self.start) && - slice.is_char_boundary(self.end) { + if self.start <= self.end + && slice.is_char_boundary(self.start) + && slice.is_char_boundary(self.end) + { unsafe { self.get_unchecked_mut(slice) } } else { super::slice_error_fail(slice, self.start, self.end) @@ -1926,7 +2002,8 @@ mod traits { #[inline] fn index(self, slice: &str) -> &Self::Output { let end = self.end; - self.get(slice).unwrap_or_else(|| super::slice_error_fail(slice, 0, end)) + self.get(slice) + .unwrap_or_else(|| super::slice_error_fail(slice, 0, end)) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { @@ -1973,7 +2050,8 @@ mod traits { #[inline] fn index(self, slice: &str) -> &Self::Output { let (start, end) = (self.start, slice.len()); - self.get(slice).unwrap_or_else(|| super::slice_error_fail(slice, start, end)) + self.get(slice) + .unwrap_or_else(|| super::slice_error_fail(slice, start, end)) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { @@ -1991,66 +2069,84 @@ mod traits { type Output = str; #[inline] fn get(self, slice: &str) -> Option<&Self::Output> { - if *self.end() == usize::max_value() { None } - else { (*self.start()..self.end()+1).get(slice) } + if *self.end() == usize::max_value() { + None + } else { + (*self.start()..self.end() + 1).get(slice) + } } #[inline] fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> { - if *self.end() == usize::max_value() { None } - else { (*self.start()..self.end()+1).get_mut(slice) } + if *self.end() == usize::max_value() { + None + } else { + (*self.start()..self.end() + 1).get_mut(slice) + } } #[inline] unsafe fn get_unchecked(self, slice: &str) -> &Self::Output { - (*self.start()..self.end()+1).get_unchecked(slice) + (*self.start()..self.end() + 1).get_unchecked(slice) } #[inline] unsafe fn get_unchecked_mut(self, slice: &mut str) -> &mut Self::Output { - (*self.start()..self.end()+1).get_unchecked_mut(slice) + (*self.start()..self.end() + 1).get_unchecked_mut(slice) } #[inline] fn index(self, slice: &str) -> &Self::Output { - if *self.end() == usize::max_value() { str_index_overflow_fail(); } - (*self.start()..self.end()+1).index(slice) + if *self.end() == usize::max_value() { + str_index_overflow_fail(); + } + (*self.start()..self.end() + 1).index(slice) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { - if *self.end() == usize::max_value() { str_index_overflow_fail(); } - (*self.start()..self.end()+1).index_mut(slice) + if *self.end() == usize::max_value() { + str_index_overflow_fail(); + } + (*self.start()..self.end() + 1).index_mut(slice) } } - - #[stable(feature = "inclusive_range", since = "1.26.0")] impl SliceIndex for ops::RangeToInclusive { type Output = str; #[inline] fn get(self, slice: &str) -> Option<&Self::Output> { - if self.end == usize::max_value() { None } - else { (..self.end+1).get(slice) } + if self.end == usize::max_value() { + None + } else { + (..self.end + 1).get(slice) + } } #[inline] fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> { - if self.end == usize::max_value() { None } - else { (..self.end+1).get_mut(slice) } + if self.end == usize::max_value() { + None + } else { + (..self.end + 1).get_mut(slice) + } } #[inline] unsafe fn get_unchecked(self, slice: &str) -> &Self::Output { - (..self.end+1).get_unchecked(slice) + (..self.end + 1).get_unchecked(slice) } #[inline] unsafe fn get_unchecked_mut(self, slice: &mut str) -> &mut Self::Output { - (..self.end+1).get_unchecked_mut(slice) + (..self.end + 1).get_unchecked_mut(slice) } #[inline] fn index(self, slice: &str) -> &Self::Output { - if self.end == usize::max_value() { str_index_overflow_fail(); } - (..self.end+1).index(slice) + if self.end == usize::max_value() { + str_index_overflow_fail(); + } + (..self.end + 1).index(slice) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { - if self.end == usize::max_value() { str_index_overflow_fail(); } - (..self.end+1).index_mut(slice) + if self.end == usize::max_value() { + str_index_overflow_fail(); + } + (..self.end + 1).index_mut(slice) } } } @@ -2078,15 +2174,28 @@ fn slice_error_fail(s: &str, begin: usize, end: usize) -> ! { // 1. out of bounds if begin > s.len() || end > s.len() { let oob_index = if begin > s.len() { begin } else { end }; - panic!("byte index {} is out of bounds of `{}`{}", oob_index, s_trunc, ellipsis); + panic!( + "byte index {} is out of bounds of `{}`{}", + oob_index, s_trunc, ellipsis + ); } // 2. begin <= end - assert!(begin <= end, "begin <= end ({} <= {}) when slicing `{}`{}", - begin, end, s_trunc, ellipsis); + assert!( + begin <= end, + "begin <= end ({} <= {}) when slicing `{}`{}", + begin, + end, + s_trunc, + ellipsis + ); // 3. character boundary - let index = if !s.is_char_boundary(begin) { begin } else { end }; + let index = if !s.is_char_boundary(begin) { + begin + } else { + end + }; // find the character let mut char_start = index; while !s.is_char_boundary(char_start) { @@ -2094,9 +2203,11 @@ fn slice_error_fail(s: &str, begin: usize, end: usize) -> ! { } // `char_start` must be less than len and a char boundary let ch = s[char_start..].chars().next().unwrap(); - let char_range = char_start .. char_start + ch.len_utf8(); - panic!("byte index {} is not a char boundary; it is inside {:?} (bytes {:?}) of `{}`{}", - index, ch, char_range, s_trunc, ellipsis); + let char_range = char_start..char_start + ch.len_utf8(); + panic!( + "byte index {} is not a char boundary; it is inside {:?} (bytes {:?}) of `{}`{}", + index, ch, char_range, s_trunc, ellipsis + ); } #[lang = "str"] @@ -2175,7 +2286,9 @@ impl str { // 0 and len are always ok. // Test for 0 explicitly so that it can optimize out the check // easily and skip reading string data for that case. - if index == 0 || index == self.len() { return true; } + if index == 0 || index == self.len() { + return true; + } match self.as_bytes().get(index) { None => false, // This is bit magic equivalent to: b < 128 || b >= 192 @@ -2198,7 +2311,7 @@ impl str { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline(always)] - #[rustc_const_unstable(feature="const_str_as_bytes")] + #[rustc_const_unstable(feature = "const_str_as_bytes")] pub const fn as_bytes(&self) -> &[u8] { union Slices<'a> { str: &'a str, @@ -2468,7 +2581,10 @@ impl str { /// * `begin` and `end` must be byte positions within the string slice. /// * `begin` and `end` must lie on UTF-8 sequence boundaries. #[stable(feature = "str_slice_mut", since = "1.5.0")] - #[rustc_deprecated(since = "1.29.0", reason = "use `get_unchecked_mut(begin..end)` instead")] + #[rustc_deprecated( + since = "1.29.0", + reason = "use `get_unchecked_mut(begin..end)` instead" + )] #[inline] pub unsafe fn slice_mut_unchecked(&mut self, begin: usize, end: usize) -> &mut str { (begin..end).get_unchecked_mut(self) @@ -2510,8 +2626,10 @@ impl str { // is_char_boundary checks that the index is in [0, .len()] if self.is_char_boundary(mid) { unsafe { - (self.get_unchecked(0..mid), - self.get_unchecked(mid..self.len())) + ( + self.get_unchecked(0..mid), + self.get_unchecked(mid..self.len()), + ) } } else { slice_error_fail(self, 0, mid) @@ -2557,11 +2675,10 @@ impl str { let len = self.len(); let ptr = self.as_ptr() as *mut u8; unsafe { - (from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr, mid)), - from_utf8_unchecked_mut(slice::from_raw_parts_mut( - ptr.add(mid), - len - mid - ))) + ( + from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr, mid)), + from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr.add(mid), len - mid)), + ) } } else { slice_error_fail(self, 0, mid) @@ -2615,7 +2732,9 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn chars(&self) -> Chars { - Chars{iter: self.as_bytes().iter()} + Chars { + iter: self.as_bytes().iter(), + } } /// Returns an iterator over the [`char`]s of a string slice, and their @@ -2670,7 +2789,10 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn char_indices(&self) -> CharIndices { - CharIndices { front_offset: 0, iter: self.chars() } + CharIndices { + front_offset: 0, + iter: self.chars(), + } } /// An iterator over the bytes of a string slice. @@ -2738,7 +2860,9 @@ impl str { #[stable(feature = "split_whitespace", since = "1.1.0")] #[inline] pub fn split_whitespace(&self) -> SplitWhitespace { - SplitWhitespace { inner: self.split(IsWhitespace).filter(IsNotEmpty) } + SplitWhitespace { + inner: self.split(IsWhitespace).filter(IsNotEmpty), + } } /// Split a string slice by ASCII whitespace. @@ -2855,7 +2979,10 @@ impl str { /// ``` #[stable(feature = "encode_utf16", since = "1.8.0")] pub fn encode_utf16(&self) -> EncodeUtf16 { - EncodeUtf16 { chars: self.chars(), extra: 0 } + EncodeUtf16 { + chars: self.chars(), + extra: 0, + } } /// Returns `true` if the given pattern matches a sub-slice of @@ -2916,7 +3043,8 @@ impl str { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn ends_with<'a, P: Pattern<'a>>(&'a self, pat: P) -> bool - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { pat.is_suffix_of(self) } @@ -3009,7 +3137,8 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn rfind<'a, P: Pattern<'a>>(&'a self, pat: P) -> Option - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { pat.into_searcher(self).next_match_back().map(|(i, _)| i) } @@ -3176,7 +3305,8 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn rsplit<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplit<'a, P> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { RSplit(self.split(pat).0) } @@ -3267,7 +3397,8 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn rsplit_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplitTerminator<'a, P> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { RSplitTerminator(self.split_terminator(pat).0) } @@ -3367,7 +3498,8 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn rsplitn<'a, P: Pattern<'a>>(&'a self, n: usize, pat: P) -> RSplitN<'a, P> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { RSplitN(self.splitn(n, pat).0) } @@ -3440,7 +3572,8 @@ impl str { #[stable(feature = "str_matches", since = "1.2.0")] #[inline] pub fn rmatches<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatches<'a, P> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { RMatches(self.matches(pat).0) } @@ -3525,7 +3658,8 @@ impl str { #[stable(feature = "str_match_indices", since = "1.5.0")] #[inline] pub fn rmatch_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatchIndices<'a, P> - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { RMatchIndices(self.match_indices(pat).0) } @@ -3718,7 +3852,8 @@ impl str { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn trim_matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> &'a str - where P::Searcher: DoubleEndedSearcher<'a> + where + P::Searcher: DoubleEndedSearcher<'a>, { let mut i = 0; let mut j = 0; @@ -3806,7 +3941,8 @@ impl str { /// ``` #[stable(feature = "trim_direction", since = "1.30.0")] pub fn trim_end_matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> &'a str - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { let mut j = 0; let mut matcher = pat.into_searcher(self); @@ -3886,7 +4022,8 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_deprecated(reason = "superseded by `trim_end_matches`", since = "1.33.0")] pub fn trim_right_matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> &'a str - where P::Searcher: ReverseSearcher<'a> + where + P::Searcher: ReverseSearcher<'a>, { self.trim_end_matches(pat) } @@ -4021,13 +4158,17 @@ impl AsRef<[u8]> for str { #[stable(feature = "rust1", since = "1.0.0")] impl Default for &str { /// Creates an empty str - fn default() -> Self { "" } + fn default() -> Self { + "" + } } #[stable(feature = "default_mut_str", since = "1.28.0")] impl Default for &mut str { /// Creates an empty mutable str - fn default() -> Self { unsafe { from_utf8_unchecked_mut(&mut []) } } + fn default() -> Self { + unsafe { from_utf8_unchecked_mut(&mut []) } + } } /// An iterator over the non-whitespace substrings of a string, @@ -4061,18 +4202,18 @@ pub struct SplitAsciiWhitespace<'a> { #[derive(Clone)] struct IsWhitespace; -impl FnOnce<(char, )> for IsWhitespace { +impl FnOnce<(char,)> for IsWhitespace { type Output = bool; #[inline] - extern "rust-call" fn call_once(mut self, arg: (char, )) -> bool { + extern "rust-call" fn call_once(mut self, arg: (char,)) -> bool { self.call_mut(arg) } } -impl FnMut<(char, )> for IsWhitespace { +impl FnMut<(char,)> for IsWhitespace { #[inline] - extern "rust-call" fn call_mut(&mut self, arg: (char, )) -> bool { + extern "rust-call" fn call_mut(&mut self, arg: (char,)) -> bool { arg.0.is_whitespace() } } @@ -4080,18 +4221,18 @@ impl FnMut<(char, )> for IsWhitespace { #[derive(Clone)] struct IsAsciiWhitespace; -impl<'a> FnOnce<(&'a u8, )> for IsAsciiWhitespace { +impl<'a> FnOnce<(&'a u8,)> for IsAsciiWhitespace { type Output = bool; #[inline] - extern "rust-call" fn call_once(mut self, arg: (&u8, )) -> bool { + extern "rust-call" fn call_once(mut self, arg: (&u8,)) -> bool { self.call_mut(arg) } } -impl<'a> FnMut<(&'a u8, )> for IsAsciiWhitespace { +impl<'a> FnMut<(&'a u8,)> for IsAsciiWhitespace { #[inline] - extern "rust-call" fn call_mut(&mut self, arg: (&u8, )) -> bool { + extern "rust-call" fn call_mut(&mut self, arg: (&u8,)) -> bool { arg.0.is_ascii_whitespace() } } @@ -4099,34 +4240,34 @@ impl<'a> FnMut<(&'a u8, )> for IsAsciiWhitespace { #[derive(Clone)] struct IsNotEmpty; -impl<'a, 'b> FnOnce<(&'a &'b str, )> for IsNotEmpty { +impl<'a, 'b> FnOnce<(&'a &'b str,)> for IsNotEmpty { type Output = bool; #[inline] - extern "rust-call" fn call_once(mut self, arg: (&'a &'b str, )) -> bool { + extern "rust-call" fn call_once(mut self, arg: (&'a &'b str,)) -> bool { self.call_mut(arg) } } -impl<'a, 'b> FnMut<(&'a &'b str, )> for IsNotEmpty { +impl<'a, 'b> FnMut<(&'a &'b str,)> for IsNotEmpty { #[inline] - extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b str, )) -> bool { + extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b str,)) -> bool { !arg.0.is_empty() } } -impl<'a, 'b> FnOnce<(&'a &'b [u8], )> for IsNotEmpty { +impl<'a, 'b> FnOnce<(&'a &'b [u8],)> for IsNotEmpty { type Output = bool; #[inline] - extern "rust-call" fn call_once(mut self, arg: (&'a &'b [u8], )) -> bool { + extern "rust-call" fn call_once(mut self, arg: (&'a &'b [u8],)) -> bool { self.call_mut(arg) } } -impl<'a, 'b> FnMut<(&'a &'b [u8], )> for IsNotEmpty { +impl<'a, 'b> FnMut<(&'a &'b [u8],)> for IsNotEmpty { #[inline] - extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b [u8], )) -> bool { + extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b [u8],)) -> bool { !arg.0.is_empty() } } @@ -4134,23 +4275,22 @@ impl<'a, 'b> FnMut<(&'a &'b [u8], )> for IsNotEmpty { #[derive(Clone)] struct UnsafeBytesToStr; -impl<'a> FnOnce<(&'a [u8], )> for UnsafeBytesToStr { +impl<'a> FnOnce<(&'a [u8],)> for UnsafeBytesToStr { type Output = &'a str; #[inline] - extern "rust-call" fn call_once(mut self, arg: (&'a [u8], )) -> &'a str { + extern "rust-call" fn call_once(mut self, arg: (&'a [u8],)) -> &'a str { self.call_mut(arg) } } -impl<'a> FnMut<(&'a [u8], )> for UnsafeBytesToStr { +impl<'a> FnMut<(&'a [u8],)> for UnsafeBytesToStr { #[inline] - extern "rust-call" fn call_mut(&mut self, arg: (&'a [u8], )) -> &'a str { + extern "rust-call" fn call_mut(&mut self, arg: (&'a [u8],)) -> &'a str { unsafe { from_utf8_unchecked(arg.0) } } } - #[stable(feature = "split_whitespace", since = "1.1.0")] impl<'a> Iterator for SplitWhitespace<'a> { type Item = &'a str; diff --git a/src/libcore/str/pattern.rs b/src/libcore/str/pattern.rs index b4eae4d1bb742..b855434ba3bde 100644 --- a/src/libcore/str/pattern.rs +++ b/src/libcore/str/pattern.rs @@ -51,7 +51,8 @@ pub trait Pattern<'a>: Sized { /// Checks whether the pattern matches at the back of the haystack #[inline] fn is_suffix_of(self, haystack: &'a str) -> bool - where Self::Searcher: ReverseSearcher<'a> + where + Self::Searcher: ReverseSearcher<'a>, { match self.into_searcher(haystack).next_back() { SearchStep::Match(_, j) if haystack.len() == j => true, @@ -76,7 +77,7 @@ pub enum SearchStep { Reject(usize, usize), /// Expresses that every byte of the haystack has been visited, ending /// the iteration. - Done + Done, } /// A searcher for a string pattern. @@ -187,7 +188,7 @@ pub unsafe trait ReverseSearcher<'a>: Searcher<'a> { /// Find the next `Match` result. See `next_back()` #[inline] - fn next_match_back(&mut self) -> Option<(usize, usize)>{ + fn next_match_back(&mut self) -> Option<(usize, usize)> { loop { match self.next_back() { SearchStep::Match(a, b) => return Some((a, b)), @@ -199,7 +200,7 @@ pub unsafe trait ReverseSearcher<'a>: Searcher<'a> { /// Find the next `Reject` result. See `next_back()` #[inline] - fn next_reject_back(&mut self) -> Option<(usize, usize)>{ + fn next_reject_back(&mut self) -> Option<(usize, usize)> { loop { match self.next_back() { SearchStep::Reject(a, b) => return Some((a, b)), @@ -233,7 +234,6 @@ pub unsafe trait ReverseSearcher<'a>: Searcher<'a> { /// `"[aa]a"` or `"a[aa]"`, depending from which side it is searched. pub trait DoubleEndedSearcher<'a>: ReverseSearcher<'a> {} - ///////////////////////////////////////////////////////////////////////////// // Impl for char ///////////////////////////////////////////////////////////////////////////// @@ -245,7 +245,6 @@ pub struct CharSearcher<'a> { // safety invariant: `finger`/`finger_back` must be a valid utf8 byte index of `haystack` // This invariant can be broken *within* next_match and next_match_back, however // they must exit with fingers on valid code point boundaries. - /// `finger` is the current byte index of the forward search. /// Imagine that it exists before the byte at its index, i.e. /// `haystack[finger]` is the first byte of the slice we must inspect during @@ -294,12 +293,12 @@ unsafe impl<'a> Searcher<'a> for CharSearcher<'a> { fn next_match(&mut self) -> Option<(usize, usize)> { loop { // get the haystack after the last character found - let bytes = if let Some(slice) = self.haystack.as_bytes() - .get(self.finger..self.finger_back) { - slice - } else { - return None; - }; + let bytes = + if let Some(slice) = self.haystack.as_bytes().get(self.finger..self.finger_back) { + slice + } else { + return None; + }; // the last byte of the utf8 encoded needle let last_byte = unsafe { *self.utf8_encoded.get_unchecked(self.utf8_size - 1) }; if let Some(index) = memchr::memchr(last_byte, bytes) { @@ -433,7 +432,7 @@ impl<'a> Pattern<'a> for char { finger_back: haystack.len(), needle: self, utf8_size, - utf8_encoded + utf8_encoded, } } @@ -457,7 +456,9 @@ impl<'a> Pattern<'a> for char { } #[inline] - fn is_suffix_of(self, haystack: &'a str) -> bool where Self::Searcher: ReverseSearcher<'a> + fn is_suffix_of(self, haystack: &'a str) -> bool + where + Self::Searcher: ReverseSearcher<'a>, { if let Some(ch) = haystack.chars().next_back() { self == ch @@ -476,15 +477,20 @@ trait MultiCharEq { fn matches(&mut self, c: char) -> bool; } -impl MultiCharEq for F where F: FnMut(char) -> bool { +impl MultiCharEq for F +where + F: FnMut(char) -> bool, +{ #[inline] - fn matches(&mut self, c: char) -> bool { (*self)(c) } + fn matches(&mut self, c: char) -> bool { + (*self)(c) + } } impl MultiCharEq for &[char] { #[inline] fn matches(&mut self, c: char) -> bool { - self.iter().any(|&m| { m == c }) + self.iter().any(|&m| m == c) } } @@ -644,7 +650,11 @@ impl<'a, 'b> DoubleEndedSearcher<'a> for CharSliceSearcher<'a, 'b> {} /// Searches for chars that are equal to any of the chars in the array impl<'a, 'b> Pattern<'a> for &'b [char] { - pattern_methods!(CharSliceSearcher<'a, 'b>, MultiCharEqPattern, CharSliceSearcher); + pattern_methods!( + CharSliceSearcher<'a, 'b>, + MultiCharEqPattern, + CharSliceSearcher + ); } ///////////////////////////////////////////////////////////////////////////// @@ -654,10 +664,12 @@ impl<'a, 'b> Pattern<'a> for &'b [char] { /// Associated type for `>::Searcher`. #[derive(Clone)] pub struct CharPredicateSearcher<'a, F>( as Pattern<'a>>::Searcher) - where F: FnMut(char) -> bool; +where + F: FnMut(char) -> bool; impl fmt::Debug for CharPredicateSearcher<'_, F> - where F: FnMut(char) -> bool +where + F: FnMut(char) -> bool, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("CharPredicateSearcher") @@ -667,23 +679,31 @@ impl fmt::Debug for CharPredicateSearcher<'_, F> } } unsafe impl<'a, F> Searcher<'a> for CharPredicateSearcher<'a, F> - where F: FnMut(char) -> bool +where + F: FnMut(char) -> bool, { searcher_methods!(forward); } unsafe impl<'a, F> ReverseSearcher<'a> for CharPredicateSearcher<'a, F> - where F: FnMut(char) -> bool +where + F: FnMut(char) -> bool, { searcher_methods!(reverse); } -impl<'a, F> DoubleEndedSearcher<'a> for CharPredicateSearcher<'a, F> - where F: FnMut(char) -> bool {} +impl<'a, F> DoubleEndedSearcher<'a> for CharPredicateSearcher<'a, F> where F: FnMut(char) -> bool {} /// Searches for chars that match the given predicate -impl<'a, F> Pattern<'a> for F where F: FnMut(char) -> bool { - pattern_methods!(CharPredicateSearcher<'a, F>, MultiCharEqPattern, CharPredicateSearcher); +impl<'a, F> Pattern<'a> for F +where + F: FnMut(char) -> bool, +{ + pattern_methods!( + CharPredicateSearcher<'a, F>, + MultiCharEqPattern, + CharPredicateSearcher + ); } ///////////////////////////////////////////////////////////////////////////// @@ -714,20 +734,18 @@ impl<'a, 'b> Pattern<'a> for &'b str { /// Checks whether the pattern matches at the front of the haystack #[inline] fn is_prefix_of(self, haystack: &'a str) -> bool { - haystack.is_char_boundary(self.len()) && - self == &haystack[..self.len()] + haystack.is_char_boundary(self.len()) && self == &haystack[..self.len()] } /// Checks whether the pattern matches at the back of the haystack #[inline] fn is_suffix_of(self, haystack: &'a str) -> bool { - self.len() <= haystack.len() && - haystack.is_char_boundary(haystack.len() - self.len()) && - self == &haystack[haystack.len() - self.len()..] + self.len() <= haystack.len() + && haystack.is_char_boundary(haystack.len() - self.len()) + && self == &haystack[haystack.len() - self.len()..] } } - ///////////////////////////////////////////////////////////////////////////// // Two Way substring searcher ///////////////////////////////////////////////////////////////////////////// @@ -772,9 +790,10 @@ impl<'a, 'b> StrSearcher<'a, 'b> { StrSearcher { haystack, needle, - searcher: StrSearcherImpl::TwoWay( - TwoWaySearcher::new(needle.as_bytes(), haystack.len()) - ), + searcher: StrSearcherImpl::TwoWay(TwoWaySearcher::new( + needle.as_bytes(), + haystack.len(), + )), } } } @@ -813,10 +832,11 @@ unsafe impl<'a, 'b> Searcher<'a> for StrSearcher<'a, 'b> { return SearchStep::Done; } let is_long = searcher.memory == usize::MAX; - match searcher.next::(self.haystack.as_bytes(), - self.needle.as_bytes(), - is_long) - { + match searcher.next::( + self.haystack.as_bytes(), + self.needle.as_bytes(), + is_long, + ) { SearchStep::Reject(a, mut b) => { // skip to next char boundary while !self.haystack.is_char_boundary(b) { @@ -834,27 +854,29 @@ unsafe impl<'a, 'b> Searcher<'a> for StrSearcher<'a, 'b> { #[inline] fn next_match(&mut self) -> Option<(usize, usize)> { match self.searcher { - StrSearcherImpl::Empty(..) => { - loop { - match self.next() { - SearchStep::Match(a, b) => return Some((a, b)), - SearchStep::Done => return None, - SearchStep::Reject(..) => { } - } + StrSearcherImpl::Empty(..) => loop { + match self.next() { + SearchStep::Match(a, b) => return Some((a, b)), + SearchStep::Done => return None, + SearchStep::Reject(..) => {} } - } + }, StrSearcherImpl::TwoWay(ref mut searcher) => { let is_long = searcher.memory == usize::MAX; // write out `true` and `false` cases to encourage the compiler // to specialize the two cases separately. if is_long { - searcher.next::(self.haystack.as_bytes(), - self.needle.as_bytes(), - true) + searcher.next::( + self.haystack.as_bytes(), + self.needle.as_bytes(), + true, + ) } else { - searcher.next::(self.haystack.as_bytes(), - self.needle.as_bytes(), - false) + searcher.next::( + self.haystack.as_bytes(), + self.needle.as_bytes(), + false, + ) } } } @@ -883,10 +905,11 @@ unsafe impl<'a, 'b> ReverseSearcher<'a> for StrSearcher<'a, 'b> { return SearchStep::Done; } let is_long = searcher.memory == usize::MAX; - match searcher.next_back::(self.haystack.as_bytes(), - self.needle.as_bytes(), - is_long) - { + match searcher.next_back::( + self.haystack.as_bytes(), + self.needle.as_bytes(), + is_long, + ) { SearchStep::Reject(mut a, b) => { // skip to next char boundary while !self.haystack.is_char_boundary(a) { @@ -904,26 +927,28 @@ unsafe impl<'a, 'b> ReverseSearcher<'a> for StrSearcher<'a, 'b> { #[inline] fn next_match_back(&mut self) -> Option<(usize, usize)> { match self.searcher { - StrSearcherImpl::Empty(..) => { - loop { - match self.next_back() { - SearchStep::Match(a, b) => return Some((a, b)), - SearchStep::Done => return None, - SearchStep::Reject(..) => { } - } + StrSearcherImpl::Empty(..) => loop { + match self.next_back() { + SearchStep::Match(a, b) => return Some((a, b)), + SearchStep::Done => return None, + SearchStep::Reject(..) => {} } - } + }, StrSearcherImpl::TwoWay(ref mut searcher) => { let is_long = searcher.memory == usize::MAX; // write out `true` and `false`, like `next_match` if is_long { - searcher.next_back::(self.haystack.as_bytes(), - self.needle.as_bytes(), - true) + searcher.next_back::( + self.haystack.as_bytes(), + self.needle.as_bytes(), + true, + ) } else { - searcher.next_back::(self.haystack.as_bytes(), - self.needle.as_bytes(), - false) + searcher.next_back::( + self.haystack.as_bytes(), + self.needle.as_bytes(), + false, + ) } } } @@ -1031,12 +1056,11 @@ impl TwoWaySearcher { let (crit_pos_false, period_false) = TwoWaySearcher::maximal_suffix(needle, false); let (crit_pos_true, period_true) = TwoWaySearcher::maximal_suffix(needle, true); - let (crit_pos, period) = - if crit_pos_false > crit_pos_true { - (crit_pos_false, period_false) - } else { - (crit_pos_true, period_true) - }; + let (crit_pos, period) = if crit_pos_false > crit_pos_true { + (crit_pos_false, period_false) + } else { + (crit_pos_true, period_true) + }; // A particularly readable explanation of what's going on here can be found // in Crochemore and Rytter's book "Text Algorithms", ch 13. Specifically @@ -1047,7 +1071,7 @@ impl TwoWaySearcher { // &v[..period]. If it is, we use "Algorithm CP1". Otherwise we use // "Algorithm CP2", which is optimized for when the period of the needle // is large. - if &needle[..crit_pos] == &needle[period.. period + crit_pos] { + if &needle[..crit_pos] == &needle[period..period + crit_pos] { // short period case -- the period is exact // compute a separate critical factorization for the reversed needle // x = u' v' where |v'| < period(x). @@ -1057,9 +1081,11 @@ impl TwoWaySearcher { // (crit_pos = 1, period = 3) while being factored with approximate // period in reverse (crit_pos = 2, period = 2). We use the given // reverse factorization but keep the exact period. - let crit_pos_back = needle.len() - cmp::max( - TwoWaySearcher::reverse_maximal_suffix(needle, period, false), - TwoWaySearcher::reverse_maximal_suffix(needle, period, true)); + let crit_pos_back = needle.len() + - cmp::max( + TwoWaySearcher::reverse_maximal_suffix(needle, period, false), + TwoWaySearcher::reverse_maximal_suffix(needle, period, true), + ); TwoWaySearcher { crit_pos, @@ -1110,9 +1136,9 @@ impl TwoWaySearcher { // How far we can jump when we encounter a mismatch is all based on the fact // that (u, v) is a critical factorization for the needle. #[inline] - fn next(&mut self, haystack: &[u8], needle: &[u8], long_period: bool) - -> S::Output - where S: TwoWayStrategy + fn next(&mut self, haystack: &[u8], needle: &[u8], long_period: bool) -> S::Output + where + S: TwoWayStrategy, { // `next()` uses `self.position` as its cursor let old_pos = self.position; @@ -1143,8 +1169,11 @@ impl TwoWaySearcher { } // See if the right part of the needle matches - let start = if long_period { self.crit_pos } - else { cmp::max(self.crit_pos, self.memory) }; + let start = if long_period { + self.crit_pos + } else { + cmp::max(self.crit_pos, self.memory) + }; for i in start..needle.len() { if needle[i] != haystack[self.position + i] { self.position += i - self.crit_pos + 1; @@ -1193,9 +1222,9 @@ impl TwoWaySearcher { // To search in reverse through the haystack, we search forward through // a reversed haystack with a reversed needle, matching first u' and then v'. #[inline] - fn next_back(&mut self, haystack: &[u8], needle: &[u8], long_period: bool) - -> S::Output - where S: TwoWayStrategy + fn next_back(&mut self, haystack: &[u8], needle: &[u8], long_period: bool) -> S::Output + where + S: TwoWayStrategy, { // `next_back()` uses `self.end` as its cursor -- so that `next()` and `next_back()` // are independent. @@ -1227,8 +1256,11 @@ impl TwoWaySearcher { } // See if the left part of the needle matches - let crit = if long_period { self.crit_pos_back } - else { cmp::min(self.crit_pos_back, self.memory_back) }; + let crit = if long_period { + self.crit_pos_back + } else { + cmp::min(self.crit_pos_back, self.memory_back) + }; for i in (0..crit).rev() { if needle[i] != haystack[self.end - needle.len() + i] { self.end -= self.crit_pos_back - i; @@ -1240,8 +1272,11 @@ impl TwoWaySearcher { } // See if the right part of the needle matches - let needle_end = if long_period { needle.len() } - else { self.memory_back }; + let needle_end = if long_period { + needle.len() + } else { + self.memory_back + }; for i in self.crit_pos_back..needle_end { if needle[i] != haystack[self.end - needle.len() + i] { self.end -= self.period; @@ -1323,9 +1358,7 @@ impl TwoWaySearcher { // a critical factorization. // // For long period cases, the resulting period is not exact (it is too short). - fn reverse_maximal_suffix(arr: &[u8], known_period: usize, - order_greater: bool) -> usize - { + fn reverse_maximal_suffix(arr: &[u8], known_period: usize, order_greater: bool) -> usize { let mut left = 0; // Corresponds to i in the paper let mut right = 1; // Corresponds to j in the paper let mut offset = 0; // Corresponds to k in the paper, but starting at 0 @@ -1375,29 +1408,41 @@ trait TwoWayStrategy { } /// Skip to match intervals as quickly as possible -enum MatchOnly { } +enum MatchOnly {} impl TwoWayStrategy for MatchOnly { type Output = Option<(usize, usize)>; #[inline] - fn use_early_reject() -> bool { false } + fn use_early_reject() -> bool { + false + } #[inline] - fn rejecting(_a: usize, _b: usize) -> Self::Output { None } + fn rejecting(_a: usize, _b: usize) -> Self::Output { + None + } #[inline] - fn matching(a: usize, b: usize) -> Self::Output { Some((a, b)) } + fn matching(a: usize, b: usize) -> Self::Output { + Some((a, b)) + } } /// Emit Rejects regularly -enum RejectAndMatch { } +enum RejectAndMatch {} impl TwoWayStrategy for RejectAndMatch { type Output = SearchStep; #[inline] - fn use_early_reject() -> bool { true } + fn use_early_reject() -> bool { + true + } #[inline] - fn rejecting(a: usize, b: usize) -> Self::Output { SearchStep::Reject(a, b) } + fn rejecting(a: usize, b: usize) -> Self::Output { + SearchStep::Reject(a, b) + } #[inline] - fn matching(a: usize, b: usize) -> Self::Output { SearchStep::Match(a, b) } + fn matching(a: usize, b: usize) -> Self::Output { + SearchStep::Match(a, b) + } } diff --git a/src/libcore/sync/atomic.rs b/src/libcore/sync/atomic.rs index 99e6365d15ec0..2d41124cab89e 100644 --- a/src/libcore/sync/atomic.rs +++ b/src/libcore/sync/atomic.rs @@ -80,9 +80,9 @@ use self::Ordering::*; -use intrinsics; use cell::UnsafeCell; use fmt; +use intrinsics; /// Save power or switch hyperthreads in a busy-wait spin-loop. /// @@ -275,7 +275,9 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub const fn new(v: bool) -> AtomicBool { - AtomicBool { v: UnsafeCell::new(v as u8) } + AtomicBool { + v: UnsafeCell::new(v as u8), + } } /// Returns a mutable reference to the underlying [`bool`]. @@ -499,12 +501,13 @@ impl AtomicBool { #[inline] #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] #[cfg(target_has_atomic = "cas")] - pub fn compare_exchange(&self, - current: bool, - new: bool, - success: Ordering, - failure: Ordering) - -> Result { + pub fn compare_exchange( + &self, + current: bool, + new: bool, + success: Ordering, + failure: Ordering, + ) -> Result { match unsafe { atomic_compare_exchange(self.v.get(), current as u8, new as u8, success, failure) } { @@ -555,12 +558,13 @@ impl AtomicBool { #[inline] #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] #[cfg(target_has_atomic = "cas")] - pub fn compare_exchange_weak(&self, - current: bool, - new: bool, - success: Ordering, - failure: Ordering) - -> Result { + pub fn compare_exchange_weak( + &self, + current: bool, + new: bool, + success: Ordering, + failure: Ordering, + ) -> Result { match unsafe { atomic_compare_exchange_weak(self.v.get(), current as u8, new as u8, success, failure) } { @@ -762,7 +766,9 @@ impl AtomicPtr { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub const fn new(p: *mut T) -> AtomicPtr { - AtomicPtr { p: UnsafeCell::new(p) } + AtomicPtr { + p: UnsafeCell::new(p), + } } /// Returns a mutable reference to the underlying pointer. @@ -979,18 +985,21 @@ impl AtomicPtr { #[inline] #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] #[cfg(target_has_atomic = "cas")] - pub fn compare_exchange(&self, - current: *mut T, - new: *mut T, - success: Ordering, - failure: Ordering) - -> Result<*mut T, *mut T> { + pub fn compare_exchange( + &self, + current: *mut T, + new: *mut T, + success: Ordering, + failure: Ordering, + ) -> Result<*mut T, *mut T> { unsafe { - let res = atomic_compare_exchange(self.p.get() as *mut usize, - current as usize, - new as usize, - success, - failure); + let res = atomic_compare_exchange( + self.p.get() as *mut usize, + current as usize, + new as usize, + success, + failure, + ); match res { Ok(x) => Ok(x as *mut T), Err(x) => Err(x as *mut T), @@ -1039,18 +1048,21 @@ impl AtomicPtr { #[inline] #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] #[cfg(target_has_atomic = "cas")] - pub fn compare_exchange_weak(&self, - current: *mut T, - new: *mut T, - success: Ordering, - failure: Ordering) - -> Result<*mut T, *mut T> { + pub fn compare_exchange_weak( + &self, + current: *mut T, + new: *mut T, + success: Ordering, + failure: Ordering, + ) -> Result<*mut T, *mut T> { unsafe { - let res = atomic_compare_exchange_weak(self.p.get() as *mut usize, - current as usize, - new as usize, - success, - failure); + let res = atomic_compare_exchange_weak( + self.p.get() as *mut usize, + current as usize, + new as usize, + success, + failure, + ); match res { Ok(x) => Ok(x as *mut T), Err(x) => Err(x as *mut T), @@ -1072,14 +1084,18 @@ impl From for AtomicBool { /// assert_eq!(format!("{:?}", atomic_bool), "true") /// ``` #[inline] - fn from(b: bool) -> Self { Self::new(b) } + fn from(b: bool) -> Self { + Self::new(b) + } } #[cfg(target_has_atomic = "ptr")] #[stable(feature = "atomic_from", since = "1.23.0")] impl From<*mut T> for AtomicPtr { #[inline] - fn from(p: *mut T) -> Self { Self::new(p) } + fn from(p: *mut T) -> Self { + Self::new(p) + } } #[cfg(target_has_atomic = "ptr")] @@ -1973,18 +1989,24 @@ atomic_int! { } #[cfg(target_pointer_width = "16")] macro_rules! ptr_width { - () => { 2 } + () => { + 2 + }; } #[cfg(target_pointer_width = "32")] macro_rules! ptr_width { - () => { 4 } + () => { + 4 + }; } #[cfg(target_pointer_width = "64")] macro_rules! ptr_width { - () => { 8 } + () => { + 8 + }; } #[cfg(target_has_atomic = "ptr")] -atomic_int!{ +atomic_int! { stable(feature = "rust1", since = "1.0.0"), stable(feature = "extended_compare_and_swap", since = "1.10.0"), stable(feature = "atomic_debug", since = "1.3.0"), @@ -1998,7 +2020,7 @@ atomic_int!{ isize AtomicIsize ATOMIC_ISIZE_INIT } #[cfg(target_has_atomic = "ptr")] -atomic_int!{ +atomic_int! { stable(feature = "rust1", since = "1.0.0"), stable(feature = "extended_compare_and_swap", since = "1.10.0"), stable(feature = "atomic_debug", since = "1.3.0"), @@ -2086,12 +2108,13 @@ unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { #[inline] #[cfg(target_has_atomic = "cas")] -unsafe fn atomic_compare_exchange(dst: *mut T, - old: T, - new: T, - success: Ordering, - failure: Ordering) - -> Result { +unsafe fn atomic_compare_exchange( + dst: *mut T, + old: T, + new: T, + success: Ordering, + failure: Ordering, +) -> Result { let (val, ok) = match (success, failure) { (Acquire, Acquire) => intrinsics::atomic_cxchg_acq(dst, old, new), (Release, Relaxed) => intrinsics::atomic_cxchg_rel(dst, old, new), @@ -2106,17 +2129,22 @@ unsafe fn atomic_compare_exchange(dst: *mut T, (_, Release) => panic!("there is no such thing as a release failure ordering"), _ => panic!("a failure ordering can't be stronger than a success ordering"), }; - if ok { Ok(val) } else { Err(val) } + if ok { + Ok(val) + } else { + Err(val) + } } #[inline] #[cfg(target_has_atomic = "cas")] -unsafe fn atomic_compare_exchange_weak(dst: *mut T, - old: T, - new: T, - success: Ordering, - failure: Ordering) - -> Result { +unsafe fn atomic_compare_exchange_weak( + dst: *mut T, + old: T, + new: T, + success: Ordering, + failure: Ordering, +) -> Result { let (val, ok) = match (success, failure) { (Acquire, Acquire) => intrinsics::atomic_cxchgweak_acq(dst, old, new), (Release, Relaxed) => intrinsics::atomic_cxchgweak_rel(dst, old, new), @@ -2131,7 +2159,11 @@ unsafe fn atomic_compare_exchange_weak(dst: *mut T, (_, Release) => panic!("there is no such thing as a release failure ordering"), _ => panic!("a failure ordering can't be stronger than a success ordering"), }; - if ok { Ok(val) } else { Err(val) } + if ok { + Ok(val) + } else { + Err(val) + } } #[inline] @@ -2332,7 +2364,6 @@ pub fn fence(order: Ordering) { } } - /// A compiler memory fence. /// /// `compiler_fence` does not emit any machine code, but restricts the kinds @@ -2421,7 +2452,6 @@ pub fn compiler_fence(order: Ordering) { } } - #[cfg(target_has_atomic = "8")] #[stable(feature = "atomic_debug", since = "1.3.0")] impl fmt::Debug for AtomicBool { diff --git a/src/libcore/task/mod.rs b/src/libcore/task/mod.rs index 9552e53ebf849..bca31f5f3551a 100644 --- a/src/libcore/task/mod.rs +++ b/src/libcore/task/mod.rs @@ -8,4 +8,4 @@ mod poll; pub use self::poll::Poll; mod wake; -pub use self::wake::{Waker, LocalWaker, UnsafeWake}; +pub use self::wake::{LocalWaker, UnsafeWake, Waker}; diff --git a/src/libcore/task/poll.rs b/src/libcore/task/poll.rs index 27b1139e15c79..cb8a2bf0e2576 100644 --- a/src/libcore/task/poll.rs +++ b/src/libcore/task/poll.rs @@ -23,7 +23,8 @@ pub enum Poll { impl Poll { /// Change the ready value of this `Poll` with the closure provided pub fn map(self, f: F) -> Poll - where F: FnOnce(T) -> U + where + F: FnOnce(T) -> U, { match self { Poll::Ready(t) => Poll::Ready(f(t)), @@ -50,7 +51,8 @@ impl Poll { impl Poll> { /// Change the success value of this `Poll` with the closure provided pub fn map_ok(self, f: F) -> Poll> - where F: FnOnce(T) -> U + where + F: FnOnce(T) -> U, { match self { Poll::Ready(Ok(t)) => Poll::Ready(Ok(f(t))), @@ -61,7 +63,8 @@ impl Poll> { /// Change the error value of this `Poll` with the closure provided pub fn map_err(self, f: F) -> Poll> - where F: FnOnce(E) -> U + where + F: FnOnce(E) -> U, { match self { Poll::Ready(Ok(t)) => Poll::Ready(Ok(t)), diff --git a/src/libcore/task/wake.rs b/src/libcore/task/wake.rs index 3f7098f1ef934..56d7208e34182 100644 --- a/src/libcore/task/wake.rs +++ b/src/libcore/task/wake.rs @@ -72,25 +72,20 @@ impl Waker { impl Clone for Waker { #[inline] fn clone(&self) -> Self { - unsafe { - self.inner.as_ref().clone_raw() - } + unsafe { self.inner.as_ref().clone_raw() } } } impl fmt::Debug for Waker { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Waker") - .finish() + f.debug_struct("Waker").finish() } } impl Drop for Waker { #[inline] fn drop(&mut self) { - unsafe { - self.inner.as_ref().drop_raw() - } + unsafe { self.inner.as_ref().drop_raw() } } } @@ -191,8 +186,7 @@ impl From for Waker { impl fmt::Debug for LocalWaker { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("LocalWaker") - .finish() + f.debug_struct("LocalWaker").finish() } } diff --git a/src/libcore/tests/any.rs b/src/libcore/tests/any.rs index 62bebcb03c96a..530b1e6bf000c 100644 --- a/src/libcore/tests/any.rs +++ b/src/libcore/tests/any.rs @@ -49,12 +49,12 @@ fn any_downcast_ref() { match a.downcast_ref::() { Some(&5) => {} - x => panic!("Unexpected value {:?}", x) + x => panic!("Unexpected value {:?}", x), } match a.downcast_ref::() { None => {} - x => panic!("Unexpected value {:?}", x) + x => panic!("Unexpected value {:?}", x), } } @@ -72,7 +72,7 @@ fn any_downcast_mut() { assert_eq!(*x, 5); *x = 612; } - x => panic!("Unexpected value {:?}", x) + x => panic!("Unexpected value {:?}", x), } match b_r.downcast_mut::() { @@ -80,27 +80,27 @@ fn any_downcast_mut() { assert_eq!(*x, 7); *x = 413; } - x => panic!("Unexpected value {:?}", x) + x => panic!("Unexpected value {:?}", x), } match a_r.downcast_mut::() { None => (), - x => panic!("Unexpected value {:?}", x) + x => panic!("Unexpected value {:?}", x), } match b_r.downcast_mut::() { None => (), - x => panic!("Unexpected value {:?}", x) + x => panic!("Unexpected value {:?}", x), } match a_r.downcast_mut::() { Some(&mut 612) => {} - x => panic!("Unexpected value {:?}", x) + x => panic!("Unexpected value {:?}", x), } match b_r.downcast_mut::() { Some(&mut 413) => {} - x => panic!("Unexpected value {:?}", x) + x => panic!("Unexpected value {:?}", x), } } diff --git a/src/libcore/tests/ascii.rs b/src/libcore/tests/ascii.rs index ec98e0464c9e6..8b80ae6c331d0 100644 --- a/src/libcore/tests/ascii.rs +++ b/src/libcore/tests/ascii.rs @@ -12,47 +12,65 @@ fn test_is_ascii() { assert!("".is_ascii()); assert!("banana\0\u{7F}".is_ascii()); assert!("banana\0\u{7F}".chars().all(|c| c.is_ascii())); - assert!(!"ประเทศไทย中华Việt Nam".chars().all(|c| c.is_ascii())); - assert!(!"ประเทศไทย中华ệ ".chars().any(|c| c.is_ascii())); + assert!(!"ประเทศไทย中华Việt Nam" + .chars() + .all(|c| c.is_ascii())); + assert!(!"ประเทศไทย中华ệ " + .chars() + .any(|c| c.is_ascii())); } #[test] fn test_to_ascii_uppercase() { - assert_eq!("url()URL()uRl()ürl".to_ascii_uppercase(), "URL()URL()URL()üRL"); + assert_eq!( + "url()URL()uRl()ürl".to_ascii_uppercase(), + "URL()URL()URL()üRL" + ); assert_eq!("hıKß".to_ascii_uppercase(), "HıKß"); for i in 0..501 { - let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 } - else { i }; - assert_eq!((from_u32(i).unwrap()).to_string().to_ascii_uppercase(), - (from_u32(upper).unwrap()).to_string()); + let upper = if 'a' as u32 <= i && i <= 'z' as u32 { + i + 'A' as u32 - 'a' as u32 + } else { + i + }; + assert_eq!( + (from_u32(i).unwrap()).to_string().to_ascii_uppercase(), + (from_u32(upper).unwrap()).to_string() + ); } } #[test] fn test_to_ascii_lowercase() { - assert_eq!("url()URL()uRl()Ürl".to_ascii_lowercase(), "url()url()url()Ürl"); + assert_eq!( + "url()URL()uRl()Ürl".to_ascii_lowercase(), + "url()url()url()Ürl" + ); // Dotted capital I, Kelvin sign, Sharp S. assert_eq!("HİKß".to_ascii_lowercase(), "hİKß"); for i in 0..501 { - let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 } - else { i }; - assert_eq!((from_u32(i).unwrap()).to_string().to_ascii_lowercase(), - (from_u32(lower).unwrap()).to_string()); + let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { + i + 'a' as u32 - 'A' as u32 + } else { + i + }; + assert_eq!( + (from_u32(i).unwrap()).to_string().to_ascii_lowercase(), + (from_u32(lower).unwrap()).to_string() + ); } } #[test] fn test_make_ascii_lower_case() { macro_rules! test { - ($from: expr, $to: expr) => { - { - let mut x = $from; - x.make_ascii_lowercase(); - assert_eq!(x, $to); - } - } + ($from: expr, $to: expr) => {{ + let mut x = $from; + x.make_ascii_lowercase(); + assert_eq!(x, $to); + }}; } test!(b'A', b'a'); test!(b'a', b'a'); @@ -65,17 +83,14 @@ fn test_make_ascii_lower_case() { test!("HİKß".to_string(), "hİKß"); } - #[test] fn test_make_ascii_upper_case() { macro_rules! test { - ($from: expr, $to: expr) => { - { - let mut x = $from; - x.make_ascii_uppercase(); - assert_eq!(x, $to); - } - } + ($from: expr, $to: expr) => {{ + let mut x = $from; + x.make_ascii_uppercase(); + assert_eq!(x, $to); + }}; } test!(b'a', b'A'); test!(b'A', b'A'); @@ -88,7 +103,7 @@ fn test_make_ascii_upper_case() { test!("hıKß".to_string(), "HıKß"); let mut x = "Hello".to_string(); - x[..3].make_ascii_uppercase(); // Test IndexMut on String. + x[..3].make_ascii_uppercase(); // Test IndexMut on String. assert_eq!(x, "HELlo") } @@ -103,10 +118,14 @@ fn test_eq_ignore_ascii_case() { assert!(!"ß".eq_ignore_ascii_case("s")); for i in 0..501 { - let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 } - else { i }; - assert!((from_u32(i).unwrap()).to_string().eq_ignore_ascii_case( - &from_u32(lower).unwrap().to_string())); + let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { + i + 'a' as u32 - 'A' as u32 + } else { + i + }; + assert!((from_u32(i).unwrap()) + .to_string() + .eq_ignore_ascii_case(&from_u32(lower).unwrap().to_string())); } } @@ -158,12 +177,14 @@ macro_rules! assert_none { #[test] fn test_is_ascii_alphabetic() { - assert_all!(is_ascii_alphabetic, + assert_all!( + is_ascii_alphabetic, "", "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOQPRSTUVWXYZ", ); - assert_none!(is_ascii_alphabetic, + assert_none!( + is_ascii_alphabetic, "0123456789", "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~", " \t\n\x0c\r", @@ -177,11 +198,9 @@ fn test_is_ascii_alphabetic() { #[test] fn test_is_ascii_uppercase() { - assert_all!(is_ascii_uppercase, - "", - "ABCDEFGHIJKLMNOQPRSTUVWXYZ", - ); - assert_none!(is_ascii_uppercase, + assert_all!(is_ascii_uppercase, "", "ABCDEFGHIJKLMNOQPRSTUVWXYZ",); + assert_none!( + is_ascii_uppercase, "abcdefghijklmnopqrstuvwxyz", "0123456789", "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~", @@ -196,10 +215,9 @@ fn test_is_ascii_uppercase() { #[test] fn test_is_ascii_lowercase() { - assert_all!(is_ascii_lowercase, - "abcdefghijklmnopqrstuvwxyz", - ); - assert_none!(is_ascii_lowercase, + assert_all!(is_ascii_lowercase, "abcdefghijklmnopqrstuvwxyz",); + assert_none!( + is_ascii_lowercase, "ABCDEFGHIJKLMNOQPRSTUVWXYZ", "0123456789", "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~", @@ -214,13 +232,15 @@ fn test_is_ascii_lowercase() { #[test] fn test_is_ascii_alphanumeric() { - assert_all!(is_ascii_alphanumeric, + assert_all!( + is_ascii_alphanumeric, "", "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOQPRSTUVWXYZ", "0123456789", ); - assert_none!(is_ascii_alphanumeric, + assert_none!( + is_ascii_alphanumeric, "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~", " \t\n\x0c\r", "\x00\x01\x02\x03\x04\x05\x06\x07", @@ -233,11 +253,9 @@ fn test_is_ascii_alphanumeric() { #[test] fn test_is_ascii_digit() { - assert_all!(is_ascii_digit, - "", - "0123456789", - ); - assert_none!(is_ascii_digit, + assert_all!(is_ascii_digit, "", "0123456789",); + assert_none!( + is_ascii_digit, "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOQPRSTUVWXYZ", "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~", @@ -252,12 +270,9 @@ fn test_is_ascii_digit() { #[test] fn test_is_ascii_hexdigit() { - assert_all!(is_ascii_hexdigit, - "", - "0123456789", - "abcdefABCDEF", - ); - assert_none!(is_ascii_hexdigit, + assert_all!(is_ascii_hexdigit, "", "0123456789", "abcdefABCDEF",); + assert_none!( + is_ascii_hexdigit, "ghijklmnopqrstuvwxyz", "GHIJKLMNOQPRSTUVWXYZ", "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~", @@ -272,11 +287,13 @@ fn test_is_ascii_hexdigit() { #[test] fn test_is_ascii_punctuation() { - assert_all!(is_ascii_punctuation, + assert_all!( + is_ascii_punctuation, "", "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~", ); - assert_none!(is_ascii_punctuation, + assert_none!( + is_ascii_punctuation, "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOQPRSTUVWXYZ", "0123456789", @@ -291,14 +308,16 @@ fn test_is_ascii_punctuation() { #[test] fn test_is_ascii_graphic() { - assert_all!(is_ascii_graphic, + assert_all!( + is_ascii_graphic, "", "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOQPRSTUVWXYZ", "0123456789", "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~", ); - assert_none!(is_ascii_graphic, + assert_none!( + is_ascii_graphic, " \t\n\x0c\r", "\x00\x01\x02\x03\x04\x05\x06\x07", "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", @@ -310,11 +329,9 @@ fn test_is_ascii_graphic() { #[test] fn test_is_ascii_whitespace() { - assert_all!(is_ascii_whitespace, - "", - " \t\n\x0c\r", - ); - assert_none!(is_ascii_whitespace, + assert_all!(is_ascii_whitespace, "", " \t\n\x0c\r",); + assert_none!( + is_ascii_whitespace, "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOQPRSTUVWXYZ", "0123456789", @@ -329,7 +346,8 @@ fn test_is_ascii_whitespace() { #[test] fn test_is_ascii_control() { - assert_all!(is_ascii_control, + assert_all!( + is_ascii_control, "", "\x00\x01\x02\x03\x04\x05\x06\x07", "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", @@ -337,7 +355,8 @@ fn test_is_ascii_control() { "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", "\x7f", ); - assert_none!(is_ascii_control, + assert_none!( + is_ascii_control, "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOQPRSTUVWXYZ", "0123456789", diff --git a/src/libcore/tests/atomic.rs b/src/libcore/tests/atomic.rs index 05fe8460f324e..acbd913982c1f 100644 --- a/src/libcore/tests/atomic.rs +++ b/src/libcore/tests/atomic.rs @@ -1,5 +1,5 @@ -use core::sync::atomic::*; use core::sync::atomic::Ordering::SeqCst; +use core::sync::atomic::*; #[test] fn bool_() { @@ -15,7 +15,7 @@ fn bool_() { fn bool_and() { let a = AtomicBool::new(true); assert_eq!(a.fetch_and(false, SeqCst), true); - assert_eq!(a.load(SeqCst),false); + assert_eq!(a.load(SeqCst), false); } #[test] @@ -89,7 +89,7 @@ fn int_xor() { static S_FALSE: AtomicBool = AtomicBool::new(false); static S_TRUE: AtomicBool = AtomicBool::new(true); -static S_INT: AtomicIsize = AtomicIsize::new(0); +static S_INT: AtomicIsize = AtomicIsize::new(0); static S_UINT: AtomicUsize = AtomicUsize::new(0); #[test] diff --git a/src/libcore/tests/cell.rs b/src/libcore/tests/cell.rs index 56f295dff8e43..1774287824364 100644 --- a/src/libcore/tests/cell.rs +++ b/src/libcore/tests/cell.rs @@ -250,7 +250,9 @@ fn as_ptr() { assert_eq!(1, unsafe { *c1.as_ptr() }); let c2: Cell = Cell::new(0); - unsafe { *c2.as_ptr() = 1; } + unsafe { + *c2.as_ptr() = 1; + } assert_eq!(1, c2.get()); let r1: RefCell = RefCell::new(0); @@ -258,7 +260,9 @@ fn as_ptr() { assert_eq!(1, unsafe { *r1.as_ptr() }); let r2: RefCell = RefCell::new(0); - unsafe { *r2.as_ptr() = 1; } + unsafe { + *r2.as_ptr() = 1; + } assert_eq!(1, *r2.borrow()); } diff --git a/src/libcore/tests/char.rs b/src/libcore/tests/char.rs index 61856242c5706..4bfa163796c59 100644 --- a/src/libcore/tests/char.rs +++ b/src/libcore/tests/char.rs @@ -1,6 +1,6 @@ -use std::{char,str}; use std::convert::TryFrom; use std::str::FromStr; +use std::{char, str}; #[test] fn test_convert() { @@ -139,13 +139,13 @@ fn test_is_control() { #[test] fn test_is_numeric() { - assert!('2'.is_numeric()); - assert!('7'.is_numeric()); - assert!('¾'.is_numeric()); - assert!(!'c'.is_numeric()); - assert!(!'i'.is_numeric()); - assert!(!'z'.is_numeric()); - assert!(!'Q'.is_numeric()); + assert!('2'.is_numeric()); + assert!('7'.is_numeric()); + assert!('¾'.is_numeric()); + assert!(!'c'.is_numeric()); + assert!(!'i'.is_numeric()); + assert!(!'z'.is_numeric()); + assert!(!'Q'.is_numeric()); } #[test] @@ -172,9 +172,9 @@ fn test_escape_debug() { assert_eq!(string('\u{ff}'), "\u{ff}"); assert_eq!(string('\u{11b}'), "\u{11b}"); assert_eq!(string('\u{1d4b6}'), "\u{1d4b6}"); - assert_eq!(string('\u{301}'), "\\u{301}"); // combining character - assert_eq!(string('\u{200b}'),"\\u{200b}"); // zero width space - assert_eq!(string('\u{e000}'), "\\u{e000}"); // private use 1 + assert_eq!(string('\u{301}'), "\\u{301}"); // combining character + assert_eq!(string('\u{200b}'), "\\u{200b}"); // zero width space + assert_eq!(string('\u{e000}'), "\\u{e000}"); // private use 1 assert_eq!(string('\u{100000}'), "\\u{100000}"); // private use 2 } @@ -268,8 +268,8 @@ fn test_len_utf16() { fn test_decode_utf16() { fn check(s: &[u16], expected: &[Result]) { let v = char::decode_utf16(s.iter().cloned()) - .map(|r| r.map_err(|e| e.unpaired_surrogate())) - .collect::>(); + .map(|r| r.map_err(|e| e.unpaired_surrogate())) + .collect::>(); assert_eq!(v, expected); } check(&[0xD800, 0x41, 0x42], &[Err(0xD800), Ok('A'), Ok('B')]); diff --git a/src/libcore/tests/cmp.rs b/src/libcore/tests/cmp.rs index 4e624e5eb126e..62065ee39482f 100644 --- a/src/libcore/tests/cmp.rs +++ b/src/libcore/tests/cmp.rs @@ -1,4 +1,4 @@ -use core::cmp::Ordering::{Less, Greater, Equal}; +use core::cmp::Ordering::{Equal, Greater, Less}; #[test] fn test_int_totalord() { @@ -71,7 +71,7 @@ fn test_ordering_then_with() { fn test_user_defined_eq() { // Our type. struct SketchyNum { - num : isize + num: isize, } // Our implementation of `PartialEq` to support `==` and `!=`. @@ -83,6 +83,6 @@ fn test_user_defined_eq() { } // Now these binary operators will work when applied! - assert!(SketchyNum {num: 37} == SketchyNum {num: 34}); - assert!(SketchyNum {num: 25} != SketchyNum {num: 57}); + assert!(SketchyNum { num: 37 } == SketchyNum { num: 34 }); + assert!(SketchyNum { num: 25 } != SketchyNum { num: 57 }); } diff --git a/src/libcore/tests/fmt/builders.rs b/src/libcore/tests/fmt/builders.rs index fd7192cc15119..186f12fd13f75 100644 --- a/src/libcore/tests/fmt/builders.rs +++ b/src/libcore/tests/fmt/builders.rs @@ -21,18 +21,17 @@ mod debug_struct { impl fmt::Debug for Foo { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("Foo") - .field("bar", &true) - .finish() + fmt.debug_struct("Foo").field("bar", &true).finish() } } assert_eq!("Foo { bar: true }", format!("{:?}", Foo)); assert_eq!( -"Foo { + "Foo { bar: true }", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -50,11 +49,12 @@ mod debug_struct { assert_eq!("Foo { bar: true, baz: 10/20 }", format!("{:?}", Foo)); assert_eq!( -"Foo { + "Foo { bar: true, baz: 10/20 }", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -81,17 +81,20 @@ mod debug_struct { } } - assert_eq!("Bar { foo: Foo { bar: true, baz: 10/20 }, hello: \"world\" }", - format!("{:?}", Bar)); assert_eq!( -"Bar { + "Bar { foo: Foo { bar: true, baz: 10/20 }, hello: \"world\" }", + format!("{:?}", Bar) + ); + assert_eq!( + "Bar { foo: Foo { bar: true, baz: 10/20 }, hello: \"world\" }", - format!("{:#?}", Bar)); + format!("{:#?}", Bar) + ); } } @@ -118,18 +121,17 @@ mod debug_tuple { impl fmt::Debug for Foo { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_tuple("Foo") - .field(&true) - .finish() + fmt.debug_tuple("Foo").field(&true).finish() } } assert_eq!("Foo(true)", format!("{:?}", Foo)); assert_eq!( -"Foo( + "Foo( true )", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -147,11 +149,12 @@ mod debug_tuple { assert_eq!("Foo(true, 10/20)", format!("{:?}", Foo)); assert_eq!( -"Foo( + "Foo( true, 10/20 )", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -171,24 +174,21 @@ mod debug_tuple { impl fmt::Debug for Bar { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_tuple("Bar") - .field(&Foo) - .field(&"world") - .finish() + fmt.debug_tuple("Bar").field(&Foo).field(&"world").finish() } } - assert_eq!("Bar(Foo(true, 10/20), \"world\")", - format!("{:?}", Bar)); + assert_eq!("Bar(Foo(true, 10/20), \"world\")", format!("{:?}", Bar)); assert_eq!( -"Bar( + "Bar( Foo( true, 10/20 ), \"world\" )", - format!("{:#?}", Bar)); + format!("{:#?}", Bar) + ); } } @@ -215,18 +215,17 @@ mod debug_map { impl fmt::Debug for Foo { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_map() - .entry(&"bar", &true) - .finish() + fmt.debug_map().entry(&"bar", &true).finish() } } assert_eq!("{\"bar\": true}", format!("{:?}", Foo)); assert_eq!( -"{ + "{ \"bar\": true }", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -244,11 +243,12 @@ mod debug_map { assert_eq!("{\"bar\": true, 10: 10/20}", format!("{:?}", Foo)); assert_eq!( -"{ + "{ \"bar\": true, 10: 10/20 }", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -275,11 +275,13 @@ mod debug_map { } } - assert_eq!("{\"foo\": {\"bar\": true, 10: 10/20}, \ - {\"bar\": true, 10: 10/20}: \"world\"}", - format!("{:?}", Bar)); assert_eq!( -"{ + "{\"foo\": {\"bar\": true, 10: 10/20}, \ + {\"bar\": true, 10: 10/20}: \"world\"}", + format!("{:?}", Bar) + ); + assert_eq!( + "{ \"foo\": { \"bar\": true, 10: 10/20 @@ -289,7 +291,8 @@ mod debug_map { 10: 10/20 }: \"world\" }", - format!("{:#?}", Bar)); + format!("{:#?}", Bar) + ); } } @@ -316,18 +319,17 @@ mod debug_set { impl fmt::Debug for Foo { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_set() - .entry(&true) - .finish() + fmt.debug_set().entry(&true).finish() } } assert_eq!("{true}", format!("{:?}", Foo)); assert_eq!( -"{ + "{ true }", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -345,11 +347,12 @@ mod debug_set { assert_eq!("{true, 10/20}", format!("{:?}", Foo)); assert_eq!( -"{ + "{ true, 10/20 }", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -369,24 +372,21 @@ mod debug_set { impl fmt::Debug for Bar { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_set() - .entry(&Foo) - .entry(&"world") - .finish() + fmt.debug_set().entry(&Foo).entry(&"world").finish() } } - assert_eq!("{{true, 10/20}, \"world\"}", - format!("{:?}", Bar)); + assert_eq!("{{true, 10/20}, \"world\"}", format!("{:?}", Bar)); assert_eq!( -"{ + "{ { true, 10/20 }, \"world\" }", - format!("{:#?}", Bar)); + format!("{:#?}", Bar) + ); } } @@ -413,18 +413,17 @@ mod debug_list { impl fmt::Debug for Foo { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_list() - .entry(&true) - .finish() + fmt.debug_list().entry(&true).finish() } } assert_eq!("[true]", format!("{:?}", Foo)); assert_eq!( -"[ + "[ true ]", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -442,11 +441,12 @@ mod debug_list { assert_eq!("[true, 10/20]", format!("{:?}", Foo)); assert_eq!( -"[ + "[ true, 10/20 ]", - format!("{:#?}", Foo)); + format!("{:#?}", Foo) + ); } #[test] @@ -466,24 +466,21 @@ mod debug_list { impl fmt::Debug for Bar { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_list() - .entry(&Foo) - .entry(&"world") - .finish() + fmt.debug_list().entry(&Foo).entry(&"world").finish() } } - assert_eq!("[[true, 10/20], \"world\"]", - format!("{:?}", Bar)); + assert_eq!("[[true, 10/20], \"world\"]", format!("{:?}", Bar)); assert_eq!( -"[ + "[ [ true, 10/20 ], \"world\" ]", - format!("{:#?}", Bar)); + format!("{:#?}", Bar) + ); } } @@ -510,34 +507,54 @@ fn test_formatting_parameters_are_forwarded() { assert_eq!(format!("{:03?}", list), "[1024, 007]"); assert_eq!(format!("{:03?}", map), r#"{"bar": 1024, "baz": 007}"#); assert_eq!(format!("{:03?}", set), "{007, 1024}"); - assert_eq!(format!("{:#03?}", struct_), " + assert_eq!( + format!("{:#03?}", struct_), + " Foo { bar: 1024, baz: 007 } - ".trim()); - assert_eq!(format!("{:#03?}", tuple), " + " + .trim() + ); + assert_eq!( + format!("{:#03?}", tuple), + " ( 1024, 007 ) - ".trim()); - assert_eq!(format!("{:#03?}", list), " + " + .trim() + ); + assert_eq!( + format!("{:#03?}", list), + " [ 1024, 007 ] - ".trim()); - assert_eq!(format!("{:#03?}", map), r#" + " + .trim() + ); + assert_eq!( + format!("{:#03?}", map), + r#" { "bar": 1024, "baz": 007 } - "#.trim()); - assert_eq!(format!("{:#03?}", set), " + "# + .trim() + ); + assert_eq!( + format!("{:#03?}", set), + " { 007, 1024 } - ".trim()); + " + .trim() + ); } diff --git a/src/libcore/tests/fmt/mod.rs b/src/libcore/tests/fmt/mod.rs index d86e21cf40b6e..23802c52cf814 100644 --- a/src/libcore/tests/fmt/mod.rs +++ b/src/libcore/tests/fmt/mod.rs @@ -26,5 +26,8 @@ fn test_estimated_capacity() { assert_eq!(format_args!("Hello").estimated_capacity(), 5); assert_eq!(format_args!("Hello, {}!", "").estimated_capacity(), 16); assert_eq!(format_args!("{}, hello!", "World").estimated_capacity(), 0); - assert_eq!(format_args!("{}. 16-bytes piece", "World").estimated_capacity(), 32); + assert_eq!( + format_args!("{}. 16-bytes piece", "World").estimated_capacity(), + 32 + ); } diff --git a/src/libcore/tests/fmt/num.rs b/src/libcore/tests/fmt/num.rs index 6d9494ec289aa..fb308fdce3693 100644 --- a/src/libcore/tests/fmt/num.rs +++ b/src/libcore/tests/fmt/num.rs @@ -134,7 +134,7 @@ fn test_format_int_sign_padding() { #[test] fn test_format_int_twos_complement() { - use core::{i8, i16, i32, i64}; + use core::{i16, i32, i64, i8}; assert!(format!("{}", i8::MIN) == "-128"); assert!(format!("{}", i16::MIN) == "-32768"); assert!(format!("{}", i32::MIN) == "-2147483648"); diff --git a/src/libcore/tests/hash/mod.rs b/src/libcore/tests/hash/mod.rs index 135f4dfcac7d5..fed63a17cd388 100644 --- a/src/libcore/tests/hash/mod.rs +++ b/src/libcore/tests/hash/mod.rs @@ -1,7 +1,7 @@ mod sip; -use std::hash::{Hash, Hasher}; use std::default::Default; +use std::hash::{Hash, Hasher}; use std::rc::Rc; struct MyHasher { @@ -20,10 +20,11 @@ impl Hasher for MyHasher { self.hash += *byte as u64; } } - fn finish(&self) -> u64 { self.hash } + fn finish(&self) -> u64 { + self.hash + } } - #[test] fn test_writer_hasher() { fn hash(t: &T) -> u64 { @@ -52,17 +53,17 @@ fn test_writer_hasher() { assert_eq!(hash(&'a'), 97); let s: &str = "a"; - assert_eq!(hash(& s), 97 + 0xFF); + assert_eq!(hash(&s), 97 + 0xFF); let s: Box = String::from("a").into_boxed_str(); - assert_eq!(hash(& s), 97 + 0xFF); + assert_eq!(hash(&s), 97 + 0xFF); let s: Rc<&str> = Rc::new("a"); assert_eq!(hash(&s), 97 + 0xFF); let cs: &[u8] = &[1, 2, 3]; - assert_eq!(hash(& cs), 9); + assert_eq!(hash(&cs), 9); let cs: Box<[u8]> = Box::new([1, 2, 3]); - assert_eq!(hash(& cs), 9); + assert_eq!(hash(&cs), 9); let cs: Rc<[u8]> = Rc::new([1, 2, 3]); - assert_eq!(hash(& cs), 9); + assert_eq!(hash(&cs), 9); let ptr = 5_usize as *const i32; assert_eq!(hash(&ptr), 5); @@ -79,13 +80,23 @@ fn test_writer_hasher() { assert_eq!(hash(&slice_ptr), hash(&ptr) + cs.len() as u64); } -struct Custom { hash: u64 } -struct CustomHasher { output: u64 } +struct Custom { + hash: u64, +} +struct CustomHasher { + output: u64, +} impl Hasher for CustomHasher { - fn finish(&self) -> u64 { self.output } - fn write(&mut self, _: &[u8]) { panic!() } - fn write_u64(&mut self, data: u64) { self.output = data; } + fn finish(&self) -> u64 { + self.output + } + fn write(&mut self, _: &[u8]) { + panic!() + } + fn write_u64(&mut self, data: u64) { + self.output = data; + } } impl Default for CustomHasher { diff --git a/src/libcore/tests/hash/sip.rs b/src/libcore/tests/hash/sip.rs index b615cfd77ef1d..894778f955d59 100644 --- a/src/libcore/tests/hash/sip.rs +++ b/src/libcore/tests/hash/sip.rs @@ -2,7 +2,7 @@ use core::hash::{Hash, Hasher}; use core::hash::{SipHasher, SipHasher13}; -use core::{slice, mem}; +use core::{mem, slice}; // Hash just the bytes of the slice, without length prefix struct Bytes<'a>(&'a [u8]); @@ -16,25 +16,25 @@ impl<'a> Hash for Bytes<'a> { } macro_rules! u8to64_le { - ($buf:expr, $i:expr) => - ($buf[0+$i] as u64 | - ($buf[1+$i] as u64) << 8 | - ($buf[2+$i] as u64) << 16 | - ($buf[3+$i] as u64) << 24 | - ($buf[4+$i] as u64) << 32 | - ($buf[5+$i] as u64) << 40 | - ($buf[6+$i] as u64) << 48 | - ($buf[7+$i] as u64) << 56); - ($buf:expr, $i:expr, $len:expr) => - ({ + ($buf:expr, $i:expr) => { + $buf[0 + $i] as u64 + | ($buf[1 + $i] as u64) << 8 + | ($buf[2 + $i] as u64) << 16 + | ($buf[3 + $i] as u64) << 24 + | ($buf[4 + $i] as u64) << 32 + | ($buf[5 + $i] as u64) << 40 + | ($buf[6 + $i] as u64) << 48 + | ($buf[7 + $i] as u64) << 56 + }; + ($buf:expr, $i:expr, $len:expr) => {{ let mut t = 0; let mut out = 0; while t < $len { - out |= ($buf[t+$i] as u64) << t*8; + out |= ($buf[t + $i] as u64) << t * 8; t += 1; } out - }); + }}; } fn hash_with(mut st: H, x: &T) -> u64 { @@ -49,71 +49,71 @@ fn hash(x: &T) -> u64 { #[test] #[allow(unused_must_use)] fn test_siphash_1_3() { - let vecs : [[u8; 8]; 64] = [ - [ 0xdc, 0xc4, 0x0f, 0x05, 0x58, 0x01, 0xac, 0xab ], - [ 0x93, 0xca, 0x57, 0x7d, 0xf3, 0x9b, 0xf4, 0xc9 ], - [ 0x4d, 0xd4, 0xc7, 0x4d, 0x02, 0x9b, 0xcb, 0x82 ], - [ 0xfb, 0xf7, 0xdd, 0xe7, 0xb8, 0x0a, 0xf8, 0x8b ], - [ 0x28, 0x83, 0xd3, 0x88, 0x60, 0x57, 0x75, 0xcf ], - [ 0x67, 0x3b, 0x53, 0x49, 0x2f, 0xd5, 0xf9, 0xde ], - [ 0xa7, 0x22, 0x9f, 0xc5, 0x50, 0x2b, 0x0d, 0xc5 ], - [ 0x40, 0x11, 0xb1, 0x9b, 0x98, 0x7d, 0x92, 0xd3 ], - [ 0x8e, 0x9a, 0x29, 0x8d, 0x11, 0x95, 0x90, 0x36 ], - [ 0xe4, 0x3d, 0x06, 0x6c, 0xb3, 0x8e, 0xa4, 0x25 ], - [ 0x7f, 0x09, 0xff, 0x92, 0xee, 0x85, 0xde, 0x79 ], - [ 0x52, 0xc3, 0x4d, 0xf9, 0xc1, 0x18, 0xc1, 0x70 ], - [ 0xa2, 0xd9, 0xb4, 0x57, 0xb1, 0x84, 0xa3, 0x78 ], - [ 0xa7, 0xff, 0x29, 0x12, 0x0c, 0x76, 0x6f, 0x30 ], - [ 0x34, 0x5d, 0xf9, 0xc0, 0x11, 0xa1, 0x5a, 0x60 ], - [ 0x56, 0x99, 0x51, 0x2a, 0x6d, 0xd8, 0x20, 0xd3 ], - [ 0x66, 0x8b, 0x90, 0x7d, 0x1a, 0xdd, 0x4f, 0xcc ], - [ 0x0c, 0xd8, 0xdb, 0x63, 0x90, 0x68, 0xf2, 0x9c ], - [ 0x3e, 0xe6, 0x73, 0xb4, 0x9c, 0x38, 0xfc, 0x8f ], - [ 0x1c, 0x7d, 0x29, 0x8d, 0xe5, 0x9d, 0x1f, 0xf2 ], - [ 0x40, 0xe0, 0xcc, 0xa6, 0x46, 0x2f, 0xdc, 0xc0 ], - [ 0x44, 0xf8, 0x45, 0x2b, 0xfe, 0xab, 0x92, 0xb9 ], - [ 0x2e, 0x87, 0x20, 0xa3, 0x9b, 0x7b, 0xfe, 0x7f ], - [ 0x23, 0xc1, 0xe6, 0xda, 0x7f, 0x0e, 0x5a, 0x52 ], - [ 0x8c, 0x9c, 0x34, 0x67, 0xb2, 0xae, 0x64, 0xf4 ], - [ 0x79, 0x09, 0x5b, 0x70, 0x28, 0x59, 0xcd, 0x45 ], - [ 0xa5, 0x13, 0x99, 0xca, 0xe3, 0x35, 0x3e, 0x3a ], - [ 0x35, 0x3b, 0xde, 0x4a, 0x4e, 0xc7, 0x1d, 0xa9 ], - [ 0x0d, 0xd0, 0x6c, 0xef, 0x02, 0xed, 0x0b, 0xfb ], - [ 0xf4, 0xe1, 0xb1, 0x4a, 0xb4, 0x3c, 0xd9, 0x88 ], - [ 0x63, 0xe6, 0xc5, 0x43, 0xd6, 0x11, 0x0f, 0x54 ], - [ 0xbc, 0xd1, 0x21, 0x8c, 0x1f, 0xdd, 0x70, 0x23 ], - [ 0x0d, 0xb6, 0xa7, 0x16, 0x6c, 0x7b, 0x15, 0x81 ], - [ 0xbf, 0xf9, 0x8f, 0x7a, 0xe5, 0xb9, 0x54, 0x4d ], - [ 0x3e, 0x75, 0x2a, 0x1f, 0x78, 0x12, 0x9f, 0x75 ], - [ 0x91, 0x6b, 0x18, 0xbf, 0xbe, 0xa3, 0xa1, 0xce ], - [ 0x06, 0x62, 0xa2, 0xad, 0xd3, 0x08, 0xf5, 0x2c ], - [ 0x57, 0x30, 0xc3, 0xa3, 0x2d, 0x1c, 0x10, 0xb6 ], - [ 0xa1, 0x36, 0x3a, 0xae, 0x96, 0x74, 0xf4, 0xb3 ], - [ 0x92, 0x83, 0x10, 0x7b, 0x54, 0x57, 0x6b, 0x62 ], - [ 0x31, 0x15, 0xe4, 0x99, 0x32, 0x36, 0xd2, 0xc1 ], - [ 0x44, 0xd9, 0x1a, 0x3f, 0x92, 0xc1, 0x7c, 0x66 ], - [ 0x25, 0x88, 0x13, 0xc8, 0xfe, 0x4f, 0x70, 0x65 ], - [ 0xa6, 0x49, 0x89, 0xc2, 0xd1, 0x80, 0xf2, 0x24 ], - [ 0x6b, 0x87, 0xf8, 0xfa, 0xed, 0x1c, 0xca, 0xc2 ], - [ 0x96, 0x21, 0x04, 0x9f, 0xfc, 0x4b, 0x16, 0xc2 ], - [ 0x23, 0xd6, 0xb1, 0x68, 0x93, 0x9c, 0x6e, 0xa1 ], - [ 0xfd, 0x14, 0x51, 0x8b, 0x9c, 0x16, 0xfb, 0x49 ], - [ 0x46, 0x4c, 0x07, 0xdf, 0xf8, 0x43, 0x31, 0x9f ], - [ 0xb3, 0x86, 0xcc, 0x12, 0x24, 0xaf, 0xfd, 0xc6 ], - [ 0x8f, 0x09, 0x52, 0x0a, 0xd1, 0x49, 0xaf, 0x7e ], - [ 0x9a, 0x2f, 0x29, 0x9d, 0x55, 0x13, 0xf3, 0x1c ], - [ 0x12, 0x1f, 0xf4, 0xa2, 0xdd, 0x30, 0x4a, 0xc4 ], - [ 0xd0, 0x1e, 0xa7, 0x43, 0x89, 0xe9, 0xfa, 0x36 ], - [ 0xe6, 0xbc, 0xf0, 0x73, 0x4c, 0xb3, 0x8f, 0x31 ], - [ 0x80, 0xe9, 0xa7, 0x70, 0x36, 0xbf, 0x7a, 0xa2 ], - [ 0x75, 0x6d, 0x3c, 0x24, 0xdb, 0xc0, 0xbc, 0xb4 ], - [ 0x13, 0x15, 0xb7, 0xfd, 0x52, 0xd8, 0xf8, 0x23 ], - [ 0x08, 0x8a, 0x7d, 0xa6, 0x4d, 0x5f, 0x03, 0x8f ], - [ 0x48, 0xf1, 0xe8, 0xb7, 0xe5, 0xd0, 0x9c, 0xd8 ], - [ 0xee, 0x44, 0xa6, 0xf7, 0xbc, 0xe6, 0xf4, 0xf6 ], - [ 0xf2, 0x37, 0x18, 0x0f, 0xd8, 0x9a, 0xc5, 0xae ], - [ 0xe0, 0x94, 0x66, 0x4b, 0x15, 0xf6, 0xb2, 0xc3 ], - [ 0xa8, 0xb3, 0xbb, 0xb7, 0x62, 0x90, 0x19, 0x9d ] + let vecs: [[u8; 8]; 64] = [ + [0xdc, 0xc4, 0x0f, 0x05, 0x58, 0x01, 0xac, 0xab], + [0x93, 0xca, 0x57, 0x7d, 0xf3, 0x9b, 0xf4, 0xc9], + [0x4d, 0xd4, 0xc7, 0x4d, 0x02, 0x9b, 0xcb, 0x82], + [0xfb, 0xf7, 0xdd, 0xe7, 0xb8, 0x0a, 0xf8, 0x8b], + [0x28, 0x83, 0xd3, 0x88, 0x60, 0x57, 0x75, 0xcf], + [0x67, 0x3b, 0x53, 0x49, 0x2f, 0xd5, 0xf9, 0xde], + [0xa7, 0x22, 0x9f, 0xc5, 0x50, 0x2b, 0x0d, 0xc5], + [0x40, 0x11, 0xb1, 0x9b, 0x98, 0x7d, 0x92, 0xd3], + [0x8e, 0x9a, 0x29, 0x8d, 0x11, 0x95, 0x90, 0x36], + [0xe4, 0x3d, 0x06, 0x6c, 0xb3, 0x8e, 0xa4, 0x25], + [0x7f, 0x09, 0xff, 0x92, 0xee, 0x85, 0xde, 0x79], + [0x52, 0xc3, 0x4d, 0xf9, 0xc1, 0x18, 0xc1, 0x70], + [0xa2, 0xd9, 0xb4, 0x57, 0xb1, 0x84, 0xa3, 0x78], + [0xa7, 0xff, 0x29, 0x12, 0x0c, 0x76, 0x6f, 0x30], + [0x34, 0x5d, 0xf9, 0xc0, 0x11, 0xa1, 0x5a, 0x60], + [0x56, 0x99, 0x51, 0x2a, 0x6d, 0xd8, 0x20, 0xd3], + [0x66, 0x8b, 0x90, 0x7d, 0x1a, 0xdd, 0x4f, 0xcc], + [0x0c, 0xd8, 0xdb, 0x63, 0x90, 0x68, 0xf2, 0x9c], + [0x3e, 0xe6, 0x73, 0xb4, 0x9c, 0x38, 0xfc, 0x8f], + [0x1c, 0x7d, 0x29, 0x8d, 0xe5, 0x9d, 0x1f, 0xf2], + [0x40, 0xe0, 0xcc, 0xa6, 0x46, 0x2f, 0xdc, 0xc0], + [0x44, 0xf8, 0x45, 0x2b, 0xfe, 0xab, 0x92, 0xb9], + [0x2e, 0x87, 0x20, 0xa3, 0x9b, 0x7b, 0xfe, 0x7f], + [0x23, 0xc1, 0xe6, 0xda, 0x7f, 0x0e, 0x5a, 0x52], + [0x8c, 0x9c, 0x34, 0x67, 0xb2, 0xae, 0x64, 0xf4], + [0x79, 0x09, 0x5b, 0x70, 0x28, 0x59, 0xcd, 0x45], + [0xa5, 0x13, 0x99, 0xca, 0xe3, 0x35, 0x3e, 0x3a], + [0x35, 0x3b, 0xde, 0x4a, 0x4e, 0xc7, 0x1d, 0xa9], + [0x0d, 0xd0, 0x6c, 0xef, 0x02, 0xed, 0x0b, 0xfb], + [0xf4, 0xe1, 0xb1, 0x4a, 0xb4, 0x3c, 0xd9, 0x88], + [0x63, 0xe6, 0xc5, 0x43, 0xd6, 0x11, 0x0f, 0x54], + [0xbc, 0xd1, 0x21, 0x8c, 0x1f, 0xdd, 0x70, 0x23], + [0x0d, 0xb6, 0xa7, 0x16, 0x6c, 0x7b, 0x15, 0x81], + [0xbf, 0xf9, 0x8f, 0x7a, 0xe5, 0xb9, 0x54, 0x4d], + [0x3e, 0x75, 0x2a, 0x1f, 0x78, 0x12, 0x9f, 0x75], + [0x91, 0x6b, 0x18, 0xbf, 0xbe, 0xa3, 0xa1, 0xce], + [0x06, 0x62, 0xa2, 0xad, 0xd3, 0x08, 0xf5, 0x2c], + [0x57, 0x30, 0xc3, 0xa3, 0x2d, 0x1c, 0x10, 0xb6], + [0xa1, 0x36, 0x3a, 0xae, 0x96, 0x74, 0xf4, 0xb3], + [0x92, 0x83, 0x10, 0x7b, 0x54, 0x57, 0x6b, 0x62], + [0x31, 0x15, 0xe4, 0x99, 0x32, 0x36, 0xd2, 0xc1], + [0x44, 0xd9, 0x1a, 0x3f, 0x92, 0xc1, 0x7c, 0x66], + [0x25, 0x88, 0x13, 0xc8, 0xfe, 0x4f, 0x70, 0x65], + [0xa6, 0x49, 0x89, 0xc2, 0xd1, 0x80, 0xf2, 0x24], + [0x6b, 0x87, 0xf8, 0xfa, 0xed, 0x1c, 0xca, 0xc2], + [0x96, 0x21, 0x04, 0x9f, 0xfc, 0x4b, 0x16, 0xc2], + [0x23, 0xd6, 0xb1, 0x68, 0x93, 0x9c, 0x6e, 0xa1], + [0xfd, 0x14, 0x51, 0x8b, 0x9c, 0x16, 0xfb, 0x49], + [0x46, 0x4c, 0x07, 0xdf, 0xf8, 0x43, 0x31, 0x9f], + [0xb3, 0x86, 0xcc, 0x12, 0x24, 0xaf, 0xfd, 0xc6], + [0x8f, 0x09, 0x52, 0x0a, 0xd1, 0x49, 0xaf, 0x7e], + [0x9a, 0x2f, 0x29, 0x9d, 0x55, 0x13, 0xf3, 0x1c], + [0x12, 0x1f, 0xf4, 0xa2, 0xdd, 0x30, 0x4a, 0xc4], + [0xd0, 0x1e, 0xa7, 0x43, 0x89, 0xe9, 0xfa, 0x36], + [0xe6, 0xbc, 0xf0, 0x73, 0x4c, 0xb3, 0x8f, 0x31], + [0x80, 0xe9, 0xa7, 0x70, 0x36, 0xbf, 0x7a, 0xa2], + [0x75, 0x6d, 0x3c, 0x24, 0xdb, 0xc0, 0xbc, 0xb4], + [0x13, 0x15, 0xb7, 0xfd, 0x52, 0xd8, 0xf8, 0x23], + [0x08, 0x8a, 0x7d, 0xa6, 0x4d, 0x5f, 0x03, 0x8f], + [0x48, 0xf1, 0xe8, 0xb7, 0xe5, 0xd0, 0x9c, 0xd8], + [0xee, 0x44, 0xa6, 0xf7, 0xbc, 0xe6, 0xf4, 0xf6], + [0xf2, 0x37, 0x18, 0x0f, 0xd8, 0x9a, 0xc5, 0xae], + [0xe0, 0x94, 0x66, 0x4b, 0x15, 0xf6, 0xb2, 0xc3], + [0xa8, 0xb3, 0xbb, 0xb7, 0x62, 0x90, 0x19, 0x9d], ]; let k0 = 0x_07_06_05_04_03_02_01_00; @@ -143,71 +143,71 @@ fn test_siphash_1_3() { #[test] #[allow(unused_must_use)] fn test_siphash_2_4() { - let vecs : [[u8; 8]; 64] = [ - [ 0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72, ], - [ 0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74, ], - [ 0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d, ], - [ 0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85, ], - [ 0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf, ], - [ 0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18, ], - [ 0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb, ], - [ 0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab, ], - [ 0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93, ], - [ 0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e, ], - [ 0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a, ], - [ 0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4, ], - [ 0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75, ], - [ 0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14, ], - [ 0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7, ], - [ 0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1, ], - [ 0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f, ], - [ 0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69, ], - [ 0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b, ], - [ 0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb, ], - [ 0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe, ], - [ 0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0, ], - [ 0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93, ], - [ 0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8, ], - [ 0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8, ], - [ 0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc, ], - [ 0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17, ], - [ 0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f, ], - [ 0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde, ], - [ 0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6, ], - [ 0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad, ], - [ 0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32, ], - [ 0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71, ], - [ 0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7, ], - [ 0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12, ], - [ 0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15, ], - [ 0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31, ], - [ 0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02, ], - [ 0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca, ], - [ 0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a, ], - [ 0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e, ], - [ 0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad, ], - [ 0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18, ], - [ 0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4, ], - [ 0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9, ], - [ 0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9, ], - [ 0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb, ], - [ 0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0, ], - [ 0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6, ], - [ 0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7, ], - [ 0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee, ], - [ 0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1, ], - [ 0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a, ], - [ 0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81, ], - [ 0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f, ], - [ 0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24, ], - [ 0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7, ], - [ 0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea, ], - [ 0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60, ], - [ 0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66, ], - [ 0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c, ], - [ 0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f, ], - [ 0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5, ], - [ 0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95, ] + let vecs: [[u8; 8]; 64] = [ + [0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72], + [0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74], + [0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d], + [0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85], + [0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf], + [0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18], + [0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb], + [0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab], + [0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93], + [0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e], + [0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a], + [0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4], + [0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75], + [0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14], + [0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7], + [0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1], + [0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f], + [0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69], + [0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b], + [0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb], + [0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe], + [0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0], + [0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93], + [0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8], + [0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8], + [0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc], + [0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17], + [0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f], + [0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde], + [0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6], + [0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad], + [0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32], + [0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71], + [0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7], + [0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12], + [0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15], + [0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31], + [0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02], + [0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca], + [0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a], + [0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e], + [0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad], + [0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18], + [0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4], + [0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9], + [0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9], + [0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb], + [0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0], + [0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6], + [0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7], + [0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee], + [0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1], + [0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a], + [0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81], + [0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f], + [0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24], + [0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7], + [0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea], + [0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60], + [0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66], + [0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c], + [0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f], + [0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5], + [0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95], ]; let k0 = 0x_07_06_05_04_03_02_01_00; @@ -320,8 +320,10 @@ fn test_write_short_works() { h1.write_u8(0x01u8); let mut h2 = SipHasher::new(); h2.write(unsafe { - slice::from_raw_parts(&test_usize as *const _ as *const u8, - mem::size_of::()) + slice::from_raw_parts( + &test_usize as *const _ as *const u8, + mem::size_of::(), + ) }); h2.write(b"bytes"); h2.write(b"string"); diff --git a/src/libcore/tests/intrinsics.rs b/src/libcore/tests/intrinsics.rs index 7544c13dee4bf..fed7c4a5bf399 100644 --- a/src/libcore/tests/intrinsics.rs +++ b/src/libcore/tests/intrinsics.rs @@ -2,7 +2,8 @@ use core::any::TypeId; #[test] fn test_typeid_sized_types() { - struct X; struct Y(u32); + struct X; + struct Y(u32); assert_eq!(TypeId::of::(), TypeId::of::()); assert_eq!(TypeId::of::(), TypeId::of::()); @@ -12,7 +13,8 @@ fn test_typeid_sized_types() { #[test] fn test_typeid_unsized_types() { trait Z {} - struct X(str); struct Y(dyn Z + 'static); + struct X(str); + struct Y(dyn Z + 'static); assert_eq!(TypeId::of::(), TypeId::of::()); assert_eq!(TypeId::of::(), TypeId::of::()); diff --git a/src/libcore/tests/iter.rs b/src/libcore/tests/iter.rs index cf19851c17b35..a5562a7c8cda2 100644 --- a/src/libcore/tests/iter.rs +++ b/src/libcore/tests/iter.rs @@ -1,56 +1,56 @@ use core::iter::*; -use core::{i8, i16, isize}; use core::usize; +use core::{i16, i8, isize}; #[test] fn test_lt() { let empty: [isize; 0] = []; - let xs = [1,2,3]; - let ys = [1,2,0]; + let xs = [1, 2, 3]; + let ys = [1, 2, 0]; assert!(!xs.iter().lt(ys.iter())); assert!(!xs.iter().le(ys.iter())); - assert!( xs.iter().gt(ys.iter())); - assert!( xs.iter().ge(ys.iter())); + assert!(xs.iter().gt(ys.iter())); + assert!(xs.iter().ge(ys.iter())); - assert!( ys.iter().lt(xs.iter())); - assert!( ys.iter().le(xs.iter())); + assert!(ys.iter().lt(xs.iter())); + assert!(ys.iter().le(xs.iter())); assert!(!ys.iter().gt(xs.iter())); assert!(!ys.iter().ge(xs.iter())); - assert!( empty.iter().lt(xs.iter())); - assert!( empty.iter().le(xs.iter())); + assert!(empty.iter().lt(xs.iter())); + assert!(empty.iter().le(xs.iter())); assert!(!empty.iter().gt(xs.iter())); assert!(!empty.iter().ge(xs.iter())); // Sequence with NaN let u = [1.0f64, 2.0]; - let v = [0.0f64/0.0, 3.0]; + let v = [0.0f64 / 0.0, 3.0]; assert!(!u.iter().lt(v.iter())); assert!(!u.iter().le(v.iter())); assert!(!u.iter().gt(v.iter())); assert!(!u.iter().ge(v.iter())); - let a = [0.0f64/0.0]; + let a = [0.0f64 / 0.0]; let b = [1.0f64]; let c = [2.0f64]; - assert!(a.iter().lt(b.iter()) == (a[0] < b[0])); + assert!(a.iter().lt(b.iter()) == (a[0] < b[0])); assert!(a.iter().le(b.iter()) == (a[0] <= b[0])); - assert!(a.iter().gt(b.iter()) == (a[0] > b[0])); + assert!(a.iter().gt(b.iter()) == (a[0] > b[0])); assert!(a.iter().ge(b.iter()) == (a[0] >= b[0])); - assert!(c.iter().lt(b.iter()) == (c[0] < b[0])); + assert!(c.iter().lt(b.iter()) == (c[0] < b[0])); assert!(c.iter().le(b.iter()) == (c[0] <= b[0])); - assert!(c.iter().gt(b.iter()) == (c[0] > b[0])); + assert!(c.iter().gt(b.iter()) == (c[0] > b[0])); assert!(c.iter().ge(b.iter()) == (c[0] >= b[0])); } #[test] fn test_multi_iter() { - let xs = [1,2,3,4]; - let ys = [4,3,2,1]; + let xs = [1, 2, 3, 4]; + let ys = [4, 3, 2, 1]; assert!(xs.iter().eq(ys.iter().rev())); assert!(xs.iter().lt(xs.iter().skip(2))); } @@ -155,7 +155,9 @@ fn test_zip_nth() { fn test_zip_nth_side_effects() { let mut a = Vec::new(); let mut b = Vec::new(); - let value = [1, 2, 3, 4, 5, 6].iter().cloned() + let value = [1, 2, 3, 4, 5, 6] + .iter() + .cloned() .map(|n| { a.push(n); n * 10 @@ -221,7 +223,9 @@ fn test_iterator_step_by_nth_overflow() { struct Test(Bigger); impl Iterator for &mut Test { type Item = i32; - fn next(&mut self) -> Option { Some(21) } + fn next(&mut self) -> Option { + Some(21) + } fn nth(&mut self, n: usize) -> Option { self.0 += n as Bigger + 1; Some(42) @@ -304,7 +308,7 @@ fn test_iterator_step_by_size_hint() { let mut it = StubSizeHint(usize::MAX, None).step_by(1); assert_eq!(it.size_hint(), (usize::MAX, None)); it.next(); - assert_eq!(it.size_hint(), (usize::MAX-1, None)); + assert_eq!(it.size_hint(), (usize::MAX - 1, None)); // still infinite with larger step let mut it = StubSizeHint(7, None).step_by(3); @@ -313,18 +317,24 @@ fn test_iterator_step_by_size_hint() { assert_eq!(it.size_hint(), (2, None)); // propagates ExactSizeIterator - let a = [1,2,3,4,5]; + let a = [1, 2, 3, 4, 5]; let it = a.iter().step_by(2); assert_eq!(it.len(), 3); // Cannot be TrustedLen as a step greater than one makes an iterator // with (usize::MAX, None) no longer meet the safety requirements - trait TrustedLenCheck { fn test(self) -> bool; } - impl TrustedLenCheck for T { - default fn test(self) -> bool { false } + trait TrustedLenCheck { + fn test(self) -> bool; } - impl TrustedLenCheck for T { - fn test(self) -> bool { true } + impl TrustedLenCheck for T { + default fn test(self) -> bool { + false + } + } + impl TrustedLenCheck for T { + fn test(self) -> bool { + true + } } assert!(TrustedLenCheck::test(a.iter())); assert!(!TrustedLenCheck::test(a.iter().step_by(1))); @@ -332,23 +342,32 @@ fn test_iterator_step_by_size_hint() { #[test] fn test_filter_map() { - let it = (0..).step_by(1).take(10) - .filter_map(|x| if x % 2 == 0 { Some(x*x) } else { None }); - assert_eq!(it.collect::>(), [0*0, 2*2, 4*4, 6*6, 8*8]); + let it = (0..) + .step_by(1) + .take(10) + .filter_map(|x| if x % 2 == 0 { Some(x * x) } else { None }); + assert_eq!( + it.collect::>(), + [0 * 0, 2 * 2, 4 * 4, 6 * 6, 8 * 8] + ); } #[test] fn test_filter_map_fold() { let xs = [0, 1, 2, 3, 4, 5, 6, 7, 8]; - let ys = [0*0, 2*2, 4*4, 6*6, 8*8]; - let it = xs.iter().filter_map(|&x| if x % 2 == 0 { Some(x*x) } else { None }); + let ys = [0 * 0, 2 * 2, 4 * 4, 6 * 6, 8 * 8]; + let it = xs + .iter() + .filter_map(|&x| if x % 2 == 0 { Some(x * x) } else { None }); let i = it.fold(0, |i, x| { assert_eq!(x, ys[i]); i + 1 }); assert_eq!(i, ys.len()); - let it = xs.iter().filter_map(|&x| if x % 2 == 0 { Some(x*x) } else { None }); + let it = xs + .iter() + .filter_map(|&x| if x % 2 == 0 { Some(x * x) } else { None }); let i = it.rfold(ys.len(), |i, x| { assert_eq!(x, ys[i - 1]); i - 1 @@ -496,7 +515,6 @@ fn test_iterator_peekable_count() { let mut it = zs.iter().peekable(); assert_eq!(it.peek(), None); - } #[test] @@ -553,10 +571,7 @@ pub struct CycleIter<'a, T: 'a> { } pub fn cycle(data: &[T]) -> CycleIter { - CycleIter { - index: 0, - data, - } + CycleIter { index: 0, data } } impl<'a, T> Iterator for CycleIter<'a, T> { @@ -580,7 +595,9 @@ fn test_iterator_peekable_remember_peek_none_1() { let is_the_last = iter.peek().is_none(); assert_eq!(is_the_last, n == data.len() - 1); n += 1; - if n > data.len() { break; } + if n > data.len() { + break; + } } assert_eq!(n, data.len()); } @@ -662,7 +679,7 @@ fn test_iterator_skip() { while let Some(&x) = it.next() { assert_eq!(x, ys[i]); i += 1; - assert_eq!(it.len(), xs.len()-5-i); + assert_eq!(it.len(), xs.len() - 5 - i); } assert_eq!(i, ys.len()); assert_eq!(it.len(), 0); @@ -720,7 +737,6 @@ fn test_iterator_skip_nth() { let mut it = xs.iter().skip(12); assert_eq!(it.nth(0), None); - } #[test] @@ -785,7 +801,6 @@ fn test_iterator_skip_fold() { i }); assert_eq!(i, 1); - } #[test] @@ -798,7 +813,7 @@ fn test_iterator_take() { while let Some(&x) = it.next() { assert_eq!(x, ys[i]); i += 1; - assert_eq!(it.len(), 5-i); + assert_eq!(it.len(), 5 - i); } assert_eq!(i, ys.len()); assert_eq!(it.len(), 0); @@ -838,7 +853,7 @@ fn test_iterator_take_short() { while let Some(&x) = it.next() { assert_eq!(x, ys[i]); i += 1; - assert_eq!(it.len(), 4-i); + assert_eq!(it.len(), 4 - i); } assert_eq!(i, ys.len()); assert_eq!(it.len(), 0); @@ -882,7 +897,7 @@ fn test_iterator_flat_map() { fn test_iterator_flat_map_fold() { let xs = [0, 3, 6]; let ys = [1, 2, 3, 4, 5, 6, 7]; - let mut it = xs.iter().flat_map(|&x| x..x+3); + let mut it = xs.iter().flat_map(|&x| x..x + 3); assert_eq!(it.next(), Some(0)); assert_eq!(it.next_back(), Some(8)); let i = it.fold(0, |i, x| { @@ -891,7 +906,7 @@ fn test_iterator_flat_map_fold() { }); assert_eq!(i, ys.len()); - let mut it = xs.iter().flat_map(|&x| x..x+3); + let mut it = xs.iter().flat_map(|&x| x..x + 3); assert_eq!(it.next(), Some(0)); assert_eq!(it.next_back(), Some(8)); let i = it.rfold(ys.len(), |i, x| { @@ -920,7 +935,7 @@ fn test_iterator_flatten() { fn test_iterator_flatten_fold() { let xs = [0, 3, 6]; let ys = [1, 2, 3, 4, 5, 6, 7]; - let mut it = xs.iter().map(|&x| x..x+3).flatten(); + let mut it = xs.iter().map(|&x| x..x + 3).flatten(); assert_eq!(it.next(), Some(0)); assert_eq!(it.next_back(), Some(8)); let i = it.fold(0, |i, x| { @@ -929,7 +944,7 @@ fn test_iterator_flatten_fold() { }); assert_eq!(i, ys.len()); - let mut it = xs.iter().map(|&x| x..x+3).flatten(); + let mut it = xs.iter().map(|&x| x..x + 3).flatten(); assert_eq!(it.next(), Some(0)); assert_eq!(it.next_back(), Some(8)); let i = it.rfold(ys.len(), |i, x| { @@ -944,10 +959,11 @@ fn test_inspect() { let xs = [1, 2, 3, 4]; let mut n = 0; - let ys = xs.iter() - .cloned() - .inspect(|_| n += 1) - .collect::>(); + let ys = xs + .iter() + .cloned() + .inspect(|_| n += 1) + .collect::>(); assert_eq!(n, xs.len()); assert_eq!(&xs[..], &ys[..]); @@ -1112,9 +1128,12 @@ fn test_iterator_size_hint() { assert_eq!(c.clone().take_while(|_| false).size_hint(), (0, None)); assert_eq!(c.clone().skip_while(|_| false).size_hint(), (0, None)); assert_eq!(c.clone().enumerate().size_hint(), (usize::MAX, None)); - assert_eq!(c.clone().chain(vi.clone().cloned()).size_hint(), (usize::MAX, None)); + assert_eq!( + c.clone().chain(vi.clone().cloned()).size_hint(), + (usize::MAX, None) + ); assert_eq!(c.clone().zip(vi.clone()).size_hint(), (10, Some(10))); - assert_eq!(c.clone().scan(0, |_,_| Some(0)).size_hint(), (0, None)); + assert_eq!(c.clone().scan(0, |_, _| Some(0)).size_hint(), (0, None)); assert_eq!(c.clone().filter(|_| false).size_hint(), (0, None)); assert_eq!(c.clone().map(|_| 0).size_hint(), (usize::MAX, None)); assert_eq!(c.filter_map(|_| Some(0)).size_hint(), (0, None)); @@ -1128,9 +1147,12 @@ fn test_iterator_size_hint() { assert_eq!(vi.clone().enumerate().size_hint(), (10, Some(10))); assert_eq!(vi.clone().chain(v2).size_hint(), (13, Some(13))); assert_eq!(vi.clone().zip(v2).size_hint(), (3, Some(3))); - assert_eq!(vi.clone().scan(0, |_,_| Some(0)).size_hint(), (0, Some(10))); + assert_eq!( + vi.clone().scan(0, |_, _| Some(0)).size_hint(), + (0, Some(10)) + ); assert_eq!(vi.clone().filter(|_| false).size_hint(), (0, Some(10))); - assert_eq!(vi.clone().map(|&i| i+1).size_hint(), (10, Some(10))); + assert_eq!(vi.clone().map(|&i| i + 1).size_hint(), (10, Some(10))); assert_eq!(vi.filter_map(|_| Some(0)).size_hint(), (0, Some(10))); } @@ -1219,7 +1241,10 @@ fn test_max_by_key() { #[test] fn test_max_by() { let xs: &[isize] = &[-3, 0, 1, 5, -10]; - assert_eq!(*xs.iter().max_by(|x, y| x.abs().cmp(&y.abs())).unwrap(), -10); + assert_eq!( + *xs.iter().max_by(|x, y| x.abs().cmp(&y.abs())).unwrap(), + -10 + ); } #[test] @@ -1249,8 +1274,7 @@ fn test_rev() { let mut it = xs.iter(); it.next(); it.next(); - assert!(it.rev().cloned().collect::>() == - vec![16, 14, 12, 10, 8, 6]); + assert!(it.rev().cloned().collect::>() == vec![16, 14, 12, 10, 8, 6]); } #[test] @@ -1357,7 +1381,9 @@ fn test_double_ended_filter() { #[test] fn test_double_ended_filter_map() { let xs = [1, 2, 3, 4, 5, 6]; - let mut it = xs.iter().filter_map(|&x| if x & 1 == 0 { Some(x * 2) } else { None }); + let mut it = xs + .iter() + .filter_map(|&x| if x & 1 == 0 { Some(x * 2) } else { None }); assert_eq!(it.next_back().unwrap(), 12); assert_eq!(it.next_back().unwrap(), 8); assert_eq!(it.next().unwrap(), 4); @@ -1379,14 +1405,22 @@ fn test_double_ended_chain() { assert_eq!(it.next_back().unwrap(), &7); assert_eq!(it.next_back(), None); - // test that .chain() is well behaved with an unfused iterator struct CrazyIterator(bool); - impl CrazyIterator { fn new() -> CrazyIterator { CrazyIterator(false) } } + impl CrazyIterator { + fn new() -> CrazyIterator { + CrazyIterator(false) + } + } impl Iterator for CrazyIterator { type Item = i32; fn next(&mut self) -> Option { - if self.0 { Some(99) } else { self.0 = true; None } + if self.0 { + Some(99) + } else { + self.0 = true; + None + } } } @@ -1402,8 +1436,14 @@ fn test_double_ended_chain() { #[test] fn test_rposition() { - fn f(xy: &(isize, char)) -> bool { let (_x, y) = *xy; y == 'b' } - fn g(xy: &(isize, char)) -> bool { let (_x, y) = *xy; y == 'd' } + fn f(xy: &(isize, char)) -> bool { + let (_x, y) = *xy; + y == 'b' + } + fn g(xy: &(isize, char)) -> bool { + let (_x, y) = *xy; + y == 'd' + } let v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')]; assert_eq!(v.iter().rposition(f), Some(3)); @@ -1413,9 +1453,12 @@ fn test_rposition() { #[test] #[should_panic] fn test_rposition_panic() { - let v: [(Box<_>, Box<_>); 4] = - [(box 0, box 0), (box 0, box 0), - (box 0, box 0), (box 0, box 0)]; + let v: [(Box<_>, Box<_>); 4] = [ + (box 0, box 0), + (box 0, box 0), + (box 0, box 0), + (box 0, box 0), + ]; let mut i = 0; v.iter().rposition(|_elt| { if i == 2 { @@ -1426,38 +1469,37 @@ fn test_rposition_panic() { }); } - #[test] fn test_double_ended_flat_map() { - let u = [0,1]; - let v = [5,6,7,8]; + let u = [0, 1]; + let v = [5, 6, 7, 8]; let mut it = u.iter().flat_map(|x| &v[*x..v.len()]); assert_eq!(it.next_back().unwrap(), &8); - assert_eq!(it.next().unwrap(), &5); + assert_eq!(it.next().unwrap(), &5); assert_eq!(it.next_back().unwrap(), &7); assert_eq!(it.next_back().unwrap(), &6); assert_eq!(it.next_back().unwrap(), &8); - assert_eq!(it.next().unwrap(), &6); + assert_eq!(it.next().unwrap(), &6); assert_eq!(it.next_back().unwrap(), &7); assert_eq!(it.next_back(), None); - assert_eq!(it.next(), None); + assert_eq!(it.next(), None); assert_eq!(it.next_back(), None); } #[test] fn test_double_ended_flatten() { - let u = [0,1]; - let v = [5,6,7,8]; + let u = [0, 1]; + let v = [5, 6, 7, 8]; let mut it = u.iter().map(|x| &v[*x..v.len()]).flatten(); assert_eq!(it.next_back().unwrap(), &8); - assert_eq!(it.next().unwrap(), &5); + assert_eq!(it.next().unwrap(), &5); assert_eq!(it.next_back().unwrap(), &7); assert_eq!(it.next_back().unwrap(), &6); assert_eq!(it.next_back().unwrap(), &8); - assert_eq!(it.next().unwrap(), &6); + assert_eq!(it.next().unwrap(), &6); assert_eq!(it.next_back().unwrap(), &7); assert_eq!(it.next_back(), None); - assert_eq!(it.next(), None); + assert_eq!(it.next(), None); assert_eq!(it.next_back(), None); } @@ -1477,7 +1519,10 @@ fn test_double_ended_range() { #[test] fn test_range() { assert_eq!((0..5).collect::>(), [0, 1, 2, 3, 4]); - assert_eq!((-10..-1).collect::>(), [-10, -9, -8, -7, -6, -5, -4, -3, -2]); + assert_eq!( + (-10..-1).collect::>(), + [-10, -9, -8, -7, -6, -5, -4, -3, -2] + ); assert_eq!((0..5).rev().collect::>(), [4, 3, 2, 1, 0]); assert_eq!((200..-5).count(), 0); assert_eq!((200..-5).rev().count(), 0); @@ -1492,8 +1537,10 @@ fn test_range() { assert_eq!((-70..58).size_hint(), (128, Some(128))); assert_eq!((-128..127).size_hint(), (255, Some(255))); - assert_eq!((-2..isize::MAX).size_hint(), - (isize::MAX as usize + 2, Some(isize::MAX as usize + 2))); + assert_eq!( + (-2..isize::MAX).size_hint(), + (isize::MAX as usize + 2, Some(isize::MAX as usize + 2)) + ); } #[test] @@ -1622,7 +1669,10 @@ fn test_range_from_take() { is_trusted_len((0..).take(3)); assert_eq!((0..).take(3).size_hint(), (3, Some(3))); assert_eq!((0..).take(0).size_hint(), (0, Some(0))); - assert_eq!((0..).take(usize::MAX).size_hint(), (usize::MAX, Some(usize::MAX))); + assert_eq!( + (0..).take(usize::MAX).size_hint(), + (usize::MAX, Some(usize::MAX)) + ); } #[test] @@ -1655,8 +1705,14 @@ fn test_range_step() { #![allow(deprecated)] assert_eq!((0..20).step_by(5).collect::>(), [0, 5, 10, 15]); - assert_eq!((1..21).rev().step_by(5).collect::>(), [20, 15, 10, 5]); - assert_eq!((1..21).rev().step_by(6).collect::>(), [20, 14, 8, 2]); + assert_eq!( + (1..21).rev().step_by(5).collect::>(), + [20, 15, 10, 5] + ); + assert_eq!( + (1..21).rev().step_by(6).collect::>(), + [20, 14, 8, 2] + ); assert_eq!((200..255).step_by(50).collect::>(), [200, 250]); assert_eq!((200..-5).step_by(1).collect::>(), []); assert_eq!((200..200).step_by(1).collect::>(), []); @@ -1668,24 +1724,47 @@ fn test_range_step() { assert_eq!((1..21).rev().step_by(6).size_hint(), (4, Some(4))); assert_eq!((20..-5).step_by(1).size_hint(), (0, Some(0))); assert_eq!((20..20).step_by(1).size_hint(), (0, Some(0))); - assert_eq!((i8::MIN..i8::MAX).step_by(-(i8::MIN as i32) as usize).size_hint(), (2, Some(2))); - assert_eq!((i16::MIN..i16::MAX).step_by(i16::MAX as usize).size_hint(), (3, Some(3))); - assert_eq!((isize::MIN..isize::MAX).step_by(1).size_hint(), (usize::MAX, Some(usize::MAX))); + assert_eq!( + (i8::MIN..i8::MAX) + .step_by(-(i8::MIN as i32) as usize) + .size_hint(), + (2, Some(2)) + ); + assert_eq!( + (i16::MIN..i16::MAX).step_by(i16::MAX as usize).size_hint(), + (3, Some(3)) + ); + assert_eq!( + (isize::MIN..isize::MAX).step_by(1).size_hint(), + (usize::MAX, Some(usize::MAX)) + ); } #[test] fn test_step_by_skip() { - assert_eq!((0..640).step_by(128).skip(1).collect::>(), [128, 256, 384, 512]); + assert_eq!( + (0..640).step_by(128).skip(1).collect::>(), + [128, 256, 384, 512] + ); assert_eq!((0..=50).step_by(10).nth(3), Some(30)); assert_eq!((200..=255u8).step_by(10).nth(3), Some(230)); } #[test] fn test_range_inclusive_step() { - assert_eq!((0..=50).step_by(10).collect::>(), [0, 10, 20, 30, 40, 50]); + assert_eq!( + (0..=50).step_by(10).collect::>(), + [0, 10, 20, 30, 40, 50] + ); assert_eq!((0..=5).step_by(1).collect::>(), [0, 1, 2, 3, 4, 5]); - assert_eq!((200..=255u8).step_by(10).collect::>(), [200, 210, 220, 230, 240, 250]); - assert_eq!((250..=255u8).step_by(1).collect::>(), [250, 251, 252, 253, 254, 255]); + assert_eq!( + (200..=255u8).step_by(10).collect::>(), + [200, 210, 220, 230, 240, 250] + ); + assert_eq!( + (250..=255u8).step_by(1).collect::>(), + [250, 251, 252, 253, 254, 255] + ); } #[test] @@ -1745,11 +1824,11 @@ fn test_range_inclusive_folds() { assert_eq!(it, 44..=47); let mut it = 10..=20; - assert_eq!(it.try_fold(0, |a,b| Some(a+b)), Some(165)); + assert_eq!(it.try_fold(0, |a, b| Some(a + b)), Some(165)); assert!(it.is_empty()); let mut it = 10..=20; - assert_eq!(it.try_rfold(0, |a,b| Some(a+b)), Some(165)); + assert_eq!(it.try_rfold(0, |a, b| Some(a + b)), Some(165)); assert!(it.is_empty()); } @@ -1772,7 +1851,10 @@ fn test_repeat_take() { is_trusted_len(repeat(42).take(3)); assert_eq!(repeat(42).take(3).size_hint(), (3, Some(3))); assert_eq!(repeat(42).take(0).size_hint(), (0, Some(0))); - assert_eq!(repeat(42).take(usize::MAX).size_hint(), (usize::MAX, Some(usize::MAX))); + assert_eq!( + repeat(42).take(usize::MAX).size_hint(), + (usize::MAX, Some(usize::MAX)) + ); } #[test] @@ -1802,22 +1884,32 @@ fn test_repeat_with_take() { is_trusted_len(repeat_with(|| 42).take(3)); assert_eq!(repeat_with(|| 42).take(3).size_hint(), (3, Some(3))); assert_eq!(repeat_with(|| 42).take(0).size_hint(), (0, Some(0))); - assert_eq!(repeat_with(|| 42).take(usize::MAX).size_hint(), - (usize::MAX, Some(usize::MAX))); + assert_eq!( + repeat_with(|| 42).take(usize::MAX).size_hint(), + (usize::MAX, Some(usize::MAX)) + ); } #[test] fn test_repeat_with_take_collect() { let mut curr = 1; - let v: Vec<_> = repeat_with(|| { let tmp = curr; curr *= 2; tmp }) - .take(5).collect(); + let v: Vec<_> = repeat_with(|| { + let tmp = curr; + curr *= 2; + tmp + }) + .take(5) + .collect(); assert_eq!(v, vec![1, 2, 4, 8, 16]); } #[test] fn test_successors() { let mut powers_of_10 = successors(Some(1_u16), |n| n.checked_mul(10)); - assert_eq!(powers_of_10.by_ref().collect::>(), &[1, 10, 100, 1_000, 10_000]); + assert_eq!( + powers_of_10.by_ref().collect::>(), + &[1, 10, 100, 1_000, 10_000] + ); assert_eq!(powers_of_10.next(), None); let mut empty = successors(None::, |_| unimplemented!()); @@ -1965,7 +2057,7 @@ fn test_step_replace_no_between() { #[test] fn test_rev_try_folds() { - let f = &|acc, x| i32::checked_add(2*acc, x); + let f = &|acc, x| i32::checked_add(2 * acc, x); assert_eq!((1..10).rev().try_fold(7, f), (1..10).try_rfold(7, f)); assert_eq!((1..10).rev().try_rfold(7, f), (1..10).try_fold(7, f)); @@ -1981,10 +2073,16 @@ fn test_rev_try_folds() { #[test] fn test_cloned_try_folds() { let a = [1, 2, 3, 4, 5, 6, 7, 8, 9]; - let f = &|acc, x| i32::checked_add(2*acc, x); - let f_ref = &|acc, &x| i32::checked_add(2*acc, x); - assert_eq!(a.iter().cloned().try_fold(7, f), a.iter().try_fold(7, f_ref)); - assert_eq!(a.iter().cloned().try_rfold(7, f), a.iter().try_rfold(7, f_ref)); + let f = &|acc, x| i32::checked_add(2 * acc, x); + let f_ref = &|acc, &x| i32::checked_add(2 * acc, x); + assert_eq!( + a.iter().cloned().try_fold(7, f), + a.iter().try_fold(7, f_ref) + ); + assert_eq!( + a.iter().cloned().try_rfold(7, f), + a.iter().try_rfold(7, f_ref) + ); let a = [10, 20, 30, 40, 100, 60, 70, 80, 90]; let mut iter = a.iter().cloned(); @@ -1999,7 +2097,7 @@ fn test_cloned_try_folds() { fn test_chain_try_folds() { let c = || (0..10).chain(10..20); - let f = &|acc, x| i32::checked_add(2*acc, x); + let f = &|acc, x| i32::checked_add(2 * acc, x); assert_eq!(c().try_fold(7, f), (0..20).try_fold(7, f)); assert_eq!(c().try_rfold(7, f), (0..20).rev().try_fold(7, f)); @@ -2008,14 +2106,17 @@ fn test_chain_try_folds() { assert_eq!(iter.next(), Some(6), "stopped in front, state Both"); assert_eq!(iter.position(|x| x == 13), Some(6)); assert_eq!(iter.next(), Some(14), "stopped in back, state Back"); - assert_eq!(iter.try_fold(0, |acc, x| Some(acc+x)), Some((15..20).sum())); + assert_eq!( + iter.try_fold(0, |acc, x| Some(acc + x)), + Some((15..20).sum()) + ); let mut iter = c().rev(); // use rev to access try_rfold assert_eq!(iter.position(|x| x == 15), Some(4)); assert_eq!(iter.next(), Some(14), "stopped in back, state Both"); assert_eq!(iter.position(|x| x == 5), Some(8)); assert_eq!(iter.next(), Some(4), "stopped in front, state Front"); - assert_eq!(iter.try_fold(0, |acc, x| Some(acc+x)), Some((0..4).sum())); + assert_eq!(iter.try_fold(0, |acc, x| Some(acc + x)), Some((0..4).sum())); let mut iter = c(); iter.by_ref().rev().nth(14); // skip the last 15, ending in state Front @@ -2028,11 +2129,17 @@ fn test_chain_try_folds() { #[test] fn test_map_try_folds() { - let f = &|acc, x| i32::checked_add(2*acc, x); - assert_eq!((0..10).map(|x| x+3).try_fold(7, f), (3..13).try_fold(7, f)); - assert_eq!((0..10).map(|x| x+3).try_rfold(7, f), (3..13).try_rfold(7, f)); - - let mut iter = (0..40).map(|x| x+10); + let f = &|acc, x| i32::checked_add(2 * acc, x); + assert_eq!( + (0..10).map(|x| x + 3).try_fold(7, f), + (3..13).try_fold(7, f) + ); + assert_eq!( + (0..10).map(|x| x + 3).try_rfold(7, f), + (3..13).try_rfold(7, f) + ); + + let mut iter = (0..40).map(|x| x + 10); assert_eq!(iter.try_fold(0, i8::checked_add), None); assert_eq!(iter.next(), Some(20)); assert_eq!(iter.try_rfold(0, i8::checked_add), None); @@ -2041,8 +2148,10 @@ fn test_map_try_folds() { #[test] fn test_filter_try_folds() { - fn p(&x: &i32) -> bool { 0 <= x && x < 10 } - let f = &|acc, x| i32::checked_add(2*acc, x); + fn p(&x: &i32) -> bool { + 0 <= x && x < 10 + } + let f = &|acc, x| i32::checked_add(2 * acc, x); assert_eq!((-10..20).filter(p).try_fold(7, f), (0..10).try_fold(7, f)); assert_eq!((-10..20).filter(p).try_rfold(7, f), (0..10).try_rfold(7, f)); @@ -2055,12 +2164,18 @@ fn test_filter_try_folds() { #[test] fn test_filter_map_try_folds() { - let mp = &|x| if 0 <= x && x < 10 { Some(x*2) } else { None }; - let f = &|acc, x| i32::checked_add(2*acc, x); - assert_eq!((-9..20).filter_map(mp).try_fold(7, f), (0..10).map(|x| 2*x).try_fold(7, f)); - assert_eq!((-9..20).filter_map(mp).try_rfold(7, f), (0..10).map(|x| 2*x).try_rfold(7, f)); - - let mut iter = (0..40).filter_map(|x| if x%2 == 1 { None } else { Some(x*2 + 10) }); + let mp = &|x| if 0 <= x && x < 10 { Some(x * 2) } else { None }; + let f = &|acc, x| i32::checked_add(2 * acc, x); + assert_eq!( + (-9..20).filter_map(mp).try_fold(7, f), + (0..10).map(|x| 2 * x).try_fold(7, f) + ); + assert_eq!( + (-9..20).filter_map(mp).try_rfold(7, f), + (0..10).map(|x| 2 * x).try_rfold(7, f) + ); + + let mut iter = (0..40).filter_map(|x| if x % 2 == 1 { None } else { Some(x * 2 + 10) }); assert_eq!(iter.try_fold(0, i8::checked_add), None); assert_eq!(iter.next(), Some(38)); assert_eq!(iter.try_rfold(0, i8::checked_add), None); @@ -2069,9 +2184,15 @@ fn test_filter_map_try_folds() { #[test] fn test_enumerate_try_folds() { - let f = &|acc, (i, x)| usize::checked_add(2*acc, x/(i+1) + i); - assert_eq!((9..18).enumerate().try_fold(7, f), (0..9).map(|i| (i, i+9)).try_fold(7, f)); - assert_eq!((9..18).enumerate().try_rfold(7, f), (0..9).map(|i| (i, i+9)).try_rfold(7, f)); + let f = &|acc, (i, x)| usize::checked_add(2 * acc, x / (i + 1) + i); + assert_eq!( + (9..18).enumerate().try_fold(7, f), + (0..9).map(|i| (i, i + 9)).try_fold(7, f) + ); + assert_eq!( + (9..18).enumerate().try_rfold(7, f), + (0..9).map(|i| (i, i + 9)).try_rfold(7, f) + ); let mut iter = (100..200).enumerate(); let f = &|acc, (i, x)| u8::checked_add(acc, u8::checked_div(x, i as u8 + 1)?); @@ -2083,7 +2204,7 @@ fn test_enumerate_try_folds() { #[test] fn test_peek_try_fold() { - let f = &|acc, x| i32::checked_add(2*acc, x); + let f = &|acc, x| i32::checked_add(2 * acc, x); assert_eq!((1..20).peekable().try_fold(7, f), (1..20).try_fold(7, f)); let mut iter = (1..20).peekable(); assert_eq!(iter.peek(), Some(&1)); @@ -2097,8 +2218,10 @@ fn test_peek_try_fold() { #[test] fn test_skip_while_try_fold() { - let f = &|acc, x| i32::checked_add(2*acc, x); - fn p(&x: &i32) -> bool { (x % 10) <= 5 } + let f = &|acc, x| i32::checked_add(2 * acc, x); + fn p(&x: &i32) -> bool { + (x % 10) <= 5 + } assert_eq!((1..20).skip_while(p).try_fold(7, f), (6..20).try_fold(7, f)); let mut iter = (1..20).skip_while(p); assert_eq!(iter.nth(5), Some(11)); @@ -2111,13 +2234,16 @@ fn test_skip_while_try_fold() { #[test] fn test_take_while_folds() { - let f = &|acc, x| i32::checked_add(2*acc, x); - assert_eq!((1..20).take_while(|&x| x != 10).try_fold(7, f), (1..10).try_fold(7, f)); + let f = &|acc, x| i32::checked_add(2 * acc, x); + assert_eq!( + (1..20).take_while(|&x| x != 10).try_fold(7, f), + (1..10).try_fold(7, f) + ); let mut iter = (1..20).take_while(|&x| x != 10); - assert_eq!(iter.try_fold(0, |x, y| Some(x+y)), Some((1..10).sum())); + assert_eq!(iter.try_fold(0, |x, y| Some(x + y)), Some((1..10).sum())); assert_eq!(iter.next(), None, "flag should be set"); let iter = (1..20).take_while(|&x| x != 10); - assert_eq!(iter.fold(0, |x, y| x+y), (1..10).sum()); + assert_eq!(iter.fold(0, |x, y| x + y), (1..10).sum()); let mut iter = (10..50).take_while(|&x| x != 40); assert_eq!(iter.try_fold(0, i8::checked_add), None); @@ -2126,7 +2252,7 @@ fn test_take_while_folds() { #[test] fn test_skip_try_folds() { - let f = &|acc, x| i32::checked_add(2*acc, x); + let f = &|acc, x| i32::checked_add(2 * acc, x); assert_eq!((1..20).skip(9).try_fold(7, f), (10..20).try_fold(7, f)); assert_eq!((1..20).skip(9).try_rfold(7, f), (10..20).try_rfold(7, f)); @@ -2139,7 +2265,7 @@ fn test_skip_try_folds() { #[test] fn test_take_try_folds() { - let f = &|acc, x| i32::checked_add(2*acc, x); + let f = &|acc, x| i32::checked_add(2 * acc, x); assert_eq!((10..30).take(10).try_fold(7, f), (10..20).try_fold(7, f)); //assert_eq!((10..30).take(10).try_rfold(7, f), (10..20).try_rfold(7, f)); @@ -2152,15 +2278,19 @@ fn test_take_try_folds() { #[test] fn test_flat_map_try_folds() { - let f = &|acc, x| i32::checked_add(acc*2/3, x); - let mr = &|x| (5*x)..(5*x + 5); + let f = &|acc, x| i32::checked_add(acc * 2 / 3, x); + let mr = &|x| (5 * x)..(5 * x + 5); assert_eq!((0..10).flat_map(mr).try_fold(7, f), (0..50).try_fold(7, f)); - assert_eq!((0..10).flat_map(mr).try_rfold(7, f), (0..50).try_rfold(7, f)); + assert_eq!( + (0..10).flat_map(mr).try_rfold(7, f), + (0..50).try_rfold(7, f) + ); let mut iter = (0..10).flat_map(mr); - iter.next(); iter.next_back(); // have front and back iters in progress + iter.next(); + iter.next_back(); // have front and back iters in progress assert_eq!(iter.try_rfold(7, f), (1..49).try_rfold(7, f)); - let mut iter = (0..10).flat_map(|x| (4*x)..(4*x + 4)); + let mut iter = (0..10).flat_map(|x| (4 * x)..(4 * x + 4)); assert_eq!(iter.try_fold(0, i8::checked_add), None); assert_eq!(iter.next(), Some(17)); assert_eq!(iter.try_rfold(0, i8::checked_add), None); @@ -2169,15 +2299,22 @@ fn test_flat_map_try_folds() { #[test] fn test_flatten_try_folds() { - let f = &|acc, x| i32::checked_add(acc*2/3, x); - let mr = &|x| (5*x)..(5*x + 5); - assert_eq!((0..10).map(mr).flatten().try_fold(7, f), (0..50).try_fold(7, f)); - assert_eq!((0..10).map(mr).flatten().try_rfold(7, f), (0..50).try_rfold(7, f)); + let f = &|acc, x| i32::checked_add(acc * 2 / 3, x); + let mr = &|x| (5 * x)..(5 * x + 5); + assert_eq!( + (0..10).map(mr).flatten().try_fold(7, f), + (0..50).try_fold(7, f) + ); + assert_eq!( + (0..10).map(mr).flatten().try_rfold(7, f), + (0..50).try_rfold(7, f) + ); let mut iter = (0..10).map(mr).flatten(); - iter.next(); iter.next_back(); // have front and back iters in progress + iter.next(); + iter.next_back(); // have front and back iters in progress assert_eq!(iter.try_rfold(7, f), (1..49).try_rfold(7, f)); - let mut iter = (0..10).map(|x| (4*x)..(4*x + 4)).flatten(); + let mut iter = (0..10).map(|x| (4 * x)..(4 * x + 4)).flatten(); assert_eq!(iter.try_fold(0, i8::checked_add), None); assert_eq!(iter.next(), Some(17)); assert_eq!(iter.try_rfold(0, i8::checked_add), None); @@ -2187,13 +2324,21 @@ fn test_flatten_try_folds() { #[test] fn test_functor_laws() { // identity: - fn identity(x: T) -> T { x } + fn identity(x: T) -> T { + x + } assert_eq!((0..10).map(identity).sum::(), (0..10).sum()); // composition: - fn f(x: usize) -> usize { x + 3 } - fn g(x: usize) -> usize { x * 2 } - fn h(x: usize) -> usize { g(f(x)) } + fn f(x: usize) -> usize { + x + 3 + } + fn g(x: usize) -> usize { + x * 2 + } + fn h(x: usize) -> usize { + g(f(x)) + } assert_eq!((0..10).map(f).map(g).sum::(), (0..10).map(h).sum()); } @@ -2212,8 +2357,14 @@ fn test_monad_laws_right_identity() { #[test] fn test_monad_laws_associativity() { - fn f(x: usize) -> impl Iterator { 0..x } - fn g(x: usize) -> impl Iterator { (0..x).rev() } - assert_eq!((0..10).flat_map(f).flat_map(g).sum::(), - (0..10).flat_map(|x| f(x).flat_map(g)).sum::()); + fn f(x: usize) -> impl Iterator { + 0..x + } + fn g(x: usize) -> impl Iterator { + (0..x).rev() + } + assert_eq!( + (0..10).flat_map(f).flat_map(g).sum::(), + (0..10).flat_map(|x| f(x).flat_map(g)).sum::() + ); } diff --git a/src/libcore/tests/lib.rs b/src/libcore/tests/lib.rs index 72846daf16a6b..656baadf7abb1 100644 --- a/src/libcore/tests/lib.rs +++ b/src/libcore/tests/lib.rs @@ -35,8 +35,8 @@ #![feature(copy_within)] extern crate core; -extern crate test; extern crate rand; +extern crate test; mod any; mod array; diff --git a/src/libcore/tests/manually_drop.rs b/src/libcore/tests/manually_drop.rs index 49a1c187ea6cd..77a338daf7dcb 100644 --- a/src/libcore/tests/manually_drop.rs +++ b/src/libcore/tests/manually_drop.rs @@ -13,7 +13,7 @@ fn smoke() { drop(x); // also test unsizing - let x : Box> = + let x: Box> = Box::new(ManuallyDrop::new([TypeWithDrop, TypeWithDrop])); drop(x); } diff --git a/src/libcore/tests/mem.rs b/src/libcore/tests/mem.rs index f5b241959fdd2..59588d97787b7 100644 --- a/src/libcore/tests/mem.rs +++ b/src/libcore/tests/mem.rs @@ -96,7 +96,9 @@ fn test_transmute_copy() { #[test] fn test_transmute() { - trait Foo { fn dummy(&self) { } } + trait Foo { + fn dummy(&self) {} + } impl Foo for isize {} let a = box 100isize as Box; @@ -116,13 +118,13 @@ fn test_transmute() { fn test_discriminant_send_sync() { enum Regular { A, - B(i32) + B(i32), } enum NotSendSync { - A(*const i32) + A(*const i32), } - fn is_send_sync() { } + fn is_send_sync() {} is_send_sync::>(); is_send_sync::>(); diff --git a/src/libcore/tests/nonzero.rs b/src/libcore/tests/nonzero.rs index c813bf20cb61a..ac6b0070cb8c5 100644 --- a/src/libcore/tests/nonzero.rs +++ b/src/libcore/tests/nonzero.rs @@ -1,13 +1,11 @@ use core::num::NonZeroU32; use core::option::Option; -use core::option::Option::{Some, None}; +use core::option::Option::{None, Some}; use std::mem::size_of; #[test] fn test_create_nonzero_instance() { - let _a = unsafe { - NonZeroU32::new_unchecked(21) - }; + let _a = unsafe { NonZeroU32::new_unchecked(21) }; } #[test] @@ -17,17 +15,15 @@ fn test_size_nonzero_in_option() { #[test] fn test_match_on_nonzero_option() { - let a = Some(unsafe { - NonZeroU32::new_unchecked(42) - }); + let a = Some(unsafe { NonZeroU32::new_unchecked(42) }); match a { Some(val) => assert_eq!(val.get(), 42), - None => panic!("unexpected None while matching on Some(NonZeroU32(_))") + None => panic!("unexpected None while matching on Some(NonZeroU32(_))"), } match unsafe { Some(NonZeroU32::new_unchecked(43)) } { Some(val) => assert_eq!(val.get(), 43), - None => panic!("unexpected None while matching on Some(NonZeroU32(_))") + None => panic!("unexpected None while matching on Some(NonZeroU32(_))"), } } @@ -45,7 +41,7 @@ fn test_match_option_vec() { let a = Some(vec![1, 2, 3, 4]); match a { Some(v) => assert_eq!(v, [1, 2, 3, 4]), - None => panic!("unexpected None while matching on Some(vec![1, 2, 3, 4])") + None => panic!("unexpected None while matching on Some(vec![1, 2, 3, 4])"), } } @@ -56,7 +52,7 @@ fn test_match_option_rc() { let five = Rc::new(5); match Some(five) { Some(r) => assert_eq!(*r, 5), - None => panic!("unexpected None while matching on Some(Rc::new(5))") + None => panic!("unexpected None while matching on Some(Rc::new(5))"), } } @@ -67,7 +63,7 @@ fn test_match_option_arc() { let five = Arc::new(5); match Some(five) { Some(a) => assert_eq!(*a, 5), - None => panic!("unexpected None while matching on Some(Arc::new(5))") + None => panic!("unexpected None while matching on Some(Arc::new(5))"), } } @@ -85,7 +81,7 @@ fn test_match_option_string() { let five = "Five".to_string(); match Some(five) { Some(s) => assert_eq!(s, "Five"), - None => panic!("unexpected None while matching on Some(String { ... })") + None => panic!("unexpected None while matching on Some(String { ... })"), } } @@ -96,11 +92,15 @@ mod atom { pub struct Atom { index: NonZeroU32, // private } - pub const FOO_ATOM: Atom = Atom { index: unsafe { NonZeroU32::new_unchecked(7) } }; + pub const FOO_ATOM: Atom = Atom { + index: unsafe { NonZeroU32::new_unchecked(7) }, + }; } macro_rules! atom { - ("foo") => { atom::FOO_ATOM } + ("foo") => { + atom::FOO_ATOM + }; } #[test] @@ -108,7 +108,7 @@ fn test_match_nonzero_const_pattern() { match atom!("foo") { // Using as a pattern is supported by the compiler: atom!("foo") => {} - _ => panic!("Expected the const item as a pattern to match.") + _ => panic!("Expected the const item as a pattern to match."), } } diff --git a/src/libcore/tests/num/bignum.rs b/src/libcore/tests/num/bignum.rs index b873f1dd0652f..2b8666e76789f 100644 --- a/src/libcore/tests/num/bignum.rs +++ b/src/libcore/tests/num/bignum.rs @@ -1,5 +1,5 @@ -use std::prelude::v1::*; use core::num::bignum::tests::Big8x3 as Big; +use std::prelude::v1::*; #[test] #[should_panic] @@ -9,12 +9,30 @@ fn test_from_u64_overflow() { #[test] fn test_add() { - assert_eq!(*Big::from_small(3).add(&Big::from_small(4)), Big::from_small(7)); - assert_eq!(*Big::from_small(3).add(&Big::from_small(0)), Big::from_small(3)); - assert_eq!(*Big::from_small(0).add(&Big::from_small(3)), Big::from_small(3)); - assert_eq!(*Big::from_small(3).add(&Big::from_u64(0xfffe)), Big::from_u64(0x10001)); - assert_eq!(*Big::from_u64(0xfedc).add(&Big::from_u64(0x789)), Big::from_u64(0x10665)); - assert_eq!(*Big::from_u64(0x789).add(&Big::from_u64(0xfedc)), Big::from_u64(0x10665)); + assert_eq!( + *Big::from_small(3).add(&Big::from_small(4)), + Big::from_small(7) + ); + assert_eq!( + *Big::from_small(3).add(&Big::from_small(0)), + Big::from_small(3) + ); + assert_eq!( + *Big::from_small(0).add(&Big::from_small(3)), + Big::from_small(3) + ); + assert_eq!( + *Big::from_small(3).add(&Big::from_u64(0xfffe)), + Big::from_u64(0x10001) + ); + assert_eq!( + *Big::from_u64(0xfedc).add(&Big::from_u64(0x789)), + Big::from_u64(0x10665) + ); + assert_eq!( + *Big::from_u64(0x789).add(&Big::from_u64(0xfedc)), + Big::from_u64(0x10665) + ); } #[test] @@ -36,7 +54,10 @@ fn test_add_small() { assert_eq!(*Big::from_small(0).add_small(3), Big::from_small(3)); assert_eq!(*Big::from_small(7).add_small(250), Big::from_u64(257)); assert_eq!(*Big::from_u64(0x7fff).add_small(1), Big::from_u64(0x8000)); - assert_eq!(*Big::from_u64(0x2ffe).add_small(0x35), Big::from_u64(0x3033)); + assert_eq!( + *Big::from_u64(0x2ffe).add_small(0x35), + Big::from_u64(0x3033) + ); assert_eq!(*Big::from_small(0xdc).add_small(0x89), Big::from_u64(0x165)); } @@ -48,11 +69,26 @@ fn test_add_small_overflow() { #[test] fn test_sub() { - assert_eq!(*Big::from_small(7).sub(&Big::from_small(4)), Big::from_small(3)); - assert_eq!(*Big::from_u64(0x10665).sub(&Big::from_u64(0x789)), Big::from_u64(0xfedc)); - assert_eq!(*Big::from_u64(0x10665).sub(&Big::from_u64(0xfedc)), Big::from_u64(0x789)); - assert_eq!(*Big::from_u64(0x10665).sub(&Big::from_u64(0x10664)), Big::from_small(1)); - assert_eq!(*Big::from_u64(0x10665).sub(&Big::from_u64(0x10665)), Big::from_small(0)); + assert_eq!( + *Big::from_small(7).sub(&Big::from_small(4)), + Big::from_small(3) + ); + assert_eq!( + *Big::from_u64(0x10665).sub(&Big::from_u64(0x789)), + Big::from_u64(0xfedc) + ); + assert_eq!( + *Big::from_u64(0x10665).sub(&Big::from_u64(0xfedc)), + Big::from_u64(0x789) + ); + assert_eq!( + *Big::from_u64(0x10665).sub(&Big::from_u64(0x10664)), + Big::from_small(1) + ); + assert_eq!( + *Big::from_u64(0x10665).sub(&Big::from_u64(0x10665)), + Big::from_small(0) + ); } #[test] @@ -70,8 +106,14 @@ fn test_sub_underflow_2() { #[test] fn test_mul_small() { assert_eq!(*Big::from_small(7).mul_small(5), Big::from_small(35)); - assert_eq!(*Big::from_small(0xff).mul_small(0xff), Big::from_u64(0xfe01)); - assert_eq!(*Big::from_u64(0xffffff/13).mul_small(13), Big::from_u64(0xffffff)); + assert_eq!( + *Big::from_small(0xff).mul_small(0xff), + Big::from_u64(0xfe01) + ); + assert_eq!( + *Big::from_u64(0xffffff / 13).mul_small(13), + Big::from_u64(0xffffff) + ); } #[test] @@ -131,12 +173,30 @@ fn test_mul_pow5_overflow_2() { #[test] fn test_mul_digits() { assert_eq!(*Big::from_small(3).mul_digits(&[5]), Big::from_small(15)); - assert_eq!(*Big::from_small(0xff).mul_digits(&[0xff]), Big::from_u64(0xfe01)); - assert_eq!(*Big::from_u64(0x123).mul_digits(&[0x56, 0x4]), Big::from_u64(0x4edc2)); - assert_eq!(*Big::from_u64(0x12345).mul_digits(&[0x67]), Big::from_u64(0x7530c3)); - assert_eq!(*Big::from_small(0x12).mul_digits(&[0x67, 0x45, 0x3]), Big::from_u64(0x3ae13e)); - assert_eq!(*Big::from_u64(0xffffff/13).mul_digits(&[13]), Big::from_u64(0xffffff)); - assert_eq!(*Big::from_small(13).mul_digits(&[0x3b, 0xb1, 0x13]), Big::from_u64(0xffffff)); + assert_eq!( + *Big::from_small(0xff).mul_digits(&[0xff]), + Big::from_u64(0xfe01) + ); + assert_eq!( + *Big::from_u64(0x123).mul_digits(&[0x56, 0x4]), + Big::from_u64(0x4edc2) + ); + assert_eq!( + *Big::from_u64(0x12345).mul_digits(&[0x67]), + Big::from_u64(0x7530c3) + ); + assert_eq!( + *Big::from_small(0x12).mul_digits(&[0x67, 0x45, 0x3]), + Big::from_u64(0x3ae13e) + ); + assert_eq!( + *Big::from_u64(0xffffff / 13).mul_digits(&[13]), + Big::from_u64(0xffffff) + ); + assert_eq!( + *Big::from_small(13).mul_digits(&[0x3b, 0xb1, 0x13]), + Big::from_u64(0xffffff) + ); } #[test] @@ -154,13 +214,26 @@ fn test_mul_digits_overflow_2() { #[test] fn test_div_rem_small() { let as_val = |(q, r): (&mut Big, u8)| (q.clone(), r); - assert_eq!(as_val(Big::from_small(0xff).div_rem_small(15)), (Big::from_small(17), 0)); - assert_eq!(as_val(Big::from_small(0xff).div_rem_small(16)), (Big::from_small(15), 15)); - assert_eq!(as_val(Big::from_small(3).div_rem_small(40)), (Big::from_small(0), 3)); - assert_eq!(as_val(Big::from_u64(0xffffff).div_rem_small(123)), - (Big::from_u64(0xffffff / 123), (0xffffffu64 % 123) as u8)); - assert_eq!(as_val(Big::from_u64(0x10000).div_rem_small(123)), - (Big::from_u64(0x10000 / 123), (0x10000u64 % 123) as u8)); + assert_eq!( + as_val(Big::from_small(0xff).div_rem_small(15)), + (Big::from_small(17), 0) + ); + assert_eq!( + as_val(Big::from_small(0xff).div_rem_small(16)), + (Big::from_small(15), 15) + ); + assert_eq!( + as_val(Big::from_small(3).div_rem_small(40)), + (Big::from_small(0), 3) + ); + assert_eq!( + as_val(Big::from_u64(0xffffff).div_rem_small(123)), + (Big::from_u64(0xffffff / 123), (0xffffffu64 % 123) as u8) + ); + assert_eq!( + as_val(Big::from_u64(0x10000).div_rem_small(123)), + (Big::from_u64(0x10000 / 123), (0x10000u64 % 123) as u8) + ); } #[test] @@ -176,10 +249,22 @@ fn test_div_rem() { assert_eq!(div_rem(1, 7), (Big::from_small(0), Big::from_small(1))); assert_eq!(div_rem(45, 9), (Big::from_small(5), Big::from_small(0))); assert_eq!(div_rem(103, 9), (Big::from_small(11), Big::from_small(4))); - assert_eq!(div_rem(123456, 77), (Big::from_u64(1603), Big::from_small(25))); - assert_eq!(div_rem(0xffff, 1), (Big::from_u64(0xffff), Big::from_small(0))); - assert_eq!(div_rem(0xeeee, 0xffff), (Big::from_small(0), Big::from_u64(0xeeee))); - assert_eq!(div_rem(2_000_000, 2), (Big::from_u64(1_000_000), Big::from_u64(0))); + assert_eq!( + div_rem(123456, 77), + (Big::from_u64(1603), Big::from_small(25)) + ); + assert_eq!( + div_rem(0xffff, 1), + (Big::from_u64(0xffff), Big::from_small(0)) + ); + assert_eq!( + div_rem(0xeeee, 0xffff), + (Big::from_small(0), Big::from_u64(0xeeee)) + ); + assert_eq!( + div_rem(2_000_000, 2), + (Big::from_u64(1_000_000), Big::from_u64(0)) + ); } #[test] @@ -187,8 +272,12 @@ fn test_is_zero() { assert!(Big::from_small(0).is_zero()); assert!(!Big::from_small(3).is_zero()); assert!(!Big::from_u64(0x123).is_zero()); - assert!(!Big::from_u64(0xffffff).sub(&Big::from_u64(0xfffffe)).is_zero()); - assert!(Big::from_u64(0xffffff).sub(&Big::from_u64(0xffffff)).is_zero()); + assert!(!Big::from_u64(0xffffff) + .sub(&Big::from_u64(0xfffffe)) + .is_zero()); + assert!(Big::from_u64(0xffffff) + .sub(&Big::from_u64(0xffffff)) + .is_zero()); } #[test] @@ -236,4 +325,3 @@ fn test_fmt() { assert_eq!(format!("{:?}", Big::from_u64(0x12345)), "0x1_23_45"); assert_eq!(format!("{:?}", Big::from_u64(0x123456)), "0x12_34_56"); } - diff --git a/src/libcore/tests/num/dec2flt/mod.rs b/src/libcore/tests/num/dec2flt/mod.rs index 8f1cd32c3563c..0558b6464c69d 100644 --- a/src/libcore/tests/num/dec2flt/mod.rs +++ b/src/libcore/tests/num/dec2flt/mod.rs @@ -1,6 +1,6 @@ #![allow(overflowing_literals)] -use std::{i64, f32, f64}; +use std::{f32, f64, i64}; mod parse; mod rawfp; @@ -9,10 +9,14 @@ mod rawfp; // to be correct) and see if those strings are parsed back to the value of the literal. // Requires a *polymorphic literal*, i.e., one that can serve as f64 as well as f32. macro_rules! test_literal { - ($x: expr) => ({ + ($x: expr) => {{ let x32: f32 = $x; let x64: f64 = $x; - let inputs = &[stringify!($x).into(), format!("{:?}", x64), format!("{:e}", x64)]; + let inputs = &[ + stringify!($x).into(), + format!("{:?}", x64), + format!("{:e}", x64), + ]; for input in inputs { assert_eq!(input.parse(), Ok(x64)); assert_eq!(input.parse(), Ok(x32)); @@ -20,7 +24,7 @@ macro_rules! test_literal { assert_eq!(neg_input.parse(), Ok(-x64)); assert_eq!(neg_input.parse(), Ok(-x32)); } - }) + }}; } #[cfg_attr(all(target_arch = "wasm32", target_os = "emscripten"), ignore)] // issue 42630 diff --git a/src/libcore/tests/num/dec2flt/parse.rs b/src/libcore/tests/num/dec2flt/parse.rs index 1eac484119170..dcabdfda4a8fc 100644 --- a/src/libcore/tests/num/dec2flt/parse.rs +++ b/src/libcore/tests/num/dec2flt/parse.rs @@ -1,5 +1,5 @@ -use core::num::dec2flt::parse::{Decimal, parse_decimal}; -use core::num::dec2flt::parse::ParseResult::{Valid, Invalid}; +use core::num::dec2flt::parse::ParseResult::{Invalid, Valid}; +use core::num::dec2flt::parse::{parse_decimal, Decimal}; #[test] fn missing_pieces() { @@ -19,7 +19,11 @@ fn invalid_chars() { let mut input = String::new(); input.push_str(s); input.insert(i, c); - assert!(parse_decimal(&input) == Invalid, "did not reject invalid {:?}", input); + assert!( + parse_decimal(&input) == Invalid, + "did not reject invalid {:?}", + input + ); } } } @@ -27,14 +31,26 @@ fn invalid_chars() { #[test] fn valid() { - assert_eq!(parse_decimal("123.456e789"), Valid(Decimal::new(b"123", b"456", 789))); - assert_eq!(parse_decimal("123.456e+789"), Valid(Decimal::new(b"123", b"456", 789))); - assert_eq!(parse_decimal("123.456e-789"), Valid(Decimal::new(b"123", b"456", -789))); + assert_eq!( + parse_decimal("123.456e789"), + Valid(Decimal::new(b"123", b"456", 789)) + ); + assert_eq!( + parse_decimal("123.456e+789"), + Valid(Decimal::new(b"123", b"456", 789)) + ); + assert_eq!( + parse_decimal("123.456e-789"), + Valid(Decimal::new(b"123", b"456", -789)) + ); assert_eq!(parse_decimal(".050"), Valid(Decimal::new(b"", b"050", 0))); assert_eq!(parse_decimal("999"), Valid(Decimal::new(b"999", b"", 0))); assert_eq!(parse_decimal("1.e300"), Valid(Decimal::new(b"1", b"", 300))); assert_eq!(parse_decimal(".1e300"), Valid(Decimal::new(b"", b"1", 300))); - assert_eq!(parse_decimal("101e-33"), Valid(Decimal::new(b"101", b"", -33))); + assert_eq!( + parse_decimal("101e-33"), + Valid(Decimal::new(b"101", b"", -33)) + ); let zeros = "0".repeat(25); let s = format!("1.5e{}", zeros); assert_eq!(parse_decimal(&s), Valid(Decimal::new(b"1", b"5", 0))); diff --git a/src/libcore/tests/num/dec2flt/rawfp.rs b/src/libcore/tests/num/dec2flt/rawfp.rs index 747c1bfa3f9c2..db665d583608a 100644 --- a/src/libcore/tests/num/dec2flt/rawfp.rs +++ b/src/libcore/tests/num/dec2flt/rawfp.rs @@ -1,8 +1,8 @@ +use core::num::dec2flt::rawfp::RawFloat; +use core::num::dec2flt::rawfp::{fp_to_float, next_float, prev_float, round_normal}; +use core::num::diy_float::Fp; use std::f32; use std::f64; -use core::num::diy_float::Fp; -use core::num::dec2flt::rawfp::{fp_to_float, prev_float, next_float, round_normal}; -use core::num::dec2flt::rawfp::RawFloat; fn integer_decode(f: f64) -> (u64, i16, i8) { RawFloat::integer_decode(f) @@ -49,27 +49,40 @@ fn fp_to_float_half_to_even() { fn integers_to_f64() { assert_eq!(fp_to_float::(Fp { f: 1, e: 0 }), 1.0); assert_eq!(fp_to_float::(Fp { f: 42, e: 7 }), (42 << 7) as f64); - assert_eq!(fp_to_float::(Fp { f: 1 << 20, e: 30 }), (1u64 << 50) as f64); + assert_eq!( + fp_to_float::(Fp { f: 1 << 20, e: 30 }), + (1u64 << 50) as f64 + ); assert_eq!(fp_to_float::(Fp { f: 4, e: -3 }), 0.5); } -const SOME_FLOATS: [f64; 9] = - [0.1f64, 33.568, 42.1e-5, 777.0e9, 1.1111, 0.347997, - 9843579834.35892, 12456.0e-150, 54389573.0e-150]; - +const SOME_FLOATS: [f64; 9] = [ + 0.1f64, + 33.568, + 42.1e-5, + 777.0e9, + 1.1111, + 0.347997, + 9843579834.35892, + 12456.0e-150, + 54389573.0e-150, +]; #[test] fn human_f64_roundtrip() { for &x in &SOME_FLOATS { let (f, e, _) = integer_decode(x); - let fp = Fp { f: f, e: e}; + let fp = Fp { f: f, e: e }; assert_eq!(fp_to_float::(fp), x); } } #[test] fn rounding_overflow() { - let x = Fp { f: 0xFF_FF_FF_FF_FF_FF_FF_00u64, e: 42 }; + let x = Fp { + f: 0xFF_FF_FF_FF_FF_FF_FF_00u64, + e: 42, + }; let rounded = round_normal::(x); let adjusted_k = x.e + 64 - 53; assert_eq!(rounded.sig, 1 << 52); @@ -155,13 +168,22 @@ fn test_f32_integer_decode() { #[test] fn test_f64_integer_decode() { - assert_eq!(3.14159265359f64.integer_decode(), (7074237752028906, -51, 1)); - assert_eq!((-8573.5918555f64).integer_decode(), (4713381968463931, -39, -1)); + assert_eq!( + 3.14159265359f64.integer_decode(), + (7074237752028906, -51, 1) + ); + assert_eq!( + (-8573.5918555f64).integer_decode(), + (4713381968463931, -39, -1) + ); assert_eq!(2f64.powf(100.0).integer_decode(), (4503599627370496, 48, 1)); assert_eq!(0f64.integer_decode(), (0, -1075, 1)); assert_eq!((-0f64).integer_decode(), (0, -1075, -1)); assert_eq!(f64::INFINITY.integer_decode(), (4503599627370496, 972, 1)); - assert_eq!(f64::NEG_INFINITY.integer_decode(), (4503599627370496, 972, -1)); + assert_eq!( + f64::NEG_INFINITY.integer_decode(), + (4503599627370496, 972, -1) + ); // Ignore the "sign" (quiet / signalling flag) of NAN. // It can vary between runtime operations and LLVM folding. diff --git a/src/libcore/tests/num/flt2dec/estimator.rs b/src/libcore/tests/num/flt2dec/estimator.rs index fb0888e2720a9..00a681f20d682 100644 --- a/src/libcore/tests/num/flt2dec/estimator.rs +++ b/src/libcore/tests/num/flt2dec/estimator.rs @@ -3,14 +3,24 @@ use core::num::flt2dec::estimator::*; #[test] fn test_estimate_scaling_factor() { macro_rules! assert_almost_eq { - ($actual:expr, $expected:expr) => ({ + ($actual:expr, $expected:expr) => {{ let actual = $actual; let expected = $expected; - println!("{} - {} = {} - {} = {}", stringify!($expected), stringify!($actual), - expected, actual, expected - actual); - assert!(expected == actual || expected == actual + 1, - "expected {}, actual {}", expected, actual); - }) + println!( + "{} - {} = {} - {} = {}", + stringify!($expected), + stringify!($actual), + expected, + actual, + expected - actual + ); + assert!( + expected == actual || expected == actual + 1, + "expected {}, actual {}", + expected, + actual + ); + }}; } assert_almost_eq!(estimate_scaling_factor(1, 0), 0); @@ -28,7 +38,10 @@ fn test_estimate_scaling_factor() { assert_almost_eq!(estimate_scaling_factor(10 * 1048576 / 1000000, -20), -5); assert_almost_eq!(estimate_scaling_factor(10 * 1048576 / 1000000 + 1, -20), -4); assert_almost_eq!(estimate_scaling_factor(100 * 1048576 / 1000000, -20), -4); - assert_almost_eq!(estimate_scaling_factor(100 * 1048576 / 1000000 + 1, -20), -3); + assert_almost_eq!( + estimate_scaling_factor(100 * 1048576 / 1000000 + 1, -20), + -3 + ); assert_almost_eq!(estimate_scaling_factor(1048575, -20), 0); assert_almost_eq!(estimate_scaling_factor(1048576, -20), 0); assert_almost_eq!(estimate_scaling_factor(1048577, -20), 1); @@ -47,4 +60,3 @@ fn test_estimate_scaling_factor() { assert_almost_eq!(estimate_scaling_factor(1, i as i16), expected as i16); } } - diff --git a/src/libcore/tests/num/flt2dec/mod.rs b/src/libcore/tests/num/flt2dec/mod.rs index fed9ce73b2a8e..10211fd238a1d 100644 --- a/src/libcore/tests/num/flt2dec/mod.rs +++ b/src/libcore/tests/num/flt2dec/mod.rs @@ -1,10 +1,11 @@ use std::prelude::v1::*; -use std::{str, i16, f32, f64, fmt}; +use std::{f32, f64, fmt, i16, str}; -use core::num::flt2dec::{decode, DecodableFloat, FullDecoded, Decoded}; -use core::num::flt2dec::{MAX_SIG_DIGITS, round_up, Part, Formatted, Sign}; -use core::num::flt2dec::{to_shortest_str, to_shortest_exp_str, - to_exact_exp_str, to_exact_fixed_str}; +use core::num::flt2dec::{decode, DecodableFloat, Decoded, FullDecoded}; +use core::num::flt2dec::{round_up, Formatted, Part, Sign, MAX_SIG_DIGITS}; +use core::num::flt2dec::{ + to_exact_exp_str, to_exact_fixed_str, to_shortest_exp_str, to_shortest_str, +}; pub use test::Bencher; @@ -18,7 +19,7 @@ mod random; pub fn decode_finite(v: T) -> Decoded { match decode(v).1 { FullDecoded::Finite(decoded) => decoded, - full_decoded => panic!("expected finite, got {:?} instead", full_decoded) + full_decoded => panic!("expected finite, got {:?} instead", full_decoded), } } @@ -82,14 +83,17 @@ fn ldexp_f32(a: f32, b: i32) -> f32 { } fn ldexp_f64(a: f64, b: i32) -> f64 { - extern { + extern "C" { fn ldexp(x: f64, n: i32) -> f64; } unsafe { ldexp(a, b) } } fn check_exact(mut f: F, v: T, vstr: &str, expected: &[u8], expectedk: i16) - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { +where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), +{ // use a large enough buffer let mut buf = [b'_'; 1024]; let mut expected_ = [b'_'; 1024]; @@ -104,14 +108,18 @@ fn check_exact(mut f: F, v: T, vstr: &str, expected: &[u8], expectedk: i16 if expected[i] >= b'5' { // check if this is a rounding-to-even case. // we avoid rounding ...x5000... (with infinite zeroes) to ...(x+1) when x is even. - if !(i+1 < expected.len() && expected[i-1] & 1 == 0 && - expected[i] == b'5' && - expected[i+1] == b' ') { + if !(i + 1 < expected.len() + && expected[i - 1] & 1 == 0 + && expected[i] == b'5' + && expected[i + 1] == b' ') + { // if this returns true, expected_[..i] is all `9`s and being rounded up. // we should always return `100..00` (`i` digits) instead, since that's // what we can came up with `i` digits anyway. `round_up` assumes that // the adjustment to the length is done by caller, which we simply ignore. - if let Some(_) = round_up(&mut expected_, i) { expectedk_ += 1; } + if let Some(_) = round_up(&mut expected_, i) { + expectedk_ += 1; + } } } @@ -145,9 +153,11 @@ fn check_exact(mut f: F, v: T, vstr: &str, expected: &[u8], expectedk: i16 // check infinite zero digits if let Some(cut) = cut { - for i in cut..expected.len()-1 { + for i in cut..expected.len() - 1 { expected_[..cut].copy_from_slice(&expected[..cut]); - for c in &mut expected_[cut..i] { *c = b'0'; } + for c in &mut expected_[cut..i] { + *c = b'0'; + } try_exact!(f(&decoded) => &mut buf, &expected_[..i], expectedk; "exact infzero mismatch for v={v}, i={i}: \ @@ -161,23 +171,29 @@ fn check_exact(mut f: F, v: T, vstr: &str, expected: &[u8], expectedk: i16 } } -trait TestableFloat : DecodableFloat + fmt::Display { +trait TestableFloat: DecodableFloat + fmt::Display { /// Returns `x * 2^exp`. Almost same to `std::{f32,f64}::ldexp`. /// This is used for testing. fn ldexpi(f: i64, exp: isize) -> Self; } impl TestableFloat for f32 { - fn ldexpi(f: i64, exp: isize) -> Self { f as Self * (exp as Self).exp2() } + fn ldexpi(f: i64, exp: isize) -> Self { + f as Self * (exp as Self).exp2() + } } impl TestableFloat for f64 { - fn ldexpi(f: i64, exp: isize) -> Self { f as Self * (exp as Self).exp2() } + fn ldexpi(f: i64, exp: isize) -> Self { + f as Self * (exp as Self).exp2() + } } fn check_exact_one(mut f: F, x: i64, e: isize, tstr: &str, expected: &[u8], expectedk: i16) - where T: TestableFloat, - F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { +where + T: TestableFloat, + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), +{ // use a large enough buffer let mut buf = [b'_'; 1024]; let v: T = TestableFloat::ldexpi(x, e); @@ -192,15 +208,15 @@ fn check_exact_one(mut f: F, x: i64, e: isize, tstr: &str, expected: &[u8] } macro_rules! check_exact { - ($f:ident($v:expr) => $buf:expr, $exp:expr) => ( - check_exact(|d,b,k| $f(d,b,k), $v, stringify!($v), $buf, $exp) - ) + ($f:ident($v:expr) => $buf:expr, $exp:expr) => { + check_exact(|d, b, k| $f(d, b, k), $v, stringify!($v), $buf, $exp) + }; } macro_rules! check_exact_one { - ($f:ident($x:expr, $e:expr; $t:ty) => $buf:expr, $exp:expr) => ( - check_exact_one::<_, $t>(|d,b,k| $f(d,b,k), $x, $e, stringify!($t), $buf, $exp) - ) + ($f:ident($x:expr, $e:expr; $t:ty) => $buf:expr, $exp:expr) => { + check_exact_one::<_, $t>(|d, b, k| $f(d, b, k), $x, $e, stringify!($t), $buf, $exp) + }; } // in the following comments, three numbers are spaced by 1 ulp apart, @@ -211,7 +227,10 @@ macro_rules! check_exact_one { // [1] Vern Paxson, A Program for Testing IEEE Decimal-Binary Conversion // ftp://ftp.ee.lbl.gov/testbase-report.ps.Z -pub fn f32_shortest_sanity_test(mut f: F) where F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +pub fn f32_shortest_sanity_test(mut f: F) +where + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ // 0.0999999940395355224609375 // 0.100000001490116119384765625 // 0.10000000894069671630859375 @@ -256,7 +275,9 @@ pub fn f32_shortest_sanity_test(mut f: F) where F: FnMut(&Decoded, &mut [u8]) } pub fn f32_exact_sanity_test(mut f: F) - where F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), +{ let minf32 = ldexp_f32(1.0, -149); check_exact!(f(0.1f32) => b"100000001490116119384765625 ", 0); @@ -297,7 +318,10 @@ pub fn f32_exact_sanity_test(mut f: F) check_exact_one!(f(13248074, 95; f32) => b"524810279937", 36); } -pub fn f64_shortest_sanity_test(mut f: F) where F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +pub fn f64_shortest_sanity_test(mut f: F) +where + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ // 0.0999999999999999777955395074968691915273... // 0.1000000000000000055511151231257827021181... // 0.1000000000000000333066907387546962127089... @@ -361,7 +385,9 @@ pub fn f64_shortest_sanity_test(mut f: F) where F: FnMut(&Decoded, &mut [u8]) } pub fn f64_exact_sanity_test(mut f: F) - where F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), +{ let minf64 = ldexp_f64(1.0, -1074); check_exact!(f(0.1f64) => b"1000000000000000055511151231257827021181", 0); @@ -445,7 +471,10 @@ pub fn f64_exact_sanity_test(mut f: F) check_exact_one!(f(8549497411294502, -448; f64) => b"1176257830728540379990", -118); } -pub fn more_shortest_sanity_test(mut f: F) where F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +pub fn more_shortest_sanity_test(mut f: F) +where + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ check_shortest!(f{mant: 99_999_999_999_999_999, minus: 1, plus: 1, exp: 0, inclusive: true} => b"1", 18); check_shortest!(f{mant: 99_999_999_999_999_999, minus: 1, plus: 1, @@ -453,7 +482,9 @@ pub fn more_shortest_sanity_test(mut f: F) where F: FnMut(&Decoded, &mut [u8] } fn to_string_with_parts(mut f: F) -> String - where F: for<'a> FnMut(&'a mut [u8], &'a mut [Part<'a>]) -> Formatted<'a> { +where + F: for<'a> FnMut(&'a mut [u8], &'a mut [Part<'a>]) -> Formatted<'a>, +{ let mut buf = [0; 1024]; let mut parts = [Part::Zero(0); 16]; let formatted = f(&mut buf, &mut parts); @@ -463,646 +494,988 @@ fn to_string_with_parts(mut f: F) -> String } pub fn to_shortest_str_test(mut f_: F) - where F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ use core::num::flt2dec::Sign::*; fn to_string(f: &mut F, v: T, sign: Sign, frac_digits: usize, upper: bool) -> String - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { - to_string_with_parts(|buf, parts| to_shortest_str(|d,b| f(d,b), v, sign, - frac_digits, upper, buf, parts)) + where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), + { + to_string_with_parts(|buf, parts| { + to_shortest_str(|d, b| f(d, b), v, sign, frac_digits, upper, buf, parts) + }) } let f = &mut f_; - assert_eq!(to_string(f, 0.0, Minus, 0, false), "0"); - assert_eq!(to_string(f, 0.0, MinusRaw, 0, false), "0"); - assert_eq!(to_string(f, 0.0, MinusPlus, 0, false), "+0"); - assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0, false), "+0"); - assert_eq!(to_string(f, -0.0, Minus, 0, false), "0"); - assert_eq!(to_string(f, -0.0, MinusRaw, 0, false), "-0"); - assert_eq!(to_string(f, -0.0, MinusPlus, 0, false), "+0"); + assert_eq!(to_string(f, 0.0, Minus, 0, false), "0"); + assert_eq!(to_string(f, 0.0, MinusRaw, 0, false), "0"); + assert_eq!(to_string(f, 0.0, MinusPlus, 0, false), "+0"); + assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0, false), "+0"); + assert_eq!(to_string(f, -0.0, Minus, 0, false), "0"); + assert_eq!(to_string(f, -0.0, MinusRaw, 0, false), "-0"); + assert_eq!(to_string(f, -0.0, MinusPlus, 0, false), "+0"); assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0, false), "-0"); - assert_eq!(to_string(f, 0.0, Minus, 1, true), "0.0"); - assert_eq!(to_string(f, 0.0, MinusRaw, 1, true), "0.0"); - assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0.0"); - assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, true), "+0.0"); - assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.00000000"); - assert_eq!(to_string(f, -0.0, MinusRaw, 8, true), "-0.00000000"); - assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.00000000"); - assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, true), "-0.00000000"); - - assert_eq!(to_string(f, 1.0/0.0, Minus, 0, false), "inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusRaw, 0, true), "inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusPlus, 0, false), "+inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusPlusRaw, 0, true), "+inf"); - assert_eq!(to_string(f, 0.0/0.0, Minus, 0, false), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusRaw, 1, true), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusPlus, 8, false), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusPlusRaw, 64, true), "NaN"); - assert_eq!(to_string(f, -1.0/0.0, Minus, 0, false), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusRaw, 1, true), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusPlus, 8, false), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusPlusRaw, 64, true), "-inf"); - - assert_eq!(to_string(f, 3.14, Minus, 0, false), "3.14"); - assert_eq!(to_string(f, 3.14, MinusRaw, 0, false), "3.14"); - assert_eq!(to_string(f, 3.14, MinusPlus, 0, false), "+3.14"); - assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0, false), "+3.14"); - assert_eq!(to_string(f, -3.14, Minus, 0, false), "-3.14"); - assert_eq!(to_string(f, -3.14, MinusRaw, 0, false), "-3.14"); - assert_eq!(to_string(f, -3.14, MinusPlus, 0, false), "-3.14"); + assert_eq!(to_string(f, 0.0, Minus, 1, true), "0.0"); + assert_eq!(to_string(f, 0.0, MinusRaw, 1, true), "0.0"); + assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0.0"); + assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, true), "+0.0"); + assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.00000000"); + assert_eq!(to_string(f, -0.0, MinusRaw, 8, true), "-0.00000000"); + assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.00000000"); + assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, true), "-0.00000000"); + + assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0, false), "inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 0, true), "inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 0, false), "+inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 0, true), "+inf"); + assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0, false), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1, true), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8, false), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64, true), "NaN"); + assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0, false), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1, true), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8, false), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64, true), "-inf"); + + assert_eq!(to_string(f, 3.14, Minus, 0, false), "3.14"); + assert_eq!(to_string(f, 3.14, MinusRaw, 0, false), "3.14"); + assert_eq!(to_string(f, 3.14, MinusPlus, 0, false), "+3.14"); + assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0, false), "+3.14"); + assert_eq!(to_string(f, -3.14, Minus, 0, false), "-3.14"); + assert_eq!(to_string(f, -3.14, MinusRaw, 0, false), "-3.14"); + assert_eq!(to_string(f, -3.14, MinusPlus, 0, false), "-3.14"); assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0, false), "-3.14"); - assert_eq!(to_string(f, 3.14, Minus, 1, true), "3.14"); - assert_eq!(to_string(f, 3.14, MinusRaw, 2, true), "3.14"); - assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.140"); - assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4, true), "+3.1400"); - assert_eq!(to_string(f, -3.14, Minus, 8, true), "-3.14000000"); - assert_eq!(to_string(f, -3.14, MinusRaw, 8, true), "-3.14000000"); - assert_eq!(to_string(f, -3.14, MinusPlus, 8, true), "-3.14000000"); - assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8, true), "-3.14000000"); - - assert_eq!(to_string(f, 7.5e-11, Minus, 0, false), "0.000000000075"); - assert_eq!(to_string(f, 7.5e-11, Minus, 3, false), "0.000000000075"); + assert_eq!(to_string(f, 3.14, Minus, 1, true), "3.14"); + assert_eq!(to_string(f, 3.14, MinusRaw, 2, true), "3.14"); + assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.140"); + assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4, true), "+3.1400"); + assert_eq!(to_string(f, -3.14, Minus, 8, true), "-3.14000000"); + assert_eq!(to_string(f, -3.14, MinusRaw, 8, true), "-3.14000000"); + assert_eq!(to_string(f, -3.14, MinusPlus, 8, true), "-3.14000000"); + assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8, true), "-3.14000000"); + + assert_eq!(to_string(f, 7.5e-11, Minus, 0, false), "0.000000000075"); + assert_eq!(to_string(f, 7.5e-11, Minus, 3, false), "0.000000000075"); assert_eq!(to_string(f, 7.5e-11, Minus, 12, false), "0.000000000075"); assert_eq!(to_string(f, 7.5e-11, Minus, 13, false), "0.0000000000750"); - assert_eq!(to_string(f, 1.9971e20, Minus, 0, false), "199710000000000000000"); - assert_eq!(to_string(f, 1.9971e20, Minus, 1, false), "199710000000000000000.0"); - assert_eq!(to_string(f, 1.9971e20, Minus, 8, false), "199710000000000000000.00000000"); + assert_eq!( + to_string(f, 1.9971e20, Minus, 0, false), + "199710000000000000000" + ); + assert_eq!( + to_string(f, 1.9971e20, Minus, 1, false), + "199710000000000000000.0" + ); + assert_eq!( + to_string(f, 1.9971e20, Minus, 8, false), + "199710000000000000000.00000000" + ); - assert_eq!(to_string(f, f32::MAX, Minus, 0, false), format!("34028235{:0>31}", "")); - assert_eq!(to_string(f, f32::MAX, Minus, 1, false), format!("34028235{:0>31}.0", "")); - assert_eq!(to_string(f, f32::MAX, Minus, 8, false), format!("34028235{:0>31}.00000000", "")); + assert_eq!( + to_string(f, f32::MAX, Minus, 0, false), + format!("34028235{:0>31}", "") + ); + assert_eq!( + to_string(f, f32::MAX, Minus, 1, false), + format!("34028235{:0>31}.0", "") + ); + assert_eq!( + to_string(f, f32::MAX, Minus, 8, false), + format!("34028235{:0>31}.00000000", "") + ); let minf32 = ldexp_f32(1.0, -149); - assert_eq!(to_string(f, minf32, Minus, 0, false), format!("0.{:0>44}1", "")); - assert_eq!(to_string(f, minf32, Minus, 45, false), format!("0.{:0>44}1", "")); - assert_eq!(to_string(f, minf32, Minus, 46, false), format!("0.{:0>44}10", "")); + assert_eq!( + to_string(f, minf32, Minus, 0, false), + format!("0.{:0>44}1", "") + ); + assert_eq!( + to_string(f, minf32, Minus, 45, false), + format!("0.{:0>44}1", "") + ); + assert_eq!( + to_string(f, minf32, Minus, 46, false), + format!("0.{:0>44}10", "") + ); - assert_eq!(to_string(f, f64::MAX, Minus, 0, false), - format!("17976931348623157{:0>292}", "")); - assert_eq!(to_string(f, f64::MAX, Minus, 1, false), - format!("17976931348623157{:0>292}.0", "")); - assert_eq!(to_string(f, f64::MAX, Minus, 8, false), - format!("17976931348623157{:0>292}.00000000", "")); + assert_eq!( + to_string(f, f64::MAX, Minus, 0, false), + format!("17976931348623157{:0>292}", "") + ); + assert_eq!( + to_string(f, f64::MAX, Minus, 1, false), + format!("17976931348623157{:0>292}.0", "") + ); + assert_eq!( + to_string(f, f64::MAX, Minus, 8, false), + format!("17976931348623157{:0>292}.00000000", "") + ); let minf64 = ldexp_f64(1.0, -1074); - assert_eq!(to_string(f, minf64, Minus, 0, false), format!("0.{:0>323}5", "")); - assert_eq!(to_string(f, minf64, Minus, 324, false), format!("0.{:0>323}5", "")); - assert_eq!(to_string(f, minf64, Minus, 325, false), format!("0.{:0>323}50", "")); + assert_eq!( + to_string(f, minf64, Minus, 0, false), + format!("0.{:0>323}5", "") + ); + assert_eq!( + to_string(f, minf64, Minus, 324, false), + format!("0.{:0>323}5", "") + ); + assert_eq!( + to_string(f, minf64, Minus, 325, false), + format!("0.{:0>323}50", "") + ); // very large output - assert_eq!(to_string(f, 1.1, Minus, 80000, false), format!("1.1{:0>79999}", "")); + assert_eq!( + to_string(f, 1.1, Minus, 80000, false), + format!("1.1{:0>79999}", "") + ); } pub fn to_shortest_exp_str_test(mut f_: F) - where F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ use core::num::flt2dec::Sign::*; fn to_string(f: &mut F, v: T, sign: Sign, exp_bounds: (i16, i16), upper: bool) -> String - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { - to_string_with_parts(|buf, parts| to_shortest_exp_str(|d,b| f(d,b), v, sign, - exp_bounds, upper, buf, parts)) + where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8]) -> (usize, i16), + { + to_string_with_parts(|buf, parts| { + to_shortest_exp_str(|d, b| f(d, b), v, sign, exp_bounds, upper, buf, parts) + }) } let f = &mut f_; - assert_eq!(to_string(f, 0.0, Minus, (-4, 16), false), "0"); - assert_eq!(to_string(f, 0.0, MinusRaw, (-4, 16), false), "0"); - assert_eq!(to_string(f, 0.0, MinusPlus, (-4, 16), false), "+0"); - assert_eq!(to_string(f, 0.0, MinusPlusRaw, (-4, 16), false), "+0"); - assert_eq!(to_string(f, -0.0, Minus, (-4, 16), false), "0"); - assert_eq!(to_string(f, -0.0, MinusRaw, (-4, 16), false), "-0"); - assert_eq!(to_string(f, -0.0, MinusPlus, (-4, 16), false), "+0"); + assert_eq!(to_string(f, 0.0, Minus, (-4, 16), false), "0"); + assert_eq!(to_string(f, 0.0, MinusRaw, (-4, 16), false), "0"); + assert_eq!(to_string(f, 0.0, MinusPlus, (-4, 16), false), "+0"); + assert_eq!(to_string(f, 0.0, MinusPlusRaw, (-4, 16), false), "+0"); + assert_eq!(to_string(f, -0.0, Minus, (-4, 16), false), "0"); + assert_eq!(to_string(f, -0.0, MinusRaw, (-4, 16), false), "-0"); + assert_eq!(to_string(f, -0.0, MinusPlus, (-4, 16), false), "+0"); assert_eq!(to_string(f, -0.0, MinusPlusRaw, (-4, 16), false), "-0"); - assert_eq!(to_string(f, 0.0, Minus, ( 0, 0), true), "0E0"); - assert_eq!(to_string(f, 0.0, MinusRaw, ( 0, 0), false), "0e0"); - assert_eq!(to_string(f, 0.0, MinusPlus, (-9, -5), true), "+0E0"); - assert_eq!(to_string(f, 0.0, MinusPlusRaw, ( 5, 9), false), "+0e0"); - assert_eq!(to_string(f, -0.0, Minus, ( 0, 0), true), "0E0"); - assert_eq!(to_string(f, -0.0, MinusRaw, ( 0, 0), false), "-0e0"); - assert_eq!(to_string(f, -0.0, MinusPlus, (-9, -5), true), "+0E0"); - assert_eq!(to_string(f, -0.0, MinusPlusRaw, ( 5, 9), false), "-0e0"); - - assert_eq!(to_string(f, 1.0/0.0, Minus, (-4, 16), false), "inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusRaw, (-4, 16), true), "inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusPlus, (-4, 16), false), "+inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusPlusRaw, (-4, 16), true), "+inf"); - assert_eq!(to_string(f, 0.0/0.0, Minus, ( 0, 0), false), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusRaw, ( 0, 0), true), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusPlus, (-9, -5), false), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusPlusRaw, ( 5, 9), true), "NaN"); - assert_eq!(to_string(f, -1.0/0.0, Minus, ( 0, 0), false), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusRaw, ( 0, 0), true), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusPlus, (-9, -5), false), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusPlusRaw, ( 5, 9), true), "-inf"); - - assert_eq!(to_string(f, 3.14, Minus, (-4, 16), false), "3.14"); - assert_eq!(to_string(f, 3.14, MinusRaw, (-4, 16), false), "3.14"); - assert_eq!(to_string(f, 3.14, MinusPlus, (-4, 16), false), "+3.14"); - assert_eq!(to_string(f, 3.14, MinusPlusRaw, (-4, 16), false), "+3.14"); - assert_eq!(to_string(f, -3.14, Minus, (-4, 16), false), "-3.14"); - assert_eq!(to_string(f, -3.14, MinusRaw, (-4, 16), false), "-3.14"); - assert_eq!(to_string(f, -3.14, MinusPlus, (-4, 16), false), "-3.14"); + assert_eq!(to_string(f, 0.0, Minus, (0, 0), true), "0E0"); + assert_eq!(to_string(f, 0.0, MinusRaw, (0, 0), false), "0e0"); + assert_eq!(to_string(f, 0.0, MinusPlus, (-9, -5), true), "+0E0"); + assert_eq!(to_string(f, 0.0, MinusPlusRaw, (5, 9), false), "+0e0"); + assert_eq!(to_string(f, -0.0, Minus, (0, 0), true), "0E0"); + assert_eq!(to_string(f, -0.0, MinusRaw, (0, 0), false), "-0e0"); + assert_eq!(to_string(f, -0.0, MinusPlus, (-9, -5), true), "+0E0"); + assert_eq!(to_string(f, -0.0, MinusPlusRaw, (5, 9), false), "-0e0"); + + assert_eq!(to_string(f, 1.0 / 0.0, Minus, (-4, 16), false), "inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, (-4, 16), true), "inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, (-4, 16), false), "+inf"); + assert_eq!( + to_string(f, 1.0 / 0.0, MinusPlusRaw, (-4, 16), true), + "+inf" + ); + assert_eq!(to_string(f, 0.0 / 0.0, Minus, (0, 0), false), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, (0, 0), true), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, (-9, -5), false), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, (5, 9), true), "NaN"); + assert_eq!(to_string(f, -1.0 / 0.0, Minus, (0, 0), false), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, (0, 0), true), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, (-9, -5), false), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, (5, 9), true), "-inf"); + + assert_eq!(to_string(f, 3.14, Minus, (-4, 16), false), "3.14"); + assert_eq!(to_string(f, 3.14, MinusRaw, (-4, 16), false), "3.14"); + assert_eq!(to_string(f, 3.14, MinusPlus, (-4, 16), false), "+3.14"); + assert_eq!(to_string(f, 3.14, MinusPlusRaw, (-4, 16), false), "+3.14"); + assert_eq!(to_string(f, -3.14, Minus, (-4, 16), false), "-3.14"); + assert_eq!(to_string(f, -3.14, MinusRaw, (-4, 16), false), "-3.14"); + assert_eq!(to_string(f, -3.14, MinusPlus, (-4, 16), false), "-3.14"); assert_eq!(to_string(f, -3.14, MinusPlusRaw, (-4, 16), false), "-3.14"); - assert_eq!(to_string(f, 3.14, Minus, ( 0, 0), true), "3.14E0"); - assert_eq!(to_string(f, 3.14, MinusRaw, ( 0, 0), false), "3.14e0"); - assert_eq!(to_string(f, 3.14, MinusPlus, (-9, -5), true), "+3.14E0"); - assert_eq!(to_string(f, 3.14, MinusPlusRaw, ( 5, 9), false), "+3.14e0"); - assert_eq!(to_string(f, -3.14, Minus, ( 0, 0), true), "-3.14E0"); - assert_eq!(to_string(f, -3.14, MinusRaw, ( 0, 0), false), "-3.14e0"); - assert_eq!(to_string(f, -3.14, MinusPlus, (-9, -5), true), "-3.14E0"); - assert_eq!(to_string(f, -3.14, MinusPlusRaw, ( 5, 9), false), "-3.14e0"); - - assert_eq!(to_string(f, 0.1, Minus, (-4, 16), false), "0.1"); - assert_eq!(to_string(f, 0.1, MinusRaw, (-4, 16), false), "0.1"); - assert_eq!(to_string(f, 0.1, MinusPlus, (-4, 16), false), "+0.1"); - assert_eq!(to_string(f, 0.1, MinusPlusRaw, (-4, 16), false), "+0.1"); - assert_eq!(to_string(f, -0.1, Minus, (-4, 16), false), "-0.1"); - assert_eq!(to_string(f, -0.1, MinusRaw, (-4, 16), false), "-0.1"); - assert_eq!(to_string(f, -0.1, MinusPlus, (-4, 16), false), "-0.1"); + assert_eq!(to_string(f, 3.14, Minus, (0, 0), true), "3.14E0"); + assert_eq!(to_string(f, 3.14, MinusRaw, (0, 0), false), "3.14e0"); + assert_eq!(to_string(f, 3.14, MinusPlus, (-9, -5), true), "+3.14E0"); + assert_eq!(to_string(f, 3.14, MinusPlusRaw, (5, 9), false), "+3.14e0"); + assert_eq!(to_string(f, -3.14, Minus, (0, 0), true), "-3.14E0"); + assert_eq!(to_string(f, -3.14, MinusRaw, (0, 0), false), "-3.14e0"); + assert_eq!(to_string(f, -3.14, MinusPlus, (-9, -5), true), "-3.14E0"); + assert_eq!(to_string(f, -3.14, MinusPlusRaw, (5, 9), false), "-3.14e0"); + + assert_eq!(to_string(f, 0.1, Minus, (-4, 16), false), "0.1"); + assert_eq!(to_string(f, 0.1, MinusRaw, (-4, 16), false), "0.1"); + assert_eq!(to_string(f, 0.1, MinusPlus, (-4, 16), false), "+0.1"); + assert_eq!(to_string(f, 0.1, MinusPlusRaw, (-4, 16), false), "+0.1"); + assert_eq!(to_string(f, -0.1, Minus, (-4, 16), false), "-0.1"); + assert_eq!(to_string(f, -0.1, MinusRaw, (-4, 16), false), "-0.1"); + assert_eq!(to_string(f, -0.1, MinusPlus, (-4, 16), false), "-0.1"); assert_eq!(to_string(f, -0.1, MinusPlusRaw, (-4, 16), false), "-0.1"); - assert_eq!(to_string(f, 0.1, Minus, ( 0, 0), true), "1E-1"); - assert_eq!(to_string(f, 0.1, MinusRaw, ( 0, 0), false), "1e-1"); - assert_eq!(to_string(f, 0.1, MinusPlus, (-9, -5), true), "+1E-1"); - assert_eq!(to_string(f, 0.1, MinusPlusRaw, ( 5, 9), false), "+1e-1"); - assert_eq!(to_string(f, -0.1, Minus, ( 0, 0), true), "-1E-1"); - assert_eq!(to_string(f, -0.1, MinusRaw, ( 0, 0), false), "-1e-1"); - assert_eq!(to_string(f, -0.1, MinusPlus, (-9, -5), true), "-1E-1"); - assert_eq!(to_string(f, -0.1, MinusPlusRaw, ( 5, 9), false), "-1e-1"); - - assert_eq!(to_string(f, 7.5e-11, Minus, ( -4, 16), false), "7.5e-11"); - assert_eq!(to_string(f, 7.5e-11, Minus, (-11, 10), false), "0.000000000075"); + assert_eq!(to_string(f, 0.1, Minus, (0, 0), true), "1E-1"); + assert_eq!(to_string(f, 0.1, MinusRaw, (0, 0), false), "1e-1"); + assert_eq!(to_string(f, 0.1, MinusPlus, (-9, -5), true), "+1E-1"); + assert_eq!(to_string(f, 0.1, MinusPlusRaw, (5, 9), false), "+1e-1"); + assert_eq!(to_string(f, -0.1, Minus, (0, 0), true), "-1E-1"); + assert_eq!(to_string(f, -0.1, MinusRaw, (0, 0), false), "-1e-1"); + assert_eq!(to_string(f, -0.1, MinusPlus, (-9, -5), true), "-1E-1"); + assert_eq!(to_string(f, -0.1, MinusPlusRaw, (5, 9), false), "-1e-1"); + + assert_eq!(to_string(f, 7.5e-11, Minus, (-4, 16), false), "7.5e-11"); + assert_eq!( + to_string(f, 7.5e-11, Minus, (-11, 10), false), + "0.000000000075" + ); assert_eq!(to_string(f, 7.5e-11, Minus, (-10, 11), false), "7.5e-11"); - assert_eq!(to_string(f, 1.9971e20, Minus, ( -4, 16), false), "1.9971e20"); - assert_eq!(to_string(f, 1.9971e20, Minus, (-20, 21), false), "199710000000000000000"); - assert_eq!(to_string(f, 1.9971e20, Minus, (-21, 20), false), "1.9971e20"); + assert_eq!(to_string(f, 1.9971e20, Minus, (-4, 16), false), "1.9971e20"); + assert_eq!( + to_string(f, 1.9971e20, Minus, (-20, 21), false), + "199710000000000000000" + ); + assert_eq!( + to_string(f, 1.9971e20, Minus, (-21, 20), false), + "1.9971e20" + ); // the true value of 1.0e23f64 is less than 10^23, but that shouldn't matter here assert_eq!(to_string(f, 1.0e23, Minus, (22, 23), false), "1e23"); - assert_eq!(to_string(f, 1.0e23, Minus, (23, 24), false), "100000000000000000000000"); + assert_eq!( + to_string(f, 1.0e23, Minus, (23, 24), false), + "100000000000000000000000" + ); assert_eq!(to_string(f, 1.0e23, Minus, (24, 25), false), "1e23"); - assert_eq!(to_string(f, f32::MAX, Minus, ( -4, 16), false), "3.4028235e38"); - assert_eq!(to_string(f, f32::MAX, Minus, (-39, 38), false), "3.4028235e38"); - assert_eq!(to_string(f, f32::MAX, Minus, (-38, 39), false), format!("34028235{:0>31}", "")); + assert_eq!( + to_string(f, f32::MAX, Minus, (-4, 16), false), + "3.4028235e38" + ); + assert_eq!( + to_string(f, f32::MAX, Minus, (-39, 38), false), + "3.4028235e38" + ); + assert_eq!( + to_string(f, f32::MAX, Minus, (-38, 39), false), + format!("34028235{:0>31}", "") + ); let minf32 = ldexp_f32(1.0, -149); - assert_eq!(to_string(f, minf32, Minus, ( -4, 16), false), "1e-45"); + assert_eq!(to_string(f, minf32, Minus, (-4, 16), false), "1e-45"); assert_eq!(to_string(f, minf32, Minus, (-44, 45), false), "1e-45"); - assert_eq!(to_string(f, minf32, Minus, (-45, 44), false), format!("0.{:0>44}1", "")); + assert_eq!( + to_string(f, minf32, Minus, (-45, 44), false), + format!("0.{:0>44}1", "") + ); - assert_eq!(to_string(f, f64::MAX, Minus, ( -4, 16), false), - "1.7976931348623157e308"); - assert_eq!(to_string(f, f64::MAX, Minus, (-308, 309), false), - format!("17976931348623157{:0>292}", "")); - assert_eq!(to_string(f, f64::MAX, Minus, (-309, 308), false), - "1.7976931348623157e308"); + assert_eq!( + to_string(f, f64::MAX, Minus, (-4, 16), false), + "1.7976931348623157e308" + ); + assert_eq!( + to_string(f, f64::MAX, Minus, (-308, 309), false), + format!("17976931348623157{:0>292}", "") + ); + assert_eq!( + to_string(f, f64::MAX, Minus, (-309, 308), false), + "1.7976931348623157e308" + ); let minf64 = ldexp_f64(1.0, -1074); - assert_eq!(to_string(f, minf64, Minus, ( -4, 16), false), "5e-324"); - assert_eq!(to_string(f, minf64, Minus, (-324, 323), false), format!("0.{:0>323}5", "")); + assert_eq!(to_string(f, minf64, Minus, (-4, 16), false), "5e-324"); + assert_eq!( + to_string(f, minf64, Minus, (-324, 323), false), + format!("0.{:0>323}5", "") + ); assert_eq!(to_string(f, minf64, Minus, (-323, 324), false), "5e-324"); assert_eq!(to_string(f, 1.1, Minus, (i16::MIN, i16::MAX), false), "1.1"); } pub fn to_exact_exp_str_test(mut f_: F) - where F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), +{ use core::num::flt2dec::Sign::*; fn to_string(f: &mut F, v: T, sign: Sign, ndigits: usize, upper: bool) -> String - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { - to_string_with_parts(|buf, parts| to_exact_exp_str(|d,b,l| f(d,b,l), v, sign, - ndigits, upper, buf, parts)) + where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), + { + to_string_with_parts(|buf, parts| { + to_exact_exp_str(|d, b, l| f(d, b, l), v, sign, ndigits, upper, buf, parts) + }) } let f = &mut f_; - assert_eq!(to_string(f, 0.0, Minus, 1, true), "0E0"); - assert_eq!(to_string(f, 0.0, MinusRaw, 1, false), "0e0"); - assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0E0"); - assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, false), "+0e0"); - assert_eq!(to_string(f, -0.0, Minus, 1, true), "0E0"); - assert_eq!(to_string(f, -0.0, MinusRaw, 1, false), "-0e0"); - assert_eq!(to_string(f, -0.0, MinusPlus, 1, true), "+0E0"); + assert_eq!(to_string(f, 0.0, Minus, 1, true), "0E0"); + assert_eq!(to_string(f, 0.0, MinusRaw, 1, false), "0e0"); + assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0E0"); + assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, false), "+0e0"); + assert_eq!(to_string(f, -0.0, Minus, 1, true), "0E0"); + assert_eq!(to_string(f, -0.0, MinusRaw, 1, false), "-0e0"); + assert_eq!(to_string(f, -0.0, MinusPlus, 1, true), "+0E0"); assert_eq!(to_string(f, -0.0, MinusPlusRaw, 1, false), "-0e0"); - assert_eq!(to_string(f, 0.0, Minus, 2, true), "0.0E0"); - assert_eq!(to_string(f, 0.0, MinusRaw, 2, false), "0.0e0"); - assert_eq!(to_string(f, 0.0, MinusPlus, 2, true), "+0.0E0"); - assert_eq!(to_string(f, 0.0, MinusPlusRaw, 2, false), "+0.0e0"); - assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.0000000E0"); - assert_eq!(to_string(f, -0.0, MinusRaw, 8, false), "-0.0000000e0"); - assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.0000000E0"); + assert_eq!(to_string(f, 0.0, Minus, 2, true), "0.0E0"); + assert_eq!(to_string(f, 0.0, MinusRaw, 2, false), "0.0e0"); + assert_eq!(to_string(f, 0.0, MinusPlus, 2, true), "+0.0E0"); + assert_eq!(to_string(f, 0.0, MinusPlusRaw, 2, false), "+0.0e0"); + assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.0000000E0"); + assert_eq!(to_string(f, -0.0, MinusRaw, 8, false), "-0.0000000e0"); + assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.0000000E0"); assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, false), "-0.0000000e0"); - assert_eq!(to_string(f, 1.0/0.0, Minus, 1, false), "inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusRaw, 1, true), "inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusPlus, 1, false), "+inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusPlusRaw, 1, true), "+inf"); - assert_eq!(to_string(f, 0.0/0.0, Minus, 8, false), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusRaw, 8, true), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusPlus, 8, false), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusPlusRaw, 8, true), "NaN"); - assert_eq!(to_string(f, -1.0/0.0, Minus, 64, false), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusRaw, 64, true), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusPlus, 64, false), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusPlusRaw, 64, true), "-inf"); - - assert_eq!(to_string(f, 3.14, Minus, 1, true), "3E0"); - assert_eq!(to_string(f, 3.14, MinusRaw, 1, false), "3e0"); - assert_eq!(to_string(f, 3.14, MinusPlus, 1, true), "+3E0"); - assert_eq!(to_string(f, 3.14, MinusPlusRaw, 1, false), "+3e0"); - assert_eq!(to_string(f, -3.14, Minus, 2, true), "-3.1E0"); - assert_eq!(to_string(f, -3.14, MinusRaw, 2, false), "-3.1e0"); - assert_eq!(to_string(f, -3.14, MinusPlus, 2, true), "-3.1E0"); + assert_eq!(to_string(f, 1.0 / 0.0, Minus, 1, false), "inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 1, true), "inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 1, false), "+inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 1, true), "+inf"); + assert_eq!(to_string(f, 0.0 / 0.0, Minus, 8, false), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 8, true), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8, false), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 8, true), "NaN"); + assert_eq!(to_string(f, -1.0 / 0.0, Minus, 64, false), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 64, true), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 64, false), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64, true), "-inf"); + + assert_eq!(to_string(f, 3.14, Minus, 1, true), "3E0"); + assert_eq!(to_string(f, 3.14, MinusRaw, 1, false), "3e0"); + assert_eq!(to_string(f, 3.14, MinusPlus, 1, true), "+3E0"); + assert_eq!(to_string(f, 3.14, MinusPlusRaw, 1, false), "+3e0"); + assert_eq!(to_string(f, -3.14, Minus, 2, true), "-3.1E0"); + assert_eq!(to_string(f, -3.14, MinusRaw, 2, false), "-3.1e0"); + assert_eq!(to_string(f, -3.14, MinusPlus, 2, true), "-3.1E0"); assert_eq!(to_string(f, -3.14, MinusPlusRaw, 2, false), "-3.1e0"); - assert_eq!(to_string(f, 3.14, Minus, 3, true), "3.14E0"); - assert_eq!(to_string(f, 3.14, MinusRaw, 3, false), "3.14e0"); - assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.14E0"); - assert_eq!(to_string(f, 3.14, MinusPlusRaw, 3, false), "+3.14e0"); - assert_eq!(to_string(f, -3.14, Minus, 4, true), "-3.140E0"); - assert_eq!(to_string(f, -3.14, MinusRaw, 4, false), "-3.140e0"); - assert_eq!(to_string(f, -3.14, MinusPlus, 4, true), "-3.140E0"); + assert_eq!(to_string(f, 3.14, Minus, 3, true), "3.14E0"); + assert_eq!(to_string(f, 3.14, MinusRaw, 3, false), "3.14e0"); + assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.14E0"); + assert_eq!(to_string(f, 3.14, MinusPlusRaw, 3, false), "+3.14e0"); + assert_eq!(to_string(f, -3.14, Minus, 4, true), "-3.140E0"); + assert_eq!(to_string(f, -3.14, MinusRaw, 4, false), "-3.140e0"); + assert_eq!(to_string(f, -3.14, MinusPlus, 4, true), "-3.140E0"); assert_eq!(to_string(f, -3.14, MinusPlusRaw, 4, false), "-3.140e0"); - assert_eq!(to_string(f, 0.195, Minus, 1, false), "2e-1"); - assert_eq!(to_string(f, 0.195, MinusRaw, 1, true), "2E-1"); - assert_eq!(to_string(f, 0.195, MinusPlus, 1, false), "+2e-1"); - assert_eq!(to_string(f, 0.195, MinusPlusRaw, 1, true), "+2E-1"); - assert_eq!(to_string(f, -0.195, Minus, 2, false), "-2.0e-1"); - assert_eq!(to_string(f, -0.195, MinusRaw, 2, true), "-2.0E-1"); - assert_eq!(to_string(f, -0.195, MinusPlus, 2, false), "-2.0e-1"); - assert_eq!(to_string(f, -0.195, MinusPlusRaw, 2, true), "-2.0E-1"); - assert_eq!(to_string(f, 0.195, Minus, 3, false), "1.95e-1"); - assert_eq!(to_string(f, 0.195, MinusRaw, 3, true), "1.95E-1"); - assert_eq!(to_string(f, 0.195, MinusPlus, 3, false), "+1.95e-1"); - assert_eq!(to_string(f, 0.195, MinusPlusRaw, 3, true), "+1.95E-1"); - assert_eq!(to_string(f, -0.195, Minus, 4, false), "-1.950e-1"); - assert_eq!(to_string(f, -0.195, MinusRaw, 4, true), "-1.950E-1"); - assert_eq!(to_string(f, -0.195, MinusPlus, 4, false), "-1.950e-1"); - assert_eq!(to_string(f, -0.195, MinusPlusRaw, 4, true), "-1.950E-1"); - - assert_eq!(to_string(f, 9.5, Minus, 1, false), "1e1"); - assert_eq!(to_string(f, 9.5, Minus, 2, false), "9.5e0"); - assert_eq!(to_string(f, 9.5, Minus, 3, false), "9.50e0"); - assert_eq!(to_string(f, 9.5, Minus, 30, false), "9.50000000000000000000000000000e0"); - - assert_eq!(to_string(f, 1.0e25, Minus, 1, false), "1e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 2, false), "1.0e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 15, false), "1.00000000000000e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 16, false), "1.000000000000000e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 17, false), "1.0000000000000001e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 18, false), "1.00000000000000009e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 19, false), "1.000000000000000091e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 20, false), "1.0000000000000000906e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 21, false), "1.00000000000000009060e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 22, false), "1.000000000000000090597e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 23, false), "1.0000000000000000905970e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 24, false), "1.00000000000000009059697e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 25, false), "1.000000000000000090596966e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 26, false), "1.0000000000000000905969664e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 27, false), "1.00000000000000009059696640e25"); - assert_eq!(to_string(f, 1.0e25, Minus, 30, false), "1.00000000000000009059696640000e25"); - - assert_eq!(to_string(f, 1.0e-6, Minus, 1, false), "1e-6"); - assert_eq!(to_string(f, 1.0e-6, Minus, 2, false), "1.0e-6"); - assert_eq!(to_string(f, 1.0e-6, Minus, 16, false), "1.000000000000000e-6"); - assert_eq!(to_string(f, 1.0e-6, Minus, 17, false), "9.9999999999999995e-7"); - assert_eq!(to_string(f, 1.0e-6, Minus, 18, false), "9.99999999999999955e-7"); - assert_eq!(to_string(f, 1.0e-6, Minus, 19, false), "9.999999999999999547e-7"); - assert_eq!(to_string(f, 1.0e-6, Minus, 20, false), "9.9999999999999995475e-7"); - assert_eq!(to_string(f, 1.0e-6, Minus, 30, false), "9.99999999999999954748111825886e-7"); - assert_eq!(to_string(f, 1.0e-6, Minus, 40, false), - "9.999999999999999547481118258862586856139e-7"); - assert_eq!(to_string(f, 1.0e-6, Minus, 50, false), - "9.9999999999999995474811182588625868561393872369081e-7"); - assert_eq!(to_string(f, 1.0e-6, Minus, 60, false), - "9.99999999999999954748111825886258685613938723690807819366455e-7"); - assert_eq!(to_string(f, 1.0e-6, Minus, 70, false), - "9.999999999999999547481118258862586856139387236908078193664550781250000e-7"); - - assert_eq!(to_string(f, f32::MAX, Minus, 1, false), "3e38"); - assert_eq!(to_string(f, f32::MAX, Minus, 2, false), "3.4e38"); - assert_eq!(to_string(f, f32::MAX, Minus, 4, false), "3.403e38"); - assert_eq!(to_string(f, f32::MAX, Minus, 8, false), "3.4028235e38"); - assert_eq!(to_string(f, f32::MAX, Minus, 16, false), "3.402823466385289e38"); - assert_eq!(to_string(f, f32::MAX, Minus, 32, false), "3.4028234663852885981170418348452e38"); - assert_eq!(to_string(f, f32::MAX, Minus, 64, false), - "3.402823466385288598117041834845169254400000000000000000000000000e38"); + assert_eq!(to_string(f, 0.195, Minus, 1, false), "2e-1"); + assert_eq!(to_string(f, 0.195, MinusRaw, 1, true), "2E-1"); + assert_eq!(to_string(f, 0.195, MinusPlus, 1, false), "+2e-1"); + assert_eq!(to_string(f, 0.195, MinusPlusRaw, 1, true), "+2E-1"); + assert_eq!(to_string(f, -0.195, Minus, 2, false), "-2.0e-1"); + assert_eq!(to_string(f, -0.195, MinusRaw, 2, true), "-2.0E-1"); + assert_eq!(to_string(f, -0.195, MinusPlus, 2, false), "-2.0e-1"); + assert_eq!(to_string(f, -0.195, MinusPlusRaw, 2, true), "-2.0E-1"); + assert_eq!(to_string(f, 0.195, Minus, 3, false), "1.95e-1"); + assert_eq!(to_string(f, 0.195, MinusRaw, 3, true), "1.95E-1"); + assert_eq!(to_string(f, 0.195, MinusPlus, 3, false), "+1.95e-1"); + assert_eq!(to_string(f, 0.195, MinusPlusRaw, 3, true), "+1.95E-1"); + assert_eq!(to_string(f, -0.195, Minus, 4, false), "-1.950e-1"); + assert_eq!(to_string(f, -0.195, MinusRaw, 4, true), "-1.950E-1"); + assert_eq!(to_string(f, -0.195, MinusPlus, 4, false), "-1.950e-1"); + assert_eq!(to_string(f, -0.195, MinusPlusRaw, 4, true), "-1.950E-1"); + + assert_eq!(to_string(f, 9.5, Minus, 1, false), "1e1"); + assert_eq!(to_string(f, 9.5, Minus, 2, false), "9.5e0"); + assert_eq!(to_string(f, 9.5, Minus, 3, false), "9.50e0"); + assert_eq!( + to_string(f, 9.5, Minus, 30, false), + "9.50000000000000000000000000000e0" + ); + + assert_eq!(to_string(f, 1.0e25, Minus, 1, false), "1e25"); + assert_eq!(to_string(f, 1.0e25, Minus, 2, false), "1.0e25"); + assert_eq!( + to_string(f, 1.0e25, Minus, 15, false), + "1.00000000000000e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 16, false), + "1.000000000000000e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 17, false), + "1.0000000000000001e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 18, false), + "1.00000000000000009e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 19, false), + "1.000000000000000091e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 20, false), + "1.0000000000000000906e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 21, false), + "1.00000000000000009060e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 22, false), + "1.000000000000000090597e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 23, false), + "1.0000000000000000905970e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 24, false), + "1.00000000000000009059697e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 25, false), + "1.000000000000000090596966e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 26, false), + "1.0000000000000000905969664e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 27, false), + "1.00000000000000009059696640e25" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 30, false), + "1.00000000000000009059696640000e25" + ); + + assert_eq!(to_string(f, 1.0e-6, Minus, 1, false), "1e-6"); + assert_eq!(to_string(f, 1.0e-6, Minus, 2, false), "1.0e-6"); + assert_eq!( + to_string(f, 1.0e-6, Minus, 16, false), + "1.000000000000000e-6" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 17, false), + "9.9999999999999995e-7" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 18, false), + "9.99999999999999955e-7" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 19, false), + "9.999999999999999547e-7" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 20, false), + "9.9999999999999995475e-7" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 30, false), + "9.99999999999999954748111825886e-7" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 40, false), + "9.999999999999999547481118258862586856139e-7" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 50, false), + "9.9999999999999995474811182588625868561393872369081e-7" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 60, false), + "9.99999999999999954748111825886258685613938723690807819366455e-7" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 70, false), + "9.999999999999999547481118258862586856139387236908078193664550781250000e-7" + ); + + assert_eq!(to_string(f, f32::MAX, Minus, 1, false), "3e38"); + assert_eq!(to_string(f, f32::MAX, Minus, 2, false), "3.4e38"); + assert_eq!(to_string(f, f32::MAX, Minus, 4, false), "3.403e38"); + assert_eq!(to_string(f, f32::MAX, Minus, 8, false), "3.4028235e38"); + assert_eq!( + to_string(f, f32::MAX, Minus, 16, false), + "3.402823466385289e38" + ); + assert_eq!( + to_string(f, f32::MAX, Minus, 32, false), + "3.4028234663852885981170418348452e38" + ); + assert_eq!( + to_string(f, f32::MAX, Minus, 64, false), + "3.402823466385288598117041834845169254400000000000000000000000000e38" + ); let minf32 = ldexp_f32(1.0, -149); - assert_eq!(to_string(f, minf32, Minus, 1, false), "1e-45"); - assert_eq!(to_string(f, minf32, Minus, 2, false), "1.4e-45"); - assert_eq!(to_string(f, minf32, Minus, 4, false), "1.401e-45"); - assert_eq!(to_string(f, minf32, Minus, 8, false), "1.4012985e-45"); - assert_eq!(to_string(f, minf32, Minus, 16, false), "1.401298464324817e-45"); - assert_eq!(to_string(f, minf32, Minus, 32, false), "1.4012984643248170709237295832899e-45"); - assert_eq!(to_string(f, minf32, Minus, 64, false), - "1.401298464324817070923729583289916131280261941876515771757068284e-45"); - assert_eq!(to_string(f, minf32, Minus, 128, false), - "1.401298464324817070923729583289916131280261941876515771757068283\ - 8897910826858606014866381883621215820312500000000000000000000000e-45"); - - assert_eq!(to_string(f, f64::MAX, Minus, 1, false), "2e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 2, false), "1.8e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 4, false), "1.798e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 8, false), "1.7976931e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 16, false), "1.797693134862316e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 32, false), "1.7976931348623157081452742373170e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 64, false), - "1.797693134862315708145274237317043567980705675258449965989174768e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 128, false), - "1.797693134862315708145274237317043567980705675258449965989174768\ - 0315726078002853876058955863276687817154045895351438246423432133e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 256, false), - "1.797693134862315708145274237317043567980705675258449965989174768\ - 0315726078002853876058955863276687817154045895351438246423432132\ - 6889464182768467546703537516986049910576551282076245490090389328\ - 9440758685084551339423045832369032229481658085593321233482747978e308"); - assert_eq!(to_string(f, f64::MAX, Minus, 512, false), - "1.797693134862315708145274237317043567980705675258449965989174768\ - 0315726078002853876058955863276687817154045895351438246423432132\ - 6889464182768467546703537516986049910576551282076245490090389328\ - 9440758685084551339423045832369032229481658085593321233482747978\ - 2620414472316873817718091929988125040402618412485836800000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000e308"); + assert_eq!(to_string(f, minf32, Minus, 1, false), "1e-45"); + assert_eq!(to_string(f, minf32, Minus, 2, false), "1.4e-45"); + assert_eq!(to_string(f, minf32, Minus, 4, false), "1.401e-45"); + assert_eq!(to_string(f, minf32, Minus, 8, false), "1.4012985e-45"); + assert_eq!( + to_string(f, minf32, Minus, 16, false), + "1.401298464324817e-45" + ); + assert_eq!( + to_string(f, minf32, Minus, 32, false), + "1.4012984643248170709237295832899e-45" + ); + assert_eq!( + to_string(f, minf32, Minus, 64, false), + "1.401298464324817070923729583289916131280261941876515771757068284e-45" + ); + assert_eq!( + to_string(f, minf32, Minus, 128, false), + "1.401298464324817070923729583289916131280261941876515771757068283\ + 8897910826858606014866381883621215820312500000000000000000000000e-45" + ); + + assert_eq!(to_string(f, f64::MAX, Minus, 1, false), "2e308"); + assert_eq!(to_string(f, f64::MAX, Minus, 2, false), "1.8e308"); + assert_eq!(to_string(f, f64::MAX, Minus, 4, false), "1.798e308"); + assert_eq!(to_string(f, f64::MAX, Minus, 8, false), "1.7976931e308"); + assert_eq!( + to_string(f, f64::MAX, Minus, 16, false), + "1.797693134862316e308" + ); + assert_eq!( + to_string(f, f64::MAX, Minus, 32, false), + "1.7976931348623157081452742373170e308" + ); + assert_eq!( + to_string(f, f64::MAX, Minus, 64, false), + "1.797693134862315708145274237317043567980705675258449965989174768e308" + ); + assert_eq!( + to_string(f, f64::MAX, Minus, 128, false), + "1.797693134862315708145274237317043567980705675258449965989174768\ + 0315726078002853876058955863276687817154045895351438246423432133e308" + ); + assert_eq!( + to_string(f, f64::MAX, Minus, 256, false), + "1.797693134862315708145274237317043567980705675258449965989174768\ + 0315726078002853876058955863276687817154045895351438246423432132\ + 6889464182768467546703537516986049910576551282076245490090389328\ + 9440758685084551339423045832369032229481658085593321233482747978e308" + ); + assert_eq!( + to_string(f, f64::MAX, Minus, 512, false), + "1.797693134862315708145274237317043567980705675258449965989174768\ + 0315726078002853876058955863276687817154045895351438246423432132\ + 6889464182768467546703537516986049910576551282076245490090389328\ + 9440758685084551339423045832369032229481658085593321233482747978\ + 2620414472316873817718091929988125040402618412485836800000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000e308" + ); // okay, this is becoming tough. fortunately for us, this is almost the worst case. let minf64 = ldexp_f64(1.0, -1074); - assert_eq!(to_string(f, minf64, Minus, 1, false), "5e-324"); - assert_eq!(to_string(f, minf64, Minus, 2, false), "4.9e-324"); - assert_eq!(to_string(f, minf64, Minus, 4, false), "4.941e-324"); - assert_eq!(to_string(f, minf64, Minus, 8, false), "4.9406565e-324"); - assert_eq!(to_string(f, minf64, Minus, 16, false), "4.940656458412465e-324"); - assert_eq!(to_string(f, minf64, Minus, 32, false), "4.9406564584124654417656879286822e-324"); - assert_eq!(to_string(f, minf64, Minus, 64, false), - "4.940656458412465441765687928682213723650598026143247644255856825e-324"); - assert_eq!(to_string(f, minf64, Minus, 128, false), - "4.940656458412465441765687928682213723650598026143247644255856825\ - 0067550727020875186529983636163599237979656469544571773092665671e-324"); - assert_eq!(to_string(f, minf64, Minus, 256, false), - "4.940656458412465441765687928682213723650598026143247644255856825\ - 0067550727020875186529983636163599237979656469544571773092665671\ - 0355939796398774796010781878126300713190311404527845817167848982\ - 1036887186360569987307230500063874091535649843873124733972731696e-324"); - assert_eq!(to_string(f, minf64, Minus, 512, false), - "4.940656458412465441765687928682213723650598026143247644255856825\ - 0067550727020875186529983636163599237979656469544571773092665671\ - 0355939796398774796010781878126300713190311404527845817167848982\ - 1036887186360569987307230500063874091535649843873124733972731696\ - 1514003171538539807412623856559117102665855668676818703956031062\ - 4931945271591492455329305456544401127480129709999541931989409080\ - 4165633245247571478690147267801593552386115501348035264934720193\ - 7902681071074917033322268447533357208324319360923828934583680601e-324"); - assert_eq!(to_string(f, minf64, Minus, 1024, false), - "4.940656458412465441765687928682213723650598026143247644255856825\ - 0067550727020875186529983636163599237979656469544571773092665671\ - 0355939796398774796010781878126300713190311404527845817167848982\ - 1036887186360569987307230500063874091535649843873124733972731696\ - 1514003171538539807412623856559117102665855668676818703956031062\ - 4931945271591492455329305456544401127480129709999541931989409080\ - 4165633245247571478690147267801593552386115501348035264934720193\ - 7902681071074917033322268447533357208324319360923828934583680601\ - 0601150616980975307834227731832924790498252473077637592724787465\ - 6084778203734469699533647017972677717585125660551199131504891101\ - 4510378627381672509558373897335989936648099411642057026370902792\ - 4276754456522908753868250641971826553344726562500000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000e-324"); + assert_eq!(to_string(f, minf64, Minus, 1, false), "5e-324"); + assert_eq!(to_string(f, minf64, Minus, 2, false), "4.9e-324"); + assert_eq!(to_string(f, minf64, Minus, 4, false), "4.941e-324"); + assert_eq!(to_string(f, minf64, Minus, 8, false), "4.9406565e-324"); + assert_eq!( + to_string(f, minf64, Minus, 16, false), + "4.940656458412465e-324" + ); + assert_eq!( + to_string(f, minf64, Minus, 32, false), + "4.9406564584124654417656879286822e-324" + ); + assert_eq!( + to_string(f, minf64, Minus, 64, false), + "4.940656458412465441765687928682213723650598026143247644255856825e-324" + ); + assert_eq!( + to_string(f, minf64, Minus, 128, false), + "4.940656458412465441765687928682213723650598026143247644255856825\ + 0067550727020875186529983636163599237979656469544571773092665671e-324" + ); + assert_eq!( + to_string(f, minf64, Minus, 256, false), + "4.940656458412465441765687928682213723650598026143247644255856825\ + 0067550727020875186529983636163599237979656469544571773092665671\ + 0355939796398774796010781878126300713190311404527845817167848982\ + 1036887186360569987307230500063874091535649843873124733972731696e-324" + ); + assert_eq!( + to_string(f, minf64, Minus, 512, false), + "4.940656458412465441765687928682213723650598026143247644255856825\ + 0067550727020875186529983636163599237979656469544571773092665671\ + 0355939796398774796010781878126300713190311404527845817167848982\ + 1036887186360569987307230500063874091535649843873124733972731696\ + 1514003171538539807412623856559117102665855668676818703956031062\ + 4931945271591492455329305456544401127480129709999541931989409080\ + 4165633245247571478690147267801593552386115501348035264934720193\ + 7902681071074917033322268447533357208324319360923828934583680601e-324" + ); + assert_eq!( + to_string(f, minf64, Minus, 1024, false), + "4.940656458412465441765687928682213723650598026143247644255856825\ + 0067550727020875186529983636163599237979656469544571773092665671\ + 0355939796398774796010781878126300713190311404527845817167848982\ + 1036887186360569987307230500063874091535649843873124733972731696\ + 1514003171538539807412623856559117102665855668676818703956031062\ + 4931945271591492455329305456544401127480129709999541931989409080\ + 4165633245247571478690147267801593552386115501348035264934720193\ + 7902681071074917033322268447533357208324319360923828934583680601\ + 0601150616980975307834227731832924790498252473077637592724787465\ + 6084778203734469699533647017972677717585125660551199131504891101\ + 4510378627381672509558373897335989936648099411642057026370902792\ + 4276754456522908753868250641971826553344726562500000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000e-324" + ); // very large output - assert_eq!(to_string(f, 0.0, Minus, 80000, false), format!("0.{:0>79999}e0", "")); - assert_eq!(to_string(f, 1.0e1, Minus, 80000, false), format!("1.{:0>79999}e1", "")); - assert_eq!(to_string(f, 1.0e0, Minus, 80000, false), format!("1.{:0>79999}e0", "")); - assert_eq!(to_string(f, 1.0e-1, Minus, 80000, false), - format!("1.000000000000000055511151231257827021181583404541015625{:0>79945}\ - e-1", "")); - assert_eq!(to_string(f, 1.0e-20, Minus, 80000, false), - format!("9.999999999999999451532714542095716517295037027873924471077157760\ - 66783064379706047475337982177734375{:0>79901}e-21", "")); + assert_eq!( + to_string(f, 0.0, Minus, 80000, false), + format!("0.{:0>79999}e0", "") + ); + assert_eq!( + to_string(f, 1.0e1, Minus, 80000, false), + format!("1.{:0>79999}e1", "") + ); + assert_eq!( + to_string(f, 1.0e0, Minus, 80000, false), + format!("1.{:0>79999}e0", "") + ); + assert_eq!( + to_string(f, 1.0e-1, Minus, 80000, false), + format!( + "1.000000000000000055511151231257827021181583404541015625{:0>79945}\ + e-1", + "" + ) + ); + assert_eq!( + to_string(f, 1.0e-20, Minus, 80000, false), + format!( + "9.999999999999999451532714542095716517295037027873924471077157760\ + 66783064379706047475337982177734375{:0>79901}e-21", + "" + ) + ); } pub fn to_exact_fixed_str_test(mut f_: F) - where F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), +{ use core::num::flt2dec::Sign::*; fn to_string(f: &mut F, v: T, sign: Sign, frac_digits: usize, upper: bool) -> String - where T: DecodableFloat, F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16) { - to_string_with_parts(|buf, parts| to_exact_fixed_str(|d,b,l| f(d,b,l), v, sign, - frac_digits, upper, buf, parts)) + where + T: DecodableFloat, + F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16), + { + to_string_with_parts(|buf, parts| { + to_exact_fixed_str( + |d, b, l| f(d, b, l), + v, + sign, + frac_digits, + upper, + buf, + parts, + ) + }) } let f = &mut f_; - assert_eq!(to_string(f, 0.0, Minus, 0, false), "0"); - assert_eq!(to_string(f, 0.0, MinusRaw, 0, false), "0"); - assert_eq!(to_string(f, 0.0, MinusPlus, 0, false), "+0"); - assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0, false), "+0"); - assert_eq!(to_string(f, -0.0, Minus, 0, false), "0"); - assert_eq!(to_string(f, -0.0, MinusRaw, 0, false), "-0"); - assert_eq!(to_string(f, -0.0, MinusPlus, 0, false), "+0"); + assert_eq!(to_string(f, 0.0, Minus, 0, false), "0"); + assert_eq!(to_string(f, 0.0, MinusRaw, 0, false), "0"); + assert_eq!(to_string(f, 0.0, MinusPlus, 0, false), "+0"); + assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0, false), "+0"); + assert_eq!(to_string(f, -0.0, Minus, 0, false), "0"); + assert_eq!(to_string(f, -0.0, MinusRaw, 0, false), "-0"); + assert_eq!(to_string(f, -0.0, MinusPlus, 0, false), "+0"); assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0, false), "-0"); - assert_eq!(to_string(f, 0.0, Minus, 1, true), "0.0"); - assert_eq!(to_string(f, 0.0, MinusRaw, 1, true), "0.0"); - assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0.0"); - assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, true), "+0.0"); - assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.00000000"); - assert_eq!(to_string(f, -0.0, MinusRaw, 8, true), "-0.00000000"); - assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.00000000"); - assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, true), "-0.00000000"); - - assert_eq!(to_string(f, 1.0/0.0, Minus, 0, false), "inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusRaw, 1, true), "inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusPlus, 8, false), "+inf"); - assert_eq!(to_string(f, 1.0/0.0, MinusPlusRaw, 64, true), "+inf"); - assert_eq!(to_string(f, 0.0/0.0, Minus, 0, false), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusRaw, 1, true), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusPlus, 8, false), "NaN"); - assert_eq!(to_string(f, 0.0/0.0, MinusPlusRaw, 64, true), "NaN"); - assert_eq!(to_string(f, -1.0/0.0, Minus, 0, false), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusRaw, 1, true), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusPlus, 8, false), "-inf"); - assert_eq!(to_string(f, -1.0/0.0, MinusPlusRaw, 64, true), "-inf"); - - assert_eq!(to_string(f, 3.14, Minus, 0, false), "3"); - assert_eq!(to_string(f, 3.14, MinusRaw, 0, false), "3"); - assert_eq!(to_string(f, 3.14, MinusPlus, 0, false), "+3"); - assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0, false), "+3"); - assert_eq!(to_string(f, -3.14, Minus, 0, false), "-3"); - assert_eq!(to_string(f, -3.14, MinusRaw, 0, false), "-3"); - assert_eq!(to_string(f, -3.14, MinusPlus, 0, false), "-3"); + assert_eq!(to_string(f, 0.0, Minus, 1, true), "0.0"); + assert_eq!(to_string(f, 0.0, MinusRaw, 1, true), "0.0"); + assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0.0"); + assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, true), "+0.0"); + assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.00000000"); + assert_eq!(to_string(f, -0.0, MinusRaw, 8, true), "-0.00000000"); + assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.00000000"); + assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, true), "-0.00000000"); + + assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0, false), "inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 1, true), "inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 8, false), "+inf"); + assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 64, true), "+inf"); + assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0, false), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1, true), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8, false), "NaN"); + assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64, true), "NaN"); + assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0, false), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1, true), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8, false), "-inf"); + assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64, true), "-inf"); + + assert_eq!(to_string(f, 3.14, Minus, 0, false), "3"); + assert_eq!(to_string(f, 3.14, MinusRaw, 0, false), "3"); + assert_eq!(to_string(f, 3.14, MinusPlus, 0, false), "+3"); + assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0, false), "+3"); + assert_eq!(to_string(f, -3.14, Minus, 0, false), "-3"); + assert_eq!(to_string(f, -3.14, MinusRaw, 0, false), "-3"); + assert_eq!(to_string(f, -3.14, MinusPlus, 0, false), "-3"); assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0, false), "-3"); - assert_eq!(to_string(f, 3.14, Minus, 1, true), "3.1"); - assert_eq!(to_string(f, 3.14, MinusRaw, 2, true), "3.14"); - assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.140"); - assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4, true), "+3.1400"); - assert_eq!(to_string(f, -3.14, Minus, 8, true), "-3.14000000"); - assert_eq!(to_string(f, -3.14, MinusRaw, 8, true), "-3.14000000"); - assert_eq!(to_string(f, -3.14, MinusPlus, 8, true), "-3.14000000"); - assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8, true), "-3.14000000"); - - assert_eq!(to_string(f, 0.195, Minus, 0, false), "0"); - assert_eq!(to_string(f, 0.195, MinusRaw, 0, false), "0"); - assert_eq!(to_string(f, 0.195, MinusPlus, 0, false), "+0"); - assert_eq!(to_string(f, 0.195, MinusPlusRaw, 0, false), "+0"); - assert_eq!(to_string(f, -0.195, Minus, 0, false), "-0"); - assert_eq!(to_string(f, -0.195, MinusRaw, 0, false), "-0"); - assert_eq!(to_string(f, -0.195, MinusPlus, 0, false), "-0"); + assert_eq!(to_string(f, 3.14, Minus, 1, true), "3.1"); + assert_eq!(to_string(f, 3.14, MinusRaw, 2, true), "3.14"); + assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.140"); + assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4, true), "+3.1400"); + assert_eq!(to_string(f, -3.14, Minus, 8, true), "-3.14000000"); + assert_eq!(to_string(f, -3.14, MinusRaw, 8, true), "-3.14000000"); + assert_eq!(to_string(f, -3.14, MinusPlus, 8, true), "-3.14000000"); + assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8, true), "-3.14000000"); + + assert_eq!(to_string(f, 0.195, Minus, 0, false), "0"); + assert_eq!(to_string(f, 0.195, MinusRaw, 0, false), "0"); + assert_eq!(to_string(f, 0.195, MinusPlus, 0, false), "+0"); + assert_eq!(to_string(f, 0.195, MinusPlusRaw, 0, false), "+0"); + assert_eq!(to_string(f, -0.195, Minus, 0, false), "-0"); + assert_eq!(to_string(f, -0.195, MinusRaw, 0, false), "-0"); + assert_eq!(to_string(f, -0.195, MinusPlus, 0, false), "-0"); assert_eq!(to_string(f, -0.195, MinusPlusRaw, 0, false), "-0"); - assert_eq!(to_string(f, 0.195, Minus, 1, true), "0.2"); - assert_eq!(to_string(f, 0.195, MinusRaw, 2, true), "0.20"); - assert_eq!(to_string(f, 0.195, MinusPlus, 3, true), "+0.195"); - assert_eq!(to_string(f, 0.195, MinusPlusRaw, 4, true), "+0.1950"); - assert_eq!(to_string(f, -0.195, Minus, 5, true), "-0.19500"); - assert_eq!(to_string(f, -0.195, MinusRaw, 6, true), "-0.195000"); - assert_eq!(to_string(f, -0.195, MinusPlus, 7, true), "-0.1950000"); - assert_eq!(to_string(f, -0.195, MinusPlusRaw, 8, true), "-0.19500000"); - - assert_eq!(to_string(f, 999.5, Minus, 0, false), "1000"); - assert_eq!(to_string(f, 999.5, Minus, 1, false), "999.5"); - assert_eq!(to_string(f, 999.5, Minus, 2, false), "999.50"); - assert_eq!(to_string(f, 999.5, Minus, 3, false), "999.500"); - assert_eq!(to_string(f, 999.5, Minus, 30, false), "999.500000000000000000000000000000"); + assert_eq!(to_string(f, 0.195, Minus, 1, true), "0.2"); + assert_eq!(to_string(f, 0.195, MinusRaw, 2, true), "0.20"); + assert_eq!(to_string(f, 0.195, MinusPlus, 3, true), "+0.195"); + assert_eq!(to_string(f, 0.195, MinusPlusRaw, 4, true), "+0.1950"); + assert_eq!(to_string(f, -0.195, Minus, 5, true), "-0.19500"); + assert_eq!(to_string(f, -0.195, MinusRaw, 6, true), "-0.195000"); + assert_eq!(to_string(f, -0.195, MinusPlus, 7, true), "-0.1950000"); + assert_eq!(to_string(f, -0.195, MinusPlusRaw, 8, true), "-0.19500000"); + + assert_eq!(to_string(f, 999.5, Minus, 0, false), "1000"); + assert_eq!(to_string(f, 999.5, Minus, 1, false), "999.5"); + assert_eq!(to_string(f, 999.5, Minus, 2, false), "999.50"); + assert_eq!(to_string(f, 999.5, Minus, 3, false), "999.500"); + assert_eq!( + to_string(f, 999.5, Minus, 30, false), + "999.500000000000000000000000000000" + ); assert_eq!(to_string(f, 0.5, Minus, 0, false), "1"); assert_eq!(to_string(f, 0.5, Minus, 1, false), "0.5"); assert_eq!(to_string(f, 0.5, Minus, 2, false), "0.50"); assert_eq!(to_string(f, 0.5, Minus, 3, false), "0.500"); - assert_eq!(to_string(f, 0.95, Minus, 0, false), "1"); - assert_eq!(to_string(f, 0.95, Minus, 1, false), "0.9"); // because it really is less than 0.95 - assert_eq!(to_string(f, 0.95, Minus, 2, false), "0.95"); - assert_eq!(to_string(f, 0.95, Minus, 3, false), "0.950"); + assert_eq!(to_string(f, 0.95, Minus, 0, false), "1"); + assert_eq!(to_string(f, 0.95, Minus, 1, false), "0.9"); // because it really is less than 0.95 + assert_eq!(to_string(f, 0.95, Minus, 2, false), "0.95"); + assert_eq!(to_string(f, 0.95, Minus, 3, false), "0.950"); assert_eq!(to_string(f, 0.95, Minus, 10, false), "0.9500000000"); - assert_eq!(to_string(f, 0.95, Minus, 30, false), "0.949999999999999955591079014994"); + assert_eq!( + to_string(f, 0.95, Minus, 30, false), + "0.949999999999999955591079014994" + ); - assert_eq!(to_string(f, 0.095, Minus, 0, false), "0"); - assert_eq!(to_string(f, 0.095, Minus, 1, false), "0.1"); - assert_eq!(to_string(f, 0.095, Minus, 2, false), "0.10"); - assert_eq!(to_string(f, 0.095, Minus, 3, false), "0.095"); - assert_eq!(to_string(f, 0.095, Minus, 4, false), "0.0950"); + assert_eq!(to_string(f, 0.095, Minus, 0, false), "0"); + assert_eq!(to_string(f, 0.095, Minus, 1, false), "0.1"); + assert_eq!(to_string(f, 0.095, Minus, 2, false), "0.10"); + assert_eq!(to_string(f, 0.095, Minus, 3, false), "0.095"); + assert_eq!(to_string(f, 0.095, Minus, 4, false), "0.0950"); assert_eq!(to_string(f, 0.095, Minus, 10, false), "0.0950000000"); - assert_eq!(to_string(f, 0.095, Minus, 30, false), "0.095000000000000001110223024625"); - - assert_eq!(to_string(f, 0.0095, Minus, 0, false), "0"); - assert_eq!(to_string(f, 0.0095, Minus, 1, false), "0.0"); - assert_eq!(to_string(f, 0.0095, Minus, 2, false), "0.01"); - assert_eq!(to_string(f, 0.0095, Minus, 3, false), "0.009"); // really is less than 0.0095 - assert_eq!(to_string(f, 0.0095, Minus, 4, false), "0.0095"); - assert_eq!(to_string(f, 0.0095, Minus, 5, false), "0.00950"); + assert_eq!( + to_string(f, 0.095, Minus, 30, false), + "0.095000000000000001110223024625" + ); + + assert_eq!(to_string(f, 0.0095, Minus, 0, false), "0"); + assert_eq!(to_string(f, 0.0095, Minus, 1, false), "0.0"); + assert_eq!(to_string(f, 0.0095, Minus, 2, false), "0.01"); + assert_eq!(to_string(f, 0.0095, Minus, 3, false), "0.009"); // really is less than 0.0095 + assert_eq!(to_string(f, 0.0095, Minus, 4, false), "0.0095"); + assert_eq!(to_string(f, 0.0095, Minus, 5, false), "0.00950"); assert_eq!(to_string(f, 0.0095, Minus, 10, false), "0.0095000000"); - assert_eq!(to_string(f, 0.0095, Minus, 30, false), "0.009499999999999999764077607267"); + assert_eq!( + to_string(f, 0.0095, Minus, 30, false), + "0.009499999999999999764077607267" + ); - assert_eq!(to_string(f, 7.5e-11, Minus, 0, false), "0"); - assert_eq!(to_string(f, 7.5e-11, Minus, 3, false), "0.000"); + assert_eq!(to_string(f, 7.5e-11, Minus, 0, false), "0"); + assert_eq!(to_string(f, 7.5e-11, Minus, 3, false), "0.000"); assert_eq!(to_string(f, 7.5e-11, Minus, 10, false), "0.0000000001"); assert_eq!(to_string(f, 7.5e-11, Minus, 11, false), "0.00000000007"); // ditto assert_eq!(to_string(f, 7.5e-11, Minus, 12, false), "0.000000000075"); assert_eq!(to_string(f, 7.5e-11, Minus, 13, false), "0.0000000000750"); - assert_eq!(to_string(f, 7.5e-11, Minus, 20, false), "0.00000000007500000000"); - assert_eq!(to_string(f, 7.5e-11, Minus, 30, false), "0.000000000074999999999999999501"); + assert_eq!( + to_string(f, 7.5e-11, Minus, 20, false), + "0.00000000007500000000" + ); + assert_eq!( + to_string(f, 7.5e-11, Minus, 30, false), + "0.000000000074999999999999999501" + ); - assert_eq!(to_string(f, 1.0e25, Minus, 0, false), "10000000000000000905969664"); - assert_eq!(to_string(f, 1.0e25, Minus, 1, false), "10000000000000000905969664.0"); - assert_eq!(to_string(f, 1.0e25, Minus, 3, false), "10000000000000000905969664.000"); + assert_eq!( + to_string(f, 1.0e25, Minus, 0, false), + "10000000000000000905969664" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 1, false), + "10000000000000000905969664.0" + ); + assert_eq!( + to_string(f, 1.0e25, Minus, 3, false), + "10000000000000000905969664.000" + ); - assert_eq!(to_string(f, 1.0e-6, Minus, 0, false), "0"); - assert_eq!(to_string(f, 1.0e-6, Minus, 3, false), "0.000"); - assert_eq!(to_string(f, 1.0e-6, Minus, 6, false), "0.000001"); - assert_eq!(to_string(f, 1.0e-6, Minus, 9, false), "0.000001000"); + assert_eq!(to_string(f, 1.0e-6, Minus, 0, false), "0"); + assert_eq!(to_string(f, 1.0e-6, Minus, 3, false), "0.000"); + assert_eq!(to_string(f, 1.0e-6, Minus, 6, false), "0.000001"); + assert_eq!(to_string(f, 1.0e-6, Minus, 9, false), "0.000001000"); assert_eq!(to_string(f, 1.0e-6, Minus, 12, false), "0.000001000000"); - assert_eq!(to_string(f, 1.0e-6, Minus, 22, false), "0.0000010000000000000000"); - assert_eq!(to_string(f, 1.0e-6, Minus, 23, false), "0.00000099999999999999995"); - assert_eq!(to_string(f, 1.0e-6, Minus, 24, false), "0.000000999999999999999955"); - assert_eq!(to_string(f, 1.0e-6, Minus, 25, false), "0.0000009999999999999999547"); - assert_eq!(to_string(f, 1.0e-6, Minus, 35, false), "0.00000099999999999999995474811182589"); - assert_eq!(to_string(f, 1.0e-6, Minus, 45, false), - "0.000000999999999999999954748111825886258685614"); - assert_eq!(to_string(f, 1.0e-6, Minus, 55, false), - "0.0000009999999999999999547481118258862586856139387236908"); - assert_eq!(to_string(f, 1.0e-6, Minus, 65, false), - "0.00000099999999999999995474811182588625868561393872369080781936646"); - assert_eq!(to_string(f, 1.0e-6, Minus, 75, false), - "0.000000999999999999999954748111825886258685613938723690807819366455078125000"); - - assert_eq!(to_string(f, f32::MAX, Minus, 0, false), - "340282346638528859811704183484516925440"); - assert_eq!(to_string(f, f32::MAX, Minus, 1, false), - "340282346638528859811704183484516925440.0"); - assert_eq!(to_string(f, f32::MAX, Minus, 2, false), - "340282346638528859811704183484516925440.00"); + assert_eq!( + to_string(f, 1.0e-6, Minus, 22, false), + "0.0000010000000000000000" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 23, false), + "0.00000099999999999999995" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 24, false), + "0.000000999999999999999955" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 25, false), + "0.0000009999999999999999547" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 35, false), + "0.00000099999999999999995474811182589" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 45, false), + "0.000000999999999999999954748111825886258685614" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 55, false), + "0.0000009999999999999999547481118258862586856139387236908" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 65, false), + "0.00000099999999999999995474811182588625868561393872369080781936646" + ); + assert_eq!( + to_string(f, 1.0e-6, Minus, 75, false), + "0.000000999999999999999954748111825886258685613938723690807819366455078125000" + ); + + assert_eq!( + to_string(f, f32::MAX, Minus, 0, false), + "340282346638528859811704183484516925440" + ); + assert_eq!( + to_string(f, f32::MAX, Minus, 1, false), + "340282346638528859811704183484516925440.0" + ); + assert_eq!( + to_string(f, f32::MAX, Minus, 2, false), + "340282346638528859811704183484516925440.00" + ); let minf32 = ldexp_f32(1.0, -149); - assert_eq!(to_string(f, minf32, Minus, 0, false), "0"); - assert_eq!(to_string(f, minf32, Minus, 1, false), "0.0"); - assert_eq!(to_string(f, minf32, Minus, 2, false), "0.00"); - assert_eq!(to_string(f, minf32, Minus, 4, false), "0.0000"); - assert_eq!(to_string(f, minf32, Minus, 8, false), "0.00000000"); - assert_eq!(to_string(f, minf32, Minus, 16, false), "0.0000000000000000"); - assert_eq!(to_string(f, minf32, Minus, 32, false), "0.00000000000000000000000000000000"); - assert_eq!(to_string(f, minf32, Minus, 64, false), - "0.0000000000000000000000000000000000000000000014012984643248170709"); - assert_eq!(to_string(f, minf32, Minus, 128, false), - "0.0000000000000000000000000000000000000000000014012984643248170709\ - 2372958328991613128026194187651577175706828388979108268586060149"); - assert_eq!(to_string(f, minf32, Minus, 256, false), - "0.0000000000000000000000000000000000000000000014012984643248170709\ - 2372958328991613128026194187651577175706828388979108268586060148\ - 6638188362121582031250000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000"); - - assert_eq!(to_string(f, f64::MAX, Minus, 0, false), - "1797693134862315708145274237317043567980705675258449965989174768\ - 0315726078002853876058955863276687817154045895351438246423432132\ - 6889464182768467546703537516986049910576551282076245490090389328\ - 9440758685084551339423045832369032229481658085593321233482747978\ - 26204144723168738177180919299881250404026184124858368"); - assert_eq!(to_string(f, f64::MAX, Minus, 10, false), - "1797693134862315708145274237317043567980705675258449965989174768\ - 0315726078002853876058955863276687817154045895351438246423432132\ - 6889464182768467546703537516986049910576551282076245490090389328\ - 9440758685084551339423045832369032229481658085593321233482747978\ - 26204144723168738177180919299881250404026184124858368.0000000000"); + assert_eq!(to_string(f, minf32, Minus, 0, false), "0"); + assert_eq!(to_string(f, minf32, Minus, 1, false), "0.0"); + assert_eq!(to_string(f, minf32, Minus, 2, false), "0.00"); + assert_eq!(to_string(f, minf32, Minus, 4, false), "0.0000"); + assert_eq!(to_string(f, minf32, Minus, 8, false), "0.00000000"); + assert_eq!(to_string(f, minf32, Minus, 16, false), "0.0000000000000000"); + assert_eq!( + to_string(f, minf32, Minus, 32, false), + "0.00000000000000000000000000000000" + ); + assert_eq!( + to_string(f, minf32, Minus, 64, false), + "0.0000000000000000000000000000000000000000000014012984643248170709" + ); + assert_eq!( + to_string(f, minf32, Minus, 128, false), + "0.0000000000000000000000000000000000000000000014012984643248170709\ + 2372958328991613128026194187651577175706828388979108268586060149" + ); + assert_eq!( + to_string(f, minf32, Minus, 256, false), + "0.0000000000000000000000000000000000000000000014012984643248170709\ + 2372958328991613128026194187651577175706828388979108268586060148\ + 6638188362121582031250000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000" + ); + + assert_eq!( + to_string(f, f64::MAX, Minus, 0, false), + "1797693134862315708145274237317043567980705675258449965989174768\ + 0315726078002853876058955863276687817154045895351438246423432132\ + 6889464182768467546703537516986049910576551282076245490090389328\ + 9440758685084551339423045832369032229481658085593321233482747978\ + 26204144723168738177180919299881250404026184124858368" + ); + assert_eq!( + to_string(f, f64::MAX, Minus, 10, false), + "1797693134862315708145274237317043567980705675258449965989174768\ + 0315726078002853876058955863276687817154045895351438246423432132\ + 6889464182768467546703537516986049910576551282076245490090389328\ + 9440758685084551339423045832369032229481658085593321233482747978\ + 26204144723168738177180919299881250404026184124858368.0000000000" + ); let minf64 = ldexp_f64(1.0, -1074); assert_eq!(to_string(f, minf64, Minus, 0, false), "0"); assert_eq!(to_string(f, minf64, Minus, 1, false), "0.0"); assert_eq!(to_string(f, minf64, Minus, 10, false), "0.0000000000"); - assert_eq!(to_string(f, minf64, Minus, 100, false), - "0.0000000000000000000000000000000000000000000000000000000000000000\ - 000000000000000000000000000000000000"); - assert_eq!(to_string(f, minf64, Minus, 1000, false), - "0.0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0000000000000000000000000000000000000000000000000000000000000000\ - 0004940656458412465441765687928682213723650598026143247644255856\ - 8250067550727020875186529983636163599237979656469544571773092665\ - 6710355939796398774796010781878126300713190311404527845817167848\ - 9821036887186360569987307230500063874091535649843873124733972731\ - 6961514003171538539807412623856559117102665855668676818703956031\ - 0624931945271591492455329305456544401127480129709999541931989409\ - 0804165633245247571478690147267801593552386115501348035264934720\ - 1937902681071074917033322268447533357208324319360923828934583680\ - 6010601150616980975307834227731832924790498252473077637592724787\ - 4656084778203734469699533647017972677717585125660551199131504891\ - 1014510378627381672509558373897335989937"); + assert_eq!( + to_string(f, minf64, Minus, 100, false), + "0.0000000000000000000000000000000000000000000000000000000000000000\ + 000000000000000000000000000000000000" + ); + assert_eq!( + to_string(f, minf64, Minus, 1000, false), + "0.0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0000000000000000000000000000000000000000000000000000000000000000\ + 0004940656458412465441765687928682213723650598026143247644255856\ + 8250067550727020875186529983636163599237979656469544571773092665\ + 6710355939796398774796010781878126300713190311404527845817167848\ + 9821036887186360569987307230500063874091535649843873124733972731\ + 6961514003171538539807412623856559117102665855668676818703956031\ + 0624931945271591492455329305456544401127480129709999541931989409\ + 0804165633245247571478690147267801593552386115501348035264934720\ + 1937902681071074917033322268447533357208324319360923828934583680\ + 6010601150616980975307834227731832924790498252473077637592724787\ + 4656084778203734469699533647017972677717585125660551199131504891\ + 1014510378627381672509558373897335989937" + ); // very large output - assert_eq!(to_string(f, 0.0, Minus, 80000, false), format!("0.{:0>80000}", "")); - assert_eq!(to_string(f, 1.0e1, Minus, 80000, false), format!("10.{:0>80000}", "")); - assert_eq!(to_string(f, 1.0e0, Minus, 80000, false), format!("1.{:0>80000}", "")); - assert_eq!(to_string(f, 1.0e-1, Minus, 80000, false), - format!("0.1000000000000000055511151231257827021181583404541015625{:0>79945}", "")); - assert_eq!(to_string(f, 1.0e-20, Minus, 80000, false), - format!("0.0000000000000000000099999999999999994515327145420957165172950370\ - 2787392447107715776066783064379706047475337982177734375{:0>79881}", "")); + assert_eq!( + to_string(f, 0.0, Minus, 80000, false), + format!("0.{:0>80000}", "") + ); + assert_eq!( + to_string(f, 1.0e1, Minus, 80000, false), + format!("10.{:0>80000}", "") + ); + assert_eq!( + to_string(f, 1.0e0, Minus, 80000, false), + format!("1.{:0>80000}", "") + ); + assert_eq!( + to_string(f, 1.0e-1, Minus, 80000, false), + format!( + "0.1000000000000000055511151231257827021181583404541015625{:0>79945}", + "" + ) + ); + assert_eq!( + to_string(f, 1.0e-20, Minus, 80000, false), + format!( + "0.0000000000000000000099999999999999994515327145420957165172950370\ + 2787392447107715776066783064379706047475337982177734375{:0>79881}", + "" + ) + ); } - diff --git a/src/libcore/tests/num/flt2dec/random.rs b/src/libcore/tests/num/flt2dec/random.rs index 1c36af6af0ee4..13e6315e64560 100644 --- a/src/libcore/tests/num/flt2dec/random.rs +++ b/src/libcore/tests/num/flt2dec/random.rs @@ -3,27 +3,28 @@ use std::i16; use std::str; -use core::num::flt2dec::MAX_SIG_DIGITS; use core::num::flt2dec::strategy::grisu::format_exact_opt; use core::num::flt2dec::strategy::grisu::format_shortest_opt; -use core::num::flt2dec::{decode, DecodableFloat, FullDecoded, Decoded}; +use core::num::flt2dec::MAX_SIG_DIGITS; +use core::num::flt2dec::{decode, DecodableFloat, Decoded, FullDecoded}; -use rand::FromEntropy; -use rand::rngs::SmallRng; use rand::distributions::{Distribution, Uniform}; +use rand::rngs::SmallRng; +use rand::FromEntropy; pub fn decode_finite(v: T) -> Decoded { match decode(v).1 { FullDecoded::Finite(decoded) => decoded, - full_decoded => panic!("expected finite, got {:?} instead", full_decoded) + full_decoded => panic!("expected finite, got {:?} instead", full_decoded), } } - fn iterate(func: &str, k: usize, n: usize, mut f: F, mut g: G, mut v: V) -> (usize, usize) - where F: FnMut(&Decoded, &mut [u8]) -> Option<(usize, i16)>, - G: FnMut(&Decoded, &mut [u8]) -> (usize, i16), - V: FnMut(usize) -> Decoded { +where + F: FnMut(&Decoded, &mut [u8]) -> Option<(usize, i16)>, + G: FnMut(&Decoded, &mut [u8]) -> (usize, i16), + V: FnMut(usize) -> Decoded, +{ assert!(k <= 1024); let mut npassed = 0; // f(x) = Some(g(x)) @@ -31,8 +32,14 @@ fn iterate(func: &str, k: usize, n: usize, mut f: F, mut g: G, mut v: V for i in 0..n { if (i & 0xfffff) == 0 { - println!("in progress, {:x}/{:x} (ignored={} passed={} failed={})", - i, n, nignored, npassed, i - nignored - npassed); + println!( + "in progress, {:x}/{:x} (ignored={} passed={} failed={})", + i, + n, + nignored, + npassed, + i - nignored - npassed + ); } let decoded = v(i); @@ -43,27 +50,47 @@ fn iterate(func: &str, k: usize, n: usize, mut f: F, mut g: G, mut v: V if e1 == e2 && &buf1[..len1] == &buf2[..len2] { npassed += 1; } else { - println!("equivalence test failed, {:x}/{:x}: {:?} f(i)={}e{} g(i)={}e{}", - i, n, decoded, str::from_utf8(&buf1[..len1]).unwrap(), e1, - str::from_utf8(&buf2[..len2]).unwrap(), e2); + println!( + "equivalence test failed, {:x}/{:x}: {:?} f(i)={}e{} g(i)={}e{}", + i, + n, + decoded, + str::from_utf8(&buf1[..len1]).unwrap(), + e1, + str::from_utf8(&buf2[..len2]).unwrap(), + e2 + ); } } else { nignored += 1; } } - println!("{}({}): done, ignored={} passed={} failed={}", - func, k, nignored, npassed, n - nignored - npassed); - assert!(nignored + npassed == n, - "{}({}): {} out of {} values returns an incorrect value!", - func, k, n - nignored - npassed, n); + println!( + "{}({}): done, ignored={} passed={} failed={}", + func, + k, + nignored, + npassed, + n - nignored - npassed + ); + assert!( + nignored + npassed == n, + "{}({}): {} out of {} values returns an incorrect value!", + func, + k, + n - nignored - npassed, + n + ); (npassed, nignored) } pub fn f32_random_equivalence_test(f: F, g: G, k: usize, n: usize) - where F: FnMut(&Decoded, &mut [u8]) -> Option<(usize, i16)>, - G: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8]) -> Option<(usize, i16)>, + G: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ if cfg!(target_os = "emscripten") { - return // using rng pulls in i128 support, which doesn't work + return; // using rng pulls in i128 support, which doesn't work } let mut rng = SmallRng::from_entropy(); let f32_range = Uniform::new(0x0000_0001u32, 0x7f80_0000); @@ -74,10 +101,12 @@ pub fn f32_random_equivalence_test(f: F, g: G, k: usize, n: usize) } pub fn f64_random_equivalence_test(f: F, g: G, k: usize, n: usize) - where F: FnMut(&Decoded, &mut [u8]) -> Option<(usize, i16)>, - G: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8]) -> Option<(usize, i16)>, + G: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ if cfg!(target_os = "emscripten") { - return // using rng pulls in i128 support, which doesn't work + return; // using rng pulls in i128 support, which doesn't work } let mut rng = SmallRng::from_entropy(); let f64_range = Uniform::new(0x0000_0000_0000_0001u64, 0x7ff0_0000_0000_0000); @@ -88,8 +117,10 @@ pub fn f64_random_equivalence_test(f: F, g: G, k: usize, n: usize) } pub fn f32_exhaustive_equivalence_test(f: F, g: G, k: usize) - where F: FnMut(&Decoded, &mut [u8]) -> Option<(usize, i16)>, - G: FnMut(&Decoded, &mut [u8]) -> (usize, i16) { +where + F: FnMut(&Decoded, &mut [u8]) -> Option<(usize, i16)>, + G: FnMut(&Decoded, &mut [u8]) -> (usize, i16), +{ // we have only 2^23 * (2^8 - 1) - 1 = 2,139,095,039 positive finite f32 values, // so why not simply testing all of them? // @@ -97,12 +128,17 @@ pub fn f32_exhaustive_equivalence_test(f: F, g: G, k: usize) // but with `-C opt-level=3 -C lto` this only takes about an hour or so. // iterate from 0x0000_0001 to 0x7f7f_ffff, i.e., all finite ranges - let (npassed, nignored) = iterate("f32_exhaustive_equivalence_test", - k, 0x7f7f_ffff, f, g, |i: usize| { - - let x = f32::from_bits(i as u32 + 1); - decode_finite(x) - }); + let (npassed, nignored) = iterate( + "f32_exhaustive_equivalence_test", + k, + 0x7f7f_ffff, + f, + g, + |i: usize| { + let x = f32::from_bits(i as u32 + 1); + decode_finite(x) + }, + ); assert_eq!((npassed, nignored), (2121451881, 17643158)); } @@ -113,7 +149,8 @@ fn shortest_random_equivalence_test() { f32_random_equivalence_test(format_shortest_opt, fallback, MAX_SIG_DIGITS, 10_000); } -#[test] #[ignore] // it is too expensive +#[test] +#[ignore] // it is too expensive fn shortest_f32_exhaustive_equivalence_test() { // it is hard to directly test the optimality of the output, but we can at least test if // two different algorithms agree to each other. @@ -126,21 +163,25 @@ fn shortest_f32_exhaustive_equivalence_test() { f32_exhaustive_equivalence_test(format_shortest_opt, fallback, MAX_SIG_DIGITS); } -#[test] #[ignore] // it is too expensive +#[test] +#[ignore] // it is too expensive fn shortest_f64_hard_random_equivalence_test() { // this again probably has to use appropriate rustc flags. use core::num::flt2dec::strategy::dragon::format_shortest as fallback; - f64_random_equivalence_test(format_shortest_opt, fallback, - MAX_SIG_DIGITS, 100_000_000); + f64_random_equivalence_test(format_shortest_opt, fallback, MAX_SIG_DIGITS, 100_000_000); } #[test] fn exact_f32_random_equivalence_test() { use core::num::flt2dec::strategy::dragon::format_exact as fallback; for k in 1..21 { - f32_random_equivalence_test(|d, buf| format_exact_opt(d, buf, i16::MIN), - |d, buf| fallback(d, buf, i16::MIN), k, 1_000); + f32_random_equivalence_test( + |d, buf| format_exact_opt(d, buf, i16::MIN), + |d, buf| fallback(d, buf, i16::MIN), + k, + 1_000, + ); } } @@ -148,8 +189,11 @@ fn exact_f32_random_equivalence_test() { fn exact_f64_random_equivalence_test() { use core::num::flt2dec::strategy::dragon::format_exact as fallback; for k in 1..21 { - f64_random_equivalence_test(|d, buf| format_exact_opt(d, buf, i16::MIN), - |d, buf| fallback(d, buf, i16::MIN), k, 1_000); + f64_random_equivalence_test( + |d, buf| format_exact_opt(d, buf, i16::MIN), + |d, buf| fallback(d, buf, i16::MIN), + k, + 1_000, + ); } } - diff --git a/src/libcore/tests/num/flt2dec/strategy/dragon.rs b/src/libcore/tests/num/flt2dec/strategy/dragon.rs index 1803e39b46df3..3025eb8660223 100644 --- a/src/libcore/tests/num/flt2dec/strategy/dragon.rs +++ b/src/libcore/tests/num/flt2dec/strategy/dragon.rs @@ -1,7 +1,7 @@ -use std::prelude::v1::*; use super::super::*; use core::num::bignum::Big32x40 as Big; use core::num::flt2dec::strategy::dragon::*; +use std::prelude::v1::*; #[test] fn test_mul_pow10() { @@ -62,4 +62,3 @@ fn test_to_exact_exp_str() { fn test_to_exact_fixed_str() { to_exact_fixed_str_test(format_exact); } - diff --git a/src/libcore/tests/num/flt2dec/strategy/grisu.rs b/src/libcore/tests/num/flt2dec/strategy/grisu.rs index 53e9f12ae0f14..ede0ec1dc3b07 100644 --- a/src/libcore/tests/num/flt2dec/strategy/grisu.rs +++ b/src/libcore/tests/num/flt2dec/strategy/grisu.rs @@ -6,12 +6,18 @@ fn test_cached_power() { assert_eq!(CACHED_POW10.first().unwrap().1, CACHED_POW10_FIRST_E); assert_eq!(CACHED_POW10.last().unwrap().1, CACHED_POW10_LAST_E); - for e in -1137..961 { // full range for f64 + for e in -1137..961 { + // full range for f64 let low = ALPHA - e - 64; let high = GAMMA - e - 64; let (_k, cached) = cached_power(low, high); - assert!(low <= cached.e && cached.e <= high, - "cached_power({}, {}) = {:?} is incorrect", low, high, cached); + assert!( + low <= cached.e && cached.e <= high, + "cached_power({}, {}) = {:?} is incorrect", + low, + high, + cached + ); } } @@ -26,7 +32,6 @@ fn test_max_pow10_no_more_than() { } } - #[cfg_attr(all(target_arch = "wasm32", target_os = "emscripten"), ignore)] // issue 42630 #[test] fn shortest_sanity_test() { @@ -64,4 +69,3 @@ fn test_to_exact_exp_str() { fn test_to_exact_fixed_str() { to_exact_fixed_str_test(format_exact); } - diff --git a/src/libcore/tests/num/int_macros.rs b/src/libcore/tests/num/int_macros.rs index 5c6ee8f8ba038..e4147c2969896 100644 --- a/src/libcore/tests/num/int_macros.rs +++ b/src/libcore/tests/num/int_macros.rs @@ -1,214 +1,215 @@ -macro_rules! int_module { ($T:ident, $T_i:ident) => ( -#[cfg(test)] -mod tests { - use core::$T_i::*; - use core::isize; - use core::ops::{Shl, Shr, Not, BitXor, BitAnd, BitOr}; - use core::mem; - - use num; - - #[test] - fn test_overflows() { - assert!(MAX > 0); - assert!(MIN <= 0); - assert!(MIN + MAX + 1 == 0); - } - - #[test] - fn test_num() { - num::test_num(10 as $T, 2 as $T); - } - - #[test] - fn test_rem_euclid() { - assert!((-1 as $T).rem_euclid(MIN) == MAX); - } - - #[test] - pub fn test_abs() { - assert!((1 as $T).abs() == 1 as $T); - assert!((0 as $T).abs() == 0 as $T); - assert!((-1 as $T).abs() == 1 as $T); - } - - #[test] - fn test_signum() { - assert!((1 as $T).signum() == 1 as $T); - assert!((0 as $T).signum() == 0 as $T); - assert!((-0 as $T).signum() == 0 as $T); - assert!((-1 as $T).signum() == -1 as $T); - } - - #[test] - fn test_is_positive() { - assert!((1 as $T).is_positive()); - assert!(!(0 as $T).is_positive()); - assert!(!(-0 as $T).is_positive()); - assert!(!(-1 as $T).is_positive()); - } - - #[test] - fn test_is_negative() { - assert!(!(1 as $T).is_negative()); - assert!(!(0 as $T).is_negative()); - assert!(!(-0 as $T).is_negative()); - assert!((-1 as $T).is_negative()); - } - - #[test] - fn test_bitwise_operators() { - assert!(0b1110 as $T == (0b1100 as $T).bitor(0b1010 as $T)); - assert!(0b1000 as $T == (0b1100 as $T).bitand(0b1010 as $T)); - assert!(0b0110 as $T == (0b1100 as $T).bitxor(0b1010 as $T)); - assert!(0b1110 as $T == (0b0111 as $T).shl(1)); - assert!(0b0111 as $T == (0b1110 as $T).shr(1)); - assert!(-(0b11 as $T) - (1 as $T) == (0b11 as $T).not()); - } - - const A: $T = 0b0101100; - const B: $T = 0b0100001; - const C: $T = 0b1111001; - - const _0: $T = 0; - const _1: $T = !0; - - #[test] - fn test_count_ones() { - assert!(A.count_ones() == 3); - assert!(B.count_ones() == 2); - assert!(C.count_ones() == 5); - } - - #[test] - fn test_count_zeros() { - let bits = mem::size_of::<$T>() * 8; - assert!(A.count_zeros() == bits as u32 - 3); - assert!(B.count_zeros() == bits as u32 - 2); - assert!(C.count_zeros() == bits as u32 - 5); - } - - #[test] - fn test_rotate() { - assert_eq!(A.rotate_left(6).rotate_right(2).rotate_right(4), A); - assert_eq!(B.rotate_left(3).rotate_left(2).rotate_right(5), B); - assert_eq!(C.rotate_left(6).rotate_right(2).rotate_right(4), C); - - // Rotating these should make no difference - // - // We test using 124 bits because to ensure that overlong bit shifts do - // not cause undefined behaviour. See #10183. - assert_eq!(_0.rotate_left(124), _0); - assert_eq!(_1.rotate_left(124), _1); - assert_eq!(_0.rotate_right(124), _0); - assert_eq!(_1.rotate_right(124), _1); - - // Rotating by 0 should have no effect - assert_eq!(A.rotate_left(0), A); - assert_eq!(B.rotate_left(0), B); - assert_eq!(C.rotate_left(0), C); - // Rotating by a multiple of word size should also have no effect - assert_eq!(A.rotate_left(64), A); - assert_eq!(B.rotate_left(64), B); - assert_eq!(C.rotate_left(64), C); - } - - #[test] - fn test_swap_bytes() { - assert_eq!(A.swap_bytes().swap_bytes(), A); - assert_eq!(B.swap_bytes().swap_bytes(), B); - assert_eq!(C.swap_bytes().swap_bytes(), C); - - // Swapping these should make no difference - assert_eq!(_0.swap_bytes(), _0); - assert_eq!(_1.swap_bytes(), _1); - } - - #[test] - fn test_le() { - assert_eq!($T::from_le(A.to_le()), A); - assert_eq!($T::from_le(B.to_le()), B); - assert_eq!($T::from_le(C.to_le()), C); - assert_eq!($T::from_le(_0), _0); - assert_eq!($T::from_le(_1), _1); - assert_eq!(_0.to_le(), _0); - assert_eq!(_1.to_le(), _1); - } - - #[test] - fn test_be() { - assert_eq!($T::from_be(A.to_be()), A); - assert_eq!($T::from_be(B.to_be()), B); - assert_eq!($T::from_be(C.to_be()), C); - assert_eq!($T::from_be(_0), _0); - assert_eq!($T::from_be(_1), _1); - assert_eq!(_0.to_be(), _0); - assert_eq!(_1.to_be(), _1); - } - - #[test] - fn test_signed_checked_div() { - assert!((10 as $T).checked_div(2) == Some(5)); - assert!((5 as $T).checked_div(0) == None); - assert!(isize::MIN.checked_div(-1) == None); - } - - #[test] - fn test_from_str() { - fn from_str(t: &str) -> Option { - ::std::str::FromStr::from_str(t).ok() +macro_rules! int_module { + ($T:ident, $T_i:ident) => { + #[cfg(test)] + mod tests { + use core::isize; + use core::mem; + use core::ops::{BitAnd, BitOr, BitXor, Not, Shl, Shr}; + use core::$T_i::*; + + use num; + + #[test] + fn test_overflows() { + assert!(MAX > 0); + assert!(MIN <= 0); + assert!(MIN + MAX + 1 == 0); + } + + #[test] + fn test_num() { + num::test_num(10 as $T, 2 as $T); + } + + #[test] + fn test_rem_euclid() { + assert!((-1 as $T).rem_euclid(MIN) == MAX); + } + + #[test] + pub fn test_abs() { + assert!((1 as $T).abs() == 1 as $T); + assert!((0 as $T).abs() == 0 as $T); + assert!((-1 as $T).abs() == 1 as $T); + } + + #[test] + fn test_signum() { + assert!((1 as $T).signum() == 1 as $T); + assert!((0 as $T).signum() == 0 as $T); + assert!((-0 as $T).signum() == 0 as $T); + assert!((-1 as $T).signum() == -1 as $T); + } + + #[test] + fn test_is_positive() { + assert!((1 as $T).is_positive()); + assert!(!(0 as $T).is_positive()); + assert!(!(-0 as $T).is_positive()); + assert!(!(-1 as $T).is_positive()); + } + + #[test] + fn test_is_negative() { + assert!(!(1 as $T).is_negative()); + assert!(!(0 as $T).is_negative()); + assert!(!(-0 as $T).is_negative()); + assert!((-1 as $T).is_negative()); + } + + #[test] + fn test_bitwise_operators() { + assert!(0b1110 as $T == (0b1100 as $T).bitor(0b1010 as $T)); + assert!(0b1000 as $T == (0b1100 as $T).bitand(0b1010 as $T)); + assert!(0b0110 as $T == (0b1100 as $T).bitxor(0b1010 as $T)); + assert!(0b1110 as $T == (0b0111 as $T).shl(1)); + assert!(0b0111 as $T == (0b1110 as $T).shr(1)); + assert!(-(0b11 as $T) - (1 as $T) == (0b11 as $T).not()); + } + + const A: $T = 0b0101100; + const B: $T = 0b0100001; + const C: $T = 0b1111001; + + const _0: $T = 0; + const _1: $T = !0; + + #[test] + fn test_count_ones() { + assert!(A.count_ones() == 3); + assert!(B.count_ones() == 2); + assert!(C.count_ones() == 5); + } + + #[test] + fn test_count_zeros() { + let bits = mem::size_of::<$T>() * 8; + assert!(A.count_zeros() == bits as u32 - 3); + assert!(B.count_zeros() == bits as u32 - 2); + assert!(C.count_zeros() == bits as u32 - 5); + } + + #[test] + fn test_rotate() { + assert_eq!(A.rotate_left(6).rotate_right(2).rotate_right(4), A); + assert_eq!(B.rotate_left(3).rotate_left(2).rotate_right(5), B); + assert_eq!(C.rotate_left(6).rotate_right(2).rotate_right(4), C); + + // Rotating these should make no difference + // + // We test using 124 bits because to ensure that overlong bit shifts do + // not cause undefined behaviour. See #10183. + assert_eq!(_0.rotate_left(124), _0); + assert_eq!(_1.rotate_left(124), _1); + assert_eq!(_0.rotate_right(124), _0); + assert_eq!(_1.rotate_right(124), _1); + + // Rotating by 0 should have no effect + assert_eq!(A.rotate_left(0), A); + assert_eq!(B.rotate_left(0), B); + assert_eq!(C.rotate_left(0), C); + // Rotating by a multiple of word size should also have no effect + assert_eq!(A.rotate_left(64), A); + assert_eq!(B.rotate_left(64), B); + assert_eq!(C.rotate_left(64), C); + } + + #[test] + fn test_swap_bytes() { + assert_eq!(A.swap_bytes().swap_bytes(), A); + assert_eq!(B.swap_bytes().swap_bytes(), B); + assert_eq!(C.swap_bytes().swap_bytes(), C); + + // Swapping these should make no difference + assert_eq!(_0.swap_bytes(), _0); + assert_eq!(_1.swap_bytes(), _1); + } + + #[test] + fn test_le() { + assert_eq!($T::from_le(A.to_le()), A); + assert_eq!($T::from_le(B.to_le()), B); + assert_eq!($T::from_le(C.to_le()), C); + assert_eq!($T::from_le(_0), _0); + assert_eq!($T::from_le(_1), _1); + assert_eq!(_0.to_le(), _0); + assert_eq!(_1.to_le(), _1); + } + + #[test] + fn test_be() { + assert_eq!($T::from_be(A.to_be()), A); + assert_eq!($T::from_be(B.to_be()), B); + assert_eq!($T::from_be(C.to_be()), C); + assert_eq!($T::from_be(_0), _0); + assert_eq!($T::from_be(_1), _1); + assert_eq!(_0.to_be(), _0); + assert_eq!(_1.to_be(), _1); + } + + #[test] + fn test_signed_checked_div() { + assert!((10 as $T).checked_div(2) == Some(5)); + assert!((5 as $T).checked_div(0) == None); + assert!(isize::MIN.checked_div(-1) == None); + } + + #[test] + fn test_from_str() { + fn from_str(t: &str) -> Option { + ::std::str::FromStr::from_str(t).ok() + } + assert_eq!(from_str::<$T>("0"), Some(0 as $T)); + assert_eq!(from_str::<$T>("3"), Some(3 as $T)); + assert_eq!(from_str::<$T>("10"), Some(10 as $T)); + assert_eq!(from_str::("123456789"), Some(123456789 as i32)); + assert_eq!(from_str::<$T>("00100"), Some(100 as $T)); + + assert_eq!(from_str::<$T>("-1"), Some(-1 as $T)); + assert_eq!(from_str::<$T>("-3"), Some(-3 as $T)); + assert_eq!(from_str::<$T>("-10"), Some(-10 as $T)); + assert_eq!(from_str::("-123456789"), Some(-123456789 as i32)); + assert_eq!(from_str::<$T>("-00100"), Some(-100 as $T)); + + assert_eq!(from_str::<$T>(""), None); + assert_eq!(from_str::<$T>(" "), None); + assert_eq!(from_str::<$T>("x"), None); + } + + #[test] + fn test_from_str_radix() { + assert_eq!($T::from_str_radix("123", 10), Ok(123 as $T)); + assert_eq!($T::from_str_radix("1001", 2), Ok(9 as $T)); + assert_eq!($T::from_str_radix("123", 8), Ok(83 as $T)); + assert_eq!(i32::from_str_radix("123", 16), Ok(291 as i32)); + assert_eq!(i32::from_str_radix("ffff", 16), Ok(65535 as i32)); + assert_eq!(i32::from_str_radix("FFFF", 16), Ok(65535 as i32)); + assert_eq!($T::from_str_radix("z", 36), Ok(35 as $T)); + assert_eq!($T::from_str_radix("Z", 36), Ok(35 as $T)); + + assert_eq!($T::from_str_radix("-123", 10), Ok(-123 as $T)); + assert_eq!($T::from_str_radix("-1001", 2), Ok(-9 as $T)); + assert_eq!($T::from_str_radix("-123", 8), Ok(-83 as $T)); + assert_eq!(i32::from_str_radix("-123", 16), Ok(-291 as i32)); + assert_eq!(i32::from_str_radix("-ffff", 16), Ok(-65535 as i32)); + assert_eq!(i32::from_str_radix("-FFFF", 16), Ok(-65535 as i32)); + assert_eq!($T::from_str_radix("-z", 36), Ok(-35 as $T)); + assert_eq!($T::from_str_radix("-Z", 36), Ok(-35 as $T)); + + assert_eq!($T::from_str_radix("Z", 35).ok(), None::<$T>); + assert_eq!($T::from_str_radix("-9", 2).ok(), None::<$T>); + } + + #[test] + fn test_pow() { + let mut r = 2 as $T; + + assert_eq!(r.pow(2), 4 as $T); + assert_eq!(r.pow(0), 1 as $T); + r = -2 as $T; + assert_eq!(r.pow(2), 4 as $T); + assert_eq!(r.pow(3), -8 as $T); + } } - assert_eq!(from_str::<$T>("0"), Some(0 as $T)); - assert_eq!(from_str::<$T>("3"), Some(3 as $T)); - assert_eq!(from_str::<$T>("10"), Some(10 as $T)); - assert_eq!(from_str::("123456789"), Some(123456789 as i32)); - assert_eq!(from_str::<$T>("00100"), Some(100 as $T)); - - assert_eq!(from_str::<$T>("-1"), Some(-1 as $T)); - assert_eq!(from_str::<$T>("-3"), Some(-3 as $T)); - assert_eq!(from_str::<$T>("-10"), Some(-10 as $T)); - assert_eq!(from_str::("-123456789"), Some(-123456789 as i32)); - assert_eq!(from_str::<$T>("-00100"), Some(-100 as $T)); - - assert_eq!(from_str::<$T>(""), None); - assert_eq!(from_str::<$T>(" "), None); - assert_eq!(from_str::<$T>("x"), None); - } - - #[test] - fn test_from_str_radix() { - assert_eq!($T::from_str_radix("123", 10), Ok(123 as $T)); - assert_eq!($T::from_str_radix("1001", 2), Ok(9 as $T)); - assert_eq!($T::from_str_radix("123", 8), Ok(83 as $T)); - assert_eq!(i32::from_str_radix("123", 16), Ok(291 as i32)); - assert_eq!(i32::from_str_radix("ffff", 16), Ok(65535 as i32)); - assert_eq!(i32::from_str_radix("FFFF", 16), Ok(65535 as i32)); - assert_eq!($T::from_str_radix("z", 36), Ok(35 as $T)); - assert_eq!($T::from_str_radix("Z", 36), Ok(35 as $T)); - - assert_eq!($T::from_str_radix("-123", 10), Ok(-123 as $T)); - assert_eq!($T::from_str_radix("-1001", 2), Ok(-9 as $T)); - assert_eq!($T::from_str_radix("-123", 8), Ok(-83 as $T)); - assert_eq!(i32::from_str_radix("-123", 16), Ok(-291 as i32)); - assert_eq!(i32::from_str_radix("-ffff", 16), Ok(-65535 as i32)); - assert_eq!(i32::from_str_radix("-FFFF", 16), Ok(-65535 as i32)); - assert_eq!($T::from_str_radix("-z", 36), Ok(-35 as $T)); - assert_eq!($T::from_str_radix("-Z", 36), Ok(-35 as $T)); - - assert_eq!($T::from_str_radix("Z", 35).ok(), None::<$T>); - assert_eq!($T::from_str_radix("-9", 2).ok(), None::<$T>); - } - - #[test] - fn test_pow() { - let mut r = 2 as $T; - - assert_eq!(r.pow(2), 4 as $T); - assert_eq!(r.pow(0), 1 as $T); - r = -2 as $T; - assert_eq!(r.pow(2), 4 as $T); - assert_eq!(r.pow(3), -8 as $T); - } + }; } - -)} diff --git a/src/libcore/tests/num/mod.rs b/src/libcore/tests/num/mod.rs index a17c094679ea8..fdbb82957a9ca 100644 --- a/src/libcore/tests/num/mod.rs +++ b/src/libcore/tests/num/mod.rs @@ -1,32 +1,31 @@ -use core::convert::{TryFrom, TryInto}; use core::cmp::PartialEq; +use core::convert::{TryFrom, TryInto}; use core::fmt::Debug; use core::marker::Copy; use core::num::TryFromIntError; -use core::ops::{Add, Sub, Mul, Div, Rem}; +use core::ops::{Add, Div, Mul, Rem, Sub}; use core::option::Option; -use core::option::Option::{Some, None}; +use core::option::Option::{None, Some}; #[macro_use] mod int_macros; -mod i8; mod i16; mod i32; mod i64; +mod i8; #[macro_use] mod uint_macros; -mod u8; mod u16; mod u32; mod u64; +mod u8; -mod flt2dec; -mod dec2flt; mod bignum; - +mod dec2flt; +mod flt2dec; /// Adds the attribute to all items in the block. macro_rules! cfg_block { @@ -48,25 +47,29 @@ macro_rules! assume_usize_width { } /// Helper function for testing numeric operations -pub fn test_num(ten: T, two: T) where +pub fn test_num(ten: T, two: T) +where T: PartialEq - + Add + Sub - + Mul + Div - + Rem + Debug - + Copy + + Add + + Sub + + Mul + + Div + + Rem + + Debug + + Copy, { - assert_eq!(ten.add(two), ten + two); - assert_eq!(ten.sub(two), ten - two); - assert_eq!(ten.mul(two), ten * two); - assert_eq!(ten.div(two), ten / two); - assert_eq!(ten.rem(two), ten % two); + assert_eq!(ten.add(two), ten + two); + assert_eq!(ten.sub(two), ten - two); + assert_eq!(ten.mul(two), ten * two); + assert_eq!(ten.div(two), ten / two); + assert_eq!(ten.rem(two), ten % two); } #[test] fn from_str_issue7588() { - let u : Option = u8::from_str_radix("1000", 10).ok(); + let u: Option = u8::from_str_radix("1000", 10).ok(); assert_eq!(u, None); - let s : Option = i16::from_str_radix("80000", 10).ok(); + let s: Option = i16::from_str_radix("80000", 10).ok(); assert_eq!(s, None); } @@ -90,17 +93,26 @@ fn test_int_from_str_overflow() { assert_eq!("-2147483648".parse::().ok(), Some(-2_147_483_648i32)); assert_eq!("-2147483649".parse::().ok(), None); - assert_eq!("9223372036854775807".parse::().ok(), Some(9_223_372_036_854_775_807i64)); + assert_eq!( + "9223372036854775807".parse::().ok(), + Some(9_223_372_036_854_775_807i64) + ); assert_eq!("9223372036854775808".parse::().ok(), None); - assert_eq!("-9223372036854775808".parse::().ok(), Some(-9_223_372_036_854_775_808i64)); + assert_eq!( + "-9223372036854775808".parse::().ok(), + Some(-9_223_372_036_854_775_808i64) + ); assert_eq!("-9223372036854775809".parse::().ok(), None); } #[test] fn test_leading_plus() { assert_eq!("+127".parse::().ok(), Some(127)); - assert_eq!("+9223372036854775807".parse::().ok(), Some(9223372036854775807)); + assert_eq!( + "+9223372036854775807".parse::().ok(), + Some(9223372036854775807) + ); } #[test] @@ -144,7 +156,7 @@ macro_rules! test_impl_from { assert_eq!(large_max as $Small, small_max); assert_eq!(large_min as $Small, small_min); } - } + }; } // Unsigned -> Unsigned @@ -242,7 +254,6 @@ fn test_f32f64() { assert!(nan.is_nan()); } - /// Conversions where the full width of $source can be represented as $target macro_rules! test_impl_try_from_always_ok { ($fn_name:ident, $source:ty, $target: ty) => { @@ -251,14 +262,20 @@ macro_rules! test_impl_try_from_always_ok { let max = <$source>::max_value(); let min = <$source>::min_value(); let zero: $source = 0; - assert_eq!(<$target as TryFrom<$source>>::try_from(max).unwrap(), - max as $target); - assert_eq!(<$target as TryFrom<$source>>::try_from(min).unwrap(), - min as $target); - assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), - zero as $target); + assert_eq!( + <$target as TryFrom<$source>>::try_from(max).unwrap(), + max as $target + ); + assert_eq!( + <$target as TryFrom<$source>>::try_from(min).unwrap(), + min as $target + ); + assert_eq!( + <$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target + ); } - } + }; } test_impl_try_from_always_ok! { test_try_u8u8, u8, u8 } @@ -368,14 +385,18 @@ macro_rules! test_impl_try_from_signed_to_unsigned_upper_ok { let min = <$source>::min_value(); let zero: $source = 0; let neg_one: $source = -1; - assert_eq!(<$target as TryFrom<$source>>::try_from(max).unwrap(), - max as $target); + assert_eq!( + <$target as TryFrom<$source>>::try_from(max).unwrap(), + max as $target + ); assert!(<$target as TryFrom<$source>>::try_from(min).is_err()); - assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), - zero as $target); + assert_eq!( + <$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target + ); assert!(<$target as TryFrom<$source>>::try_from(neg_one).is_err()); } - } + }; } test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i8u8, i8, u8 } @@ -435,12 +456,16 @@ macro_rules! test_impl_try_from_unsigned_to_signed_upper_err { let min = <$source>::min_value(); let zero: $source = 0; assert!(<$target as TryFrom<$source>>::try_from(max).is_err()); - assert_eq!(<$target as TryFrom<$source>>::try_from(min).unwrap(), - min as $target); - assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), - zero as $target); + assert_eq!( + <$target as TryFrom<$source>>::try_from(min).unwrap(), + min as $target + ); + assert_eq!( + <$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target + ); } - } + }; } test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u8i8, u8, i8 } @@ -503,16 +528,20 @@ macro_rules! test_impl_try_from_same_sign_err { if min != 0 { assert!(<$target as TryFrom<$source>>::try_from(min).is_err()); } - assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), - zero as $target); - assert_eq!(<$target as TryFrom<$source>>::try_from(t_max as $source) - .unwrap(), - t_max as $target); - assert_eq!(<$target as TryFrom<$source>>::try_from(t_min as $source) - .unwrap(), - t_min as $target); + assert_eq!( + <$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target + ); + assert_eq!( + <$target as TryFrom<$source>>::try_from(t_max as $source).unwrap(), + t_max as $target + ); + assert_eq!( + <$target as TryFrom<$source>>::try_from(t_min as $source).unwrap(), + t_min as $target + ); } - } + }; } test_impl_try_from_same_sign_err! { test_try_u16u8, u16, u8 } @@ -589,16 +618,20 @@ macro_rules! test_impl_try_from_signed_to_unsigned_err { let t_min = <$target>::min_value(); assert!(<$target as TryFrom<$source>>::try_from(max).is_err()); assert!(<$target as TryFrom<$source>>::try_from(min).is_err()); - assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), - zero as $target); - assert_eq!(<$target as TryFrom<$source>>::try_from(t_max as $source) - .unwrap(), - t_max as $target); - assert_eq!(<$target as TryFrom<$source>>::try_from(t_min as $source) - .unwrap(), - t_min as $target); + assert_eq!( + <$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target + ); + assert_eq!( + <$target as TryFrom<$source>>::try_from(t_max as $source).unwrap(), + t_max as $target + ); + assert_eq!( + <$target as TryFrom<$source>>::try_from(t_min as $source).unwrap(), + t_min as $target + ); } - } + }; } test_impl_try_from_signed_to_unsigned_err! { test_try_i16u8, i16, u8 } @@ -637,73 +670,87 @@ assume_usize_width! { } macro_rules! test_float { - ($modname: ident, $fty: ty, $inf: expr, $neginf: expr, $nan: expr) => { mod $modname { - // FIXME(nagisa): these tests should test for sign of -0.0 - #[test] - fn min() { - assert_eq!((0.0 as $fty).min(0.0), 0.0); - assert_eq!((-0.0 as $fty).min(-0.0), -0.0); - assert_eq!((9.0 as $fty).min(9.0), 9.0); - assert_eq!((-9.0 as $fty).min(0.0), -9.0); - assert_eq!((0.0 as $fty).min(9.0), 0.0); - assert_eq!((-0.0 as $fty).min(-9.0), -9.0); - assert_eq!(($inf as $fty).min(9.0), 9.0); - assert_eq!((9.0 as $fty).min($inf), 9.0); - assert_eq!(($inf as $fty).min(-9.0), -9.0); - assert_eq!((-9.0 as $fty).min($inf), -9.0); - assert_eq!(($neginf as $fty).min(9.0), $neginf); - assert_eq!((9.0 as $fty).min($neginf), $neginf); - assert_eq!(($neginf as $fty).min(-9.0), $neginf); - assert_eq!((-9.0 as $fty).min($neginf), $neginf); - assert_eq!(($nan as $fty).min(9.0), 9.0); - assert_eq!(($nan as $fty).min(-9.0), -9.0); - assert_eq!((9.0 as $fty).min($nan), 9.0); - assert_eq!((-9.0 as $fty).min($nan), -9.0); - assert!(($nan as $fty).min($nan).is_nan()); - } - #[test] - fn max() { - assert_eq!((0.0 as $fty).max(0.0), 0.0); - assert_eq!((-0.0 as $fty).max(-0.0), -0.0); - assert_eq!((9.0 as $fty).max(9.0), 9.0); - assert_eq!((-9.0 as $fty).max(0.0), 0.0); - assert_eq!((0.0 as $fty).max(9.0), 9.0); - assert_eq!((-0.0 as $fty).max(-9.0), -0.0); - assert_eq!(($inf as $fty).max(9.0), $inf); - assert_eq!((9.0 as $fty).max($inf), $inf); - assert_eq!(($inf as $fty).max(-9.0), $inf); - assert_eq!((-9.0 as $fty).max($inf), $inf); - assert_eq!(($neginf as $fty).max(9.0), 9.0); - assert_eq!((9.0 as $fty).max($neginf), 9.0); - assert_eq!(($neginf as $fty).max(-9.0), -9.0); - assert_eq!((-9.0 as $fty).max($neginf), -9.0); - assert_eq!(($nan as $fty).max(9.0), 9.0); - assert_eq!(($nan as $fty).max(-9.0), -9.0); - assert_eq!((9.0 as $fty).max($nan), 9.0); - assert_eq!((-9.0 as $fty).max($nan), -9.0); - assert!(($nan as $fty).max($nan).is_nan()); - } - #[test] - fn rem_euclid() { - let a: $fty = 42.0; - assert!($inf.rem_euclid(a).is_nan()); - assert_eq!(a.rem_euclid($inf), a); - assert!(a.rem_euclid($nan).is_nan()); - assert!($inf.rem_euclid($inf).is_nan()); - assert!($inf.rem_euclid($nan).is_nan()); - assert!($nan.rem_euclid($inf).is_nan()); - } - #[test] - fn div_euclid() { - let a: $fty = 42.0; - assert_eq!(a.div_euclid($inf), 0.0); - assert!(a.div_euclid($nan).is_nan()); - assert!($inf.div_euclid($inf).is_nan()); - assert!($inf.div_euclid($nan).is_nan()); - assert!($nan.div_euclid($inf).is_nan()); + ($modname: ident, $fty: ty, $inf: expr, $neginf: expr, $nan: expr) => { + mod $modname { + // FIXME(nagisa): these tests should test for sign of -0.0 + #[test] + fn min() { + assert_eq!((0.0 as $fty).min(0.0), 0.0); + assert_eq!((-0.0 as $fty).min(-0.0), -0.0); + assert_eq!((9.0 as $fty).min(9.0), 9.0); + assert_eq!((-9.0 as $fty).min(0.0), -9.0); + assert_eq!((0.0 as $fty).min(9.0), 0.0); + assert_eq!((-0.0 as $fty).min(-9.0), -9.0); + assert_eq!(($inf as $fty).min(9.0), 9.0); + assert_eq!((9.0 as $fty).min($inf), 9.0); + assert_eq!(($inf as $fty).min(-9.0), -9.0); + assert_eq!((-9.0 as $fty).min($inf), -9.0); + assert_eq!(($neginf as $fty).min(9.0), $neginf); + assert_eq!((9.0 as $fty).min($neginf), $neginf); + assert_eq!(($neginf as $fty).min(-9.0), $neginf); + assert_eq!((-9.0 as $fty).min($neginf), $neginf); + assert_eq!(($nan as $fty).min(9.0), 9.0); + assert_eq!(($nan as $fty).min(-9.0), -9.0); + assert_eq!((9.0 as $fty).min($nan), 9.0); + assert_eq!((-9.0 as $fty).min($nan), -9.0); + assert!(($nan as $fty).min($nan).is_nan()); + } + #[test] + fn max() { + assert_eq!((0.0 as $fty).max(0.0), 0.0); + assert_eq!((-0.0 as $fty).max(-0.0), -0.0); + assert_eq!((9.0 as $fty).max(9.0), 9.0); + assert_eq!((-9.0 as $fty).max(0.0), 0.0); + assert_eq!((0.0 as $fty).max(9.0), 9.0); + assert_eq!((-0.0 as $fty).max(-9.0), -0.0); + assert_eq!(($inf as $fty).max(9.0), $inf); + assert_eq!((9.0 as $fty).max($inf), $inf); + assert_eq!(($inf as $fty).max(-9.0), $inf); + assert_eq!((-9.0 as $fty).max($inf), $inf); + assert_eq!(($neginf as $fty).max(9.0), 9.0); + assert_eq!((9.0 as $fty).max($neginf), 9.0); + assert_eq!(($neginf as $fty).max(-9.0), -9.0); + assert_eq!((-9.0 as $fty).max($neginf), -9.0); + assert_eq!(($nan as $fty).max(9.0), 9.0); + assert_eq!(($nan as $fty).max(-9.0), -9.0); + assert_eq!((9.0 as $fty).max($nan), 9.0); + assert_eq!((-9.0 as $fty).max($nan), -9.0); + assert!(($nan as $fty).max($nan).is_nan()); + } + #[test] + fn rem_euclid() { + let a: $fty = 42.0; + assert!($inf.rem_euclid(a).is_nan()); + assert_eq!(a.rem_euclid($inf), a); + assert!(a.rem_euclid($nan).is_nan()); + assert!($inf.rem_euclid($inf).is_nan()); + assert!($inf.rem_euclid($nan).is_nan()); + assert!($nan.rem_euclid($inf).is_nan()); + } + #[test] + fn div_euclid() { + let a: $fty = 42.0; + assert_eq!(a.div_euclid($inf), 0.0); + assert!(a.div_euclid($nan).is_nan()); + assert!($inf.div_euclid($inf).is_nan()); + assert!($inf.div_euclid($nan).is_nan()); + assert!($nan.div_euclid($inf).is_nan()); + } } - } } + }; } -test_float!(f32, f32, ::core::f32::INFINITY, ::core::f32::NEG_INFINITY, ::core::f32::NAN); -test_float!(f64, f64, ::core::f64::INFINITY, ::core::f64::NEG_INFINITY, ::core::f64::NAN); +test_float!( + f32, + f32, + ::core::f32::INFINITY, + ::core::f32::NEG_INFINITY, + ::core::f32::NAN +); +test_float!( + f64, + f64, + ::core::f64::INFINITY, + ::core::f64::NEG_INFINITY, + ::core::f64::NAN +); diff --git a/src/libcore/tests/num/uint_macros.rs b/src/libcore/tests/num/uint_macros.rs index 6e81542b6ec88..561e143603a1e 100644 --- a/src/libcore/tests/num/uint_macros.rs +++ b/src/libcore/tests/num/uint_macros.rs @@ -1,159 +1,161 @@ -macro_rules! uint_module { ($T:ident, $T_i:ident) => ( -#[cfg(test)] -mod tests { - use core::$T_i::*; - use num; - use core::ops::{BitOr, BitAnd, BitXor, Shl, Shr, Not}; - use std::str::FromStr; - use std::mem; - - #[test] - fn test_overflows() { - assert!(MAX > 0); - assert!(MIN <= 0); - assert!((MIN + MAX).wrapping_add(1) == 0); - } - - #[test] - fn test_num() { - num::test_num(10 as $T, 2 as $T); - } - - #[test] - fn test_bitwise_operators() { - assert!(0b1110 as $T == (0b1100 as $T).bitor(0b1010 as $T)); - assert!(0b1000 as $T == (0b1100 as $T).bitand(0b1010 as $T)); - assert!(0b0110 as $T == (0b1100 as $T).bitxor(0b1010 as $T)); - assert!(0b1110 as $T == (0b0111 as $T).shl(1)); - assert!(0b0111 as $T == (0b1110 as $T).shr(1)); - assert!(MAX - (0b1011 as $T) == (0b1011 as $T).not()); - } - - const A: $T = 0b0101100; - const B: $T = 0b0100001; - const C: $T = 0b1111001; - - const _0: $T = 0; - const _1: $T = !0; - - #[test] - fn test_count_ones() { - assert!(A.count_ones() == 3); - assert!(B.count_ones() == 2); - assert!(C.count_ones() == 5); - } - - #[test] - fn test_count_zeros() { - let bits = mem::size_of::<$T>() * 8; - assert!(A.count_zeros() == bits as u32 - 3); - assert!(B.count_zeros() == bits as u32 - 2); - assert!(C.count_zeros() == bits as u32 - 5); - } - - #[test] - fn test_rotate() { - assert_eq!(A.rotate_left(6).rotate_right(2).rotate_right(4), A); - assert_eq!(B.rotate_left(3).rotate_left(2).rotate_right(5), B); - assert_eq!(C.rotate_left(6).rotate_right(2).rotate_right(4), C); - - // Rotating these should make no difference - // - // We test using 124 bits because to ensure that overlong bit shifts do - // not cause undefined behaviour. See #10183. - assert_eq!(_0.rotate_left(124), _0); - assert_eq!(_1.rotate_left(124), _1); - assert_eq!(_0.rotate_right(124), _0); - assert_eq!(_1.rotate_right(124), _1); - - // Rotating by 0 should have no effect - assert_eq!(A.rotate_left(0), A); - assert_eq!(B.rotate_left(0), B); - assert_eq!(C.rotate_left(0), C); - // Rotating by a multiple of word size should also have no effect - assert_eq!(A.rotate_left(64), A); - assert_eq!(B.rotate_left(64), B); - assert_eq!(C.rotate_left(64), C); - } - - #[test] - fn test_swap_bytes() { - assert_eq!(A.swap_bytes().swap_bytes(), A); - assert_eq!(B.swap_bytes().swap_bytes(), B); - assert_eq!(C.swap_bytes().swap_bytes(), C); - - // Swapping these should make no difference - assert_eq!(_0.swap_bytes(), _0); - assert_eq!(_1.swap_bytes(), _1); - } - - #[test] - fn test_reverse_bits() { - assert_eq!(A.reverse_bits().reverse_bits(), A); - assert_eq!(B.reverse_bits().reverse_bits(), B); - assert_eq!(C.reverse_bits().reverse_bits(), C); - - // Swapping these should make no difference - assert_eq!(_0.reverse_bits(), _0); - assert_eq!(_1.reverse_bits(), _1); - } - - #[test] - fn test_le() { - assert_eq!($T::from_le(A.to_le()), A); - assert_eq!($T::from_le(B.to_le()), B); - assert_eq!($T::from_le(C.to_le()), C); - assert_eq!($T::from_le(_0), _0); - assert_eq!($T::from_le(_1), _1); - assert_eq!(_0.to_le(), _0); - assert_eq!(_1.to_le(), _1); - } - - #[test] - fn test_be() { - assert_eq!($T::from_be(A.to_be()), A); - assert_eq!($T::from_be(B.to_be()), B); - assert_eq!($T::from_be(C.to_be()), C); - assert_eq!($T::from_be(_0), _0); - assert_eq!($T::from_be(_1), _1); - assert_eq!(_0.to_be(), _0); - assert_eq!(_1.to_be(), _1); - } - - #[test] - fn test_unsigned_checked_div() { - assert!((10 as $T).checked_div(2) == Some(5)); - assert!((5 as $T).checked_div(0) == None); - } - - fn from_str(t: &str) -> Option { - FromStr::from_str(t).ok() - } - - #[test] - pub fn test_from_str() { - assert_eq!(from_str::<$T>("0"), Some(0 as $T)); - assert_eq!(from_str::<$T>("3"), Some(3 as $T)); - assert_eq!(from_str::<$T>("10"), Some(10 as $T)); - assert_eq!(from_str::("123456789"), Some(123456789 as u32)); - assert_eq!(from_str::<$T>("00100"), Some(100 as $T)); - - assert_eq!(from_str::<$T>(""), None); - assert_eq!(from_str::<$T>(" "), None); - assert_eq!(from_str::<$T>("x"), None); - } - - #[test] - pub fn test_parse_bytes() { - assert_eq!($T::from_str_radix("123", 10), Ok(123 as $T)); - assert_eq!($T::from_str_radix("1001", 2), Ok(9 as $T)); - assert_eq!($T::from_str_radix("123", 8), Ok(83 as $T)); - assert_eq!(u16::from_str_radix("123", 16), Ok(291 as u16)); - assert_eq!(u16::from_str_radix("ffff", 16), Ok(65535 as u16)); - assert_eq!($T::from_str_radix("z", 36), Ok(35 as $T)); - - assert_eq!($T::from_str_radix("Z", 10).ok(), None::<$T>); - assert_eq!($T::from_str_radix("_", 2).ok(), None::<$T>); - } +macro_rules! uint_module { + ($T:ident, $T_i:ident) => { + #[cfg(test)] + mod tests { + use core::ops::{BitAnd, BitOr, BitXor, Not, Shl, Shr}; + use core::$T_i::*; + use num; + use std::mem; + use std::str::FromStr; + + #[test] + fn test_overflows() { + assert!(MAX > 0); + assert!(MIN <= 0); + assert!((MIN + MAX).wrapping_add(1) == 0); + } + + #[test] + fn test_num() { + num::test_num(10 as $T, 2 as $T); + } + + #[test] + fn test_bitwise_operators() { + assert!(0b1110 as $T == (0b1100 as $T).bitor(0b1010 as $T)); + assert!(0b1000 as $T == (0b1100 as $T).bitand(0b1010 as $T)); + assert!(0b0110 as $T == (0b1100 as $T).bitxor(0b1010 as $T)); + assert!(0b1110 as $T == (0b0111 as $T).shl(1)); + assert!(0b0111 as $T == (0b1110 as $T).shr(1)); + assert!(MAX - (0b1011 as $T) == (0b1011 as $T).not()); + } + + const A: $T = 0b0101100; + const B: $T = 0b0100001; + const C: $T = 0b1111001; + + const _0: $T = 0; + const _1: $T = !0; + + #[test] + fn test_count_ones() { + assert!(A.count_ones() == 3); + assert!(B.count_ones() == 2); + assert!(C.count_ones() == 5); + } + + #[test] + fn test_count_zeros() { + let bits = mem::size_of::<$T>() * 8; + assert!(A.count_zeros() == bits as u32 - 3); + assert!(B.count_zeros() == bits as u32 - 2); + assert!(C.count_zeros() == bits as u32 - 5); + } + + #[test] + fn test_rotate() { + assert_eq!(A.rotate_left(6).rotate_right(2).rotate_right(4), A); + assert_eq!(B.rotate_left(3).rotate_left(2).rotate_right(5), B); + assert_eq!(C.rotate_left(6).rotate_right(2).rotate_right(4), C); + + // Rotating these should make no difference + // + // We test using 124 bits because to ensure that overlong bit shifts do + // not cause undefined behaviour. See #10183. + assert_eq!(_0.rotate_left(124), _0); + assert_eq!(_1.rotate_left(124), _1); + assert_eq!(_0.rotate_right(124), _0); + assert_eq!(_1.rotate_right(124), _1); + + // Rotating by 0 should have no effect + assert_eq!(A.rotate_left(0), A); + assert_eq!(B.rotate_left(0), B); + assert_eq!(C.rotate_left(0), C); + // Rotating by a multiple of word size should also have no effect + assert_eq!(A.rotate_left(64), A); + assert_eq!(B.rotate_left(64), B); + assert_eq!(C.rotate_left(64), C); + } + + #[test] + fn test_swap_bytes() { + assert_eq!(A.swap_bytes().swap_bytes(), A); + assert_eq!(B.swap_bytes().swap_bytes(), B); + assert_eq!(C.swap_bytes().swap_bytes(), C); + + // Swapping these should make no difference + assert_eq!(_0.swap_bytes(), _0); + assert_eq!(_1.swap_bytes(), _1); + } + + #[test] + fn test_reverse_bits() { + assert_eq!(A.reverse_bits().reverse_bits(), A); + assert_eq!(B.reverse_bits().reverse_bits(), B); + assert_eq!(C.reverse_bits().reverse_bits(), C); + + // Swapping these should make no difference + assert_eq!(_0.reverse_bits(), _0); + assert_eq!(_1.reverse_bits(), _1); + } + + #[test] + fn test_le() { + assert_eq!($T::from_le(A.to_le()), A); + assert_eq!($T::from_le(B.to_le()), B); + assert_eq!($T::from_le(C.to_le()), C); + assert_eq!($T::from_le(_0), _0); + assert_eq!($T::from_le(_1), _1); + assert_eq!(_0.to_le(), _0); + assert_eq!(_1.to_le(), _1); + } + + #[test] + fn test_be() { + assert_eq!($T::from_be(A.to_be()), A); + assert_eq!($T::from_be(B.to_be()), B); + assert_eq!($T::from_be(C.to_be()), C); + assert_eq!($T::from_be(_0), _0); + assert_eq!($T::from_be(_1), _1); + assert_eq!(_0.to_be(), _0); + assert_eq!(_1.to_be(), _1); + } + + #[test] + fn test_unsigned_checked_div() { + assert!((10 as $T).checked_div(2) == Some(5)); + assert!((5 as $T).checked_div(0) == None); + } + + fn from_str(t: &str) -> Option { + FromStr::from_str(t).ok() + } + + #[test] + pub fn test_from_str() { + assert_eq!(from_str::<$T>("0"), Some(0 as $T)); + assert_eq!(from_str::<$T>("3"), Some(3 as $T)); + assert_eq!(from_str::<$T>("10"), Some(10 as $T)); + assert_eq!(from_str::("123456789"), Some(123456789 as u32)); + assert_eq!(from_str::<$T>("00100"), Some(100 as $T)); + + assert_eq!(from_str::<$T>(""), None); + assert_eq!(from_str::<$T>(" "), None); + assert_eq!(from_str::<$T>("x"), None); + } + + #[test] + pub fn test_parse_bytes() { + assert_eq!($T::from_str_radix("123", 10), Ok(123 as $T)); + assert_eq!($T::from_str_radix("1001", 2), Ok(9 as $T)); + assert_eq!($T::from_str_radix("123", 8), Ok(83 as $T)); + assert_eq!(u16::from_str_radix("123", 16), Ok(291 as u16)); + assert_eq!(u16::from_str_radix("ffff", 16), Ok(65535 as u16)); + assert_eq!($T::from_str_radix("z", 36), Ok(35 as $T)); + + assert_eq!($T::from_str_radix("Z", 10).ok(), None::<$T>); + assert_eq!($T::from_str_radix("_", 2).ok(), None::<$T>); + } + } + }; } -)} diff --git a/src/libcore/tests/ops.rs b/src/libcore/tests/ops.rs index 401644e120d16..19d46faa913d3 100644 --- a/src/libcore/tests/ops.rs +++ b/src/libcore/tests/ops.rs @@ -1,4 +1,4 @@ -use core::ops::{Range, RangeFull, RangeFrom, RangeTo, RangeInclusive}; +use core::ops::{Range, RangeFrom, RangeFull, RangeInclusive, RangeTo}; // Test the Range structs without the syntactic sugar. @@ -59,26 +59,25 @@ fn test_range_inclusive() { assert_eq!(r.next(), None); } - #[test] fn test_range_is_empty() { use core::f32::*; - assert!(!(0.0 .. 10.0).is_empty()); - assert!( (-0.0 .. 0.0).is_empty()); - assert!( (10.0 .. 0.0).is_empty()); + assert!(!(0.0..10.0).is_empty()); + assert!((-0.0..0.0).is_empty()); + assert!((10.0..0.0).is_empty()); - assert!(!(NEG_INFINITY .. INFINITY).is_empty()); - assert!( (EPSILON .. NAN).is_empty()); - assert!( (NAN .. EPSILON).is_empty()); - assert!( (NAN .. NAN).is_empty()); + assert!(!(NEG_INFINITY..INFINITY).is_empty()); + assert!((EPSILON..NAN).is_empty()); + assert!((NAN..EPSILON).is_empty()); + assert!((NAN..NAN).is_empty()); - assert!(!(0.0 ..= 10.0).is_empty()); - assert!(!(-0.0 ..= 0.0).is_empty()); - assert!( (10.0 ..= 0.0).is_empty()); + assert!(!(0.0..=10.0).is_empty()); + assert!(!(-0.0..=0.0).is_empty()); + assert!((10.0..=0.0).is_empty()); - assert!(!(NEG_INFINITY ..= INFINITY).is_empty()); - assert!( (EPSILON ..= NAN).is_empty()); - assert!( (NAN ..= EPSILON).is_empty()); - assert!( (NAN ..= NAN).is_empty()); + assert!(!(NEG_INFINITY..=INFINITY).is_empty()); + assert!((EPSILON..=NAN).is_empty()); + assert!((NAN..=EPSILON).is_empty()); + assert!((NAN..=NAN).is_empty()); } diff --git a/src/libcore/tests/option.rs b/src/libcore/tests/option.rs index b059b134868d9..ec6bbb34645a9 100644 --- a/src/libcore/tests/option.rs +++ b/src/libcore/tests/option.rs @@ -1,6 +1,6 @@ -use core::option::*; -use core::mem; use core::clone::Clone; +use core::mem; +use core::option::*; #[test] fn test_get_ptr() { @@ -26,15 +26,15 @@ fn test_get_str() { #[test] fn test_get_resource() { - use std::rc::Rc; use core::cell::RefCell; + use std::rc::Rc; struct R { - i: Rc>, + i: Rc>, } - impl Drop for R { - fn drop(&mut self) { + impl Drop for R { + fn drop(&mut self) { let ii = &*self.i; let i = *ii.borrow(); *ii.borrow_mut() = i + 1; @@ -42,9 +42,7 @@ fn test_get_resource() { } fn r(i: Rc>) -> R { - R { - i, - } + R { i } } let i = Rc::new(RefCell::new(0)); @@ -68,7 +66,8 @@ fn test_option_dance() { assert!(y.is_none()); } -#[test] #[should_panic] +#[test] +#[should_panic] fn test_option_too_much_dance() { struct A; let mut y = Some(A); @@ -208,7 +207,7 @@ fn test_mut_iter() { fn test_ord() { let small = Some(1.0f64); let big = Some(5.0f64); - let nan = Some(0.0f64/0.0); + let nan = Some(0.0f64 / 0.0); assert!(!(nan < big)); assert!(!(nan > big)); assert!(small < big); @@ -224,9 +223,7 @@ fn test_collect() { let v: Option> = (0..3).map(|x| Some(x)).collect(); assert!(v == Some(vec![0, 1, 2])); - let v: Option> = (0..3).map(|x| { - if x > 1 { None } else { Some(x) } - }).collect(); + let v: Option> = (0..3).map(|x| if x > 1 { None } else { Some(x) }).collect(); assert!(v == None); // test that it does not take more elements than it needs diff --git a/src/libcore/tests/pattern.rs b/src/libcore/tests/pattern.rs index b78ed0210770f..8d27586d697b3 100644 --- a/src/libcore/tests/pattern.rs +++ b/src/libcore/tests/pattern.rs @@ -18,7 +18,7 @@ enum Step { Matches(usize, usize), Rejects(usize, usize), InRange(usize, usize), - Done + Done, } use self::Step::*; @@ -28,7 +28,7 @@ impl From for Step { match x { SearchStep::Match(a, b) => Matches(a, b), SearchStep::Reject(a, b) => Rejects(a, b), - SearchStep::Done => Done + SearchStep::Done => Done, } } } @@ -37,7 +37,7 @@ impl From> for Step { fn from(x: Option<(usize, usize)>) -> Self { match x { Some((a, b)) => InRange(a, b), - None => Done + None => Done, } } } @@ -52,57 +52,170 @@ impl From> for Step { #[test] fn test_simple_iteration() { - search_asserts! ("abcdeabcd", 'a', "forward iteration for ASCII string", + search_asserts!( + "abcdeabcd", + 'a', + "forward iteration for ASCII string", // a b c d e a b c d EOF - [next, next, next, next, next, next, next, next, next, next], - [Matches(0, 1), Rejects(1, 2), Rejects(2, 3), Rejects(3, 4), Rejects(4, 5), Matches(5, 6), Rejects(6, 7), Rejects(7, 8), Rejects(8, 9), Done] - ); - - search_asserts! ("abcdeabcd", 'a', "reverse iteration for ASCII string", + [next, next, next, next, next, next, next, next, next, next], + [ + Matches(0, 1), + Rejects(1, 2), + Rejects(2, 3), + Rejects(3, 4), + Rejects(4, 5), + Matches(5, 6), + Rejects(6, 7), + Rejects(7, 8), + Rejects(8, 9), + Done + ] + ); + + search_asserts!( + "abcdeabcd", + 'a', + "reverse iteration for ASCII string", // d c b a e d c b a EOF - [next_back, next_back, next_back, next_back, next_back, next_back, next_back, next_back, next_back, next_back], - [Rejects(8, 9), Rejects(7, 8), Rejects(6, 7), Matches(5, 6), Rejects(4, 5), Rejects(3, 4), Rejects(2, 3), Rejects(1, 2), Matches(0, 1), Done] - ); - - search_asserts! ("我爱我的猫", '我', "forward iteration for Chinese string", + [ + next_back, next_back, next_back, next_back, next_back, next_back, next_back, next_back, + next_back, next_back + ], + [ + Rejects(8, 9), + Rejects(7, 8), + Rejects(6, 7), + Matches(5, 6), + Rejects(4, 5), + Rejects(3, 4), + Rejects(2, 3), + Rejects(1, 2), + Matches(0, 1), + Done + ] + ); + + search_asserts!( + "我爱我的猫", + '我', + "forward iteration for Chinese string", // 我 愛 我 的 貓 EOF - [next, next, next, next, next, next], - [Matches(0, 3), Rejects(3, 6), Matches(6, 9), Rejects(9, 12), Rejects(12, 15), Done] - ); - - search_asserts! ("我的猫说meow", 'm', "forward iteration for mixed string", + [next, next, next, next, next, next], + [ + Matches(0, 3), + Rejects(3, 6), + Matches(6, 9), + Rejects(9, 12), + Rejects(12, 15), + Done + ] + ); + + search_asserts!( + "我的猫说meow", + 'm', + "forward iteration for mixed string", // 我 的 猫 说 m e o w EOF - [next, next, next, next, next, next, next, next, next], - [Rejects(0, 3), Rejects(3, 6), Rejects(6, 9), Rejects(9, 12), Matches(12, 13), Rejects(13, 14), Rejects(14, 15), Rejects(15, 16), Done] - ); - - search_asserts! ("我的猫说meow", '猫', "reverse iteration for mixed string", + [next, next, next, next, next, next, next, next, next], + [ + Rejects(0, 3), + Rejects(3, 6), + Rejects(6, 9), + Rejects(9, 12), + Matches(12, 13), + Rejects(13, 14), + Rejects(14, 15), + Rejects(15, 16), + Done + ] + ); + + search_asserts!( + "我的猫说meow", + '猫', + "reverse iteration for mixed string", // w o e m 说 猫 的 我 EOF - [next_back, next_back, next_back, next_back, next_back, next_back, next_back, next_back, next_back], - [Rejects(15, 16), Rejects(14, 15), Rejects(13, 14), Rejects(12, 13), Rejects(9, 12), Matches(6, 9), Rejects(3, 6), Rejects(0, 3), Done] + [ + next_back, next_back, next_back, next_back, next_back, next_back, next_back, next_back, + next_back + ], + [ + Rejects(15, 16), + Rejects(14, 15), + Rejects(13, 14), + Rejects(12, 13), + Rejects(9, 12), + Matches(6, 9), + Rejects(3, 6), + Rejects(0, 3), + Done + ] ); } #[test] fn test_simple_search() { - search_asserts!("abcdeabcdeabcde", 'a', "next_match for ASCII string", - [next_match, next_match, next_match, next_match], + search_asserts!( + "abcdeabcdeabcde", + 'a', + "next_match for ASCII string", + [next_match, next_match, next_match, next_match], [InRange(0, 1), InRange(5, 6), InRange(10, 11), Done] ); - search_asserts!("abcdeabcdeabcde", 'a', "next_match_back for ASCII string", - [next_match_back, next_match_back, next_match_back, next_match_back], - [InRange(10, 11), InRange(5, 6), InRange(0, 1), Done] - ); - - search_asserts!("abcdeab", 'a', "next_reject for ASCII string", - [next_reject, next_reject, next_match, next_reject, next_reject], - [InRange(1, 2), InRange(2, 3), InRange(5, 6), InRange(6, 7), Done] - ); - - search_asserts!("abcdeabcdeabcde", 'a', "next_reject_back for ASCII string", - [next_reject_back, next_reject_back, next_match_back, next_reject_back, next_reject_back, next_reject_back], - [InRange(14, 15), InRange(13, 14), InRange(10, 11), InRange(9, 10), InRange(8, 9), InRange(7, 8)] + search_asserts!( + "abcdeabcdeabcde", + 'a', + "next_match_back for ASCII string", + [ + next_match_back, + next_match_back, + next_match_back, + next_match_back + ], + [InRange(10, 11), InRange(5, 6), InRange(0, 1), Done] + ); + + search_asserts!( + "abcdeab", + 'a', + "next_reject for ASCII string", + [ + next_reject, + next_reject, + next_match, + next_reject, + next_reject + ], + [ + InRange(1, 2), + InRange(2, 3), + InRange(5, 6), + InRange(6, 7), + Done + ] + ); + + search_asserts!( + "abcdeabcdeabcde", + 'a', + "next_reject_back for ASCII string", + [ + next_reject_back, + next_reject_back, + next_match_back, + next_reject_back, + next_reject_back, + next_reject_back + ], + [ + InRange(14, 15), + InRange(13, 14), + InRange(10, 11), + InRange(9, 10), + InRange(8, 9), + InRange(7, 8) + ] ); } @@ -121,77 +234,164 @@ const STRESS: &str = "Áa🁀bÁꁁfg😁각กᘀ각aÁ각ꁁก😁a"; fn test_stress_indices() { // this isn't really a test, more of documentation on the indices of each character in the stresstest string - search_asserts!(STRESS, 'x', "Indices of characters in stress test", - [next, next, next, next, next, next, next, next, next, next, next, next, next, next, next, next, next, next, next, next, next], - [Rejects(0, 2), // Á - Rejects(2, 3), // a - Rejects(3, 7), // 🁀 - Rejects(7, 8), // b - Rejects(8, 10), // Á - Rejects(10, 13), // ꁁ - Rejects(13, 14), // f - Rejects(14, 15), // g - Rejects(15, 19), // 😀 - Rejects(19, 22), // 각 - Rejects(22, 25), // ก - Rejects(25, 28), // ᘀ - Rejects(28, 31), // 각 - Rejects(31, 32), // a - Rejects(32, 34), // Á - Rejects(34, 37), // 각 - Rejects(37, 40), // ꁁ - Rejects(40, 43), // ก - Rejects(43, 47), // 😀 - Rejects(47, 48), // a - Done] + search_asserts!( + STRESS, + 'x', + "Indices of characters in stress test", + [ + next, next, next, next, next, next, next, next, next, next, next, next, next, next, + next, next, next, next, next, next, next + ], + [ + Rejects(0, 2), // Á + Rejects(2, 3), // a + Rejects(3, 7), // 🁀 + Rejects(7, 8), // b + Rejects(8, 10), // Á + Rejects(10, 13), // ꁁ + Rejects(13, 14), // f + Rejects(14, 15), // g + Rejects(15, 19), // 😀 + Rejects(19, 22), // 각 + Rejects(22, 25), // ก + Rejects(25, 28), // ᘀ + Rejects(28, 31), // 각 + Rejects(31, 32), // a + Rejects(32, 34), // Á + Rejects(34, 37), // 각 + Rejects(37, 40), // ꁁ + Rejects(40, 43), // ก + Rejects(43, 47), // 😀 + Rejects(47, 48), // a + Done + ] ); } #[test] fn test_forward_search_shared_bytes() { - search_asserts!(STRESS, 'Á', "Forward search for two-byte Latin character", - [next_match, next_match, next_match, next_match], + search_asserts!( + STRESS, + 'Á', + "Forward search for two-byte Latin character", + [next_match, next_match, next_match, next_match], [InRange(0, 2), InRange(8, 10), InRange(32, 34), Done] ); - search_asserts!(STRESS, 'Á', "Forward search for two-byte Latin character; check if next() still works", - [next_match, next, next_match, next, next_match, next, next_match], - [InRange(0, 2), Rejects(2, 3), InRange(8, 10), Rejects(10, 13), InRange(32, 34), Rejects(34, 37), Done] - ); - - search_asserts!(STRESS, '각', "Forward search for three-byte Hangul character", - [next_match, next, next_match, next_match, next_match], - [InRange(19, 22), Rejects(22, 25), InRange(28, 31), InRange(34, 37), Done] - ); - - search_asserts!(STRESS, '각', "Forward search for three-byte Hangul character; check if next() still works", - [next_match, next, next_match, next, next_match, next, next_match], - [InRange(19, 22), Rejects(22, 25), InRange(28, 31), Rejects(31, 32), InRange(34, 37), Rejects(37, 40), Done] - ); - - search_asserts!(STRESS, 'ก', "Forward search for three-byte Thai character", - [next_match, next, next_match, next, next_match], - [InRange(22, 25), Rejects(25, 28), InRange(40, 43), Rejects(43, 47), Done] - ); - - search_asserts!(STRESS, 'ก', "Forward search for three-byte Thai character; check if next() still works", - [next_match, next, next_match, next, next_match], - [InRange(22, 25), Rejects(25, 28), InRange(40, 43), Rejects(43, 47), Done] - ); - - search_asserts!(STRESS, '😁', "Forward search for four-byte emoji", - [next_match, next, next_match, next, next_match], - [InRange(15, 19), Rejects(19, 22), InRange(43, 47), Rejects(47, 48), Done] - ); - - search_asserts!(STRESS, '😁', "Forward search for four-byte emoji; check if next() still works", - [next_match, next, next_match, next, next_match], - [InRange(15, 19), Rejects(19, 22), InRange(43, 47), Rejects(47, 48), Done] - ); - - search_asserts!(STRESS, 'ꁁ', "Forward search for three-byte Yi character with repeated bytes", - [next_match, next, next_match, next, next_match], - [InRange(10, 13), Rejects(13, 14), InRange(37, 40), Rejects(40, 43), Done] + search_asserts!( + STRESS, + 'Á', + "Forward search for two-byte Latin character; check if next() still works", + [next_match, next, next_match, next, next_match, next, next_match], + [ + InRange(0, 2), + Rejects(2, 3), + InRange(8, 10), + Rejects(10, 13), + InRange(32, 34), + Rejects(34, 37), + Done + ] + ); + + search_asserts!( + STRESS, + '각', + "Forward search for three-byte Hangul character", + [next_match, next, next_match, next_match, next_match], + [ + InRange(19, 22), + Rejects(22, 25), + InRange(28, 31), + InRange(34, 37), + Done + ] + ); + + search_asserts!( + STRESS, + '각', + "Forward search for three-byte Hangul character; check if next() still works", + [next_match, next, next_match, next, next_match, next, next_match], + [ + InRange(19, 22), + Rejects(22, 25), + InRange(28, 31), + Rejects(31, 32), + InRange(34, 37), + Rejects(37, 40), + Done + ] + ); + + search_asserts!( + STRESS, + 'ก', + "Forward search for three-byte Thai character", + [next_match, next, next_match, next, next_match], + [ + InRange(22, 25), + Rejects(25, 28), + InRange(40, 43), + Rejects(43, 47), + Done + ] + ); + + search_asserts!( + STRESS, + 'ก', + "Forward search for three-byte Thai character; check if next() still works", + [next_match, next, next_match, next, next_match], + [ + InRange(22, 25), + Rejects(25, 28), + InRange(40, 43), + Rejects(43, 47), + Done + ] + ); + + search_asserts!( + STRESS, + '😁', + "Forward search for four-byte emoji", + [next_match, next, next_match, next, next_match], + [ + InRange(15, 19), + Rejects(19, 22), + InRange(43, 47), + Rejects(47, 48), + Done + ] + ); + + search_asserts!( + STRESS, + '😁', + "Forward search for four-byte emoji; check if next() still works", + [next_match, next, next_match, next, next_match], + [ + InRange(15, 19), + Rejects(19, 22), + InRange(43, 47), + Rejects(47, 48), + Done + ] + ); + + search_asserts!( + STRESS, + 'ꁁ', + "Forward search for three-byte Yi character with repeated bytes", + [next_match, next, next_match, next, next_match], + [ + InRange(10, 13), + Rejects(13, 14), + InRange(37, 40), + Rejects(40, 43), + Done + ] ); search_asserts!(STRESS, 'ꁁ', "Forward search for three-byte Yi character with repeated bytes; check if next() still works", @@ -202,49 +402,183 @@ fn test_forward_search_shared_bytes() { #[test] fn test_reverse_search_shared_bytes() { - search_asserts!(STRESS, 'Á', "Reverse search for two-byte Latin character", - [next_match_back, next_match_back, next_match_back, next_match_back], - [InRange(32, 34), InRange(8, 10), InRange(0, 2), Done] - ); - - search_asserts!(STRESS, 'Á', "Reverse search for two-byte Latin character; check if next_back() still works", - [next_match_back, next_back, next_match_back, next_back, next_match_back, next_back], - [InRange(32, 34), Rejects(31, 32), InRange(8, 10), Rejects(7, 8), InRange(0, 2), Done] - ); - - search_asserts!(STRESS, '각', "Reverse search for three-byte Hangul character", - [next_match_back, next_back, next_match_back, next_match_back, next_match_back], - [InRange(34, 37), Rejects(32, 34), InRange(28, 31), InRange(19, 22), Done] - ); - - search_asserts!(STRESS, '각', "Reverse search for three-byte Hangul character; check if next_back() still works", - [next_match_back, next_back, next_match_back, next_back, next_match_back, next_back, next_match_back], - [InRange(34, 37), Rejects(32, 34), InRange(28, 31), Rejects(25, 28), InRange(19, 22), Rejects(15, 19), Done] - ); - - search_asserts!(STRESS, 'ก', "Reverse search for three-byte Thai character", - [next_match_back, next_back, next_match_back, next_back, next_match_back], - [InRange(40, 43), Rejects(37, 40), InRange(22, 25), Rejects(19, 22), Done] - ); - - search_asserts!(STRESS, 'ก', "Reverse search for three-byte Thai character; check if next_back() still works", - [next_match_back, next_back, next_match_back, next_back, next_match_back], - [InRange(40, 43), Rejects(37, 40), InRange(22, 25), Rejects(19, 22), Done] - ); - - search_asserts!(STRESS, '😁', "Reverse search for four-byte emoji", - [next_match_back, next_back, next_match_back, next_back, next_match_back], - [InRange(43, 47), Rejects(40, 43), InRange(15, 19), Rejects(14, 15), Done] - ); - - search_asserts!(STRESS, '😁', "Reverse search for four-byte emoji; check if next_back() still works", - [next_match_back, next_back, next_match_back, next_back, next_match_back], - [InRange(43, 47), Rejects(40, 43), InRange(15, 19), Rejects(14, 15), Done] - ); - - search_asserts!(STRESS, 'ꁁ', "Reverse search for three-byte Yi character with repeated bytes", - [next_match_back, next_back, next_match_back, next_back, next_match_back], - [InRange(37, 40), Rejects(34, 37), InRange(10, 13), Rejects(8, 10), Done] + search_asserts!( + STRESS, + 'Á', + "Reverse search for two-byte Latin character", + [ + next_match_back, + next_match_back, + next_match_back, + next_match_back + ], + [InRange(32, 34), InRange(8, 10), InRange(0, 2), Done] + ); + + search_asserts!( + STRESS, + 'Á', + "Reverse search for two-byte Latin character; check if next_back() still works", + [ + next_match_back, + next_back, + next_match_back, + next_back, + next_match_back, + next_back + ], + [ + InRange(32, 34), + Rejects(31, 32), + InRange(8, 10), + Rejects(7, 8), + InRange(0, 2), + Done + ] + ); + + search_asserts!( + STRESS, + '각', + "Reverse search for three-byte Hangul character", + [ + next_match_back, + next_back, + next_match_back, + next_match_back, + next_match_back + ], + [ + InRange(34, 37), + Rejects(32, 34), + InRange(28, 31), + InRange(19, 22), + Done + ] + ); + + search_asserts!( + STRESS, + '각', + "Reverse search for three-byte Hangul character; check if next_back() still works", + [ + next_match_back, + next_back, + next_match_back, + next_back, + next_match_back, + next_back, + next_match_back + ], + [ + InRange(34, 37), + Rejects(32, 34), + InRange(28, 31), + Rejects(25, 28), + InRange(19, 22), + Rejects(15, 19), + Done + ] + ); + + search_asserts!( + STRESS, + 'ก', + "Reverse search for three-byte Thai character", + [ + next_match_back, + next_back, + next_match_back, + next_back, + next_match_back + ], + [ + InRange(40, 43), + Rejects(37, 40), + InRange(22, 25), + Rejects(19, 22), + Done + ] + ); + + search_asserts!( + STRESS, + 'ก', + "Reverse search for three-byte Thai character; check if next_back() still works", + [ + next_match_back, + next_back, + next_match_back, + next_back, + next_match_back + ], + [ + InRange(40, 43), + Rejects(37, 40), + InRange(22, 25), + Rejects(19, 22), + Done + ] + ); + + search_asserts!( + STRESS, + '😁', + "Reverse search for four-byte emoji", + [ + next_match_back, + next_back, + next_match_back, + next_back, + next_match_back + ], + [ + InRange(43, 47), + Rejects(40, 43), + InRange(15, 19), + Rejects(14, 15), + Done + ] + ); + + search_asserts!( + STRESS, + '😁', + "Reverse search for four-byte emoji; check if next_back() still works", + [ + next_match_back, + next_back, + next_match_back, + next_back, + next_match_back + ], + [ + InRange(43, 47), + Rejects(40, 43), + InRange(15, 19), + Rejects(14, 15), + Done + ] + ); + + search_asserts!( + STRESS, + 'ꁁ', + "Reverse search for three-byte Yi character with repeated bytes", + [ + next_match_back, + next_back, + next_match_back, + next_back, + next_match_back + ], + [ + InRange(37, 40), + Rejects(34, 37), + InRange(10, 13), + Rejects(8, 10), + Done + ] ); search_asserts!(STRESS, 'ꁁ', "Reverse search for three-byte Yi character with repeated bytes; check if next_back() still works", @@ -257,36 +591,102 @@ fn test_reverse_search_shared_bytes() { fn double_ended_regression_test() { // https://github.com/rust-lang/rust/issues/47175 // Ensures that double ended searching comes to a convergence - search_asserts!("abcdeabcdeabcde", 'a', "alternating double ended search", - [next_match, next_match_back, next_match, next_match_back], + search_asserts!( + "abcdeabcdeabcde", + 'a', + "alternating double ended search", + [next_match, next_match_back, next_match, next_match_back], [InRange(0, 1), InRange(10, 11), InRange(5, 6), Done] ); - search_asserts!("abcdeabcdeabcde", 'a', "triple double ended search for a", - [next_match, next_match_back, next_match_back, next_match_back], + search_asserts!( + "abcdeabcdeabcde", + 'a', + "triple double ended search for a", + [ + next_match, + next_match_back, + next_match_back, + next_match_back + ], [InRange(0, 1), InRange(10, 11), InRange(5, 6), Done] ); - search_asserts!("abcdeabcdeabcde", 'd', "triple double ended search for d", - [next_match, next_match_back, next_match_back, next_match_back], + search_asserts!( + "abcdeabcdeabcde", + 'd', + "triple double ended search for d", + [ + next_match, + next_match_back, + next_match_back, + next_match_back + ], [InRange(3, 4), InRange(13, 14), InRange(8, 9), Done] ); - search_asserts!(STRESS, 'Á', "Double ended search for two-byte Latin character", - [next_match, next_match_back, next_match, next_match_back], + search_asserts!( + STRESS, + 'Á', + "Double ended search for two-byte Latin character", + [next_match, next_match_back, next_match, next_match_back], [InRange(0, 2), InRange(32, 34), InRange(8, 10), Done] ); - search_asserts!(STRESS, '각', "Reverse double ended search for three-byte Hangul character", - [next_match_back, next_back, next_match, next, next_match_back, next_match], - [InRange(34, 37), Rejects(32, 34), InRange(19, 22), Rejects(22, 25), InRange(28, 31), Done] - ); - search_asserts!(STRESS, 'ก', "Double ended search for three-byte Thai character", - [next_match, next_back, next, next_match_back, next_match], - [InRange(22, 25), Rejects(47, 48), Rejects(25, 28), InRange(40, 43), Done] - ); - search_asserts!(STRESS, '😁', "Double ended search for four-byte emoji", - [next_match_back, next, next_match, next_back, next_match], - [InRange(43, 47), Rejects(0, 2), InRange(15, 19), Rejects(40, 43), Done] - ); - search_asserts!(STRESS, 'ꁁ', "Double ended search for three-byte Yi character with repeated bytes", - [next_match, next, next_match_back, next_back, next_match], - [InRange(10, 13), Rejects(13, 14), InRange(37, 40), Rejects(34, 37), Done] + search_asserts!( + STRESS, + '각', + "Reverse double ended search for three-byte Hangul character", + [ + next_match_back, + next_back, + next_match, + next, + next_match_back, + next_match + ], + [ + InRange(34, 37), + Rejects(32, 34), + InRange(19, 22), + Rejects(22, 25), + InRange(28, 31), + Done + ] + ); + search_asserts!( + STRESS, + 'ก', + "Double ended search for three-byte Thai character", + [next_match, next_back, next, next_match_back, next_match], + [ + InRange(22, 25), + Rejects(47, 48), + Rejects(25, 28), + InRange(40, 43), + Done + ] + ); + search_asserts!( + STRESS, + '😁', + "Double ended search for four-byte emoji", + [next_match_back, next, next_match, next_back, next_match], + [ + InRange(43, 47), + Rejects(0, 2), + InRange(15, 19), + Rejects(40, 43), + Done + ] + ); + search_asserts!( + STRESS, + 'ꁁ', + "Double ended search for three-byte Yi character with repeated bytes", + [next_match, next, next_match_back, next_back, next_match], + [ + InRange(10, 13), + Rejects(13, 14), + InRange(37, 40), + Rejects(34, 37), + Done + ] ); } diff --git a/src/libcore/tests/ptr.rs b/src/libcore/tests/ptr.rs index 65c1a3e0254d2..a482af925a736 100644 --- a/src/libcore/tests/ptr.rs +++ b/src/libcore/tests/ptr.rs @@ -1,14 +1,14 @@ -use core::ptr::*; use core::cell::RefCell; +use core::ptr::*; #[test] fn test() { unsafe { struct Pair { fst: isize, - snd: isize + snd: isize, }; - let mut p = Pair {fst: 10, snd: 20}; + let mut p = Pair { fst: 10, snd: 20 }; let pptr: *mut Pair = &mut p; let iptr: *mut isize = pptr as *mut isize; assert_eq!(*iptr, 10); @@ -16,7 +16,7 @@ fn test() { assert_eq!(*iptr, 30); assert_eq!(p.fst, 30); - *pptr = Pair {fst: 50, snd: 60}; + *pptr = Pair { fst: 50, snd: 60 }; assert_eq!(*iptr, 50); assert_eq!(p.fst, 50); assert_eq!(p.snd, 60); @@ -25,17 +25,11 @@ fn test() { let mut v1 = vec![0u16, 0u16, 0u16]; copy(v0.as_ptr().offset(1), v1.as_mut_ptr().offset(1), 1); - assert!((v1[0] == 0u16 && - v1[1] == 32001u16 && - v1[2] == 0u16)); + assert!((v1[0] == 0u16 && v1[1] == 32001u16 && v1[2] == 0u16)); copy(v0.as_ptr().offset(2), v1.as_mut_ptr(), 1); - assert!((v1[0] == 32002u16 && - v1[1] == 32001u16 && - v1[2] == 0u16)); + assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 0u16)); copy(v0.as_ptr(), v1.as_mut_ptr().offset(2), 1); - assert!((v1[0] == 32002u16 && - v1[1] == 32001u16 && - v1[2] == 32000u16)); + assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 32000u16)); } } @@ -208,7 +202,7 @@ fn test_ptr_addition() { #[test] fn test_ptr_subtraction() { unsafe { - let xs = vec![0,1,2,3,4,5,6,7,8,9]; + let xs = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; let mut idx = 9; let ptr = xs.as_ptr(); @@ -226,7 +220,7 @@ fn test_ptr_subtraction() { m_ptr = m_ptr.offset(-1); } - assert_eq!(xs_mut, [0,2,4,6,8,10,12,14,16,18]); + assert_eq!(xs_mut, [0, 2, 4, 6, 8, 10, 12, 14, 16, 18]); } } @@ -234,7 +228,9 @@ fn test_ptr_subtraction() { fn test_set_memory() { let mut xs = [0u8; 20]; let ptr = xs.as_mut_ptr(); - unsafe { write_bytes(ptr, 5u8, xs.len()); } + unsafe { + write_bytes(ptr, 5u8, xs.len()); + } assert!(xs == [5u8; 20]); } @@ -254,10 +250,10 @@ fn test_unsized_nonnull() { #[no_mangle] pub fn test_variadic_fnptr() { use core::hash::{Hash, SipHasher}; - extern { + extern "C" { fn test_variadic_fnptr(_: u64, ...) -> f64; } - let p: unsafe extern fn(u64, ...) -> f64 = test_variadic_fnptr; + let p: unsafe extern "C" fn(u64, ...) -> f64 = test_variadic_fnptr; let q = p.clone(); assert_eq!(p, q); assert!(!(p < q)); @@ -282,7 +278,9 @@ fn write_unaligned_drop() { { let c = Dropper(0); let mut t = Dropper(1); - unsafe { write_unaligned(&mut t, c); } + unsafe { + write_unaligned(&mut t, c); + } } DROPS.with(|d| assert_eq!(*d.borrow(), [0])); } @@ -307,11 +305,16 @@ fn align_offset_stride1() { // number of bytes. let mut align = 1; while align < 1024 { - for ptr in 1..2*align { + for ptr in 1..2 * align { let expected = ptr % align; let offset = if expected == 0 { 0 } else { align - expected }; - assert_eq!((ptr as *const u8).align_offset(align), offset, - "ptr = {}, align = {}, size = 1", ptr, align); + assert_eq!( + (ptr as *const u8).align_offset(align), + offset, + "ptr = {}, align = {}, size = 1", + ptr, + align + ); } align = (align + 1).next_power_of_two(); } @@ -347,8 +350,14 @@ fn align_offset_weird_strides() { } let got = ptr.align_offset(align); if got != expected { - eprintln!("aligning {:p} (with stride of {}) to {}, expected {}, got {}", ptr, - ::std::mem::size_of::(), align, expected, got); + eprintln!( + "aligning {:p} (with stride of {}) to {}, expected {}, got {}", + ptr, + ::std::mem::size_of::(), + align, + expected, + got + ); return true; } return false; @@ -359,7 +368,7 @@ fn align_offset_weird_strides() { let mut align = 1; let mut x = false; while align < 1024 { - for ptr in 1usize..4*align { + for ptr in 1usize..4 * align { unsafe { x |= test_weird_stride::(ptr as *const A3, align); x |= test_weird_stride::(ptr as *const A4, align); diff --git a/src/libcore/tests/result.rs b/src/libcore/tests/result.rs index 1fab07526a07f..a99743c74b887 100644 --- a/src/libcore/tests/result.rs +++ b/src/libcore/tests/result.rs @@ -1,29 +1,54 @@ use core::option::*; -fn op1() -> Result { Ok(666) } -fn op2() -> Result { Err("sadface") } +fn op1() -> Result { + Ok(666) +} +fn op2() -> Result { + Err("sadface") +} #[test] fn test_and() { assert_eq!(op1().and(Ok(667)).unwrap(), 667); - assert_eq!(op1().and(Err::("bad")).unwrap_err(), - "bad"); + assert_eq!( + op1().and(Err::("bad")).unwrap_err(), + "bad" + ); assert_eq!(op2().and(Ok(667)).unwrap_err(), "sadface"); - assert_eq!(op2().and(Err::("bad")).unwrap_err(), - "sadface"); + assert_eq!( + op2().and(Err::("bad")).unwrap_err(), + "sadface" + ); } #[test] fn test_and_then() { - assert_eq!(op1().and_then(|i| Ok::(i + 1)).unwrap(), 667); - assert_eq!(op1().and_then(|_| Err::("bad")).unwrap_err(), - "bad"); - - assert_eq!(op2().and_then(|i| Ok::(i + 1)).unwrap_err(), - "sadface"); - assert_eq!(op2().and_then(|_| Err::("bad")).unwrap_err(), - "sadface"); + assert_eq!( + op1() + .and_then(|i| Ok::(i + 1)) + .unwrap(), + 667 + ); + assert_eq!( + op1() + .and_then(|_| Err::("bad")) + .unwrap_err(), + "bad" + ); + + assert_eq!( + op2() + .and_then(|i| Ok::(i + 1)) + .unwrap_err(), + "sadface" + ); + assert_eq!( + op2() + .and_then(|_| Err::("bad")) + .unwrap_err(), + "sadface" + ); } #[test] @@ -37,12 +62,25 @@ fn test_or() { #[test] fn test_or_else() { - assert_eq!(op1().or_else(|_| Ok::(667)).unwrap(), 666); - assert_eq!(op1().or_else(|e| Err::(e)).unwrap(), 666); - - assert_eq!(op2().or_else(|_| Ok::(667)).unwrap(), 667); - assert_eq!(op2().or_else(|e| Err::(e)).unwrap_err(), - "sadface"); + assert_eq!( + op1().or_else(|_| Ok::(667)).unwrap(), + 666 + ); + assert_eq!( + op1().or_else(|e| Err::(e)).unwrap(), + 666 + ); + + assert_eq!( + op2().or_else(|_| Ok::(667)).unwrap(), + 667 + ); + assert_eq!( + op2() + .or_else(|e| Err::(e)) + .unwrap_err(), + "sadface" + ); } #[test] @@ -65,9 +103,7 @@ fn test_collect() { let v: Result, ()> = (0..3).map(|x| Ok::(x)).collect(); assert!(v == Ok(vec![0, 1, 2])); - let v: Result, isize> = (0..3).map(|x| { - if x > 1 { Err(x) } else { Ok(x) } - }).collect(); + let v: Result, isize> = (0..3).map(|x| if x > 1 { Err(x) } else { Ok(x) }).collect(); assert!(v == Err(2)); // test that it does not take more elements than it needs @@ -127,30 +163,28 @@ pub fn test_unwrap_or_else_panic() { } let bad_err: Result = Err("Unrecoverable mess."); - let _ : isize = bad_err.unwrap_or_else(handler); + let _: isize = bad_err.unwrap_or_else(handler); } - #[test] pub fn test_expect_ok() { let ok: Result = Ok(100); assert_eq!(ok.expect("Unexpected error"), 100); } #[test] -#[should_panic(expected="Got expected error: \"All good\"")] +#[should_panic(expected = "Got expected error: \"All good\"")] pub fn test_expect_err() { let err: Result = Err("All good"); err.expect("Got expected error"); } - #[test] pub fn test_expect_err_err() { let ok: Result<&'static str, isize> = Err(100); assert_eq!(ok.expect_err("Unexpected ok"), 100); } #[test] -#[should_panic(expected="Got expected ok: \"All good\"")] +#[should_panic(expected = "Got expected ok: \"All good\"")] pub fn test_expect_err_ok() { let err: Result<&'static str, isize> = Ok("All good"); err.expect_err("Got expected ok"); diff --git a/src/libcore/tests/slice.rs b/src/libcore/tests/slice.rs index 2c96efbda7673..c35ee9bd0c824 100644 --- a/src/libcore/tests/slice.rs +++ b/src/libcore/tests/slice.rs @@ -1,4 +1,4 @@ -use core::result::Result::{Ok, Err}; +use core::result::Result::{Err, Ok}; #[test] fn test_position() { @@ -50,8 +50,14 @@ fn test_binary_search() { assert_eq!(b.binary_search(&0), Err(0)); assert_eq!(b.binary_search(&1), Ok(0)); assert_eq!(b.binary_search(&2), Err(1)); - assert!(match b.binary_search(&3) { Ok(1..=3) => true, _ => false }); - assert!(match b.binary_search(&3) { Ok(1..=3) => true, _ => false }); + assert!(match b.binary_search(&3) { + Ok(1..=3) => true, + _ => false, + }); + assert!(match b.binary_search(&3) { + Ok(1..=3) => true, + _ => false, + }); assert_eq!(b.binary_search(&4), Err(4)); assert_eq!(b.binary_search(&5), Err(4)); assert_eq!(b.binary_search(&6), Err(4)); @@ -150,7 +156,8 @@ fn test_chunks_zip() { let v1: &[i32] = &[0, 1, 2, 3, 4]; let v2: &[i32] = &[6, 7, 8, 9, 10]; - let res = v1.chunks(2) + let res = v1 + .chunks(2) .zip(v2.chunks(2)) .map(|(a, b)| a.iter().sum::() + b.iter().sum::()) .collect::>(); @@ -261,7 +268,8 @@ fn test_chunks_exact_zip() { let v1: &[i32] = &[0, 1, 2, 3, 4]; let v2: &[i32] = &[6, 7, 8, 9, 10]; - let res = v1.chunks_exact(2) + let res = v1 + .chunks_exact(2) .zip(v2.chunks_exact(2)) .map(|(a, b)| a.iter().sum::() + b.iter().sum::()) .collect::>(); @@ -372,7 +380,8 @@ fn test_rchunks_zip() { let v1: &[i32] = &[0, 1, 2, 3, 4]; let v2: &[i32] = &[6, 7, 8, 9, 10]; - let res = v1.rchunks(2) + let res = v1 + .rchunks(2) .zip(v2.rchunks(2)) .map(|(a, b)| a.iter().sum::() + b.iter().sum::()) .collect::>(); @@ -483,7 +492,8 @@ fn test_rchunks_exact_zip() { let v1: &[i32] = &[0, 1, 2, 3, 4]; let v2: &[i32] = &[6, 7, 8, 9, 10]; - let res = v1.rchunks_exact(2) + let res = v1 + .rchunks_exact(2) .zip(v2.rchunks_exact(2)) .map(|(a, b)| a.iter().sum::() + b.iter().sum::()) .collect::>(); @@ -594,7 +604,8 @@ fn test_windows_zip() { let v1: &[i32] = &[0, 1, 2, 3, 4]; let v2: &[i32] = &[6, 7, 8, 9, 10]; - let res = v1.windows(2) + let res = v1 + .windows(2) .zip(v2.windows(2)) .map(|(a, b)| a.iter().sum::() + b.iter().sum::()) .collect::>(); @@ -607,11 +618,11 @@ fn test_windows_zip() { fn test_iter_ref_consistency() { use std::fmt::Debug; - fn test(x : T) { - let v : &[T] = &[x, x, x]; - let v_ptrs : [*const T; 3] = match v { + fn test(x: T) { + let v: &[T] = &[x, x, x]; + let v_ptrs: [*const T; 3] = match v { [ref v1, ref v2, ref v3] => [v1 as *const _, v2 as *const _, v3 as *const _], - _ => unreachable!() + _ => unreachable!(), }; let len = v.len(); @@ -644,7 +655,11 @@ fn test_iter_ref_consistency() { assert_eq!(next as *const _, v_ptrs[i]); } assert_eq!(it.size_hint(), (0, Some(0))); - assert_eq!(it.next(), None, "The final call to next() should return None"); + assert_eq!( + it.next(), + None, + "The final call to next() should return None" + ); } // next_back() @@ -655,19 +670,26 @@ fn test_iter_ref_consistency() { assert_eq!(it.size_hint(), (remaining, Some(remaining))); let prev = it.next_back().unwrap(); - assert_eq!(prev as *const _, v_ptrs[remaining-1]); + assert_eq!(prev as *const _, v_ptrs[remaining - 1]); } assert_eq!(it.size_hint(), (0, Some(0))); - assert_eq!(it.next_back(), None, "The final call to next_back() should return None"); + assert_eq!( + it.next_back(), + None, + "The final call to next_back() should return None" + ); } } - fn test_mut(x : T) { - let v : &mut [T] = &mut [x, x, x]; - let v_ptrs : [*mut T; 3] = match v { - [ref v1, ref v2, ref v3] => - [v1 as *const _ as *mut _, v2 as *const _ as *mut _, v3 as *const _ as *mut _], - _ => unreachable!() + fn test_mut(x: T) { + let v: &mut [T] = &mut [x, x, x]; + let v_ptrs: [*mut T; 3] = match v { + [ref v1, ref v2, ref v3] => [ + v1 as *const _ as *mut _, + v2 as *const _ as *mut _, + v3 as *const _ as *mut _, + ], + _ => unreachable!(), }; let len = v.len(); @@ -700,7 +722,11 @@ fn test_iter_ref_consistency() { assert_eq!(next as *mut _, v_ptrs[i]); } assert_eq!(it.size_hint(), (0, Some(0))); - assert_eq!(it.next(), None, "The final call to next() should return None"); + assert_eq!( + it.next(), + None, + "The final call to next() should return None" + ); } // next_back() @@ -711,10 +737,14 @@ fn test_iter_ref_consistency() { assert_eq!(it.size_hint(), (remaining, Some(remaining))); let prev = it.next_back().unwrap(); - assert_eq!(prev as *mut _, v_ptrs[remaining-1]); + assert_eq!(prev as *mut _, v_ptrs[remaining - 1]); } assert_eq!(it.size_hint(), (0, Some(0))); - assert_eq!(it.next_back(), None, "The final call to next_back() should return None"); + assert_eq!( + it.next_back(), + None, + "The final call to next_back() should return None" + ); } } @@ -735,8 +765,7 @@ mod slice_index { // This checks all six indexing methods, given an input range that // should succeed. (it is NOT suitable for testing invalid inputs) macro_rules! assert_range_eq { - ($arr:expr, $range:expr, $expected:expr) - => { + ($arr:expr, $range:expr, $expected:expr) => { let mut arr = $arr; let mut expected = $expected; { @@ -747,7 +776,8 @@ mod slice_index { assert_eq!(s.get($range), Some(expected), "(in assertion for: get)"); unsafe { assert_eq!( - s.get_unchecked($range), expected, + s.get_unchecked($range), + expected, "(in assertion for: get_unchecked)", ); } @@ -756,22 +786,21 @@ mod slice_index { let s: &mut [_] = &mut arr; let expected: &mut [_] = &mut expected; + assert_eq!(&mut s[$range], expected, "(in assertion for: index_mut)",); assert_eq!( - &mut s[$range], expected, - "(in assertion for: index_mut)", - ); - assert_eq!( - s.get_mut($range), Some(&mut expected[..]), + s.get_mut($range), + Some(&mut expected[..]), "(in assertion for: get_mut)", ); unsafe { assert_eq!( - s.get_unchecked_mut($range), expected, + s.get_unchecked_mut($range), + expected, "(in assertion for: get_unchecked_mut)", ); } } - } + }; } // Make sure the macro can actually detect bugs, @@ -964,8 +993,8 @@ fn test_find_rfind() { #[test] fn test_iter_folds() { let a = [1, 2, 3, 4, 5]; // len>4 so the unroll is used - assert_eq!(a.iter().fold(0, |acc, &x| 2*acc + x), 57); - assert_eq!(a.iter().rfold(0, |acc, &x| 2*acc + x), 129); + assert_eq!(a.iter().fold(0, |acc, &x| 2 * acc + x), 57); + assert_eq!(a.iter().rfold(0, |acc, &x| 2 * acc + x), 129); let fold = |acc: i32, &x| acc.checked_mul(2)?.checked_add(x); assert_eq!(a.iter().try_fold(0, &fold), Some(57)); assert_eq!(a.iter().try_rfold(0, &fold), Some(129)); @@ -1014,7 +1043,7 @@ fn test_rotate_right() { fn sort_unstable() { use core::cmp::Ordering::{Equal, Greater, Less}; use core::slice::heapsort; - use rand::{FromEntropy, Rng, rngs::SmallRng, seq::SliceRandom}; + use rand::{rngs::SmallRng, seq::SliceRandom, FromEntropy, Rng}; let mut v = [0; 600]; let mut tmp = [0; 600]; @@ -1175,9 +1204,15 @@ fn test_align_to_simple() { let expect2 = [1 | 2 << 8, 3 | 4 << 8, 5 | 6 << 8]; let expect3 = [2 << 8 | 3, 4 << 8 | 5, 6 << 8 | 7]; let expect4 = [2 | 3 << 8, 4 | 5 << 8, 6 | 7 << 8]; - assert!(aligned == expect1 || aligned == expect2 || aligned == expect3 || aligned == expect4, - "aligned={:?} expected={:?} || {:?} || {:?} || {:?}", - aligned, expect1, expect2, expect3, expect4); + assert!( + aligned == expect1 || aligned == expect2 || aligned == expect3 || aligned == expect4, + "aligned={:?} expected={:?} || {:?} || {:?} || {:?}", + aligned, + expect1, + expect2, + expect3, + expect4 + ); } #[test] @@ -1190,10 +1225,20 @@ fn test_align_to_zst() { #[test] fn test_align_to_non_trivial() { - #[repr(align(8))] struct U64(u64, u64); - #[repr(align(8))] struct U64U64U32(u64, u64, u32); - let data = [U64(1, 2), U64(3, 4), U64(5, 6), U64(7, 8), U64(9, 10), U64(11, 12), U64(13, 14), - U64(15, 16)]; + #[repr(align(8))] + struct U64(u64, u64); + #[repr(align(8))] + struct U64U64U32(u64, u64, u32); + let data = [ + U64(1, 2), + U64(3, 4), + U64(5, 6), + U64(7, 8), + U64(9, 10), + U64(11, 12), + U64(13, 14), + U64(15, 16), + ]; let (prefix, aligned, suffix) = unsafe { data.align_to::() }; assert_eq!(aligned.len(), 4); assert_eq!(prefix.len() + suffix.len(), 2); @@ -1208,7 +1253,7 @@ fn test_align_to_empty_mid() { let bytes = [1, 2, 3, 4, 5, 6, 7]; type Chunk = u32; for offset in 0..4 { - let (_, mid, _) = unsafe { bytes[offset..offset+1].align_to::() }; + let (_, mid, _) = unsafe { bytes[offset..offset + 1].align_to::() }; assert_eq!(mid.as_ptr() as usize % mem::align_of::(), 0); } } diff --git a/src/libcore/tests/str_lossy.rs b/src/libcore/tests/str_lossy.rs index f9fd333cca712..5fdbc47cd2d8b 100644 --- a/src/libcore/tests/str_lossy.rs +++ b/src/libcore/tests/str_lossy.rs @@ -3,65 +3,269 @@ use core::str::lossy::*; #[test] fn chunks() { let mut iter = Utf8Lossy::from_bytes(b"hello").chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "hello", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "hello", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); let mut iter = Utf8Lossy::from_bytes("ศไทย中华Việt Nam".as_bytes()).chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "ศไทย中华Việt Nam", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "ศไทย中华Việt Nam", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); let mut iter = Utf8Lossy::from_bytes(b"Hello\xC2 There\xFF Goodbye").chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "Hello", broken: b"\xC2", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: " There", broken: b"\xFF", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: " Goodbye", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "Hello", + broken: b"\xC2", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: " There", + broken: b"\xFF", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: " Goodbye", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); let mut iter = Utf8Lossy::from_bytes(b"Hello\xC0\x80 There\xE6\x83 Goodbye").chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "Hello", broken: b"\xC0", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: " There", broken: b"\xE6\x83", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: " Goodbye", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "Hello", + broken: b"\xC0", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: " There", + broken: b"\xE6\x83", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: " Goodbye", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); let mut iter = Utf8Lossy::from_bytes(b"\xF5foo\xF5\x80bar").chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xF5", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "foo", broken: b"\xF5", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "bar", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xF5", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "foo", + broken: b"\xF5", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "bar", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); let mut iter = Utf8Lossy::from_bytes(b"\xF1foo\xF1\x80bar\xF1\x80\x80baz").chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xF1", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "foo", broken: b"\xF1\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "bar", broken: b"\xF1\x80\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "baz", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xF1", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "foo", + broken: b"\xF1\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "bar", + broken: b"\xF1\x80\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "baz", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); let mut iter = Utf8Lossy::from_bytes(b"\xF4foo\xF4\x80bar\xF4\xBFbaz").chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xF4", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "foo", broken: b"\xF4\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "bar", broken: b"\xF4", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xBF", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "baz", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xF4", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "foo", + broken: b"\xF4\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "bar", + broken: b"\xF4", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xBF", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "baz", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); let mut iter = Utf8Lossy::from_bytes(b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar").chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xF0", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "foo\u{10000}bar", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xF0", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "foo\u{10000}bar", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); // surrogates let mut iter = Utf8Lossy::from_bytes(b"\xED\xA0\x80foo\xED\xBF\xBFbar").chunks(); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xED", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xA0", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\x80", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "foo", broken: b"\xED", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xBF", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "", broken: b"\xBF", }), iter.next()); - assert_eq!(Some(Utf8LossyChunk { valid: "bar", broken: b"", }), iter.next()); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xED", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xA0", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\x80", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "foo", + broken: b"\xED", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xBF", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "", + broken: b"\xBF", + }), + iter.next() + ); + assert_eq!( + Some(Utf8LossyChunk { + valid: "bar", + broken: b"", + }), + iter.next() + ); assert_eq!(None, iter.next()); } @@ -69,13 +273,17 @@ fn chunks() { fn display() { assert_eq!( "Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye", - &Utf8Lossy::from_bytes(b"Hello\xC0\x80 There\xE6\x83 Goodbye").to_string()); + &Utf8Lossy::from_bytes(b"Hello\xC0\x80 There\xE6\x83 Goodbye").to_string() + ); } #[test] fn debug() { assert_eq!( "\"Hello\\xc0\\x80 There\\xe6\\x83 Goodbye\\u{10d4ea}\"", - &format!("{:?}", Utf8Lossy::from_bytes( - b"Hello\xC0\x80 There\xE6\x83 Goodbye\xf4\x8d\x93\xaa"))); + &format!( + "{:?}", + Utf8Lossy::from_bytes(b"Hello\xC0\x80 There\xE6\x83 Goodbye\xf4\x8d\x93\xaa") + ) + ); } diff --git a/src/libcore/tests/time.rs b/src/libcore/tests/time.rs index 6efd22572dc18..d42aea2b86582 100644 --- a/src/libcore/tests/time.rs +++ b/src/libcore/tests/time.rs @@ -3,10 +3,14 @@ use core::time::Duration; #[test] fn creation() { assert!(Duration::from_secs(1) != Duration::from_secs(0)); - assert_eq!(Duration::from_secs(1) + Duration::from_secs(2), - Duration::from_secs(3)); - assert_eq!(Duration::from_millis(10) + Duration::from_secs(4), - Duration::new(4, 10 * 1_000_000)); + assert_eq!( + Duration::from_secs(1) + Duration::from_secs(2), + Duration::from_secs(3) + ); + assert_eq!( + Duration::from_millis(10) + Duration::from_secs(4), + Duration::new(4, 10 * 1_000_000) + ); assert_eq!(Duration::from_millis(4000), Duration::new(4, 0)); } @@ -62,35 +66,55 @@ fn nanos() { assert_eq!(Duration::from_millis(1001).subsec_nanos(), 1_000_000); assert_eq!(Duration::from_micros(999_999).subsec_nanos(), 999_999_000); assert_eq!(Duration::from_micros(1_000_001).subsec_nanos(), 1000); - assert_eq!(Duration::from_nanos(999_999_999).subsec_nanos(), 999_999_999); + assert_eq!( + Duration::from_nanos(999_999_999).subsec_nanos(), + 999_999_999 + ); assert_eq!(Duration::from_nanos(1_000_000_001).subsec_nanos(), 1); } #[test] fn add() { - assert_eq!(Duration::new(0, 0) + Duration::new(0, 1), - Duration::new(0, 1)); - assert_eq!(Duration::new(0, 500_000_000) + Duration::new(0, 500_000_001), - Duration::new(1, 1)); + assert_eq!( + Duration::new(0, 0) + Duration::new(0, 1), + Duration::new(0, 1) + ); + assert_eq!( + Duration::new(0, 500_000_000) + Duration::new(0, 500_000_001), + Duration::new(1, 1) + ); } #[test] fn checked_add() { - assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)), - Some(Duration::new(0, 1))); - assert_eq!(Duration::new(0, 500_000_000).checked_add(Duration::new(0, 500_000_001)), - Some(Duration::new(1, 1))); - assert_eq!(Duration::new(1, 0).checked_add(Duration::new(::core::u64::MAX, 0)), None); + assert_eq!( + Duration::new(0, 0).checked_add(Duration::new(0, 1)), + Some(Duration::new(0, 1)) + ); + assert_eq!( + Duration::new(0, 500_000_000).checked_add(Duration::new(0, 500_000_001)), + Some(Duration::new(1, 1)) + ); + assert_eq!( + Duration::new(1, 0).checked_add(Duration::new(::core::u64::MAX, 0)), + None + ); } #[test] fn sub() { - assert_eq!(Duration::new(0, 1) - Duration::new(0, 0), - Duration::new(0, 1)); - assert_eq!(Duration::new(0, 500_000_001) - Duration::new(0, 500_000_000), - Duration::new(0, 1)); - assert_eq!(Duration::new(1, 0) - Duration::new(0, 1), - Duration::new(0, 999_999_999)); + assert_eq!( + Duration::new(0, 1) - Duration::new(0, 0), + Duration::new(0, 1) + ); + assert_eq!( + Duration::new(0, 500_000_001) - Duration::new(0, 500_000_000), + Duration::new(0, 1) + ); + assert_eq!( + Duration::new(1, 0) - Duration::new(0, 1), + Duration::new(0, 999_999_999) + ); } #[test] @@ -99,8 +123,10 @@ fn checked_sub() { let one_nano = Duration::new(0, 1); let one_sec = Duration::new(1, 0); assert_eq!(one_nano.checked_sub(zero), Some(Duration::new(0, 1))); - assert_eq!(one_sec.checked_sub(one_nano), - Some(Duration::new(0, 999_999_999))); + assert_eq!( + one_sec.checked_sub(one_nano), + Some(Duration::new(0, 999_999_999)) + ); assert_eq!(zero.checked_sub(one_nano), None); assert_eq!(zero.checked_sub(one_sec), None); } @@ -122,17 +148,30 @@ fn mul() { assert_eq!(Duration::new(0, 1) * 2, Duration::new(0, 2)); assert_eq!(Duration::new(1, 1) * 3, Duration::new(3, 3)); assert_eq!(Duration::new(0, 500_000_001) * 4, Duration::new(2, 4)); - assert_eq!(Duration::new(0, 500_000_001) * 4000, - Duration::new(2000, 4000)); + assert_eq!( + Duration::new(0, 500_000_001) * 4000, + Duration::new(2000, 4000) + ); } #[test] fn checked_mul() { - assert_eq!(Duration::new(0, 1).checked_mul(2), Some(Duration::new(0, 2))); - assert_eq!(Duration::new(1, 1).checked_mul(3), Some(Duration::new(3, 3))); - assert_eq!(Duration::new(0, 500_000_001).checked_mul(4), Some(Duration::new(2, 4))); - assert_eq!(Duration::new(0, 500_000_001).checked_mul(4000), - Some(Duration::new(2000, 4000))); + assert_eq!( + Duration::new(0, 1).checked_mul(2), + Some(Duration::new(0, 2)) + ); + assert_eq!( + Duration::new(1, 1).checked_mul(3), + Some(Duration::new(3, 3)) + ); + assert_eq!( + Duration::new(0, 500_000_001).checked_mul(4), + Some(Duration::new(2, 4)) + ); + assert_eq!( + Duration::new(0, 500_000_001).checked_mul(4000), + Some(Duration::new(2000, 4000)) + ); assert_eq!(Duration::new(::core::u64::MAX - 1, 0).checked_mul(2), None); } @@ -140,14 +179,22 @@ fn checked_mul() { fn div() { assert_eq!(Duration::new(0, 1) / 2, Duration::new(0, 0)); assert_eq!(Duration::new(1, 1) / 3, Duration::new(0, 333_333_333)); - assert_eq!(Duration::new(99, 999_999_000) / 100, - Duration::new(0, 999_999_990)); + assert_eq!( + Duration::new(99, 999_999_000) / 100, + Duration::new(0, 999_999_990) + ); } #[test] fn checked_div() { - assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0))); - assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000))); + assert_eq!( + Duration::new(2, 0).checked_div(2), + Some(Duration::new(1, 0)) + ); + assert_eq!( + Duration::new(1, 0).checked_div(2), + Some(Duration::new(0, 500_000_000)) + ); assert_eq!(Duration::new(2, 0).checked_div(0), None); } @@ -162,13 +209,16 @@ fn correct_sum() { Duration::new(5, 0), ]; let sum = durations.iter().sum::(); - assert_eq!(sum, Duration::new(1+2+5+4, 1_000_000_000 - 5)); + assert_eq!(sum, Duration::new(1 + 2 + 5 + 4, 1_000_000_000 - 5)); } #[test] fn debug_formatting_extreme_values() { assert_eq!( - format!("{:?}", Duration::new(18_446_744_073_709_551_615, 123_456_789)), + format!( + "{:?}", + Duration::new(18_446_744_073_709_551_615, 123_456_789) + ), "18446744073709551615.123456789s" ); } @@ -178,20 +228,41 @@ fn debug_formatting_secs() { assert_eq!(format!("{:?}", Duration::new(7, 000_000_000)), "7s"); assert_eq!(format!("{:?}", Duration::new(7, 100_000_000)), "7.1s"); assert_eq!(format!("{:?}", Duration::new(7, 000_010_000)), "7.00001s"); - assert_eq!(format!("{:?}", Duration::new(7, 000_000_001)), "7.000000001s"); - assert_eq!(format!("{:?}", Duration::new(7, 123_456_789)), "7.123456789s"); + assert_eq!( + format!("{:?}", Duration::new(7, 000_000_001)), + "7.000000001s" + ); + assert_eq!( + format!("{:?}", Duration::new(7, 123_456_789)), + "7.123456789s" + ); assert_eq!(format!("{:?}", Duration::new(88, 000_000_000)), "88s"); assert_eq!(format!("{:?}", Duration::new(88, 100_000_000)), "88.1s"); assert_eq!(format!("{:?}", Duration::new(88, 000_010_000)), "88.00001s"); - assert_eq!(format!("{:?}", Duration::new(88, 000_000_001)), "88.000000001s"); - assert_eq!(format!("{:?}", Duration::new(88, 123_456_789)), "88.123456789s"); + assert_eq!( + format!("{:?}", Duration::new(88, 000_000_001)), + "88.000000001s" + ); + assert_eq!( + format!("{:?}", Duration::new(88, 123_456_789)), + "88.123456789s" + ); assert_eq!(format!("{:?}", Duration::new(999, 000_000_000)), "999s"); assert_eq!(format!("{:?}", Duration::new(999, 100_000_000)), "999.1s"); - assert_eq!(format!("{:?}", Duration::new(999, 000_010_000)), "999.00001s"); - assert_eq!(format!("{:?}", Duration::new(999, 000_000_001)), "999.000000001s"); - assert_eq!(format!("{:?}", Duration::new(999, 123_456_789)), "999.123456789s"); + assert_eq!( + format!("{:?}", Duration::new(999, 000_010_000)), + "999.00001s" + ); + assert_eq!( + format!("{:?}", Duration::new(999, 000_000_001)), + "999.000000001s" + ); + assert_eq!( + format!("{:?}", Duration::new(999, 123_456_789)), + "999.123456789s" + ); } #[test] @@ -208,8 +279,14 @@ fn debug_formatting_millis() { assert_eq!(format!("{:?}", Duration::new(0, 999_000_000)), "999ms"); assert_eq!(format!("{:?}", Duration::new(0, 999_100_000)), "999.1ms"); - assert_eq!(format!("{:?}", Duration::new(0, 999_000_001)), "999.000001ms"); - assert_eq!(format!("{:?}", Duration::new(0, 999_123_456)), "999.123456ms"); + assert_eq!( + format!("{:?}", Duration::new(0, 999_000_001)), + "999.000001ms" + ); + assert_eq!( + format!("{:?}", Duration::new(0, 999_123_456)), + "999.123456ms" + ); } #[test] @@ -286,9 +363,18 @@ fn debug_formatting_precision_two() { #[test] fn debug_formatting_precision_high() { - assert_eq!(format!("{:.5?}", Duration::new(0, 23_678)), "23.67800µs"); + assert_eq!(format!("{:.5?}", Duration::new(0, 23_678)), "23.67800µs"); - assert_eq!(format!("{:.9?}", Duration::new(1, 000_000_000)), "1.000000000s"); - assert_eq!(format!("{:.10?}", Duration::new(4, 001_000_000)), "4.0010000000s"); - assert_eq!(format!("{:.20?}", Duration::new(4, 001_000_000)), "4.00100000000000000000s"); + assert_eq!( + format!("{:.9?}", Duration::new(1, 000_000_000)), + "1.000000000s" + ); + assert_eq!( + format!("{:.10?}", Duration::new(4, 001_000_000)), + "4.0010000000s" + ); + assert_eq!( + format!("{:.20?}", Duration::new(4, 001_000_000)), + "4.00100000000000000000s" + ); } diff --git a/src/libcore/tests/tuple.rs b/src/libcore/tests/tuple.rs index a4c171eb4243b..6a80888e543f0 100644 --- a/src/libcore/tests/tuple.rs +++ b/src/libcore/tests/tuple.rs @@ -1,4 +1,4 @@ -use std::cmp::Ordering::{Equal, Less, Greater}; +use std::cmp::Ordering::{Equal, Greater, Less}; #[test] fn test_clone() { @@ -11,7 +11,7 @@ fn test_clone() { fn test_tuple_cmp() { let (small, big) = ((1, 2, 3), (3, 2, 1)); - let nan = 0.0f64/0.0; + let nan = 0.0f64 / 0.0; // PartialEq assert_eq!(small, small); diff --git a/src/libcore/time.rs b/src/libcore/time.rs index b12ee0497d2c2..65af7667bd88e 100644 --- a/src/libcore/time.rs +++ b/src/libcore/time.rs @@ -12,16 +12,16 @@ //! assert_eq!(Duration::new(5, 0), Duration::from_secs(5)); //! ``` -use {fmt, u64}; use iter::Sum; -use ops::{Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign}; +use ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign}; +use {fmt, u64}; const NANOS_PER_SEC: u32 = 1_000_000_000; const NANOS_PER_MILLI: u32 = 1_000_000; const NANOS_PER_MICRO: u32 = 1_000; const MILLIS_PER_SEC: u64 = 1_000; const MICROS_PER_SEC: u64 = 1_000_000; -const MAX_NANOS_F64: f64 = ((u64::MAX as u128 + 1)*(NANOS_PER_SEC as u128)) as f64; +const MAX_NANOS_F64: f64 = ((u64::MAX as u128 + 1) * (NANOS_PER_SEC as u128)) as f64; /// A `Duration` type to represent a span of time, typically used for system /// timeouts. @@ -80,7 +80,8 @@ impl Duration { #[stable(feature = "duration", since = "1.3.0")] #[inline] pub fn new(secs: u64, nanos: u32) -> Duration { - let secs = secs.checked_add((nanos / NANOS_PER_SEC) as u64) + let secs = secs + .checked_add((nanos / NANOS_PER_SEC) as u64) .expect("overflow in Duration::new"); let nanos = nanos % NANOS_PER_SEC; Duration { secs, nanos } @@ -201,7 +202,9 @@ impl Duration { /// [`subsec_nanos`]: #method.subsec_nanos #[stable(feature = "duration", since = "1.3.0")] #[inline] - pub const fn as_secs(&self) -> u64 { self.secs } + pub const fn as_secs(&self) -> u64 { + self.secs + } /// Returns the fractional part of this `Duration`, in whole milliseconds. /// @@ -220,7 +223,9 @@ impl Duration { /// ``` #[stable(feature = "duration_extras", since = "1.27.0")] #[inline] - pub const fn subsec_millis(&self) -> u32 { self.nanos / NANOS_PER_MILLI } + pub const fn subsec_millis(&self) -> u32 { + self.nanos / NANOS_PER_MILLI + } /// Returns the fractional part of this `Duration`, in whole microseconds. /// @@ -239,7 +244,9 @@ impl Duration { /// ``` #[stable(feature = "duration_extras", since = "1.27.0")] #[inline] - pub const fn subsec_micros(&self) -> u32 { self.nanos / NANOS_PER_MICRO } + pub const fn subsec_micros(&self) -> u32 { + self.nanos / NANOS_PER_MICRO + } /// Returns the fractional part of this `Duration`, in nanoseconds. /// @@ -258,7 +265,9 @@ impl Duration { /// ``` #[stable(feature = "duration", since = "1.3.0")] #[inline] - pub const fn subsec_nanos(&self) -> u32 { self.nanos } + pub const fn subsec_nanos(&self) -> u32 { + self.nanos + } /// Returns the total number of whole milliseconds contained by this `Duration`. /// @@ -337,10 +346,7 @@ impl Duration { } } debug_assert!(nanos < NANOS_PER_SEC); - Some(Duration { - secs, - nanos, - }) + Some(Duration { secs, nanos }) } else { None } @@ -404,14 +410,13 @@ impl Duration { let total_nanos = self.nanos as u64 * rhs as u64; let extra_secs = total_nanos / (NANOS_PER_SEC as u64); let nanos = (total_nanos % (NANOS_PER_SEC as u64)) as u32; - if let Some(secs) = self.secs + if let Some(secs) = self + .secs .checked_mul(rhs as u64) - .and_then(|s| s.checked_add(extra_secs)) { + .and_then(|s| s.checked_add(extra_secs)) + { debug_assert!(nanos < NANOS_PER_SEC); - Some(Duration { - secs, - nanos, - }) + Some(Duration { secs, nanos }) } else { None } @@ -482,7 +487,7 @@ impl Duration { #[unstable(feature = "duration_float", issue = "54361")] #[inline] pub fn from_float_secs(secs: f64) -> Duration { - let nanos = secs * (NANOS_PER_SEC as f64); + let nanos = secs * (NANOS_PER_SEC as f64); if !nanos.is_finite() { panic!("got non-finite value when converting float to duration"); } @@ -492,7 +497,7 @@ impl Duration { if nanos < 0.0 { panic!("underflow when converting float to duration"); } - let nanos = nanos as u128; + let nanos = nanos as u128; Duration { secs: (nanos / (NANOS_PER_SEC as u128)) as u64, nanos: (nanos % (NANOS_PER_SEC as u128)) as u32, @@ -563,7 +568,8 @@ impl Add for Duration { type Output = Duration; fn add(self, rhs: Duration) -> Duration { - self.checked_add(rhs).expect("overflow when adding durations") + self.checked_add(rhs) + .expect("overflow when adding durations") } } @@ -579,7 +585,8 @@ impl Sub for Duration { type Output = Duration; fn sub(self, rhs: Duration) -> Duration { - self.checked_sub(rhs).expect("overflow when subtracting durations") + self.checked_sub(rhs) + .expect("overflow when subtracting durations") } } @@ -595,7 +602,8 @@ impl Mul for Duration { type Output = Duration; fn mul(self, rhs: u32) -> Duration { - self.checked_mul(rhs).expect("overflow when multiplying duration by scalar") + self.checked_mul(rhs) + .expect("overflow when multiplying duration by scalar") } } @@ -620,7 +628,8 @@ impl Div for Duration { type Output = Duration; fn div(self, rhs: u32) -> Duration { - self.checked_div(rhs).expect("divide by zero error when dividing duration by scalar") + self.checked_div(rhs) + .expect("divide by zero error when dividing duration by scalar") } } @@ -663,14 +672,14 @@ macro_rules! sum_durations { #[stable(feature = "duration_sum", since = "1.16.0")] impl Sum for Duration { - fn sum>(iter: I) -> Duration { + fn sum>(iter: I) -> Duration { sum_durations!(iter) } } #[stable(feature = "duration_sum", since = "1.16.0")] impl<'a> Sum<&'a Duration> for Duration { - fn sum>(iter: I) -> Duration { + fn sum>(iter: I) -> Duration { sum_durations!(iter) } } @@ -760,9 +769,7 @@ impl fmt::Debug for Duration { } else { // We are only writing ASCII digits into the buffer and it was // initialized with '0's, so it contains valid UTF8. - let s = unsafe { - ::str::from_utf8_unchecked(&buf[..end]) - }; + let s = unsafe { ::str::from_utf8_unchecked(&buf[..end]) }; // If the user request a precision > 9, we pad '0's at the end. let w = f.precision().unwrap_or(pos); @@ -779,7 +786,12 @@ impl fmt::Debug for Duration { fmt_decimal(f, self.secs, self.nanos, 100_000_000)?; f.write_str("s") } else if self.nanos >= 1_000_000 { - fmt_decimal(f, self.nanos as u64 / 1_000_000, self.nanos % 1_000_000, 100_000)?; + fmt_decimal( + f, + self.nanos as u64 / 1_000_000, + self.nanos % 1_000_000, + 100_000, + )?; f.write_str("ms") } else if self.nanos >= 1_000 { fmt_decimal(f, self.nanos as u64 / 1_000, self.nanos % 1_000, 100)?; diff --git a/src/libcore/tuple.rs b/src/libcore/tuple.rs index a82666d8f70f8..1a0b6f5d00959 100644 --- a/src/libcore/tuple.rs +++ b/src/libcore/tuple.rs @@ -1,7 +1,7 @@ // See src/libstd/primitive_docs.rs for documentation. -use cmp::*; use cmp::Ordering::*; +use cmp::*; // macro for implementing n-ary tuple functions and operations macro_rules! tuple_impls { diff --git a/src/libcore/unicode/bool_trie.rs b/src/libcore/unicode/bool_trie.rs index 39584d346e4a8..b7fba88a540f9 100644 --- a/src/libcore/unicode/bool_trie.rs +++ b/src/libcore/unicode/bool_trie.rs @@ -19,16 +19,16 @@ /// non-BMP range of most Unicode sets. pub struct BoolTrie { // 0..0x800 (corresponding to 1 and 2 byte utf-8 sequences) - pub r1: [u64; 32], // leaves + pub r1: [u64; 32], // leaves // 0x800..0x10000 (corresponding to 3 byte utf-8 sequences) pub r2: [u8; 992], // first level - pub r3: &'static [u64], // leaves + pub r3: &'static [u64], // leaves // 0x10000..0x110000 (corresponding to 4 byte utf-8 sequences) - pub r4: [u8; 256], // first level - pub r5: &'static [u8], // second level - pub r6: &'static [u64], // leaves + pub r4: [u8; 256], // first level + pub r5: &'static [u8], // second level + pub r6: &'static [u64], // leaves } impl BoolTrie { pub fn lookup(&self, c: char) -> bool { @@ -48,7 +48,7 @@ impl BoolTrie { pub struct SmallBoolTrie { pub(crate) r1: &'static [u8], // first level - pub(crate) r2: &'static [u64], // leaves + pub(crate) r2: &'static [u64], // leaves } impl SmallBoolTrie { diff --git a/src/libcore/unicode/printable.rs b/src/libcore/unicode/printable.rs index a950e82cba241..8f270719e4c99 100644 --- a/src/libcore/unicode/printable.rs +++ b/src/libcore/unicode/printable.rs @@ -1,8 +1,7 @@ // NOTE: The following code was generated by "src/libcore/unicode/printable.py", // do not edit directly! -fn check(x: u16, singletonuppers: &[(u8, u8)], singletonlowers: &[u8], - normal: &[u8]) -> bool { +fn check(x: u16, singletonuppers: &[(u8, u8)], singletonlowers: &[u8], normal: &[u8]) -> bool { let xupper = (x >> 8) as u8; let mut lowerstart = 0; for &(upper, lowercount) in singletonuppers { @@ -113,44 +112,25 @@ const SINGLETONS0U: &[(u8, u8)] = &[ (0xff, 9), ]; const SINGLETONS0L: &[u8] = &[ - 0xad, 0x78, 0x79, 0x8b, 0x8d, 0xa2, 0x30, 0x57, - 0x58, 0x8b, 0x8c, 0x90, 0x1c, 0x1d, 0xdd, 0x0e, - 0x0f, 0x4b, 0x4c, 0xfb, 0xfc, 0x2e, 0x2f, 0x3f, - 0x5c, 0x5d, 0x5f, 0xb5, 0xe2, 0x84, 0x8d, 0x8e, - 0x91, 0x92, 0xa9, 0xb1, 0xba, 0xbb, 0xc5, 0xc6, - 0xc9, 0xca, 0xde, 0xe4, 0xe5, 0xff, 0x00, 0x04, - 0x11, 0x12, 0x29, 0x31, 0x34, 0x37, 0x3a, 0x3b, - 0x3d, 0x49, 0x4a, 0x5d, 0x84, 0x8e, 0x92, 0xa9, - 0xb1, 0xb4, 0xba, 0xbb, 0xc6, 0xca, 0xce, 0xcf, - 0xe4, 0xe5, 0x00, 0x04, 0x0d, 0x0e, 0x11, 0x12, - 0x29, 0x31, 0x34, 0x3a, 0x3b, 0x45, 0x46, 0x49, - 0x4a, 0x5e, 0x64, 0x65, 0x84, 0x91, 0x9b, 0x9d, - 0xc9, 0xce, 0xcf, 0x0d, 0x11, 0x29, 0x45, 0x49, - 0x57, 0x64, 0x65, 0x8d, 0x91, 0xa9, 0xb4, 0xba, - 0xbb, 0xc5, 0xc9, 0xdf, 0xe4, 0xe5, 0xf0, 0x04, - 0x0d, 0x11, 0x45, 0x49, 0x64, 0x65, 0x80, 0x81, - 0x84, 0xb2, 0xbc, 0xbe, 0xbf, 0xd5, 0xd7, 0xf0, - 0xf1, 0x83, 0x85, 0x86, 0x89, 0x8b, 0x8c, 0x98, - 0xa0, 0xa4, 0xa6, 0xa8, 0xa9, 0xac, 0xba, 0xbe, - 0xbf, 0xc5, 0xc7, 0xce, 0xcf, 0xda, 0xdb, 0x48, - 0x98, 0xbd, 0xcd, 0xc6, 0xce, 0xcf, 0x49, 0x4e, - 0x4f, 0x57, 0x59, 0x5e, 0x5f, 0x89, 0x8e, 0x8f, - 0xb1, 0xb6, 0xb7, 0xbf, 0xc1, 0xc6, 0xc7, 0xd7, - 0x11, 0x16, 0x17, 0x5b, 0x5c, 0xf6, 0xf7, 0xfe, - 0xff, 0x80, 0x0d, 0x6d, 0x71, 0xde, 0xdf, 0x0e, - 0x0f, 0x1f, 0x6e, 0x6f, 0x1c, 0x1d, 0x5f, 0x7d, - 0x7e, 0xae, 0xaf, 0xbb, 0xbc, 0xfa, 0x16, 0x17, - 0x1e, 0x1f, 0x46, 0x47, 0x4e, 0x4f, 0x58, 0x5a, - 0x5c, 0x5e, 0x7e, 0x7f, 0xb5, 0xc5, 0xd4, 0xd5, - 0xdc, 0xf0, 0xf1, 0xf5, 0x72, 0x73, 0x8f, 0x74, - 0x75, 0x96, 0x97, 0xc9, 0xff, 0x2f, 0x5f, 0x26, - 0x2e, 0x2f, 0xa7, 0xaf, 0xb7, 0xbf, 0xc7, 0xcf, - 0xd7, 0xdf, 0x9a, 0x40, 0x97, 0x98, 0x30, 0x8f, - 0x1f, 0xff, 0xce, 0xff, 0x4e, 0x4f, 0x5a, 0x5b, - 0x07, 0x08, 0x0f, 0x10, 0x27, 0x2f, 0xee, 0xef, - 0x6e, 0x6f, 0x37, 0x3d, 0x3f, 0x42, 0x45, 0x90, - 0x91, 0xfe, 0xff, 0x53, 0x67, 0x75, 0xc8, 0xc9, - 0xd0, 0xd1, 0xd8, 0xd9, 0xe7, 0xfe, 0xff, + 0xad, 0x78, 0x79, 0x8b, 0x8d, 0xa2, 0x30, 0x57, 0x58, 0x8b, 0x8c, 0x90, 0x1c, 0x1d, 0xdd, 0x0e, + 0x0f, 0x4b, 0x4c, 0xfb, 0xfc, 0x2e, 0x2f, 0x3f, 0x5c, 0x5d, 0x5f, 0xb5, 0xe2, 0x84, 0x8d, 0x8e, + 0x91, 0x92, 0xa9, 0xb1, 0xba, 0xbb, 0xc5, 0xc6, 0xc9, 0xca, 0xde, 0xe4, 0xe5, 0xff, 0x00, 0x04, + 0x11, 0x12, 0x29, 0x31, 0x34, 0x37, 0x3a, 0x3b, 0x3d, 0x49, 0x4a, 0x5d, 0x84, 0x8e, 0x92, 0xa9, + 0xb1, 0xb4, 0xba, 0xbb, 0xc6, 0xca, 0xce, 0xcf, 0xe4, 0xe5, 0x00, 0x04, 0x0d, 0x0e, 0x11, 0x12, + 0x29, 0x31, 0x34, 0x3a, 0x3b, 0x45, 0x46, 0x49, 0x4a, 0x5e, 0x64, 0x65, 0x84, 0x91, 0x9b, 0x9d, + 0xc9, 0xce, 0xcf, 0x0d, 0x11, 0x29, 0x45, 0x49, 0x57, 0x64, 0x65, 0x8d, 0x91, 0xa9, 0xb4, 0xba, + 0xbb, 0xc5, 0xc9, 0xdf, 0xe4, 0xe5, 0xf0, 0x04, 0x0d, 0x11, 0x45, 0x49, 0x64, 0x65, 0x80, 0x81, + 0x84, 0xb2, 0xbc, 0xbe, 0xbf, 0xd5, 0xd7, 0xf0, 0xf1, 0x83, 0x85, 0x86, 0x89, 0x8b, 0x8c, 0x98, + 0xa0, 0xa4, 0xa6, 0xa8, 0xa9, 0xac, 0xba, 0xbe, 0xbf, 0xc5, 0xc7, 0xce, 0xcf, 0xda, 0xdb, 0x48, + 0x98, 0xbd, 0xcd, 0xc6, 0xce, 0xcf, 0x49, 0x4e, 0x4f, 0x57, 0x59, 0x5e, 0x5f, 0x89, 0x8e, 0x8f, + 0xb1, 0xb6, 0xb7, 0xbf, 0xc1, 0xc6, 0xc7, 0xd7, 0x11, 0x16, 0x17, 0x5b, 0x5c, 0xf6, 0xf7, 0xfe, + 0xff, 0x80, 0x0d, 0x6d, 0x71, 0xde, 0xdf, 0x0e, 0x0f, 0x1f, 0x6e, 0x6f, 0x1c, 0x1d, 0x5f, 0x7d, + 0x7e, 0xae, 0xaf, 0xbb, 0xbc, 0xfa, 0x16, 0x17, 0x1e, 0x1f, 0x46, 0x47, 0x4e, 0x4f, 0x58, 0x5a, + 0x5c, 0x5e, 0x7e, 0x7f, 0xb5, 0xc5, 0xd4, 0xd5, 0xdc, 0xf0, 0xf1, 0xf5, 0x72, 0x73, 0x8f, 0x74, + 0x75, 0x96, 0x97, 0xc9, 0xff, 0x2f, 0x5f, 0x26, 0x2e, 0x2f, 0xa7, 0xaf, 0xb7, 0xbf, 0xc7, 0xcf, + 0xd7, 0xdf, 0x9a, 0x40, 0x97, 0x98, 0x30, 0x8f, 0x1f, 0xff, 0xce, 0xff, 0x4e, 0x4f, 0x5a, 0x5b, + 0x07, 0x08, 0x0f, 0x10, 0x27, 0x2f, 0xee, 0xef, 0x6e, 0x6f, 0x37, 0x3d, 0x3f, 0x42, 0x45, 0x90, + 0x91, 0xfe, 0xff, 0x53, 0x67, 0x75, 0xc8, 0xc9, 0xd0, 0xd1, 0xd8, 0xd9, 0xe7, 0xfe, 0xff, ]; const SINGLETONS1U: &[(u8, u8)] = &[ (0x00, 6), @@ -188,338 +168,62 @@ const SINGLETONS1U: &[(u8, u8)] = &[ (0xf9, 4), ]; const SINGLETONS1L: &[u8] = &[ - 0x0c, 0x27, 0x3b, 0x3e, 0x4e, 0x4f, 0x8f, 0x9e, - 0x9e, 0x9f, 0x06, 0x07, 0x09, 0x36, 0x3d, 0x3e, - 0x56, 0xf3, 0xd0, 0xd1, 0x04, 0x14, 0x18, 0x36, - 0x37, 0x56, 0x57, 0xbd, 0x35, 0xce, 0xcf, 0xe0, - 0x12, 0x87, 0x89, 0x8e, 0x9e, 0x04, 0x0d, 0x0e, - 0x11, 0x12, 0x29, 0x31, 0x34, 0x3a, 0x45, 0x46, - 0x49, 0x4a, 0x4e, 0x4f, 0x64, 0x65, 0x5a, 0x5c, - 0xb6, 0xb7, 0x1b, 0x1c, 0x84, 0x85, 0x09, 0x37, - 0x90, 0x91, 0xa8, 0x07, 0x0a, 0x3b, 0x3e, 0x66, - 0x69, 0x8f, 0x92, 0x6f, 0x5f, 0xee, 0xef, 0x5a, - 0x62, 0x9a, 0x9b, 0x27, 0x28, 0x55, 0x9d, 0xa0, - 0xa1, 0xa3, 0xa4, 0xa7, 0xa8, 0xad, 0xba, 0xbc, - 0xc4, 0x06, 0x0b, 0x0c, 0x15, 0x1d, 0x3a, 0x3f, - 0x45, 0x51, 0xa6, 0xa7, 0xcc, 0xcd, 0xa0, 0x07, - 0x19, 0x1a, 0x22, 0x25, 0xc5, 0xc6, 0x04, 0x20, - 0x23, 0x25, 0x26, 0x28, 0x33, 0x38, 0x3a, 0x48, - 0x4a, 0x4c, 0x50, 0x53, 0x55, 0x56, 0x58, 0x5a, - 0x5c, 0x5e, 0x60, 0x63, 0x65, 0x66, 0x6b, 0x73, - 0x78, 0x7d, 0x7f, 0x8a, 0xa4, 0xaa, 0xaf, 0xb0, - 0xc0, 0xd0, 0x3f, 0x71, 0x72, 0x7b, + 0x0c, 0x27, 0x3b, 0x3e, 0x4e, 0x4f, 0x8f, 0x9e, 0x9e, 0x9f, 0x06, 0x07, 0x09, 0x36, 0x3d, 0x3e, + 0x56, 0xf3, 0xd0, 0xd1, 0x04, 0x14, 0x18, 0x36, 0x37, 0x56, 0x57, 0xbd, 0x35, 0xce, 0xcf, 0xe0, + 0x12, 0x87, 0x89, 0x8e, 0x9e, 0x04, 0x0d, 0x0e, 0x11, 0x12, 0x29, 0x31, 0x34, 0x3a, 0x45, 0x46, + 0x49, 0x4a, 0x4e, 0x4f, 0x64, 0x65, 0x5a, 0x5c, 0xb6, 0xb7, 0x1b, 0x1c, 0x84, 0x85, 0x09, 0x37, + 0x90, 0x91, 0xa8, 0x07, 0x0a, 0x3b, 0x3e, 0x66, 0x69, 0x8f, 0x92, 0x6f, 0x5f, 0xee, 0xef, 0x5a, + 0x62, 0x9a, 0x9b, 0x27, 0x28, 0x55, 0x9d, 0xa0, 0xa1, 0xa3, 0xa4, 0xa7, 0xa8, 0xad, 0xba, 0xbc, + 0xc4, 0x06, 0x0b, 0x0c, 0x15, 0x1d, 0x3a, 0x3f, 0x45, 0x51, 0xa6, 0xa7, 0xcc, 0xcd, 0xa0, 0x07, + 0x19, 0x1a, 0x22, 0x25, 0xc5, 0xc6, 0x04, 0x20, 0x23, 0x25, 0x26, 0x28, 0x33, 0x38, 0x3a, 0x48, + 0x4a, 0x4c, 0x50, 0x53, 0x55, 0x56, 0x58, 0x5a, 0x5c, 0x5e, 0x60, 0x63, 0x65, 0x66, 0x6b, 0x73, + 0x78, 0x7d, 0x7f, 0x8a, 0xa4, 0xaa, 0xaf, 0xb0, 0xc0, 0xd0, 0x3f, 0x71, 0x72, 0x7b, ]; const NORMAL0: &[u8] = &[ - 0x00, 0x20, - 0x5f, 0x22, - 0x82, 0xdf, 0x04, - 0x82, 0x44, 0x08, - 0x1b, 0x04, - 0x06, 0x11, - 0x81, 0xac, 0x0e, - 0x80, 0xab, 0x35, - 0x1e, 0x15, - 0x80, 0xe0, 0x03, - 0x19, 0x08, - 0x01, 0x04, - 0x2f, 0x04, - 0x34, 0x04, - 0x07, 0x03, - 0x01, 0x07, - 0x06, 0x07, - 0x11, 0x0a, - 0x50, 0x0f, - 0x12, 0x07, - 0x55, 0x08, - 0x02, 0x04, - 0x1c, 0x0a, - 0x09, 0x03, - 0x08, 0x03, - 0x07, 0x03, - 0x02, 0x03, - 0x03, 0x03, - 0x0c, 0x04, - 0x05, 0x03, - 0x0b, 0x06, - 0x01, 0x0e, - 0x15, 0x05, - 0x3a, 0x03, - 0x11, 0x07, - 0x06, 0x05, - 0x10, 0x08, - 0x56, 0x07, - 0x02, 0x07, - 0x15, 0x0d, - 0x50, 0x04, - 0x43, 0x03, - 0x2d, 0x03, - 0x01, 0x04, - 0x11, 0x06, - 0x0f, 0x0c, - 0x3a, 0x04, - 0x1d, 0x25, - 0x0d, 0x06, - 0x4c, 0x20, - 0x6d, 0x04, - 0x6a, 0x25, - 0x80, 0xc8, 0x05, - 0x82, 0xb0, 0x03, - 0x1a, 0x06, - 0x82, 0xfd, 0x03, - 0x59, 0x07, - 0x15, 0x0b, - 0x17, 0x09, - 0x14, 0x0c, - 0x14, 0x0c, - 0x6a, 0x06, - 0x0a, 0x06, - 0x1a, 0x06, - 0x59, 0x07, - 0x2b, 0x05, - 0x46, 0x0a, - 0x2c, 0x04, - 0x0c, 0x04, - 0x01, 0x03, - 0x31, 0x0b, - 0x2c, 0x04, - 0x1a, 0x06, - 0x0b, 0x03, - 0x80, 0xac, 0x06, - 0x0a, 0x06, - 0x1f, 0x41, - 0x4c, 0x04, - 0x2d, 0x03, - 0x74, 0x08, - 0x3c, 0x03, - 0x0f, 0x03, - 0x3c, 0x07, - 0x38, 0x08, - 0x2a, 0x06, - 0x82, 0xff, 0x11, - 0x18, 0x08, - 0x2f, 0x11, - 0x2d, 0x03, - 0x20, 0x10, - 0x21, 0x0f, - 0x80, 0x8c, 0x04, - 0x82, 0x97, 0x19, - 0x0b, 0x15, - 0x88, 0x94, 0x05, - 0x2f, 0x05, - 0x3b, 0x07, - 0x02, 0x0e, - 0x18, 0x09, - 0x80, 0xaf, 0x31, - 0x74, 0x0c, - 0x80, 0xd6, 0x1a, - 0x0c, 0x05, - 0x80, 0xff, 0x05, - 0x80, 0xb6, 0x05, - 0x24, 0x0c, - 0x9b, 0xc6, 0x0a, - 0xd2, 0x30, 0x10, - 0x84, 0x8d, 0x03, - 0x37, 0x09, - 0x81, 0x5c, 0x14, - 0x80, 0xb8, 0x08, - 0x80, 0xba, 0x3d, - 0x35, 0x04, - 0x0a, 0x06, - 0x38, 0x08, - 0x46, 0x08, - 0x0c, 0x06, - 0x74, 0x0b, - 0x1e, 0x03, - 0x5a, 0x04, - 0x59, 0x09, - 0x80, 0x83, 0x18, - 0x1c, 0x0a, - 0x16, 0x09, - 0x46, 0x0a, - 0x80, 0x8a, 0x06, - 0xab, 0xa4, 0x0c, - 0x17, 0x04, - 0x31, 0xa1, 0x04, - 0x81, 0xda, 0x26, - 0x07, 0x0c, - 0x05, 0x05, - 0x80, 0xa5, 0x11, - 0x81, 0x6d, 0x10, - 0x78, 0x28, - 0x2a, 0x06, - 0x4c, 0x04, - 0x80, 0x8d, 0x04, - 0x80, 0xbe, 0x03, - 0x1b, 0x03, - 0x0f, 0x0d, + 0x00, 0x20, 0x5f, 0x22, 0x82, 0xdf, 0x04, 0x82, 0x44, 0x08, 0x1b, 0x04, 0x06, 0x11, 0x81, 0xac, + 0x0e, 0x80, 0xab, 0x35, 0x1e, 0x15, 0x80, 0xe0, 0x03, 0x19, 0x08, 0x01, 0x04, 0x2f, 0x04, 0x34, + 0x04, 0x07, 0x03, 0x01, 0x07, 0x06, 0x07, 0x11, 0x0a, 0x50, 0x0f, 0x12, 0x07, 0x55, 0x08, 0x02, + 0x04, 0x1c, 0x0a, 0x09, 0x03, 0x08, 0x03, 0x07, 0x03, 0x02, 0x03, 0x03, 0x03, 0x0c, 0x04, 0x05, + 0x03, 0x0b, 0x06, 0x01, 0x0e, 0x15, 0x05, 0x3a, 0x03, 0x11, 0x07, 0x06, 0x05, 0x10, 0x08, 0x56, + 0x07, 0x02, 0x07, 0x15, 0x0d, 0x50, 0x04, 0x43, 0x03, 0x2d, 0x03, 0x01, 0x04, 0x11, 0x06, 0x0f, + 0x0c, 0x3a, 0x04, 0x1d, 0x25, 0x0d, 0x06, 0x4c, 0x20, 0x6d, 0x04, 0x6a, 0x25, 0x80, 0xc8, 0x05, + 0x82, 0xb0, 0x03, 0x1a, 0x06, 0x82, 0xfd, 0x03, 0x59, 0x07, 0x15, 0x0b, 0x17, 0x09, 0x14, 0x0c, + 0x14, 0x0c, 0x6a, 0x06, 0x0a, 0x06, 0x1a, 0x06, 0x59, 0x07, 0x2b, 0x05, 0x46, 0x0a, 0x2c, 0x04, + 0x0c, 0x04, 0x01, 0x03, 0x31, 0x0b, 0x2c, 0x04, 0x1a, 0x06, 0x0b, 0x03, 0x80, 0xac, 0x06, 0x0a, + 0x06, 0x1f, 0x41, 0x4c, 0x04, 0x2d, 0x03, 0x74, 0x08, 0x3c, 0x03, 0x0f, 0x03, 0x3c, 0x07, 0x38, + 0x08, 0x2a, 0x06, 0x82, 0xff, 0x11, 0x18, 0x08, 0x2f, 0x11, 0x2d, 0x03, 0x20, 0x10, 0x21, 0x0f, + 0x80, 0x8c, 0x04, 0x82, 0x97, 0x19, 0x0b, 0x15, 0x88, 0x94, 0x05, 0x2f, 0x05, 0x3b, 0x07, 0x02, + 0x0e, 0x18, 0x09, 0x80, 0xaf, 0x31, 0x74, 0x0c, 0x80, 0xd6, 0x1a, 0x0c, 0x05, 0x80, 0xff, 0x05, + 0x80, 0xb6, 0x05, 0x24, 0x0c, 0x9b, 0xc6, 0x0a, 0xd2, 0x30, 0x10, 0x84, 0x8d, 0x03, 0x37, 0x09, + 0x81, 0x5c, 0x14, 0x80, 0xb8, 0x08, 0x80, 0xba, 0x3d, 0x35, 0x04, 0x0a, 0x06, 0x38, 0x08, 0x46, + 0x08, 0x0c, 0x06, 0x74, 0x0b, 0x1e, 0x03, 0x5a, 0x04, 0x59, 0x09, 0x80, 0x83, 0x18, 0x1c, 0x0a, + 0x16, 0x09, 0x46, 0x0a, 0x80, 0x8a, 0x06, 0xab, 0xa4, 0x0c, 0x17, 0x04, 0x31, 0xa1, 0x04, 0x81, + 0xda, 0x26, 0x07, 0x0c, 0x05, 0x05, 0x80, 0xa5, 0x11, 0x81, 0x6d, 0x10, 0x78, 0x28, 0x2a, 0x06, + 0x4c, 0x04, 0x80, 0x8d, 0x04, 0x80, 0xbe, 0x03, 0x1b, 0x03, 0x0f, 0x0d, ]; const NORMAL1: &[u8] = &[ - 0x5e, 0x22, - 0x7b, 0x05, - 0x03, 0x04, - 0x2d, 0x03, - 0x65, 0x04, - 0x01, 0x2f, - 0x2e, 0x80, 0x82, - 0x1d, 0x03, - 0x31, 0x0f, - 0x1c, 0x04, - 0x24, 0x09, - 0x1e, 0x05, - 0x2b, 0x05, - 0x44, 0x04, - 0x0e, 0x2a, - 0x80, 0xaa, 0x06, - 0x24, 0x04, - 0x24, 0x04, - 0x28, 0x08, - 0x34, 0x0b, - 0x01, 0x80, 0x90, - 0x81, 0x37, 0x09, - 0x16, 0x0a, - 0x08, 0x80, 0x98, - 0x39, 0x03, - 0x63, 0x08, - 0x09, 0x30, - 0x16, 0x05, - 0x21, 0x03, - 0x1b, 0x05, - 0x01, 0x40, - 0x38, 0x04, - 0x4b, 0x05, - 0x2f, 0x04, - 0x0a, 0x07, - 0x09, 0x07, - 0x40, 0x20, - 0x27, 0x04, - 0x0c, 0x09, - 0x36, 0x03, - 0x3a, 0x05, - 0x1a, 0x07, - 0x04, 0x0c, - 0x07, 0x50, - 0x49, 0x37, - 0x33, 0x0d, - 0x33, 0x07, - 0x2e, 0x08, - 0x0a, 0x81, 0x26, - 0x1f, 0x80, 0x81, - 0x28, 0x08, - 0x2a, 0x80, 0xa6, - 0x4e, 0x04, - 0x1e, 0x0f, - 0x43, 0x0e, - 0x19, 0x07, - 0x0a, 0x06, - 0x47, 0x09, - 0x27, 0x09, - 0x75, 0x0b, - 0x3f, 0x41, - 0x2a, 0x06, - 0x3b, 0x05, - 0x0a, 0x06, - 0x51, 0x06, - 0x01, 0x05, - 0x10, 0x03, - 0x05, 0x80, 0x8b, - 0x5f, 0x21, - 0x48, 0x08, - 0x0a, 0x80, 0xa6, - 0x5e, 0x22, - 0x45, 0x0b, - 0x0a, 0x06, - 0x0d, 0x13, - 0x38, 0x08, - 0x0a, 0x36, - 0x2c, 0x04, - 0x10, 0x80, 0xc0, - 0x3c, 0x64, - 0x53, 0x0c, - 0x01, 0x81, 0x00, - 0x48, 0x08, - 0x53, 0x1d, - 0x39, 0x81, 0x07, - 0x46, 0x0a, - 0x1d, 0x03, - 0x47, 0x49, - 0x37, 0x03, - 0x0e, 0x08, - 0x0a, 0x06, - 0x39, 0x07, - 0x0a, 0x81, 0x36, - 0x19, 0x81, 0x07, - 0x83, 0x9a, 0x66, - 0x75, 0x0b, - 0x80, 0xc4, 0x8a, 0xbc, - 0x84, 0x2f, 0x8f, 0xd1, - 0x82, 0x47, 0xa1, 0xb9, - 0x82, 0x39, 0x07, - 0x2a, 0x04, - 0x02, 0x60, - 0x26, 0x0a, - 0x46, 0x0a, - 0x28, 0x05, - 0x13, 0x82, 0xb0, - 0x5b, 0x65, - 0x45, 0x0b, - 0x2f, 0x10, - 0x11, 0x40, - 0x02, 0x1e, - 0x97, 0xf2, 0x0e, - 0x82, 0xf3, 0xa5, 0x0d, - 0x81, 0x1f, 0x51, - 0x81, 0x8c, 0x89, 0x04, - 0x6b, 0x05, - 0x0d, 0x03, - 0x09, 0x07, - 0x10, 0x93, 0x60, - 0x80, 0xf6, 0x0a, - 0x73, 0x08, - 0x6e, 0x17, - 0x46, 0x80, 0x9a, - 0x14, 0x0c, - 0x57, 0x09, - 0x19, 0x80, 0x87, - 0x81, 0x47, 0x03, - 0x85, 0x42, 0x0f, - 0x15, 0x85, 0x50, - 0x2b, 0x87, 0xd5, - 0x80, 0xd7, 0x29, - 0x4b, 0x05, - 0x0a, 0x04, - 0x02, 0x83, 0x11, - 0x44, 0x81, 0x4b, - 0x3c, 0x06, - 0x01, 0x04, - 0x55, 0x05, - 0x1b, 0x34, - 0x02, 0x81, 0x0e, - 0x2c, 0x04, - 0x64, 0x0c, - 0x56, 0x0a, - 0x0d, 0x03, - 0x5c, 0x04, - 0x3d, 0x39, - 0x1d, 0x0d, - 0x2c, 0x04, - 0x09, 0x07, - 0x02, 0x0e, - 0x06, 0x80, 0x9a, - 0x83, 0xd5, 0x0b, - 0x0d, 0x03, - 0x0a, 0x06, - 0x74, 0x0c, - 0x59, 0x27, - 0x0c, 0x04, - 0x38, 0x08, - 0x0a, 0x06, - 0x28, 0x08, - 0x1e, 0x52, - 0x0c, 0x04, - 0x67, 0x03, - 0x29, 0x0d, - 0x0a, 0x06, - 0x03, 0x0d, - 0x30, 0x60, - 0x0e, 0x85, 0x92, + 0x5e, 0x22, 0x7b, 0x05, 0x03, 0x04, 0x2d, 0x03, 0x65, 0x04, 0x01, 0x2f, 0x2e, 0x80, 0x82, 0x1d, + 0x03, 0x31, 0x0f, 0x1c, 0x04, 0x24, 0x09, 0x1e, 0x05, 0x2b, 0x05, 0x44, 0x04, 0x0e, 0x2a, 0x80, + 0xaa, 0x06, 0x24, 0x04, 0x24, 0x04, 0x28, 0x08, 0x34, 0x0b, 0x01, 0x80, 0x90, 0x81, 0x37, 0x09, + 0x16, 0x0a, 0x08, 0x80, 0x98, 0x39, 0x03, 0x63, 0x08, 0x09, 0x30, 0x16, 0x05, 0x21, 0x03, 0x1b, + 0x05, 0x01, 0x40, 0x38, 0x04, 0x4b, 0x05, 0x2f, 0x04, 0x0a, 0x07, 0x09, 0x07, 0x40, 0x20, 0x27, + 0x04, 0x0c, 0x09, 0x36, 0x03, 0x3a, 0x05, 0x1a, 0x07, 0x04, 0x0c, 0x07, 0x50, 0x49, 0x37, 0x33, + 0x0d, 0x33, 0x07, 0x2e, 0x08, 0x0a, 0x81, 0x26, 0x1f, 0x80, 0x81, 0x28, 0x08, 0x2a, 0x80, 0xa6, + 0x4e, 0x04, 0x1e, 0x0f, 0x43, 0x0e, 0x19, 0x07, 0x0a, 0x06, 0x47, 0x09, 0x27, 0x09, 0x75, 0x0b, + 0x3f, 0x41, 0x2a, 0x06, 0x3b, 0x05, 0x0a, 0x06, 0x51, 0x06, 0x01, 0x05, 0x10, 0x03, 0x05, 0x80, + 0x8b, 0x5f, 0x21, 0x48, 0x08, 0x0a, 0x80, 0xa6, 0x5e, 0x22, 0x45, 0x0b, 0x0a, 0x06, 0x0d, 0x13, + 0x38, 0x08, 0x0a, 0x36, 0x2c, 0x04, 0x10, 0x80, 0xc0, 0x3c, 0x64, 0x53, 0x0c, 0x01, 0x81, 0x00, + 0x48, 0x08, 0x53, 0x1d, 0x39, 0x81, 0x07, 0x46, 0x0a, 0x1d, 0x03, 0x47, 0x49, 0x37, 0x03, 0x0e, + 0x08, 0x0a, 0x06, 0x39, 0x07, 0x0a, 0x81, 0x36, 0x19, 0x81, 0x07, 0x83, 0x9a, 0x66, 0x75, 0x0b, + 0x80, 0xc4, 0x8a, 0xbc, 0x84, 0x2f, 0x8f, 0xd1, 0x82, 0x47, 0xa1, 0xb9, 0x82, 0x39, 0x07, 0x2a, + 0x04, 0x02, 0x60, 0x26, 0x0a, 0x46, 0x0a, 0x28, 0x05, 0x13, 0x82, 0xb0, 0x5b, 0x65, 0x45, 0x0b, + 0x2f, 0x10, 0x11, 0x40, 0x02, 0x1e, 0x97, 0xf2, 0x0e, 0x82, 0xf3, 0xa5, 0x0d, 0x81, 0x1f, 0x51, + 0x81, 0x8c, 0x89, 0x04, 0x6b, 0x05, 0x0d, 0x03, 0x09, 0x07, 0x10, 0x93, 0x60, 0x80, 0xf6, 0x0a, + 0x73, 0x08, 0x6e, 0x17, 0x46, 0x80, 0x9a, 0x14, 0x0c, 0x57, 0x09, 0x19, 0x80, 0x87, 0x81, 0x47, + 0x03, 0x85, 0x42, 0x0f, 0x15, 0x85, 0x50, 0x2b, 0x87, 0xd5, 0x80, 0xd7, 0x29, 0x4b, 0x05, 0x0a, + 0x04, 0x02, 0x83, 0x11, 0x44, 0x81, 0x4b, 0x3c, 0x06, 0x01, 0x04, 0x55, 0x05, 0x1b, 0x34, 0x02, + 0x81, 0x0e, 0x2c, 0x04, 0x64, 0x0c, 0x56, 0x0a, 0x0d, 0x03, 0x5c, 0x04, 0x3d, 0x39, 0x1d, 0x0d, + 0x2c, 0x04, 0x09, 0x07, 0x02, 0x0e, 0x06, 0x80, 0x9a, 0x83, 0xd5, 0x0b, 0x0d, 0x03, 0x0a, 0x06, + 0x74, 0x0c, 0x59, 0x27, 0x0c, 0x04, 0x38, 0x08, 0x0a, 0x06, 0x28, 0x08, 0x1e, 0x52, 0x0c, 0x04, + 0x67, 0x03, 0x29, 0x0d, 0x0a, 0x06, 0x03, 0x0d, 0x30, 0x60, 0x0e, 0x85, 0x92, ]; diff --git a/src/libcore/unicode/tables.rs b/src/libcore/unicode/tables.rs index edef4ca361e4f..4df6e823ec779 100644 --- a/src/libcore/unicode/tables.rs +++ b/src/libcore/unicode/tables.rs @@ -2,8 +2,8 @@ #![allow(missing_docs, non_upper_case_globals, non_snake_case)] -use unicode::version::UnicodeVersion; use unicode::bool_trie::{BoolTrie, SmallBoolTrie}; +use unicode::version::UnicodeVersion; /// The version of [Unicode](http://www.unicode.org/) that the Unicode parts of /// `char` and `str` methods are based on. @@ -16,12 +16,8 @@ pub const UNICODE_VERSION: UnicodeVersion = UnicodeVersion { }; pub mod general_category { pub const Cc_table: &super::SmallBoolTrie = &super::SmallBoolTrie { - r1: &[ - 0, 1, 0 - ], - r2: &[ - 0x00000000ffffffff, 0x8000000000000000 - ], + r1: &[0, 1, 0], + r2: &[0x00000000ffffffff, 0x8000000000000000], }; pub fn Cc(c: char) -> bool { @@ -30,23 +26,47 @@ pub mod general_category { pub const N_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x03ff000000000000, 0x0000000000000000, 0x720c000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x000003ff00000000, 0x0000000000000000, 0x03ff000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x00000000000003ff + 0x03ff000000000000, + 0x0000000000000000, + 0x720c000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x000003ff00000000, + 0x0000000000000000, + 0x03ff000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x00000000000003ff, ], r2: [ 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 0, 1, 0, 3, 0, 4, 0, 5, 0, 1, 0, 6, 0, 1, 0, 7, 0, 7, 8, 0, 0, 0, 0, 9, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 12, 7, 0, 0, 0, 0, 13, 0, 14, 0, 0, 15, 0, 0, 7, 16, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 9, 0, 0, 18, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 20, 21, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 27, 0, 28, - 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 20, 21, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 27, 0, + 28, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -61,9 +81,8 @@ pub mod general_category { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, - 0, 0, 1, 0, 0, 0, 0, 31, 0, 0, 7, 9, 0, 0, 32, 0, 7, 0, 0, 0, 0, 0, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 28, 0, 0, 1, 0, 0, 0, 0, 31, 0, 0, 7, 9, 0, 0, 32, 0, 7, 0, 0, 0, 0, 0, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -74,18 +93,43 @@ pub mod general_category { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, ], r3: &[ - 0x0000000000000000, 0x0000ffc000000000, 0x03f0ffc000000000, 0x00fcffc000000000, - 0x0007ffc000000000, 0x7f00ffc000000000, 0x01ffffc07f000000, 0x0000000003ff0000, - 0x000fffff00000000, 0x00000000000003ff, 0x1ffffe0000000000, 0x0001c00000000000, - 0x03ff03ff00000000, 0x000000000000ffc0, 0x0000000007ff0000, 0x0000000003ff03ff, - 0x03ff000000000000, 0x03f1000000000000, 0xffffffffffff0000, 0x00000000000003e7, - 0xffffffff00000000, 0x000000000fffffff, 0xfffffc0000000000, 0xffc0000000000000, - 0x00000000000fffff, 0x2000000000000000, 0x070003fe00000080, 0x00000000003c0000, - 0x000003ff00000000, 0x00000000fffeff00, 0xfffe0000000003ff, 0x003f000000000000, - 0x03ff000003ff0000 + 0x0000000000000000, + 0x0000ffc000000000, + 0x03f0ffc000000000, + 0x00fcffc000000000, + 0x0007ffc000000000, + 0x7f00ffc000000000, + 0x01ffffc07f000000, + 0x0000000003ff0000, + 0x000fffff00000000, + 0x00000000000003ff, + 0x1ffffe0000000000, + 0x0001c00000000000, + 0x03ff03ff00000000, + 0x000000000000ffc0, + 0x0000000007ff0000, + 0x0000000003ff03ff, + 0x03ff000000000000, + 0x03f1000000000000, + 0xffffffffffff0000, + 0x00000000000003e7, + 0xffffffff00000000, + 0x000000000fffffff, + 0xfffffc0000000000, + 0xffc0000000000000, + 0x00000000000fffff, + 0x2000000000000000, + 0x070003fe00000080, + 0x00000000003c0000, + 0x000003ff00000000, + 0x00000000fffeff00, + 0xfffe0000000003ff, + 0x003f000000000000, + 0x03ff000003ff0000, ], r4: [ 0, 1, 2, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 5, 6, 7, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, @@ -96,40 +140,74 @@ pub mod general_category { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, ], r5: &[ 0, 0, 0, 0, 1, 2, 3, 0, 0, 0, 0, 4, 5, 6, 0, 7, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 10, 11, 12, 0, 13, 14, 0, 15, 16, 17, 0, 18, 19, 0, 0, 0, 0, 20, 21, 0, - 0, 0, 0, 22, 0, 0, 23, 24, 0, 0, 0, 25, 0, 21, 26, 0, 0, 27, 0, 0, 0, 21, 0, 0, 0, 0, 0, - 28, 0, 28, 0, 0, 0, 0, 0, 28, 0, 29, 30, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 32, 0, 0, 0, 28, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 33, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 22, 0, 0, 23, 24, 0, 0, 0, 25, 0, 21, 26, 0, 0, 27, 0, 0, 0, 21, 0, 0, 0, 0, + 0, 28, 0, 28, 0, 0, 0, 0, 0, 28, 0, 29, 30, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 28, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 37, 0, 38, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 35, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 37, 0, 38, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 0, 28, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 41, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 0, 28, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 41, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 43, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + 0, 0, 0, 0, ], r6: &[ - 0x0000000000000000, 0x000fffffffffff80, 0x01ffffffffffffff, 0x0000000000000c00, - 0x0ffffffe00000000, 0x0000000f00000000, 0x0000000000000402, 0x00000000003e0000, - 0x000003ff00000000, 0xfe000000ff000000, 0x0000ff8000000000, 0xf800000000000000, - 0x000000000fc00000, 0x3000000000000000, 0xfffffffffffcffff, 0x60000000000001ff, - 0x00000000e0000000, 0x0000f80000000000, 0xff000000ff000000, 0x0000fe0000000000, - 0xfc00000000000000, 0x03ff000000000000, 0x7fffffff00000000, 0x0000007fe0000000, - 0x00000000001e0000, 0x0000fffffffc0000, 0xffc0000000000000, 0x001ffffe03ff0000, - 0x0000000003ff0000, 0x00000000000003ff, 0x0fff000000000000, 0x0007ffff00000000, - 0x00001fffffff0000, 0xffffffffffffffff, 0x00007fffffffffff, 0x00000003fbff0000, - 0x00000000007fffff, 0x000fffff00000000, 0x01ffffff00000000, 0xffffffffffffc000, - 0x000000000000ff80, 0xfffe000000000000, 0x001eefffffffffff, 0x0000000000001fff + 0x0000000000000000, + 0x000fffffffffff80, + 0x01ffffffffffffff, + 0x0000000000000c00, + 0x0ffffffe00000000, + 0x0000000f00000000, + 0x0000000000000402, + 0x00000000003e0000, + 0x000003ff00000000, + 0xfe000000ff000000, + 0x0000ff8000000000, + 0xf800000000000000, + 0x000000000fc00000, + 0x3000000000000000, + 0xfffffffffffcffff, + 0x60000000000001ff, + 0x00000000e0000000, + 0x0000f80000000000, + 0xff000000ff000000, + 0x0000fe0000000000, + 0xfc00000000000000, + 0x03ff000000000000, + 0x7fffffff00000000, + 0x0000007fe0000000, + 0x00000000001e0000, + 0x0000fffffffc0000, + 0xffc0000000000000, + 0x001ffffe03ff0000, + 0x0000000003ff0000, + 0x00000000000003ff, + 0x0fff000000000000, + 0x0007ffff00000000, + 0x00001fffffff0000, + 0xffffffffffffffff, + 0x00007fffffffffff, + 0x00000003fbff0000, + 0x00000000007fffff, + 0x000fffff00000000, + 0x01ffffff00000000, + 0xffffffffffffc000, + 0x000000000000ff80, + 0xfffe000000000000, + 0x001eefffffffffff, + 0x0000000000001fff, ], }; @@ -142,14 +220,38 @@ pub mod general_category { pub mod derived_property { pub const Alphabetic_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x0000000000000000, 0x07fffffe07fffffe, 0x0420040000000000, 0xff7fffffff7fffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0x0000501f0003ffc3, - 0x0000000000000000, 0xbcdf000000000020, 0xfffffffbffffd740, 0xffbfffffffffffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xfffffffffffffc03, 0xffffffffffffffff, - 0xfffeffffffffffff, 0xffffffff027fffff, 0xbfff0000000001ff, 0x000787ffffff00b6, - 0xffffffff07ff0000, 0xffffc000feffffff, 0xffffffffffffffff, 0x9c00e1fe1fefffff, - 0xffffffffffff0000, 0xffffffffffffe000, 0x0003ffffffffffff, 0x043007fffffffc00 + 0x0000000000000000, + 0x07fffffe07fffffe, + 0x0420040000000000, + 0xff7fffffff7fffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x0000501f0003ffc3, + 0x0000000000000000, + 0xbcdf000000000020, + 0xfffffffbffffd740, + 0xffbfffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xfffffffffffffc03, + 0xffffffffffffffff, + 0xfffeffffffffffff, + 0xffffffff027fffff, + 0xbfff0000000001ff, + 0x000787ffffff00b6, + 0xffffffff07ff0000, + 0xffffc000feffffff, + 0xffffffffffffffff, + 0x9c00e1fe1fefffff, + 0xffffffffffff0000, + 0xffffffffffffe000, + 0x0003ffffffffffff, + 0x043007fffffffc00, ], r2: [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, @@ -197,125 +299,303 @@ pub mod derived_property { 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 36, 36, 36, 36, 36, 120, 36, 121, 122, 123, 124, 125, 36, 36, 36, 36, 126, 127, 128, - 129, 31, 130, 36, 131, 132, 133, 113, 134 + 129, 31, 130, 36, 131, 132, 133, 113, 134, ], r3: &[ - 0x00001ffffcffffff, 0x000007ff01ffffff, 0x3fdfffff00000000, 0xffff03f8fff00000, - 0xefffffffffffffff, 0xfffe000fffe1dfff, 0xe3c5fdfffff99fef, 0x1003000fb080599f, - 0xc36dfdfffff987ee, 0x003f00005e021987, 0xe3edfdfffffbbfee, 0x1e00000f00011bbf, - 0xe3edfdfffff99fee, 0x0002000fb0c0199f, 0xc3ffc718d63dc7ec, 0x0000000000811dc7, - 0xe3fffdfffffddfef, 0x0000000f07601ddf, 0xe3effdfffffddfef, 0x0006000f40601ddf, - 0xe7fffffffffddfef, 0xfc00000f80f05ddf, 0x2ffbfffffc7fffec, 0x000c0000ff5f807f, - 0x07fffffffffffffe, 0x000000000000207f, 0x3bffecaefef02596, 0x00000000f000205f, - 0x0000000000000001, 0xfffe1ffffffffeff, 0x1ffffffffeffff03, 0x0000000000000000, - 0xf97fffffffffffff, 0xffffc1e7ffff0000, 0xffffffff3000407f, 0xf7ffffffffff20bf, - 0xffffffffffffffff, 0xffffffff3d7f3dff, 0x7f3dffffffff3dff, 0xffffffffff7fff3d, - 0xffffffffff3dffff, 0x0000000087ffffff, 0xffffffff0000ffff, 0x3f3fffffffffffff, - 0xfffffffffffffffe, 0xffff9fffffffffff, 0xffffffff07fffffe, 0x01ffc7ffffffffff, - 0x000fffff000fdfff, 0x000ddfff000fffff, 0xffcfffffffffffff, 0x00000000108001ff, - 0xffffffff00000000, 0x01ffffffffffffff, 0xffff07ffffffffff, 0x003fffffffffffff, - 0x01ff0fff7fffffff, 0x001f3fffffff0000, 0xffff0fffffffffff, 0x00000000000003ff, - 0xffffffff0fffffff, 0x001ffffe7fffffff, 0x0000008000000000, 0xffefffffffffffff, - 0x0000000000000fef, 0xfc00f3ffffffffff, 0x0003ffbfffffffff, 0x3ffffffffc00e000, - 0xe7ffffffffff01ff, 0x006fde0000000000, 0x001fff8000000000, 0xffffffff3f3fffff, - 0x3fffffffaaff3f3f, 0x5fdfffffffffffff, 0x1fdc1fff0fcf1fdc, 0x8002000000000000, - 0x000000001fff0000, 0xf3ffbd503e2ffc84, 0xffffffff000043e0, 0x00000000000001ff, - 0xffc0000000000000, 0x000003ffffffffff, 0xffff7fffffffffff, 0xffffffff7fffffff, - 0x000c781fffffffff, 0xffff20bfffffffff, 0x000080ffffffffff, 0x7f7f7f7f007fffff, - 0xffffffff7f7f7f7f, 0x0000800000000000, 0x1f3e03fe000000e0, 0xfffffffee07fffff, - 0xf7ffffffffffffff, 0xfffeffffffffffe0, 0x07ffffff00007fff, 0xffff000000000000, - 0x0000ffffffffffff, 0x0000000000001fff, 0x3fffffffffff0000, 0x00000c00ffff1fff, - 0x8ff07fffffffffff, 0xfffffffcff800000, 0x03fffffffffff9ff, 0xff80000000000000, - 0x000000fffffff7bb, 0x000fffffffffffff, 0x68fc00000000002f, 0xffff07fffffffc00, - 0x1fffffff0007ffff, 0xfff7ffffffffffff, 0x7c00ffdf00008000, 0x007fffffffffffff, - 0xc47fffff00003fff, 0x7fffffffffffffff, 0x003cffff38000005, 0xffff7f7f007e7e7e, - 0xffff003ff7ffffff, 0x000007ffffffffff, 0xffff000fffffffff, 0x0ffffffffffff87f, - 0xffff3fffffffffff, 0x0000000003ffffff, 0x5f7ffdffe0f8007f, 0xffffffffffffffdb, - 0x0003ffffffffffff, 0xfffffffffff80000, 0x3fffffffffffffff, 0xffffffffffff0000, - 0xfffffffffffcffff, 0x0fff0000000000ff, 0xffdf000000000000, 0x1fffffffffffffff, - 0x07fffffe00000000, 0xffffffc007fffffe, 0x000000001cfcfcfc + 0x00001ffffcffffff, + 0x000007ff01ffffff, + 0x3fdfffff00000000, + 0xffff03f8fff00000, + 0xefffffffffffffff, + 0xfffe000fffe1dfff, + 0xe3c5fdfffff99fef, + 0x1003000fb080599f, + 0xc36dfdfffff987ee, + 0x003f00005e021987, + 0xe3edfdfffffbbfee, + 0x1e00000f00011bbf, + 0xe3edfdfffff99fee, + 0x0002000fb0c0199f, + 0xc3ffc718d63dc7ec, + 0x0000000000811dc7, + 0xe3fffdfffffddfef, + 0x0000000f07601ddf, + 0xe3effdfffffddfef, + 0x0006000f40601ddf, + 0xe7fffffffffddfef, + 0xfc00000f80f05ddf, + 0x2ffbfffffc7fffec, + 0x000c0000ff5f807f, + 0x07fffffffffffffe, + 0x000000000000207f, + 0x3bffecaefef02596, + 0x00000000f000205f, + 0x0000000000000001, + 0xfffe1ffffffffeff, + 0x1ffffffffeffff03, + 0x0000000000000000, + 0xf97fffffffffffff, + 0xffffc1e7ffff0000, + 0xffffffff3000407f, + 0xf7ffffffffff20bf, + 0xffffffffffffffff, + 0xffffffff3d7f3dff, + 0x7f3dffffffff3dff, + 0xffffffffff7fff3d, + 0xffffffffff3dffff, + 0x0000000087ffffff, + 0xffffffff0000ffff, + 0x3f3fffffffffffff, + 0xfffffffffffffffe, + 0xffff9fffffffffff, + 0xffffffff07fffffe, + 0x01ffc7ffffffffff, + 0x000fffff000fdfff, + 0x000ddfff000fffff, + 0xffcfffffffffffff, + 0x00000000108001ff, + 0xffffffff00000000, + 0x01ffffffffffffff, + 0xffff07ffffffffff, + 0x003fffffffffffff, + 0x01ff0fff7fffffff, + 0x001f3fffffff0000, + 0xffff0fffffffffff, + 0x00000000000003ff, + 0xffffffff0fffffff, + 0x001ffffe7fffffff, + 0x0000008000000000, + 0xffefffffffffffff, + 0x0000000000000fef, + 0xfc00f3ffffffffff, + 0x0003ffbfffffffff, + 0x3ffffffffc00e000, + 0xe7ffffffffff01ff, + 0x006fde0000000000, + 0x001fff8000000000, + 0xffffffff3f3fffff, + 0x3fffffffaaff3f3f, + 0x5fdfffffffffffff, + 0x1fdc1fff0fcf1fdc, + 0x8002000000000000, + 0x000000001fff0000, + 0xf3ffbd503e2ffc84, + 0xffffffff000043e0, + 0x00000000000001ff, + 0xffc0000000000000, + 0x000003ffffffffff, + 0xffff7fffffffffff, + 0xffffffff7fffffff, + 0x000c781fffffffff, + 0xffff20bfffffffff, + 0x000080ffffffffff, + 0x7f7f7f7f007fffff, + 0xffffffff7f7f7f7f, + 0x0000800000000000, + 0x1f3e03fe000000e0, + 0xfffffffee07fffff, + 0xf7ffffffffffffff, + 0xfffeffffffffffe0, + 0x07ffffff00007fff, + 0xffff000000000000, + 0x0000ffffffffffff, + 0x0000000000001fff, + 0x3fffffffffff0000, + 0x00000c00ffff1fff, + 0x8ff07fffffffffff, + 0xfffffffcff800000, + 0x03fffffffffff9ff, + 0xff80000000000000, + 0x000000fffffff7bb, + 0x000fffffffffffff, + 0x68fc00000000002f, + 0xffff07fffffffc00, + 0x1fffffff0007ffff, + 0xfff7ffffffffffff, + 0x7c00ffdf00008000, + 0x007fffffffffffff, + 0xc47fffff00003fff, + 0x7fffffffffffffff, + 0x003cffff38000005, + 0xffff7f7f007e7e7e, + 0xffff003ff7ffffff, + 0x000007ffffffffff, + 0xffff000fffffffff, + 0x0ffffffffffff87f, + 0xffff3fffffffffff, + 0x0000000003ffffff, + 0x5f7ffdffe0f8007f, + 0xffffffffffffffdb, + 0x0003ffffffffffff, + 0xfffffffffff80000, + 0x3fffffffffffffff, + 0xffffffffffff0000, + 0xfffffffffffcffff, + 0x0fff0000000000ff, + 0xffdf000000000000, + 0x1fffffffffffffff, + 0x07fffffe00000000, + 0xffffffc007fffffe, + 0x000000001cfcfcfc, ], r4: [ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 5, 5, 9, 5, 10, 11, 12, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 13, 14, - 15, 7, 16, 17, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 0, 1, 2, 3, 4, 5, 6, 7, 8, 5, 5, 9, 5, 10, 11, 12, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 13, + 14, 15, 7, 16, 17, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, ], r5: &[ 0, 1, 2, 3, 4, 5, 4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 2, 2, 12, 13, 14, 15, 4, 4, 2, 2, 2, - 2, 16, 17, 4, 4, 18, 19, 20, 21, 22, 4, 23, 4, 24, 25, 26, 27, 28, 29, 30, 4, 2, 31, 32, - 32, 33, 4, 4, 4, 4, 4, 4, 4, 34, 35, 4, 4, 2, 35, 36, 37, 32, 38, 2, 39, 40, 4, 41, 42, - 43, 44, 4, 4, 2, 45, 2, 46, 4, 4, 47, 48, 49, 50, 28, 4, 51, 4, 4, 4, 52, 4, 53, 54, 4, - 4, 4, 4, 55, 56, 57, 52, 4, 4, 4, 4, 58, 59, 60, 4, 61, 62, 63, 4, 4, 4, 4, 64, 4, 4, 4, - 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 65, 4, 2, 66, 2, 2, 2, 67, 4, 4, 4, 4, 4, + 2, 16, 17, 4, 4, 18, 19, 20, 21, 22, 4, 23, 4, 24, 25, 26, 27, 28, 29, 30, 4, 2, 31, + 32, 32, 33, 4, 4, 4, 4, 4, 4, 4, 34, 35, 4, 4, 2, 35, 36, 37, 32, 38, 2, 39, 40, 4, 41, + 42, 43, 44, 4, 4, 2, 45, 2, 46, 4, 4, 47, 48, 49, 50, 28, 4, 51, 4, 4, 4, 52, 4, 53, + 54, 4, 4, 4, 4, 55, 56, 57, 52, 4, 4, 4, 4, 58, 59, 60, 4, 61, 62, 63, 4, 4, 4, 4, 64, + 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 65, 4, 2, 66, 2, 2, 2, 67, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 66, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 66, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 68, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 68, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 2, 2, 2, 2, 2, 2, 2, 2, 52, 20, 4, 69, 16, 70, 71, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 4, - 4, 2, 72, 73, 74, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 75, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 32, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 20, 76, 2, 2, 2, 2, 2, - 77, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 2, 78, 79, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 80, 81, 82, 83, 84, 2, 2, 2, 2, 85, 86, 87, 88, 89, - 90, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 91, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 92, 2, 93, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 94, 95, 96, 4, 4, 4, 4, 4, 4, 4, 4, 4, 76, 97, 98, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 52, 20, 4, 69, 16, 70, 71, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 2, 4, 4, 2, 72, 73, 74, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 75, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 32, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 20, 76, 2, + 2, 2, 2, 2, 77, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 78, 79, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 80, 81, 82, 83, 84, 2, 2, 2, 2, 85, + 86, 87, 88, 89, 90, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 91, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 92, 2, 93, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 94, 95, 96, 4, 4, 4, 4, 4, 4, 4, 4, 4, 76, 97, 98, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 99, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 99, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 5, 2, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 100, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 101, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 102, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 + 2, 2, 2, 2, 2, 2, 2, 2, 100, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 101, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, + 2, 2, 2, 2, 102, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, ], r6: &[ - 0xb7ffff7fffffefff, 0x000000003fff3fff, 0xffffffffffffffff, 0x07ffffffffffffff, - 0x0000000000000000, 0x001fffffffffffff, 0xffffffff1fffffff, 0x000000000001ffff, - 0xffffe000ffffffff, 0x07ffffffffff07ff, 0xffffffff3fffffff, 0x00000000003eff0f, - 0xffff00003fffffff, 0x0fffffffff0fffff, 0xffff00ffffffffff, 0x0000000fffffffff, - 0x007fffffffffffff, 0x000000ff003fffff, 0x91bffffffffffd3f, 0x007fffff003fffff, - 0x000000007fffffff, 0x0037ffff00000000, 0x03ffffff003fffff, 0xc0ffffffffffffff, - 0x003ffffffeeff06f, 0x1fffffff00000000, 0x000000001fffffff, 0x0000001ffffffeff, - 0x003fffffffffffff, 0x0007ffff003fffff, 0x000000000003ffff, 0x00000000000001ff, - 0x0007ffffffffffff, 0x000000ffffffffff, 0xffff00801fffffff, 0x000000000000003f, - 0x01fffffffffffffc, 0x000001ffffff0000, 0x0047ffffffff0070, 0x000000001400001e, - 0x409ffffffffbffff, 0xffff01ffbfffbd7f, 0x000001ffffffffff, 0xe3edfdfffff99fef, - 0x0000000fe081199f, 0x00000000000007bb, 0x00000000000000b3, 0x7f3fffffffffffff, - 0x000000003f000000, 0x7fffffffffffffff, 0x0000000000000011, 0x000007ffe7ffffff, - 0x01ffffffffffffff, 0xffffffff00000000, 0x80000000ffffffff, 0x7fe7ffffffffffff, - 0xffffffffffff0000, 0x0000000020ffffcf, 0x7f7ffffffffffdff, 0xfffc000000000001, - 0x007ffefffffcffff, 0xb47ffffffffffb7f, 0xfffffdbf000000cb, 0x00000000017b7fff, - 0x007fffff00000000, 0x0000000003ffffff, 0x00007fffffffffff, 0x000000000000000f, - 0x000000000000007f, 0x00003fffffff0000, 0xe0fffff80000000f, 0x000000000000ffff, - 0x7fffffffffff001f, 0x00000000fff80000, 0x0000000300000000, 0x0003ffffffffffff, - 0xffff000000000000, 0x0fffffffffffffff, 0x1fff07ffffffffff, 0x0000000043ff01ff, - 0xffffffffffdfffff, 0xebffde64dfffffff, 0xffffffffffffffef, 0x7bffffffdfdfe7bf, - 0xfffffffffffdfc5f, 0xffffff3fffffffff, 0xf7fffffff7fffffd, 0xffdfffffffdfffff, - 0xffff7fffffff7fff, 0xfffffdfffffffdff, 0x0000000000000ff7, 0x000007dbf9ffff7f, - 0x000000000000001f, 0x000000000000008f, 0x0af7fe96ffffffef, 0x5ef7f796aa96ea84, - 0x0ffffbee0ffffbff, 0xffff03ffffff03ff, 0x00000000000003ff, 0x00000000007fffff, - 0xffff0003ffffffff, 0x00000001ffffffff, 0x000000003fffffff + 0xb7ffff7fffffefff, + 0x000000003fff3fff, + 0xffffffffffffffff, + 0x07ffffffffffffff, + 0x0000000000000000, + 0x001fffffffffffff, + 0xffffffff1fffffff, + 0x000000000001ffff, + 0xffffe000ffffffff, + 0x07ffffffffff07ff, + 0xffffffff3fffffff, + 0x00000000003eff0f, + 0xffff00003fffffff, + 0x0fffffffff0fffff, + 0xffff00ffffffffff, + 0x0000000fffffffff, + 0x007fffffffffffff, + 0x000000ff003fffff, + 0x91bffffffffffd3f, + 0x007fffff003fffff, + 0x000000007fffffff, + 0x0037ffff00000000, + 0x03ffffff003fffff, + 0xc0ffffffffffffff, + 0x003ffffffeeff06f, + 0x1fffffff00000000, + 0x000000001fffffff, + 0x0000001ffffffeff, + 0x003fffffffffffff, + 0x0007ffff003fffff, + 0x000000000003ffff, + 0x00000000000001ff, + 0x0007ffffffffffff, + 0x000000ffffffffff, + 0xffff00801fffffff, + 0x000000000000003f, + 0x01fffffffffffffc, + 0x000001ffffff0000, + 0x0047ffffffff0070, + 0x000000001400001e, + 0x409ffffffffbffff, + 0xffff01ffbfffbd7f, + 0x000001ffffffffff, + 0xe3edfdfffff99fef, + 0x0000000fe081199f, + 0x00000000000007bb, + 0x00000000000000b3, + 0x7f3fffffffffffff, + 0x000000003f000000, + 0x7fffffffffffffff, + 0x0000000000000011, + 0x000007ffe7ffffff, + 0x01ffffffffffffff, + 0xffffffff00000000, + 0x80000000ffffffff, + 0x7fe7ffffffffffff, + 0xffffffffffff0000, + 0x0000000020ffffcf, + 0x7f7ffffffffffdff, + 0xfffc000000000001, + 0x007ffefffffcffff, + 0xb47ffffffffffb7f, + 0xfffffdbf000000cb, + 0x00000000017b7fff, + 0x007fffff00000000, + 0x0000000003ffffff, + 0x00007fffffffffff, + 0x000000000000000f, + 0x000000000000007f, + 0x00003fffffff0000, + 0xe0fffff80000000f, + 0x000000000000ffff, + 0x7fffffffffff001f, + 0x00000000fff80000, + 0x0000000300000000, + 0x0003ffffffffffff, + 0xffff000000000000, + 0x0fffffffffffffff, + 0x1fff07ffffffffff, + 0x0000000043ff01ff, + 0xffffffffffdfffff, + 0xebffde64dfffffff, + 0xffffffffffffffef, + 0x7bffffffdfdfe7bf, + 0xfffffffffffdfc5f, + 0xffffff3fffffffff, + 0xf7fffffff7fffffd, + 0xffdfffffffdfffff, + 0xffff7fffffff7fff, + 0xfffffdfffffffdff, + 0x0000000000000ff7, + 0x000007dbf9ffff7f, + 0x000000000000001f, + 0x000000000000008f, + 0x0af7fe96ffffffef, + 0x5ef7f796aa96ea84, + 0x0ffffbee0ffffbff, + 0xffff03ffffff03ff, + 0x00000000000003ff, + 0x00000000007fffff, + 0xffff0003ffffffff, + 0x00000001ffffffff, + 0x000000003fffffff, ], }; @@ -325,23 +605,47 @@ pub mod derived_property { pub const Case_Ignorable_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x0400408000000000, 0x0000000140000000, 0x0190a10000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0xffff000000000000, 0xffffffffffffffff, - 0xffffffffffffffff, 0x0430ffffffffffff, 0x00000000000000b0, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x00000000000003f8, 0x0000000000000000, - 0x0000000000000000, 0x0000000002000000, 0xbffffffffffe0000, 0x00100000000000b6, - 0x0000000017ff003f, 0x00010000fffff801, 0x0000000000000000, 0x00003dffbfc00000, - 0xffff000000028000, 0x00000000000007ff, 0x0001ffc000000000, 0x243ff80000000000 + 0x0400408000000000, + 0x0000000140000000, + 0x0190a10000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0xffff000000000000, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x0430ffffffffffff, + 0x00000000000000b0, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x00000000000003f8, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000002000000, + 0xbffffffffffe0000, + 0x00100000000000b6, + 0x0000000017ff003f, + 0x00010000fffff801, + 0x0000000000000000, + 0x00003dffbfc00000, + 0xffff000000028000, + 0x00000000000007ff, + 0x0001ffc000000000, + 0x243ff80000000000, ], r2: [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 10, 11, 12, 13, 14, 15, 16, 11, 17, 18, 19, 2, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 2, 2, 2, 2, 2, 2, 2, 2, 2, 33, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 34, 35, 36, 37, 38, 39, 40, 2, 41, 2, 2, 2, 42, 43, 44, 2, - 45, 46, 47, 48, 49, 50, 2, 51, 52, 53, 54, 55, 2, 2, 2, 2, 2, 2, 56, 57, 58, 59, 60, 61, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 62, 2, 63, 2, 64, 2, 65, 66, 2, 2, 2, 2, - 2, 2, 2, 67, 2, 68, 69, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 34, 35, 36, 37, 38, 39, 40, 2, 41, 2, 2, 2, 42, 43, 44, + 2, 45, 46, 47, 48, 49, 50, 2, 51, 52, 53, 54, 55, 2, 2, 2, 2, 2, 2, 56, 57, 58, 59, 60, + 61, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 62, 2, 63, 2, 64, 2, 65, 66, 2, 2, + 2, 2, 2, 2, 2, 67, 2, 68, 69, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -356,9 +660,9 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 70, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 50, 2, 2, 2, 2, 71, 72, 73, 74, 75, 76, 77, 78, 79, 2, 2, 80, 81, - 82, 83, 84, 85, 86, 87, 88, 2, 89, 2, 90, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 70, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 50, 2, 2, 2, 2, 71, 72, 73, 74, 75, 76, 77, 78, 79, 2, 2, + 80, 81, 82, 83, 84, 85, 86, 87, 88, 2, 89, 2, 90, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -369,36 +673,111 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 91, 2, 92, 93, 2, 2, 2, 2, 2, 2, 2, 2, 94, 95, 2, 96, - 97, 98, 99, 100 + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 91, 2, 92, 93, 2, 2, 2, 2, 2, 2, 2, 2, 94, + 95, 2, 96, 97, 98, 99, 100, ], r3: &[ - 0x00003fffffc00000, 0x000000000e000000, 0x0000000000000000, 0xfffffffffff80000, - 0x1400000000000007, 0x0002000c00fe21fe, 0x1000000000000002, 0x4000000c0000201e, - 0x1000000000000006, 0x0023000000023986, 0xfc00000c000021be, 0x9000000000000002, - 0x0000000c0040201e, 0x0000000000000004, 0x0000000000002001, 0xc000000000000011, - 0x0000000c00603dc1, 0x0000000c00003040, 0x1800000000000003, 0x0000000c0000201e, - 0x00000000005c0400, 0x07f2000000000000, 0x0000000000007fc0, 0x1bf2000000000000, - 0x0000000000003f40, 0x02a0000003000000, 0x7ffe000000000000, 0x1ffffffffeffe0df, - 0x0000000000000040, 0x66fde00000000000, 0x001e0001c3000000, 0x0000000020002064, - 0x1000000000000000, 0x00000000e0000000, 0x001c0000001c0000, 0x000c0000000c0000, - 0x3fb0000000000000, 0x00000000208ffe40, 0x0000000000007800, 0x0000000000000008, - 0x0000020000000060, 0x0e04018700000000, 0x0000000009800000, 0x9ff81fe57f400000, - 0x7fff008000000000, 0x17d000000000000f, 0x000ff80000000004, 0x00003b3c00000003, - 0x0003a34000000000, 0x00cff00000000000, 0x3f00000000000000, 0x031021fdfff70000, - 0xfffff00000000000, 0x010007ffffffffff, 0xfffffffff8000000, 0xfbffffffffffffff, - 0xa000000000000000, 0x6000e000e000e003, 0x00007c900300f800, 0x8002ffdf00000000, - 0x000000001fff0000, 0x0001ffffffff0000, 0x3000000000000000, 0x0003800000000000, - 0x8000800000000000, 0xffffffff00000000, 0x0000800000000000, 0x083e3c0000000020, - 0x000000007e000000, 0x7000000000000000, 0x0000000000200000, 0x0000000000001000, - 0xbff7800000000000, 0x00000000f0000000, 0x0003000000000000, 0x00000003ffffffff, - 0x0001000000000000, 0x0000000000000700, 0x0300000000000000, 0x0000006000000844, - 0x8003ffff00000030, 0x00003fc000000000, 0x000000000003ff80, 0x13c8000000000007, - 0x0000006000008000, 0x00667e0000000000, 0x1001000000001008, 0xc19d000000000000, - 0x0058300020000002, 0x00000000f8000000, 0x0000212000000000, 0x0000000040000000, - 0xfffc000000000000, 0x0000000000000003, 0x0000ffff0008ffff, 0x0000000000240000, - 0x8000000000000000, 0x4000000004004080, 0x0001000000000001, 0x00000000c0000000, - 0x0e00000800000000 + 0x00003fffffc00000, + 0x000000000e000000, + 0x0000000000000000, + 0xfffffffffff80000, + 0x1400000000000007, + 0x0002000c00fe21fe, + 0x1000000000000002, + 0x4000000c0000201e, + 0x1000000000000006, + 0x0023000000023986, + 0xfc00000c000021be, + 0x9000000000000002, + 0x0000000c0040201e, + 0x0000000000000004, + 0x0000000000002001, + 0xc000000000000011, + 0x0000000c00603dc1, + 0x0000000c00003040, + 0x1800000000000003, + 0x0000000c0000201e, + 0x00000000005c0400, + 0x07f2000000000000, + 0x0000000000007fc0, + 0x1bf2000000000000, + 0x0000000000003f40, + 0x02a0000003000000, + 0x7ffe000000000000, + 0x1ffffffffeffe0df, + 0x0000000000000040, + 0x66fde00000000000, + 0x001e0001c3000000, + 0x0000000020002064, + 0x1000000000000000, + 0x00000000e0000000, + 0x001c0000001c0000, + 0x000c0000000c0000, + 0x3fb0000000000000, + 0x00000000208ffe40, + 0x0000000000007800, + 0x0000000000000008, + 0x0000020000000060, + 0x0e04018700000000, + 0x0000000009800000, + 0x9ff81fe57f400000, + 0x7fff008000000000, + 0x17d000000000000f, + 0x000ff80000000004, + 0x00003b3c00000003, + 0x0003a34000000000, + 0x00cff00000000000, + 0x3f00000000000000, + 0x031021fdfff70000, + 0xfffff00000000000, + 0x010007ffffffffff, + 0xfffffffff8000000, + 0xfbffffffffffffff, + 0xa000000000000000, + 0x6000e000e000e003, + 0x00007c900300f800, + 0x8002ffdf00000000, + 0x000000001fff0000, + 0x0001ffffffff0000, + 0x3000000000000000, + 0x0003800000000000, + 0x8000800000000000, + 0xffffffff00000000, + 0x0000800000000000, + 0x083e3c0000000020, + 0x000000007e000000, + 0x7000000000000000, + 0x0000000000200000, + 0x0000000000001000, + 0xbff7800000000000, + 0x00000000f0000000, + 0x0003000000000000, + 0x00000003ffffffff, + 0x0001000000000000, + 0x0000000000000700, + 0x0300000000000000, + 0x0000006000000844, + 0x8003ffff00000030, + 0x00003fc000000000, + 0x000000000003ff80, + 0x13c8000000000007, + 0x0000006000008000, + 0x00667e0000000000, + 0x1001000000001008, + 0xc19d000000000000, + 0x0058300020000002, + 0x00000000f8000000, + 0x0000212000000000, + 0x0000000040000000, + 0xfffc000000000000, + 0x0000000000000003, + 0x0000ffff0008ffff, + 0x0000000000240000, + 0x8000000000000000, + 0x4000000004004080, + 0x0001000000000001, + 0x00000000c0000000, + 0x0e00000800000000, ], r4: [ 0, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 4, 2, 5, 6, 7, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -409,47 +788,91 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, ], r5: &[ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0, 0, 17, 18, 19, 0, 0, 20, 21, 22, - 23, 0, 0, 24, 25, 26, 27, 28, 0, 29, 0, 0, 0, 30, 0, 0, 0, 0, 0, 0, 0, 31, 32, 33, 0, 0, - 0, 0, 0, 34, 0, 35, 0, 36, 37, 38, 0, 0, 0, 0, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 23, 0, 0, 24, 25, 26, 27, 28, 0, 29, 0, 0, 0, 30, 0, 0, 0, 0, 0, 0, 0, 31, 32, 33, 0, + 0, 0, 0, 0, 34, 0, 35, 0, 36, 37, 38, 0, 0, 0, 0, 39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 41, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 43, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 46, 47, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 50, 51, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 53, 0, 54, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 41, 42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 43, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 46, 47, 0, 0, 48, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 50, 51, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 53, 0, + 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 56, 57, 0, 0, 57, 57, 57, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 56, 57, 0, 0, 57, 57, 57, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ], r6: &[ - 0x0000000000000000, 0x2000000000000000, 0x0000000100000000, 0x07c0000000000000, - 0x870000000000f06e, 0x0000006000000000, 0x000000f000000000, 0x000000000001ffc0, - 0xff00000000000002, 0x800000000000007f, 0x2678000000000003, 0x0000000000002000, - 0x001fef8000000007, 0x0008000000000000, 0x7fc0000000000003, 0x0000000000001e00, - 0x40d3800000000000, 0x000007f880000000, 0x1800000000000003, 0x001f1fc000000001, - 0xff00000000000000, 0x000000004000005c, 0x85f8000000000000, 0x000000000000000d, - 0xb03c000000000000, 0x0000000030000001, 0xa7f8000000000000, 0x0000000000000001, - 0x00bf280000000000, 0x00000fbce0000000, 0x06ff800000000000, 0x79f80000000007fe, - 0x000000000e7e0080, 0x00000000037ffc00, 0xbf7f000000000000, 0x006dfcfffffc0000, - 0xb47e000000000000, 0x00000000000000bf, 0x0000000000a30000, 0x0018000000000000, - 0x001f000000000000, 0x007f000000000000, 0x000000000000000f, 0x00000000ffff8000, - 0x0000000300000000, 0x0000000f60000000, 0xfff8038000000000, 0x00003c0000000fe7, - 0x000000000000001c, 0xf87fffffffffffff, 0x00201fffffffffff, 0x0000fffef8000010, - 0x000007dbf9ffff7f, 0x00000000007f0000, 0x00000000000007f0, 0xf800000000000000, - 0xffffffff00000002, 0xffffffffffffffff, 0x0000ffffffffffff + 0x0000000000000000, + 0x2000000000000000, + 0x0000000100000000, + 0x07c0000000000000, + 0x870000000000f06e, + 0x0000006000000000, + 0x000000f000000000, + 0x000000000001ffc0, + 0xff00000000000002, + 0x800000000000007f, + 0x2678000000000003, + 0x0000000000002000, + 0x001fef8000000007, + 0x0008000000000000, + 0x7fc0000000000003, + 0x0000000000001e00, + 0x40d3800000000000, + 0x000007f880000000, + 0x1800000000000003, + 0x001f1fc000000001, + 0xff00000000000000, + 0x000000004000005c, + 0x85f8000000000000, + 0x000000000000000d, + 0xb03c000000000000, + 0x0000000030000001, + 0xa7f8000000000000, + 0x0000000000000001, + 0x00bf280000000000, + 0x00000fbce0000000, + 0x06ff800000000000, + 0x79f80000000007fe, + 0x000000000e7e0080, + 0x00000000037ffc00, + 0xbf7f000000000000, + 0x006dfcfffffc0000, + 0xb47e000000000000, + 0x00000000000000bf, + 0x0000000000a30000, + 0x0018000000000000, + 0x001f000000000000, + 0x007f000000000000, + 0x000000000000000f, + 0x00000000ffff8000, + 0x0000000300000000, + 0x0000000f60000000, + 0xfff8038000000000, + 0x00003c0000000fe7, + 0x000000000000001c, + 0xf87fffffffffffff, + 0x00201fffffffffff, + 0x0000fffef8000010, + 0x000007dbf9ffff7f, + 0x00000000007f0000, + 0x00000000000007f0, + 0xf800000000000000, + 0xffffffff00000002, + 0xffffffffffffffff, + 0x0000ffffffffffff, ], }; @@ -459,22 +882,46 @@ pub mod derived_property { pub const Cased_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x0000000000000000, 0x07fffffe07fffffe, 0x0420040000000000, 0xff7fffffff7fffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xf7ffffffffffffff, 0xfffffffffffffff0, - 0xffffffffffffffff, 0xffffffffffffffff, 0x01ffffffffefffff, 0x0000001f00000003, - 0x0000000000000000, 0xbccf000000000020, 0xfffffffbffffd740, 0xffbfffffffffffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xfffffffffffffc03, 0xffffffffffffffff, - 0xfffeffffffffffff, 0xffffffff007fffff, 0x00000000000001ff, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + 0x0000000000000000, + 0x07fffffe07fffffe, + 0x0420040000000000, + 0xff7fffffff7fffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xf7ffffffffffffff, + 0xfffffffffffffff0, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x01ffffffffefffff, + 0x0000001f00000003, + 0x0000000000000000, + 0xbccf000000000020, + 0xfffffffbffffd740, + 0xffbfffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xfffffffffffffc03, + 0xffffffffffffffff, + 0xfffeffffffffffff, + 0xffffffff007fffff, + 0x00000000000001ff, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, ], r2: [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 5, 5, 5, 0, 5, 5, 5, 5, 6, 7, 8, 9, 0, 10, 11, 0, 12, 13, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 15, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 17, 18, 5, 19, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 17, 18, 5, 19, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -490,8 +937,8 @@ pub mod derived_property { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 22, - 0, 23, 5, 24, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 27, 5, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, + 22, 0, 23, 5, 24, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 27, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -502,18 +949,41 @@ pub mod derived_property { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 29, 30, 0, 0 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 0, ], r3: &[ - 0x0000000000000000, 0xffffffff00000000, 0xe7ffffffffff20bf, 0x3f3fffffffffffff, - 0xe7ffffffffff01ff, 0xffffffffffffffff, 0xffffffff3f3fffff, 0x3fffffffaaff3f3f, - 0x5fdfffffffffffff, 0x1fdc1fff0fcf1fdc, 0x8002000000000000, 0x000000001fff0000, - 0xf21fbd503e2ffc84, 0xffffffff000043e0, 0x0000000000000018, 0xffc0000000000000, - 0x000003ffffffffff, 0xffff7fffffffffff, 0xffffffff7fffffff, 0x000c781fffffffff, - 0x000020bfffffffff, 0x00003fffffffffff, 0x000000003fffffff, 0xfffffffc00000000, - 0x03ffffffffff78ff, 0x0700000000000000, 0xffff000000000000, 0xffff003ff7ffffff, - 0x0000000000f8007f, 0x07fffffe00000000, 0x0000000007fffffe + 0x0000000000000000, + 0xffffffff00000000, + 0xe7ffffffffff20bf, + 0x3f3fffffffffffff, + 0xe7ffffffffff01ff, + 0xffffffffffffffff, + 0xffffffff3f3fffff, + 0x3fffffffaaff3f3f, + 0x5fdfffffffffffff, + 0x1fdc1fff0fcf1fdc, + 0x8002000000000000, + 0x000000001fff0000, + 0xf21fbd503e2ffc84, + 0xffffffff000043e0, + 0x0000000000000018, + 0xffc0000000000000, + 0x000003ffffffffff, + 0xffff7fffffffffff, + 0xffffffff7fffffff, + 0x000c781fffffffff, + 0x000020bfffffffff, + 0x00003fffffffffff, + 0x000000003fffffff, + 0xfffffffc00000000, + 0x03ffffffffff78ff, + 0x0700000000000000, + 0xffff000000000000, + 0xffff003ff7ffffff, + 0x0000000000f8007f, + 0x07fffffe00000000, + 0x0000000007fffffe, ], r4: [ 0, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 4, 5, 6, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -524,7 +994,7 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, ], r5: &[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -540,17 +1010,33 @@ pub mod derived_property { 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 20, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 20, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ], r6: &[ - 0x0000000000000000, 0xffffffffffffffff, 0x000000000000ffff, 0xffff000000000000, - 0x0fffffffff0fffff, 0x0007ffffffffffff, 0xffffffff00000000, 0x00000000ffffffff, - 0xffffffffffdfffff, 0xebffde64dfffffff, 0xffffffffffffffef, 0x7bffffffdfdfe7bf, - 0xfffffffffffdfc5f, 0xffffff3fffffffff, 0xf7fffffff7fffffd, 0xffdfffffffdfffff, - 0xffff7fffffff7fff, 0xfffffdfffffffdff, 0x0000000000000ff7, 0x000000000000000f, - 0xffff03ffffff03ff, 0x00000000000003ff + 0x0000000000000000, + 0xffffffffffffffff, + 0x000000000000ffff, + 0xffff000000000000, + 0x0fffffffff0fffff, + 0x0007ffffffffffff, + 0xffffffff00000000, + 0x00000000ffffffff, + 0xffffffffffdfffff, + 0xebffde64dfffffff, + 0xffffffffffffffef, + 0x7bffffffdfdfe7bf, + 0xfffffffffffdfc5f, + 0xffffff3fffffffff, + 0xf7fffffff7fffffd, + 0xffdfffffffdfffff, + 0xffff7fffffff7fff, + 0xfffffdfffffffdff, + 0x0000000000000ff7, + 0x000000000000000f, + 0xffff03ffffff03ff, + 0x00000000000003ff, ], }; @@ -560,23 +1046,47 @@ pub mod derived_property { pub const Grapheme_Extend_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0xffffffffffffffff, 0x0000ffffffffffff, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x00000000000003f8, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0xbffffffffffe0000, 0x00000000000000b6, - 0x0000000007ff0000, 0x00010000fffff800, 0x0000000000000000, 0x00003d9f9fc00000, - 0xffff000000020000, 0x00000000000007ff, 0x0001ffc000000000, 0x200ff80000000000 + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0xffffffffffffffff, + 0x0000ffffffffffff, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x00000000000003f8, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0xbffffffffffe0000, + 0x00000000000000b6, + 0x0000000007ff0000, + 0x00010000fffff800, + 0x0000000000000000, + 0x00003d9f9fc00000, + 0xffff000000020000, + 0x00000000000007ff, + 0x0001ffc000000000, + 0x200ff80000000000, ], r2: [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 2, 21, 22, - 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 33, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 34, 35, 36, 37, 38, 2, 39, 2, 40, 2, 2, 2, 41, 42, 43, 2, 44, - 45, 46, 47, 48, 2, 2, 49, 2, 2, 2, 50, 2, 2, 2, 2, 2, 2, 2, 2, 51, 2, 2, 52, 2, 2, 2, 2, + 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 33, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 34, 35, 36, 37, 38, 2, 39, 2, 40, 2, 2, 2, 41, 42, 43, 2, + 44, 45, 46, 47, 48, 2, 2, 49, 2, 2, 2, 50, 2, 2, 2, 2, 2, 2, 2, 2, 51, 2, 2, 52, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 53, 2, 54, 2, 55, 2, 2, 2, 2, 2, 2, 2, 2, 56, - 2, 57, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 53, 2, 54, 2, 55, 2, 2, 2, 2, 2, 2, 2, + 2, 56, 2, 57, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -592,8 +1102,8 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 58, 59, 60, 2, 2, 2, 2, 61, 2, 2, 62, 63, 64, 65, 66, 67, 68, - 69, 70, 2, 2, 2, 71, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 58, 59, 60, 2, 2, 2, 2, 61, 2, 2, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 2, 2, 2, 71, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -604,28 +1114,83 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 72, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 73, 2, 2, 2, 2, 2, 59, 2 + 2, 2, 2, 2, 2, 2, 2, 72, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 73, 2, 2, 2, 2, 2, 59, 2, ], r3: &[ - 0x00003eeffbc00000, 0x000000000e000000, 0x0000000000000000, 0xfffffffbfff80000, - 0x1400000000000007, 0x0000000c00fe21fe, 0x5000000000000002, 0x4000000c0080201e, - 0x1000000000000006, 0x0023000000023986, 0xfc00000c000021be, 0xd000000000000002, - 0x0000000c00c0201e, 0x4000000000000004, 0x0000000000802001, 0xc000000000000011, - 0x0000000c00603dc1, 0x9000000000000002, 0x0000000c00603044, 0x5800000000000003, - 0x0000000c0080201e, 0x00000000805c8400, 0x07f2000000000000, 0x0000000000007f80, - 0x1bf2000000000000, 0x0000000000003f00, 0x02a0000003000000, 0x7ffe000000000000, - 0x1ffffffffeffe0df, 0x0000000000000040, 0x66fde00000000000, 0x001e0001c3000000, - 0x0000000020002064, 0x00000000e0000000, 0x001c0000001c0000, 0x000c0000000c0000, - 0x3fb0000000000000, 0x00000000200ffe40, 0x0000000000003800, 0x0000020000000060, - 0x0e04018700000000, 0x0000000009800000, 0x9ff81fe57f400000, 0x7fff000000000000, - 0x17d000000000000f, 0x000ff80000000004, 0x00003b3c00000003, 0x0003a34000000000, - 0x00cff00000000000, 0x031021fdfff70000, 0xfbffffffffffffff, 0x0000000000001000, - 0x0001ffffffff0000, 0x0003800000000000, 0x8000000000000000, 0xffffffff00000000, - 0x0000fc0000000000, 0x0000000006000000, 0x3ff7800000000000, 0x00000000c0000000, - 0x0003000000000000, 0x0000006000000844, 0x8003ffff00000030, 0x00003fc000000000, - 0x000000000003ff80, 0x13c8000000000007, 0x0000002000000000, 0x00667e0000000000, - 0x1000000000001008, 0xc19d000000000000, 0x0040300000000002, 0x0000212000000000, - 0x0000000040000000, 0x0000ffff0000ffff + 0x00003eeffbc00000, + 0x000000000e000000, + 0x0000000000000000, + 0xfffffffbfff80000, + 0x1400000000000007, + 0x0000000c00fe21fe, + 0x5000000000000002, + 0x4000000c0080201e, + 0x1000000000000006, + 0x0023000000023986, + 0xfc00000c000021be, + 0xd000000000000002, + 0x0000000c00c0201e, + 0x4000000000000004, + 0x0000000000802001, + 0xc000000000000011, + 0x0000000c00603dc1, + 0x9000000000000002, + 0x0000000c00603044, + 0x5800000000000003, + 0x0000000c0080201e, + 0x00000000805c8400, + 0x07f2000000000000, + 0x0000000000007f80, + 0x1bf2000000000000, + 0x0000000000003f00, + 0x02a0000003000000, + 0x7ffe000000000000, + 0x1ffffffffeffe0df, + 0x0000000000000040, + 0x66fde00000000000, + 0x001e0001c3000000, + 0x0000000020002064, + 0x00000000e0000000, + 0x001c0000001c0000, + 0x000c0000000c0000, + 0x3fb0000000000000, + 0x00000000200ffe40, + 0x0000000000003800, + 0x0000020000000060, + 0x0e04018700000000, + 0x0000000009800000, + 0x9ff81fe57f400000, + 0x7fff000000000000, + 0x17d000000000000f, + 0x000ff80000000004, + 0x00003b3c00000003, + 0x0003a34000000000, + 0x00cff00000000000, + 0x031021fdfff70000, + 0xfbffffffffffffff, + 0x0000000000001000, + 0x0001ffffffff0000, + 0x0003800000000000, + 0x8000000000000000, + 0xffffffff00000000, + 0x0000fc0000000000, + 0x0000000006000000, + 0x3ff7800000000000, + 0x00000000c0000000, + 0x0003000000000000, + 0x0000006000000844, + 0x8003ffff00000030, + 0x00003fc000000000, + 0x000000000003ff80, + 0x13c8000000000007, + 0x0000002000000000, + 0x00667e0000000000, + 0x1000000000001008, + 0xc19d000000000000, + 0x0040300000000002, + 0x0000212000000000, + 0x0000000040000000, + 0x0000ffff0000ffff, ], r4: [ 0, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 4, 2, 5, 6, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -636,44 +1201,85 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 7, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, ], r5: &[ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 8, 9, 10, 0, 11, 12, 13, 14, 15, 0, 0, 16, 17, 18, 0, 0, 19, 20, 21, - 22, 0, 0, 23, 24, 25, 26, 27, 0, 28, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, 0, 30, 31, 32, 0, 0, - 0, 0, 0, 33, 0, 34, 0, 35, 36, 37, 0, 0, 0, 0, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 22, 0, 0, 23, 24, 25, 26, 27, 0, 28, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, 0, 30, 31, 32, 0, + 0, 0, 0, 0, 33, 0, 34, 0, 35, 36, 37, 0, 0, 0, 0, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 44, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 46, 47, 48, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 50, 0, 51, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 53, 0, 0, - 53, 53, 53, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 44, 0, 0, 45, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 46, 47, 48, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 50, 0, + 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, + 53, 0, 0, 53, 53, 53, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0 + 0, 0, 0, 0, 0, 0, 0, ], r6: &[ - 0x0000000000000000, 0x2000000000000000, 0x0000000100000000, 0x07c0000000000000, - 0x870000000000f06e, 0x0000006000000000, 0x000000f000000000, 0x000000000001ffc0, - 0xff00000000000002, 0x800000000000007f, 0x0678000000000003, 0x001fef8000000007, - 0x0008000000000000, 0x7fc0000000000003, 0x0000000000001e00, 0x40d3800000000000, - 0x000007f880000000, 0x5800000000000003, 0x001f1fc000800001, 0xff00000000000000, - 0x000000004000005c, 0xa5f9000000000000, 0x000000000000000d, 0xb03c800000000000, - 0x0000000030000001, 0xa7f8000000000000, 0x0000000000000001, 0x00bf280000000000, - 0x00000fbce0000000, 0x06ff800000000000, 0x79f80000000007fe, 0x000000000e7e0080, - 0x00000000037ffc00, 0xbf7f000000000000, 0x006dfcfffffc0000, 0xb47e000000000000, - 0x00000000000000bf, 0x0000000000a30000, 0x0018000000000000, 0x001f000000000000, - 0x007f000000000000, 0x0000000000078000, 0x0000000060000000, 0xf807c3a000000000, - 0x00003c0000000fe7, 0x000000000000001c, 0xf87fffffffffffff, 0x00201fffffffffff, - 0x0000fffef8000010, 0x000007dbf9ffff7f, 0x00000000007f0000, 0x00000000000007f0, - 0xffffffff00000000, 0xffffffffffffffff, 0x0000ffffffffffff + 0x0000000000000000, + 0x2000000000000000, + 0x0000000100000000, + 0x07c0000000000000, + 0x870000000000f06e, + 0x0000006000000000, + 0x000000f000000000, + 0x000000000001ffc0, + 0xff00000000000002, + 0x800000000000007f, + 0x0678000000000003, + 0x001fef8000000007, + 0x0008000000000000, + 0x7fc0000000000003, + 0x0000000000001e00, + 0x40d3800000000000, + 0x000007f880000000, + 0x5800000000000003, + 0x001f1fc000800001, + 0xff00000000000000, + 0x000000004000005c, + 0xa5f9000000000000, + 0x000000000000000d, + 0xb03c800000000000, + 0x0000000030000001, + 0xa7f8000000000000, + 0x0000000000000001, + 0x00bf280000000000, + 0x00000fbce0000000, + 0x06ff800000000000, + 0x79f80000000007fe, + 0x000000000e7e0080, + 0x00000000037ffc00, + 0xbf7f000000000000, + 0x006dfcfffffc0000, + 0xb47e000000000000, + 0x00000000000000bf, + 0x0000000000a30000, + 0x0018000000000000, + 0x001f000000000000, + 0x007f000000000000, + 0x0000000000078000, + 0x0000000060000000, + 0xf807c3a000000000, + 0x00003c0000000fe7, + 0x000000000000001c, + 0xf87fffffffffffff, + 0x00201fffffffffff, + 0x0000fffef8000010, + 0x000007dbf9ffff7f, + 0x00000000007f0000, + 0x00000000000007f0, + 0xffffffff00000000, + 0xffffffffffffffff, + 0x0000ffffffffffff, ], }; @@ -683,14 +1289,38 @@ pub mod derived_property { pub const Lowercase_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x0000000000000000, 0x07fffffe00000000, 0x0420040000000000, 0xff7fffff80000000, - 0x55aaaaaaaaaaaaaa, 0xd4aaaaaaaaaaab55, 0xe6512d2a4e243129, 0xaa29aaaab5555240, - 0x93faaaaaaaaaaaaa, 0xffffffffffffaa85, 0x01ffffffffefffff, 0x0000001f00000003, - 0x0000000000000000, 0x3c8a000000000020, 0xfffff00000010000, 0x192faaaaaae37fff, - 0xffff000000000000, 0xaaaaaaaaffffffff, 0xaaaaaaaaaaaaa802, 0xaaaaaaaaaaaad554, - 0x0000aaaaaaaaaaaa, 0xffffffff00000000, 0x00000000000001ff, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + 0x0000000000000000, + 0x07fffffe00000000, + 0x0420040000000000, + 0xff7fffff80000000, + 0x55aaaaaaaaaaaaaa, + 0xd4aaaaaaaaaaab55, + 0xe6512d2a4e243129, + 0xaa29aaaab5555240, + 0x93faaaaaaaaaaaaa, + 0xffffffffffffaa85, + 0x01ffffffffefffff, + 0x0000001f00000003, + 0x0000000000000000, + 0x3c8a000000000020, + 0xfffff00000010000, + 0x192faaaaaae37fff, + 0xffff000000000000, + 0xaaaaaaaaffffffff, + 0xaaaaaaaaaaaaa802, + 0xaaaaaaaaaaaad554, + 0x0000aaaaaaaaaaaa, + 0xffffffff00000000, + 0x00000000000001ff, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, ], r2: [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -714,8 +1344,8 @@ pub mod derived_property { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 22, - 0, 23, 24, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 27, 4, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, + 22, 0, 23, 24, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 27, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -726,18 +1356,40 @@ pub mod derived_property { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, ], r3: &[ - 0x0000000000000000, 0xe7ffffffffff0000, 0x3f00000000000000, 0x00000000000001ff, - 0xffffffffffffffff, 0xaaaaaaaaaaaaaaaa, 0xaaaaaaaabfeaaaaa, 0x00ff00ff003f00ff, - 0x3fff00ff00ff003f, 0x40df00ff00ff00ff, 0x00dc00ff00cf00dc, 0x8002000000000000, - 0x000000001fff0000, 0x321080000008c400, 0xffff0000000043c0, 0x0000000000000010, - 0x000003ffffff0000, 0xffff000000000000, 0x3fda15627fffffff, 0x0008501aaaaaaaaa, - 0x000020bfffffffff, 0x00002aaaaaaaaaaa, 0x000000003aaaaaaa, 0xaaabaaa800000000, - 0x95ffaaaaaaaaaaaa, 0x02a082aaaaba50aa, 0x0700000000000000, 0xffff003ff7ffffff, - 0x0000000000f8007f, 0x0000000007fffffe + 0x0000000000000000, + 0xe7ffffffffff0000, + 0x3f00000000000000, + 0x00000000000001ff, + 0xffffffffffffffff, + 0xaaaaaaaaaaaaaaaa, + 0xaaaaaaaabfeaaaaa, + 0x00ff00ff003f00ff, + 0x3fff00ff00ff003f, + 0x40df00ff00ff00ff, + 0x00dc00ff00cf00dc, + 0x8002000000000000, + 0x000000001fff0000, + 0x321080000008c400, + 0xffff0000000043c0, + 0x0000000000000010, + 0x000003ffffff0000, + 0xffff000000000000, + 0x3fda15627fffffff, + 0x0008501aaaaaaaaa, + 0x000020bfffffffff, + 0x00002aaaaaaaaaaa, + 0x000000003aaaaaaa, + 0xaaabaaa800000000, + 0x95ffaaaaaaaaaaaa, + 0x02a082aaaaba50aa, + 0x0700000000000000, + 0xffff003ff7ffffff, + 0x0000000000f8007f, + 0x0000000007fffffe, ], r4: [ 0, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 4, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -748,7 +1400,7 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, ], r5: &[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -761,19 +1413,37 @@ pub mod derived_property { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, - 21, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 21, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ], r6: &[ - 0x0000000000000000, 0xffffff0000000000, 0x000000000000ffff, 0x0fffffffff000000, - 0x0007ffffffffffff, 0x00000000ffffffff, 0xffffffff00000000, 0x000ffffffc000000, - 0x000000ffffdfc000, 0xebc000000ffffffc, 0xfffffc000000ffef, 0x00ffffffc000000f, - 0x00000ffffffc0000, 0xfc000000ffffffc0, 0xffffc000000fffff, 0x0ffffffc000000ff, - 0x0000ffffffc00000, 0x0000003ffffffc00, 0xf0000003f7fffffc, 0xffc000000fdfffff, - 0xffff0000003f7fff, 0xfffffc000000fdff, 0x0000000000000bf7, 0xfffffffc00000000, - 0x000000000000000f + 0x0000000000000000, + 0xffffff0000000000, + 0x000000000000ffff, + 0x0fffffffff000000, + 0x0007ffffffffffff, + 0x00000000ffffffff, + 0xffffffff00000000, + 0x000ffffffc000000, + 0x000000ffffdfc000, + 0xebc000000ffffffc, + 0xfffffc000000ffef, + 0x00ffffffc000000f, + 0x00000ffffffc0000, + 0xfc000000ffffffc0, + 0xffffc000000fffff, + 0x0ffffffc000000ff, + 0x0000ffffffc00000, + 0x0000003ffffffc00, + 0xf0000003f7fffffc, + 0xffc000000fdfffff, + 0xffff0000003f7fff, + 0xfffffc000000fdff, + 0x0000000000000bf7, + 0xfffffffc00000000, + 0x000000000000000f, ], }; @@ -783,22 +1453,46 @@ pub mod derived_property { pub const Uppercase_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x0000000000000000, 0x0000000007fffffe, 0x0000000000000000, 0x000000007f7fffff, - 0xaa55555555555555, 0x2b555555555554aa, 0x11aed2d5b1dbced6, 0x55d255554aaaa490, - 0x6c05555555555555, 0x000000000000557a, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x8045000000000000, 0x00000ffbfffed740, 0xe6905555551c8000, - 0x0000ffffffffffff, 0x5555555500000000, 0x5555555555555401, 0x5555555555552aab, - 0xfffe555555555555, 0x00000000007fffff, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + 0x0000000000000000, + 0x0000000007fffffe, + 0x0000000000000000, + 0x000000007f7fffff, + 0xaa55555555555555, + 0x2b555555555554aa, + 0x11aed2d5b1dbced6, + 0x55d255554aaaa490, + 0x6c05555555555555, + 0x000000000000557a, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x8045000000000000, + 0x00000ffbfffed740, + 0xe6905555551c8000, + 0x0000ffffffffffff, + 0x5555555500000000, + 0x5555555555555401, + 0x5555555555552aab, + 0xfffe555555555555, + 0x00000000007fffff, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, + 0x0000000000000000, ], r2: [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, - 0, 5, 5, 6, 5, 7, 8, 9, 10, 0, 0, 0, 0, 11, 12, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, - 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 16, 17, 5, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 5, 5, 6, 5, 7, 8, 9, 10, 0, 0, 0, 0, 11, 12, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 16, 17, 5, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -814,8 +1508,8 @@ pub mod derived_property { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 20, 0, - 21, 22, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, + 20, 0, 21, 22, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -827,16 +1521,34 @@ pub mod derived_property { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 24, 0, 0, 0 + 0, 0, 0, 0, 0, 0, 0, 24, 0, 0, 0, ], r3: &[ - 0x0000000000000000, 0xffffffff00000000, 0x00000000000020bf, 0x003fffffffffffff, - 0xe7ffffffffff0000, 0x5555555555555555, 0x5555555540155555, 0xff00ff003f00ff00, - 0x0000ff00aa003f00, 0x0f00000000000000, 0x0f001f000f000f00, 0xc00f3d503e273884, - 0x0000ffff00000020, 0x0000000000000008, 0xffc0000000000000, 0x000000000000ffff, - 0x00007fffffffffff, 0xc025ea9d00000000, 0x0004280555555555, 0x0000155555555555, - 0x0000000005555555, 0x5554555400000000, 0x6a00555555555555, 0x015f7d5555452855, - 0x07fffffe00000000 + 0x0000000000000000, + 0xffffffff00000000, + 0x00000000000020bf, + 0x003fffffffffffff, + 0xe7ffffffffff0000, + 0x5555555555555555, + 0x5555555540155555, + 0xff00ff003f00ff00, + 0x0000ff00aa003f00, + 0x0f00000000000000, + 0x0f001f000f000f00, + 0xc00f3d503e273884, + 0x0000ffff00000020, + 0x0000000000000008, + 0xffc0000000000000, + 0x000000000000ffff, + 0x00007fffffffffff, + 0xc025ea9d00000000, + 0x0004280555555555, + 0x0000155555555555, + 0x0000000005555555, + 0x5554555400000000, + 0x6a00555555555555, + 0x015f7d5555452855, + 0x07fffffe00000000, ], r4: [ 0, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 4, 5, 6, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -847,7 +1559,7 @@ pub mod derived_property { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, ], r5: &[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -860,21 +1572,40 @@ pub mod derived_property { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, - 21, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 21, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 24, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 24, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ], r6: &[ - 0x0000000000000000, 0x000000ffffffffff, 0xffff000000000000, 0x00000000000fffff, - 0x0007ffffffffffff, 0xffffffff00000000, 0x00000000ffffffff, 0xfff0000003ffffff, - 0xffffff0000003fff, 0x003fde64d0000003, 0x000003ffffff0000, 0x7b0000001fdfe7b0, - 0xfffff0000001fc5f, 0x03ffffff0000003f, 0x00003ffffff00000, 0xf0000003ffffff00, - 0xffff0000003fffff, 0xffffff00000003ff, 0x07fffffc00000001, 0x001ffffff0000000, - 0x00007fffffc00000, 0x000001ffffff0000, 0x0000000000000400, 0x00000003ffffffff, - 0xffff03ffffff03ff, 0x00000000000003ff + 0x0000000000000000, + 0x000000ffffffffff, + 0xffff000000000000, + 0x00000000000fffff, + 0x0007ffffffffffff, + 0xffffffff00000000, + 0x00000000ffffffff, + 0xfff0000003ffffff, + 0xffffff0000003fff, + 0x003fde64d0000003, + 0x000003ffffff0000, + 0x7b0000001fdfe7b0, + 0xfffff0000001fc5f, + 0x03ffffff0000003f, + 0x00003ffffff00000, + 0xf0000003ffffff00, + 0xffff0000003fffff, + 0xffffff00000003ff, + 0x07fffffc00000001, + 0x001ffffff0000000, + 0x00007fffffc00000, + 0x000001ffffff0000, + 0x0000000000000400, + 0x00000003ffffffff, + 0xffff03ffffff03ff, + 0x00000000000003ff, ], }; @@ -884,14 +1615,38 @@ pub mod derived_property { pub const XID_Continue_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x03ff000000000000, 0x07fffffe87fffffe, 0x04a0040000000000, 0xff7fffffff7fffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0x0000501f0003ffc3, - 0xffffffffffffffff, 0xb8dfffffffffffff, 0xfffffffbffffd7c0, 0xffbfffffffffffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xfffffffffffffcfb, 0xffffffffffffffff, - 0xfffeffffffffffff, 0xffffffff027fffff, 0xbffffffffffe01ff, 0x000787ffffff00b6, - 0xffffffff07ff0000, 0xffffc3ffffffffff, 0xffffffffffffffff, 0x9ffffdff9fefffff, - 0xffffffffffff0000, 0xffffffffffffe7ff, 0x0003ffffffffffff, 0x243fffffffffffff + 0x03ff000000000000, + 0x07fffffe87fffffe, + 0x04a0040000000000, + 0xff7fffffff7fffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x0000501f0003ffc3, + 0xffffffffffffffff, + 0xb8dfffffffffffff, + 0xfffffffbffffd7c0, + 0xffbfffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xfffffffffffffcfb, + 0xffffffffffffffff, + 0xfffeffffffffffff, + 0xffffffff027fffff, + 0xbffffffffffe01ff, + 0x000787ffffff00b6, + 0xffffffff07ff0000, + 0xffffc3ffffffffff, + 0xffffffffffffffff, + 0x9ffffdff9fefffff, + 0xffffffffffff0000, + 0xffffffffffffe7ff, + 0x0003ffffffffffff, + 0x243fffffffffffff, ], r2: [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, @@ -901,11 +1656,11 @@ pub mod derived_property { 71, 72, 73, 74, 75, 76, 77, 78, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 79, 80, 4, 81, 82, 83, 84, 85, 60, 60, 60, 60, 60, 60, 60, 60, 86, - 42, 87, 88, 89, 4, 90, 91, 60, 60, 60, 60, 60, 60, 60, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 42, 87, 88, 89, 4, 90, 91, 60, 60, 60, 60, 60, 60, 60, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 52, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 52, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, @@ -916,58 +1671,155 @@ pub mod derived_property { 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 92, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 93, 94, 4, 4, 4, 4, 95, 96, 4, 97, 98, 4, 99, 100, 101, 62, 4, 102, 103, - 104, 4, 105, 106, 107, 4, 108, 109, 110, 4, 111, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 92, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 93, 94, 4, 4, 4, 4, 95, 96, 4, 97, 98, 4, 99, 100, 101, 62, 4, + 102, 103, 104, 4, 105, 106, 107, 4, 108, 109, 110, 4, 111, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 112, 113, 60, 60, 60, 60, 60, 60, 60, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 112, 113, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, - 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 4, 4, 4, 4, 4, 103, 4, 114, - 115, 116, 97, 117, 4, 118, 4, 4, 119, 120, 121, 122, 123, 124, 4, 125, 126, 127, 128, - 129 + 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 4, 4, 4, 4, 4, + 103, 4, 114, 115, 116, 97, 117, 4, 118, 4, 4, 119, 120, 121, 122, 123, 124, 4, 125, + 126, 127, 128, 129, ], r3: &[ - 0x00003fffffffffff, 0x000007ff0fffffff, 0x3fdfffff00000000, 0xfffffffbfff80000, - 0xffffffffffffffff, 0xfffeffcfffffffff, 0xf3c5fdfffff99fef, 0x5003ffcfb080799f, - 0xd36dfdfffff987ee, 0x003fffc05e023987, 0xf3edfdfffffbbfee, 0xfe00ffcf00013bbf, - 0xf3edfdfffff99fee, 0x0002ffcfb0c0399f, 0xc3ffc718d63dc7ec, 0x0000ffc000813dc7, - 0xe3fffdfffffddfff, 0x0000ffcf07603ddf, 0xf3effdfffffddfef, 0x0006ffcf40603ddf, - 0xfffffffffffddfef, 0xfc00ffcf80f07ddf, 0x2ffbfffffc7fffec, 0x000cffc0ff5f847f, - 0x07fffffffffffffe, 0x0000000003ff7fff, 0x3bffecaefef02596, 0x00000000f3ff3f5f, - 0xc2a003ff03000001, 0xfffe1ffffffffeff, 0x1ffffffffeffffdf, 0x0000000000000040, - 0xffffffffffff03ff, 0xffffffff3fffffff, 0xf7ffffffffff20bf, 0xffffffff3d7f3dff, - 0x7f3dffffffff3dff, 0xffffffffff7fff3d, 0xffffffffff3dffff, 0x0003fe00e7ffffff, - 0xffffffff0000ffff, 0x3f3fffffffffffff, 0xfffffffffffffffe, 0xffff9fffffffffff, - 0xffffffff07fffffe, 0x01ffc7ffffffffff, 0x001fffff001fdfff, 0x000ddfff000fffff, - 0x000003ff308fffff, 0xffffffff03ff3800, 0x01ffffffffffffff, 0xffff07ffffffffff, - 0x003fffffffffffff, 0x0fff0fff7fffffff, 0x001f3fffffffffc0, 0xffff0fffffffffff, - 0x0000000007ff03ff, 0xffffffff0fffffff, 0x9fffffff7fffffff, 0x3fff008003ff03ff, - 0x0000000000000000, 0x000ff80003ff0fff, 0x000fffffffffffff, 0x00ffffffffffffff, - 0x3fffffffffffe3ff, 0xe7ffffffffff01ff, 0x03fffffffff70000, 0xfbffffffffffffff, - 0xffffffff3f3fffff, 0x3fffffffaaff3f3f, 0x5fdfffffffffffff, 0x1fdc1fff0fcf1fdc, - 0x8000000000000000, 0x8002000000100001, 0x000000001fff0000, 0x0001ffe21fff0000, - 0xf3fffd503f2ffc84, 0xffffffff000043e0, 0x00000000000001ff, 0xffff7fffffffffff, - 0xffffffff7fffffff, 0x000ff81fffffffff, 0xffff20bfffffffff, 0x800080ffffffffff, - 0x7f7f7f7f007fffff, 0xffffffff7f7f7f7f, 0x1f3efffe000000e0, 0xfffffffee67fffff, - 0xf7ffffffffffffff, 0xfffeffffffffffe0, 0x07ffffff00007fff, 0xffff000000000000, - 0x0000ffffffffffff, 0x0000000000001fff, 0x3fffffffffff0000, 0x00000fffffff1fff, - 0xbff0ffffffffffff, 0x0003ffffffffffff, 0xfffffffcff800000, 0x03fffffffffff9ff, - 0xff80000000000000, 0x000000ffffffffff, 0xe8ffffff03ff003f, 0xffff3fffffffffff, - 0x1fffffff000fffff, 0x7fffffff03ff8001, 0x007fffffffffffff, 0xfc7fffff03ff3fff, - 0x007cffff38000007, 0xffff7f7f007e7e7e, 0xffff003ff7ffffff, 0x03ff37ffffffffff, - 0xffff000fffffffff, 0x0ffffffffffff87f, 0x0000000003ffffff, 0x5f7ffdffe0f8007f, - 0xffffffffffffffdb, 0xfffffffffff80000, 0xfffffff03fffffff, 0x3fffffffffffffff, - 0xffffffffffff0000, 0xfffffffffffcffff, 0x03ff0000000000ff, 0x0018ffff0000ffff, - 0xaa8a00000000e000, 0x1fffffffffffffff, 0x87fffffe03ff0000, 0xffffffc007fffffe, - 0x7fffffffffffffff, 0x000000001cfcfcfc + 0x00003fffffffffff, + 0x000007ff0fffffff, + 0x3fdfffff00000000, + 0xfffffffbfff80000, + 0xffffffffffffffff, + 0xfffeffcfffffffff, + 0xf3c5fdfffff99fef, + 0x5003ffcfb080799f, + 0xd36dfdfffff987ee, + 0x003fffc05e023987, + 0xf3edfdfffffbbfee, + 0xfe00ffcf00013bbf, + 0xf3edfdfffff99fee, + 0x0002ffcfb0c0399f, + 0xc3ffc718d63dc7ec, + 0x0000ffc000813dc7, + 0xe3fffdfffffddfff, + 0x0000ffcf07603ddf, + 0xf3effdfffffddfef, + 0x0006ffcf40603ddf, + 0xfffffffffffddfef, + 0xfc00ffcf80f07ddf, + 0x2ffbfffffc7fffec, + 0x000cffc0ff5f847f, + 0x07fffffffffffffe, + 0x0000000003ff7fff, + 0x3bffecaefef02596, + 0x00000000f3ff3f5f, + 0xc2a003ff03000001, + 0xfffe1ffffffffeff, + 0x1ffffffffeffffdf, + 0x0000000000000040, + 0xffffffffffff03ff, + 0xffffffff3fffffff, + 0xf7ffffffffff20bf, + 0xffffffff3d7f3dff, + 0x7f3dffffffff3dff, + 0xffffffffff7fff3d, + 0xffffffffff3dffff, + 0x0003fe00e7ffffff, + 0xffffffff0000ffff, + 0x3f3fffffffffffff, + 0xfffffffffffffffe, + 0xffff9fffffffffff, + 0xffffffff07fffffe, + 0x01ffc7ffffffffff, + 0x001fffff001fdfff, + 0x000ddfff000fffff, + 0x000003ff308fffff, + 0xffffffff03ff3800, + 0x01ffffffffffffff, + 0xffff07ffffffffff, + 0x003fffffffffffff, + 0x0fff0fff7fffffff, + 0x001f3fffffffffc0, + 0xffff0fffffffffff, + 0x0000000007ff03ff, + 0xffffffff0fffffff, + 0x9fffffff7fffffff, + 0x3fff008003ff03ff, + 0x0000000000000000, + 0x000ff80003ff0fff, + 0x000fffffffffffff, + 0x00ffffffffffffff, + 0x3fffffffffffe3ff, + 0xe7ffffffffff01ff, + 0x03fffffffff70000, + 0xfbffffffffffffff, + 0xffffffff3f3fffff, + 0x3fffffffaaff3f3f, + 0x5fdfffffffffffff, + 0x1fdc1fff0fcf1fdc, + 0x8000000000000000, + 0x8002000000100001, + 0x000000001fff0000, + 0x0001ffe21fff0000, + 0xf3fffd503f2ffc84, + 0xffffffff000043e0, + 0x00000000000001ff, + 0xffff7fffffffffff, + 0xffffffff7fffffff, + 0x000ff81fffffffff, + 0xffff20bfffffffff, + 0x800080ffffffffff, + 0x7f7f7f7f007fffff, + 0xffffffff7f7f7f7f, + 0x1f3efffe000000e0, + 0xfffffffee67fffff, + 0xf7ffffffffffffff, + 0xfffeffffffffffe0, + 0x07ffffff00007fff, + 0xffff000000000000, + 0x0000ffffffffffff, + 0x0000000000001fff, + 0x3fffffffffff0000, + 0x00000fffffff1fff, + 0xbff0ffffffffffff, + 0x0003ffffffffffff, + 0xfffffffcff800000, + 0x03fffffffffff9ff, + 0xff80000000000000, + 0x000000ffffffffff, + 0xe8ffffff03ff003f, + 0xffff3fffffffffff, + 0x1fffffff000fffff, + 0x7fffffff03ff8001, + 0x007fffffffffffff, + 0xfc7fffff03ff3fff, + 0x007cffff38000007, + 0xffff7f7f007e7e7e, + 0xffff003ff7ffffff, + 0x03ff37ffffffffff, + 0xffff000fffffffff, + 0x0ffffffffffff87f, + 0x0000000003ffffff, + 0x5f7ffdffe0f8007f, + 0xffffffffffffffdb, + 0xfffffffffff80000, + 0xfffffff03fffffff, + 0x3fffffffffffffff, + 0xffffffffffff0000, + 0xfffffffffffcffff, + 0x03ff0000000000ff, + 0x0018ffff0000ffff, + 0xaa8a00000000e000, + 0x1fffffffffffffff, + 0x87fffffe03ff0000, + 0xffffffc007fffffe, + 0x7fffffffffffffff, + 0x000000001cfcfcfc, ], r4: [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 5, 5, 9, 5, 10, 11, 5, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 12, 13, @@ -978,80 +1830,165 @@ pub mod derived_property { 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 17, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, ], r5: &[ 0, 1, 2, 3, 4, 5, 4, 6, 4, 4, 7, 8, 9, 10, 11, 12, 2, 2, 13, 14, 15, 16, 4, 4, 2, 2, 2, - 2, 17, 18, 4, 4, 19, 20, 21, 22, 23, 4, 24, 4, 25, 26, 27, 28, 29, 30, 31, 4, 2, 32, 33, - 33, 34, 4, 4, 4, 4, 4, 4, 4, 35, 36, 4, 4, 2, 37, 3, 38, 39, 40, 2, 41, 42, 4, 43, 44, - 45, 46, 4, 4, 2, 47, 2, 48, 4, 4, 49, 50, 2, 51, 52, 53, 54, 4, 4, 4, 3, 4, 55, 56, 4, - 4, 4, 4, 57, 58, 59, 60, 4, 4, 4, 4, 61, 62, 63, 4, 64, 65, 66, 4, 4, 4, 4, 67, 4, 4, 4, - 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 68, 4, 2, 69, 2, 2, 2, 70, 4, 4, 4, 4, 4, + 2, 17, 18, 4, 4, 19, 20, 21, 22, 23, 4, 24, 4, 25, 26, 27, 28, 29, 30, 31, 4, 2, 32, + 33, 33, 34, 4, 4, 4, 4, 4, 4, 4, 35, 36, 4, 4, 2, 37, 3, 38, 39, 40, 2, 41, 42, 4, 43, + 44, 45, 46, 4, 4, 2, 47, 2, 48, 4, 4, 49, 50, 2, 51, 52, 53, 54, 4, 4, 4, 3, 4, 55, 56, + 4, 4, 4, 4, 57, 58, 59, 60, 4, 4, 4, 4, 61, 62, 63, 4, 64, 65, 66, 4, 4, 4, 4, 67, 4, + 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 68, 4, 2, 69, 2, 2, 2, 70, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 69, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 69, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 71, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 71, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 2, 2, 2, 2, 2, 2, 2, 2, 60, 72, 4, 73, 17, 74, 75, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 4, - 4, 2, 76, 77, 78, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 60, 72, 4, 73, 17, 74, 75, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 2, 4, 4, 2, 76, 77, 78, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 79, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 33, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 21, 80, 2, 2, 2, 2, 2, - 81, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 2, 82, 83, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 84, 85, 4, 4, 86, 4, 4, 4, 4, 4, 4, 2, 87, 88, 89, 90, 91, 2, 2, 2, 2, 92, 93, 94, 95, - 96, 97, 4, 4, 4, 4, 4, 4, 4, 4, 98, 99, 100, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 101, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 102, 2, 103, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 104, 105, 106, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 107, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 79, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 33, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 21, 80, 2, + 2, 2, 2, 2, 81, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 82, 83, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 84, 85, 4, 4, 86, 4, 4, 4, 4, 4, 4, 2, 87, 88, 89, 90, 91, 2, 2, 2, 2, + 92, 93, 94, 95, 96, 97, 4, 4, 4, 4, 4, 4, 4, 4, 98, 99, 100, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 101, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 102, 2, 103, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 104, 105, 106, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 107, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 11, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, + 2, 2, 2, 11, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 108, + 2, 2, 2, 2, 2, 2, 108, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 109, 4, 4, 4, 4, 4, + 2, 2, 109, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, + 2, 2, 110, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 2, 2, 2, 111, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 110, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 111, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 + 4, 4, 4, 4, 4, ], r6: &[ - 0xb7ffff7fffffefff, 0x000000003fff3fff, 0xffffffffffffffff, 0x07ffffffffffffff, - 0x0000000000000000, 0x001fffffffffffff, 0x2000000000000000, 0xffffffff1fffffff, - 0x000000010001ffff, 0xffffe000ffffffff, 0x07ffffffffff07ff, 0xffffffff3fffffff, - 0x00000000003eff0f, 0xffff03ff3fffffff, 0x0fffffffff0fffff, 0xffff00ffffffffff, - 0x0000000fffffffff, 0x007fffffffffffff, 0x000000ff003fffff, 0x91bffffffffffd3f, - 0x007fffff003fffff, 0x000000007fffffff, 0x0037ffff00000000, 0x03ffffff003fffff, - 0xc0ffffffffffffff, 0x873ffffffeeff06f, 0x1fffffff00000000, 0x000000001fffffff, - 0x0000007ffffffeff, 0x003fffffffffffff, 0x0007ffff003fffff, 0x000000000003ffff, - 0x00000000000001ff, 0x0007ffffffffffff, 0x03ff00ffffffffff, 0xffff00801fffffff, - 0x000000000001ffff, 0x8000ffc00000007f, 0x03ff01ffffff0000, 0xffdfffffffffffff, - 0x004fffffffff0070, 0x0000000017ff1e1f, 0x40fffffffffbffff, 0xffff01ffbfffbd7f, - 0x03ff07ffffffffff, 0xfbedfdfffff99fef, 0x001f1fcfe081399f, 0x0000000043ff07ff, - 0x0000000003ff00bf, 0xff3fffffffffffff, 0x000000003f000001, 0x0000000003ff0011, - 0x00ffffffffffffff, 0x00000000000003ff, 0x03ff0fffe7ffffff, 0xffffffff00000000, - 0x800003ffffffffff, 0x7fffffffffffffff, 0xffffffffffff0080, 0x0000000023ffffcf, - 0x01ffffffffffffff, 0xff7ffffffffffdff, 0xfffc000003ff0001, 0x007ffefffffcffff, - 0xb47ffffffffffb7f, 0xfffffdbf03ff00ff, 0x000003ff01fb7fff, 0x007fffff00000000, - 0x0000000003ffffff, 0x00007fffffffffff, 0x000000000000000f, 0x000000000000007f, - 0x000003ff7fffffff, 0x001f3fffffff0000, 0xe0fffff803ff000f, 0x000000000000ffff, - 0x7fffffffffff001f, 0x00000000ffff8000, 0x0000000300000000, 0x0003ffffffffffff, - 0xffff000000000000, 0x0fffffffffffffff, 0x1fff07ffffffffff, 0x0000000063ff01ff, - 0xf807e3e000000000, 0x00003c0000000fe7, 0x000000000000001c, 0xffffffffffdfffff, - 0xebffde64dfffffff, 0xffffffffffffffef, 0x7bffffffdfdfe7bf, 0xfffffffffffdfc5f, - 0xffffff3fffffffff, 0xf7fffffff7fffffd, 0xffdfffffffdfffff, 0xffff7fffffff7fff, - 0xfffffdfffffffdff, 0xffffffffffffcff7, 0xf87fffffffffffff, 0x00201fffffffffff, - 0x0000fffef8000010, 0x000007dbf9ffff7f, 0x00000000007f001f, 0x0000000003ff07ff, - 0x0af7fe96ffffffef, 0x5ef7f796aa96ea84, 0x0ffffbee0ffffbff, 0x00000000007fffff, - 0xffff0003ffffffff, 0x00000001ffffffff, 0x000000003fffffff, 0x0000ffffffffffff + 0xb7ffff7fffffefff, + 0x000000003fff3fff, + 0xffffffffffffffff, + 0x07ffffffffffffff, + 0x0000000000000000, + 0x001fffffffffffff, + 0x2000000000000000, + 0xffffffff1fffffff, + 0x000000010001ffff, + 0xffffe000ffffffff, + 0x07ffffffffff07ff, + 0xffffffff3fffffff, + 0x00000000003eff0f, + 0xffff03ff3fffffff, + 0x0fffffffff0fffff, + 0xffff00ffffffffff, + 0x0000000fffffffff, + 0x007fffffffffffff, + 0x000000ff003fffff, + 0x91bffffffffffd3f, + 0x007fffff003fffff, + 0x000000007fffffff, + 0x0037ffff00000000, + 0x03ffffff003fffff, + 0xc0ffffffffffffff, + 0x873ffffffeeff06f, + 0x1fffffff00000000, + 0x000000001fffffff, + 0x0000007ffffffeff, + 0x003fffffffffffff, + 0x0007ffff003fffff, + 0x000000000003ffff, + 0x00000000000001ff, + 0x0007ffffffffffff, + 0x03ff00ffffffffff, + 0xffff00801fffffff, + 0x000000000001ffff, + 0x8000ffc00000007f, + 0x03ff01ffffff0000, + 0xffdfffffffffffff, + 0x004fffffffff0070, + 0x0000000017ff1e1f, + 0x40fffffffffbffff, + 0xffff01ffbfffbd7f, + 0x03ff07ffffffffff, + 0xfbedfdfffff99fef, + 0x001f1fcfe081399f, + 0x0000000043ff07ff, + 0x0000000003ff00bf, + 0xff3fffffffffffff, + 0x000000003f000001, + 0x0000000003ff0011, + 0x00ffffffffffffff, + 0x00000000000003ff, + 0x03ff0fffe7ffffff, + 0xffffffff00000000, + 0x800003ffffffffff, + 0x7fffffffffffffff, + 0xffffffffffff0080, + 0x0000000023ffffcf, + 0x01ffffffffffffff, + 0xff7ffffffffffdff, + 0xfffc000003ff0001, + 0x007ffefffffcffff, + 0xb47ffffffffffb7f, + 0xfffffdbf03ff00ff, + 0x000003ff01fb7fff, + 0x007fffff00000000, + 0x0000000003ffffff, + 0x00007fffffffffff, + 0x000000000000000f, + 0x000000000000007f, + 0x000003ff7fffffff, + 0x001f3fffffff0000, + 0xe0fffff803ff000f, + 0x000000000000ffff, + 0x7fffffffffff001f, + 0x00000000ffff8000, + 0x0000000300000000, + 0x0003ffffffffffff, + 0xffff000000000000, + 0x0fffffffffffffff, + 0x1fff07ffffffffff, + 0x0000000063ff01ff, + 0xf807e3e000000000, + 0x00003c0000000fe7, + 0x000000000000001c, + 0xffffffffffdfffff, + 0xebffde64dfffffff, + 0xffffffffffffffef, + 0x7bffffffdfdfe7bf, + 0xfffffffffffdfc5f, + 0xffffff3fffffffff, + 0xf7fffffff7fffffd, + 0xffdfffffffdfffff, + 0xffff7fffffff7fff, + 0xfffffdfffffffdff, + 0xffffffffffffcff7, + 0xf87fffffffffffff, + 0x00201fffffffffff, + 0x0000fffef8000010, + 0x000007dbf9ffff7f, + 0x00000000007f001f, + 0x0000000003ff07ff, + 0x0af7fe96ffffffef, + 0x5ef7f796aa96ea84, + 0x0ffffbee0ffffbff, + 0x00000000007fffff, + 0xffff0003ffffffff, + 0x00000001ffffffff, + 0x000000003fffffff, + 0x0000ffffffffffff, ], }; @@ -1061,28 +1998,52 @@ pub mod derived_property { pub const XID_Start_table: &super::BoolTrie = &super::BoolTrie { r1: [ - 0x0000000000000000, 0x07fffffe07fffffe, 0x0420040000000000, 0xff7fffffff7fffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0x0000501f0003ffc3, - 0x0000000000000000, 0xb8df000000000000, 0xfffffffbffffd740, 0xffbfffffffffffff, - 0xffffffffffffffff, 0xffffffffffffffff, 0xfffffffffffffc03, 0xffffffffffffffff, - 0xfffeffffffffffff, 0xffffffff027fffff, 0x00000000000001ff, 0x000787ffffff0000, - 0xffffffff00000000, 0xfffec000000007ff, 0xffffffffffffffff, 0x9c00c060002fffff, - 0x0000fffffffd0000, 0xffffffffffffe000, 0x0002003fffffffff, 0x043007fffffffc00 + 0x0000000000000000, + 0x07fffffe07fffffe, + 0x0420040000000000, + 0xff7fffffff7fffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x0000501f0003ffc3, + 0x0000000000000000, + 0xb8df000000000000, + 0xfffffffbffffd740, + 0xffbfffffffffffff, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0xfffffffffffffc03, + 0xffffffffffffffff, + 0xfffeffffffffffff, + 0xffffffff027fffff, + 0x00000000000001ff, + 0x000787ffffff0000, + 0xffffffff00000000, + 0xfffec000000007ff, + 0xffffffffffffffff, + 0x9c00c060002fffff, + 0x0000fffffffd0000, + 0xffffffffffffe000, + 0x0002003fffffffff, + 0x043007fffffffc00, ], r2: [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 23, 25, 26, 27, 28, 29, 3, 30, 31, 32, 33, 34, 34, 34, 34, 34, 35, 36, 37, 38, 39, 40, 41, 42, 34, 34, 34, 34, 34, 34, 34, 34, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 3, 61, 62, 63, 64, 65, 66, 67, 68, 34, 34, 34, 3, 34, 34, - 34, 34, 69, 70, 71, 72, 3, 73, 74, 3, 75, 76, 77, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 78, - 79, 34, 80, 81, 82, 83, 84, 3, 3, 3, 3, 3, 3, 3, 3, 85, 42, 86, 87, 88, 34, 89, 90, 3, - 3, 3, 3, 3, 3, 3, 3, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 69, 70, 71, 72, 3, 73, 74, 3, 75, 76, 77, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 78, 79, 34, 80, 81, 82, 83, 84, 3, 3, 3, 3, 3, 3, 3, 3, 85, 42, 86, 87, 88, 34, 89, 90, + 3, 3, 3, 3, 3, 3, 3, 3, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, - 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 53, 3, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 53, 3, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, @@ -1097,10 +2058,10 @@ pub mod derived_property { 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, - 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 91, 34, 34, 34, - 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 92, 93, 34, 34, 34, 34, 94, - 95, 96, 91, 97, 34, 98, 99, 100, 48, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, - 111, 112, 34, 113, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 91, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 92, 93, 34, 34, 34, 34, + 94, 95, 96, 91, 97, 34, 98, 99, 100, 48, 101, 102, 103, 104, 105, 106, 107, 108, 109, + 110, 111, 112, 34, 113, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, @@ -1108,49 +2069,148 @@ pub mod derived_property { 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, - 34, 34, 34, 114, 115, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 34, 34, 34, 34, 114, 115, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 34, 34, 34, 34, 34, - 116, 34, 117, 118, 119, 120, 121, 34, 122, 34, 34, 123, 124, 125, 126, 3, 127, 34, 128, - 129, 130, 131, 132 + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 34, 34, 34, + 34, 34, 116, 34, 117, 118, 119, 120, 121, 34, 122, 34, 34, 123, 124, 125, 126, 3, 127, + 34, 128, 129, 130, 131, 132, ], r3: &[ - 0x00000110043fffff, 0x000007ff01ffffff, 0x3fdfffff00000000, 0x0000000000000000, - 0x23fffffffffffff0, 0xfffe0003ff010000, 0x23c5fdfffff99fe1, 0x10030003b0004000, - 0x036dfdfffff987e0, 0x001c00005e000000, 0x23edfdfffffbbfe0, 0x0200000300010000, - 0x23edfdfffff99fe0, 0x00020003b0000000, 0x03ffc718d63dc7e8, 0x0000000000010000, - 0x23fffdfffffddfe0, 0x0000000307000000, 0x23effdfffffddfe1, 0x0006000340000000, - 0x27fffffffffddfe0, 0xfc00000380704000, 0x2ffbfffffc7fffe0, 0x000000000000007f, - 0x0005fffffffffffe, 0x2005ecaefef02596, 0x00000000f000005f, 0x0000000000000001, - 0x00001ffffffffeff, 0x0000000000001f00, 0x800007ffffffffff, 0xffe1c0623c3f0000, - 0xffffffff00004003, 0xf7ffffffffff20bf, 0xffffffffffffffff, 0xffffffff3d7f3dff, - 0x7f3dffffffff3dff, 0xffffffffff7fff3d, 0xffffffffff3dffff, 0x0000000007ffffff, - 0xffffffff0000ffff, 0x3f3fffffffffffff, 0xfffffffffffffffe, 0xffff9fffffffffff, - 0xffffffff07fffffe, 0x01ffc7ffffffffff, 0x0003ffff0003dfff, 0x0001dfff0003ffff, - 0x000fffffffffffff, 0x0000000010800000, 0xffffffff00000000, 0x01ffffffffffffff, - 0xffff05ffffffffff, 0x003fffffffffffff, 0x000000007fffffff, 0x001f3fffffff0000, - 0xffff0fffffffffff, 0x00000000000003ff, 0xffffffff007fffff, 0x00000000001fffff, - 0x0000008000000000, 0x000fffffffffffe0, 0x0000000000000fe0, 0xfc00c001fffffff8, - 0x0000003fffffffff, 0x0000000fffffffff, 0x3ffffffffc00e000, 0xe7ffffffffff01ff, - 0x0063de0000000000, 0xffffffff3f3fffff, 0x3fffffffaaff3f3f, 0x5fdfffffffffffff, - 0x1fdc1fff0fcf1fdc, 0x8002000000000000, 0x000000001fff0000, 0xf3fffd503f2ffc84, - 0xffffffff000043e0, 0x00000000000001ff, 0xffff7fffffffffff, 0xffffffff7fffffff, - 0x000c781fffffffff, 0xffff20bfffffffff, 0x000080ffffffffff, 0x7f7f7f7f007fffff, - 0x000000007f7f7f7f, 0x1f3e03fe000000e0, 0xfffffffee07fffff, 0xf7ffffffffffffff, - 0xfffeffffffffffe0, 0x07ffffff00007fff, 0xffff000000000000, 0x0000ffffffffffff, - 0x0000000000001fff, 0x3fffffffffff0000, 0x00000c00ffff1fff, 0x80007fffffffffff, - 0xffffffff3fffffff, 0xfffffffcff800000, 0x03fffffffffff9ff, 0xff80000000000000, - 0x00000007fffff7bb, 0x000ffffffffffffc, 0x68fc000000000000, 0xffff003ffffffc00, - 0x1fffffff0000007f, 0x0007fffffffffff0, 0x7c00ffdf00008000, 0x000001ffffffffff, - 0xc47fffff00000ff7, 0x3e62ffffffffffff, 0x001c07ff38000005, 0xffff7f7f007e7e7e, - 0xffff003ff7ffffff, 0x00000007ffffffff, 0xffff000fffffffff, 0x0ffffffffffff87f, - 0xffff3fffffffffff, 0x0000000003ffffff, 0x5f7ffdffa0f8007f, 0xffffffffffffffdb, - 0x0003ffffffffffff, 0xfffffffffff80000, 0xfffffff03fffffff, 0x3fffffffffffffff, - 0xffffffffffff0000, 0xfffffffffffcffff, 0x03ff0000000000ff, 0xaa8a000000000000, - 0x1fffffffffffffff, 0x07fffffe00000000, 0xffffffc007fffffe, 0x7fffffff3fffffff, - 0x000000001cfcfcfc + 0x00000110043fffff, + 0x000007ff01ffffff, + 0x3fdfffff00000000, + 0x0000000000000000, + 0x23fffffffffffff0, + 0xfffe0003ff010000, + 0x23c5fdfffff99fe1, + 0x10030003b0004000, + 0x036dfdfffff987e0, + 0x001c00005e000000, + 0x23edfdfffffbbfe0, + 0x0200000300010000, + 0x23edfdfffff99fe0, + 0x00020003b0000000, + 0x03ffc718d63dc7e8, + 0x0000000000010000, + 0x23fffdfffffddfe0, + 0x0000000307000000, + 0x23effdfffffddfe1, + 0x0006000340000000, + 0x27fffffffffddfe0, + 0xfc00000380704000, + 0x2ffbfffffc7fffe0, + 0x000000000000007f, + 0x0005fffffffffffe, + 0x2005ecaefef02596, + 0x00000000f000005f, + 0x0000000000000001, + 0x00001ffffffffeff, + 0x0000000000001f00, + 0x800007ffffffffff, + 0xffe1c0623c3f0000, + 0xffffffff00004003, + 0xf7ffffffffff20bf, + 0xffffffffffffffff, + 0xffffffff3d7f3dff, + 0x7f3dffffffff3dff, + 0xffffffffff7fff3d, + 0xffffffffff3dffff, + 0x0000000007ffffff, + 0xffffffff0000ffff, + 0x3f3fffffffffffff, + 0xfffffffffffffffe, + 0xffff9fffffffffff, + 0xffffffff07fffffe, + 0x01ffc7ffffffffff, + 0x0003ffff0003dfff, + 0x0001dfff0003ffff, + 0x000fffffffffffff, + 0x0000000010800000, + 0xffffffff00000000, + 0x01ffffffffffffff, + 0xffff05ffffffffff, + 0x003fffffffffffff, + 0x000000007fffffff, + 0x001f3fffffff0000, + 0xffff0fffffffffff, + 0x00000000000003ff, + 0xffffffff007fffff, + 0x00000000001fffff, + 0x0000008000000000, + 0x000fffffffffffe0, + 0x0000000000000fe0, + 0xfc00c001fffffff8, + 0x0000003fffffffff, + 0x0000000fffffffff, + 0x3ffffffffc00e000, + 0xe7ffffffffff01ff, + 0x0063de0000000000, + 0xffffffff3f3fffff, + 0x3fffffffaaff3f3f, + 0x5fdfffffffffffff, + 0x1fdc1fff0fcf1fdc, + 0x8002000000000000, + 0x000000001fff0000, + 0xf3fffd503f2ffc84, + 0xffffffff000043e0, + 0x00000000000001ff, + 0xffff7fffffffffff, + 0xffffffff7fffffff, + 0x000c781fffffffff, + 0xffff20bfffffffff, + 0x000080ffffffffff, + 0x7f7f7f7f007fffff, + 0x000000007f7f7f7f, + 0x1f3e03fe000000e0, + 0xfffffffee07fffff, + 0xf7ffffffffffffff, + 0xfffeffffffffffe0, + 0x07ffffff00007fff, + 0xffff000000000000, + 0x0000ffffffffffff, + 0x0000000000001fff, + 0x3fffffffffff0000, + 0x00000c00ffff1fff, + 0x80007fffffffffff, + 0xffffffff3fffffff, + 0xfffffffcff800000, + 0x03fffffffffff9ff, + 0xff80000000000000, + 0x00000007fffff7bb, + 0x000ffffffffffffc, + 0x68fc000000000000, + 0xffff003ffffffc00, + 0x1fffffff0000007f, + 0x0007fffffffffff0, + 0x7c00ffdf00008000, + 0x000001ffffffffff, + 0xc47fffff00000ff7, + 0x3e62ffffffffffff, + 0x001c07ff38000005, + 0xffff7f7f007e7e7e, + 0xffff003ff7ffffff, + 0x00000007ffffffff, + 0xffff000fffffffff, + 0x0ffffffffffff87f, + 0xffff3fffffffffff, + 0x0000000003ffffff, + 0x5f7ffdffa0f8007f, + 0xffffffffffffffdb, + 0x0003ffffffffffff, + 0xfffffffffff80000, + 0xfffffff03fffffff, + 0x3fffffffffffffff, + 0xffffffffffff0000, + 0xfffffffffffcffff, + 0x03ff0000000000ff, + 0xaa8a000000000000, + 0x1fffffffffffffff, + 0x07fffffe00000000, + 0xffffffc007fffffe, + 0x7fffffff3fffffff, + 0x000000001cfcfcfc, ], r4: [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 5, 5, 9, 5, 10, 11, 5, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 12, 13, @@ -1161,75 +2221,150 @@ pub mod derived_property { 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, ], r5: &[ 0, 1, 2, 3, 4, 5, 4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 2, 2, 12, 13, 14, 15, 4, 4, 2, 2, 2, - 2, 16, 17, 4, 4, 18, 19, 20, 21, 22, 4, 23, 4, 24, 25, 26, 27, 28, 29, 30, 4, 2, 31, 32, - 32, 15, 4, 4, 4, 4, 4, 4, 4, 33, 34, 4, 4, 35, 4, 36, 37, 38, 39, 40, 41, 42, 4, 43, 20, - 44, 45, 4, 4, 5, 46, 47, 48, 4, 4, 49, 50, 47, 51, 52, 4, 53, 4, 4, 4, 54, 4, 55, 56, 4, - 4, 4, 4, 57, 58, 59, 60, 4, 4, 4, 4, 61, 62, 63, 4, 64, 65, 66, 4, 4, 4, 4, 67, 4, 4, 4, - 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 68, 4, 2, 49, 2, 2, 2, 69, 4, 4, 4, 4, 4, + 2, 16, 17, 4, 4, 18, 19, 20, 21, 22, 4, 23, 4, 24, 25, 26, 27, 28, 29, 30, 4, 2, 31, + 32, 32, 15, 4, 4, 4, 4, 4, 4, 4, 33, 34, 4, 4, 35, 4, 36, 37, 38, 39, 40, 41, 42, 4, + 43, 20, 44, 45, 4, 4, 5, 46, 47, 48, 4, 4, 49, 50, 47, 51, 52, 4, 53, 4, 4, 4, 54, 4, + 55, 56, 4, 4, 4, 4, 57, 58, 59, 60, 4, 4, 4, 4, 61, 62, 63, 4, 64, 65, 66, 4, 4, 4, 4, + 67, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 68, 4, 2, 49, 2, 2, 2, 69, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 49, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 49, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 70, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 70, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 2, 2, 2, 2, 2, 2, 2, 2, 60, 20, 4, 71, 47, 72, 63, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 4, - 4, 2, 73, 74, 75, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 60, 20, 4, 71, 47, 72, 63, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 2, 4, 4, 2, 73, 74, 75, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 76, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 32, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 20, 77, 2, 2, 2, 2, 2, - 78, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 2, 79, 80, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 81, 82, 83, 84, 85, 2, 2, 2, 2, 86, 87, 88, 89, 90, - 91, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 76, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 32, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 20, 77, + 2, 2, 2, 2, 2, 78, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 79, 80, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 81, 82, 83, 84, 85, 2, 2, 2, 2, + 86, 87, 88, 89, 90, 91, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 92, 2, 69, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 93, 94, 95, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 96, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 92, 2, 69, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 93, 94, 95, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 96, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 97, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 98, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 2, 2, 97, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 98, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 99, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 99, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, ], r6: &[ - 0xb7ffff7fffffefff, 0x000000003fff3fff, 0xffffffffffffffff, 0x07ffffffffffffff, - 0x0000000000000000, 0x001fffffffffffff, 0xffffffff1fffffff, 0x000000000001ffff, - 0xffffe000ffffffff, 0x003fffffffff07ff, 0xffffffff3fffffff, 0x00000000003eff0f, - 0xffff00003fffffff, 0x0fffffffff0fffff, 0xffff00ffffffffff, 0x0000000fffffffff, - 0x007fffffffffffff, 0x000000ff003fffff, 0x91bffffffffffd3f, 0x007fffff003fffff, - 0x000000007fffffff, 0x0037ffff00000000, 0x03ffffff003fffff, 0xc0ffffffffffffff, - 0x003ffffffeef0001, 0x1fffffff00000000, 0x000000001fffffff, 0x0000001ffffffeff, - 0x003fffffffffffff, 0x0007ffff003fffff, 0x000000000003ffff, 0x00000000000001ff, - 0x0007ffffffffffff, 0xffff00801fffffff, 0x000000000000003f, 0x00fffffffffffff8, - 0x0000fffffffffff8, 0x000001ffffff0000, 0x0000007ffffffff8, 0x0047ffffffff0010, - 0x0007fffffffffff8, 0x000000001400001e, 0x00000ffffffbffff, 0xffff01ffbfffbd7f, - 0x23edfdfffff99fe0, 0x00000003e0010000, 0x0000000000000780, 0x0000ffffffffffff, - 0x00000000000000b0, 0x00007fffffffffff, 0x000000000f000000, 0x0000000000000010, - 0x000007ffffffffff, 0x0000000007ffffff, 0x00000fffffffffff, 0xffffffff00000000, - 0x80000000ffffffff, 0x0407fffffffff801, 0xfffffffff0010000, 0x00000000200003cf, - 0x01ffffffffffffff, 0x00007ffffffffdff, 0xfffc000000000001, 0x000000000000ffff, - 0x0001fffffffffb7f, 0xfffffdbf00000040, 0x00000000010003ff, 0x0007ffff00000000, - 0x0000000003ffffff, 0x000000000000000f, 0x000000000000007f, 0x00003fffffff0000, - 0xe0fffff80000000f, 0x000000000001001f, 0x00000000fff80000, 0x0000000300000000, - 0x0003ffffffffffff, 0xffff000000000000, 0x0fffffffffffffff, 0x1fff07ffffffffff, - 0x0000000003ff01ff, 0xffffffffffdfffff, 0xebffde64dfffffff, 0xffffffffffffffef, - 0x7bffffffdfdfe7bf, 0xfffffffffffdfc5f, 0xffffff3fffffffff, 0xf7fffffff7fffffd, - 0xffdfffffffdfffff, 0xffff7fffffff7fff, 0xfffffdfffffffdff, 0x0000000000000ff7, - 0x000000000000001f, 0x0af7fe96ffffffef, 0x5ef7f796aa96ea84, 0x0ffffbee0ffffbff, - 0x00000000007fffff, 0xffff0003ffffffff, 0x00000001ffffffff, 0x000000003fffffff + 0xb7ffff7fffffefff, + 0x000000003fff3fff, + 0xffffffffffffffff, + 0x07ffffffffffffff, + 0x0000000000000000, + 0x001fffffffffffff, + 0xffffffff1fffffff, + 0x000000000001ffff, + 0xffffe000ffffffff, + 0x003fffffffff07ff, + 0xffffffff3fffffff, + 0x00000000003eff0f, + 0xffff00003fffffff, + 0x0fffffffff0fffff, + 0xffff00ffffffffff, + 0x0000000fffffffff, + 0x007fffffffffffff, + 0x000000ff003fffff, + 0x91bffffffffffd3f, + 0x007fffff003fffff, + 0x000000007fffffff, + 0x0037ffff00000000, + 0x03ffffff003fffff, + 0xc0ffffffffffffff, + 0x003ffffffeef0001, + 0x1fffffff00000000, + 0x000000001fffffff, + 0x0000001ffffffeff, + 0x003fffffffffffff, + 0x0007ffff003fffff, + 0x000000000003ffff, + 0x00000000000001ff, + 0x0007ffffffffffff, + 0xffff00801fffffff, + 0x000000000000003f, + 0x00fffffffffffff8, + 0x0000fffffffffff8, + 0x000001ffffff0000, + 0x0000007ffffffff8, + 0x0047ffffffff0010, + 0x0007fffffffffff8, + 0x000000001400001e, + 0x00000ffffffbffff, + 0xffff01ffbfffbd7f, + 0x23edfdfffff99fe0, + 0x00000003e0010000, + 0x0000000000000780, + 0x0000ffffffffffff, + 0x00000000000000b0, + 0x00007fffffffffff, + 0x000000000f000000, + 0x0000000000000010, + 0x000007ffffffffff, + 0x0000000007ffffff, + 0x00000fffffffffff, + 0xffffffff00000000, + 0x80000000ffffffff, + 0x0407fffffffff801, + 0xfffffffff0010000, + 0x00000000200003cf, + 0x01ffffffffffffff, + 0x00007ffffffffdff, + 0xfffc000000000001, + 0x000000000000ffff, + 0x0001fffffffffb7f, + 0xfffffdbf00000040, + 0x00000000010003ff, + 0x0007ffff00000000, + 0x0000000003ffffff, + 0x000000000000000f, + 0x000000000000007f, + 0x00003fffffff0000, + 0xe0fffff80000000f, + 0x000000000001001f, + 0x00000000fff80000, + 0x0000000300000000, + 0x0003ffffffffffff, + 0xffff000000000000, + 0x0fffffffffffffff, + 0x1fff07ffffffffff, + 0x0000000003ff01ff, + 0xffffffffffdfffff, + 0xebffde64dfffffff, + 0xffffffffffffffef, + 0x7bffffffdfdfe7bf, + 0xfffffffffffdfc5f, + 0xffffff3fffffffff, + 0xf7fffffff7fffffd, + 0xffdfffffffdfffff, + 0xffff7fffffff7fff, + 0xfffffdfffffffdff, + 0x0000000000000ff7, + 0x000000000000001f, + 0x0af7fe96ffffffef, + 0x5ef7f796aa96ea84, + 0x0ffffbee0ffffbff, + 0x00000000007fffff, + 0xffff0003ffffffff, + 0x00000001ffffffff, + 0x000000003fffffff, ], }; @@ -1246,10 +2381,13 @@ pub mod property { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, ], r2: &[ - 0x0000000100003e00, 0x0000000000000000, 0x0000000000000020, 0x000003000000c000 + 0x0000000100003e00, + 0x0000000000000000, + 0x0000000000000020, + 0x000003000000c000, ], }; @@ -1265,11 +2403,15 @@ pub mod property { 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, ], r2: &[ - 0x0000000100003e00, 0x0000000000000000, 0x0000000100000020, 0x0000000000000001, - 0x00008300000007ff, 0x0000000080000000 + 0x0000000100003e00, + 0x0000000000000000, + 0x0000000100000020, + 0x0000000000000001, + 0x00008300000007ff, + 0x0000000080000000, ], }; @@ -1282,14 +2424,14 @@ pub mod property { pub mod conversions { pub fn to_lower(c: char) -> [char; 3] { match bsearch_case_table(c, to_lowercase_table) { - None => [c, '\0', '\0'], + None => [c, '\0', '\0'], Some(index) => to_lowercase_table[index].1, } } pub fn to_upper(c: char) -> [char; 3] { match bsearch_case_table(c, to_uppercase_table) { - None => [c, '\0', '\0'], + None => [c, '\0', '\0'], Some(index) => to_uppercase_table[index].1, } } @@ -1299,1292 +2441,2867 @@ pub mod conversions { } const to_lowercase_table: &[(char, [char; 3])] = &[ - ('\u{41}', ['\u{61}', '\0', '\0']), ('\u{42}', ['\u{62}', '\0', '\0']), ('\u{43}', - ['\u{63}', '\0', '\0']), ('\u{44}', ['\u{64}', '\0', '\0']), ('\u{45}', ['\u{65}', '\0', - '\0']), ('\u{46}', ['\u{66}', '\0', '\0']), ('\u{47}', ['\u{67}', '\0', '\0']), ('\u{48}', - ['\u{68}', '\0', '\0']), ('\u{49}', ['\u{69}', '\0', '\0']), ('\u{4a}', ['\u{6a}', '\0', - '\0']), ('\u{4b}', ['\u{6b}', '\0', '\0']), ('\u{4c}', ['\u{6c}', '\0', '\0']), ('\u{4d}', - ['\u{6d}', '\0', '\0']), ('\u{4e}', ['\u{6e}', '\0', '\0']), ('\u{4f}', ['\u{6f}', '\0', - '\0']), ('\u{50}', ['\u{70}', '\0', '\0']), ('\u{51}', ['\u{71}', '\0', '\0']), ('\u{52}', - ['\u{72}', '\0', '\0']), ('\u{53}', ['\u{73}', '\0', '\0']), ('\u{54}', ['\u{74}', '\0', - '\0']), ('\u{55}', ['\u{75}', '\0', '\0']), ('\u{56}', ['\u{76}', '\0', '\0']), ('\u{57}', - ['\u{77}', '\0', '\0']), ('\u{58}', ['\u{78}', '\0', '\0']), ('\u{59}', ['\u{79}', '\0', - '\0']), ('\u{5a}', ['\u{7a}', '\0', '\0']), ('\u{c0}', ['\u{e0}', '\0', '\0']), ('\u{c1}', - ['\u{e1}', '\0', '\0']), ('\u{c2}', ['\u{e2}', '\0', '\0']), ('\u{c3}', ['\u{e3}', '\0', - '\0']), ('\u{c4}', ['\u{e4}', '\0', '\0']), ('\u{c5}', ['\u{e5}', '\0', '\0']), ('\u{c6}', - ['\u{e6}', '\0', '\0']), ('\u{c7}', ['\u{e7}', '\0', '\0']), ('\u{c8}', ['\u{e8}', '\0', - '\0']), ('\u{c9}', ['\u{e9}', '\0', '\0']), ('\u{ca}', ['\u{ea}', '\0', '\0']), ('\u{cb}', - ['\u{eb}', '\0', '\0']), ('\u{cc}', ['\u{ec}', '\0', '\0']), ('\u{cd}', ['\u{ed}', '\0', - '\0']), ('\u{ce}', ['\u{ee}', '\0', '\0']), ('\u{cf}', ['\u{ef}', '\0', '\0']), ('\u{d0}', - ['\u{f0}', '\0', '\0']), ('\u{d1}', ['\u{f1}', '\0', '\0']), ('\u{d2}', ['\u{f2}', '\0', - '\0']), ('\u{d3}', ['\u{f3}', '\0', '\0']), ('\u{d4}', ['\u{f4}', '\0', '\0']), ('\u{d5}', - ['\u{f5}', '\0', '\0']), ('\u{d6}', ['\u{f6}', '\0', '\0']), ('\u{d8}', ['\u{f8}', '\0', - '\0']), ('\u{d9}', ['\u{f9}', '\0', '\0']), ('\u{da}', ['\u{fa}', '\0', '\0']), ('\u{db}', - ['\u{fb}', '\0', '\0']), ('\u{dc}', ['\u{fc}', '\0', '\0']), ('\u{dd}', ['\u{fd}', '\0', - '\0']), ('\u{de}', ['\u{fe}', '\0', '\0']), ('\u{100}', ['\u{101}', '\0', '\0']), - ('\u{102}', ['\u{103}', '\0', '\0']), ('\u{104}', ['\u{105}', '\0', '\0']), ('\u{106}', - ['\u{107}', '\0', '\0']), ('\u{108}', ['\u{109}', '\0', '\0']), ('\u{10a}', ['\u{10b}', - '\0', '\0']), ('\u{10c}', ['\u{10d}', '\0', '\0']), ('\u{10e}', ['\u{10f}', '\0', '\0']), - ('\u{110}', ['\u{111}', '\0', '\0']), ('\u{112}', ['\u{113}', '\0', '\0']), ('\u{114}', - ['\u{115}', '\0', '\0']), ('\u{116}', ['\u{117}', '\0', '\0']), ('\u{118}', ['\u{119}', - '\0', '\0']), ('\u{11a}', ['\u{11b}', '\0', '\0']), ('\u{11c}', ['\u{11d}', '\0', '\0']), - ('\u{11e}', ['\u{11f}', '\0', '\0']), ('\u{120}', ['\u{121}', '\0', '\0']), ('\u{122}', - ['\u{123}', '\0', '\0']), ('\u{124}', ['\u{125}', '\0', '\0']), ('\u{126}', ['\u{127}', - '\0', '\0']), ('\u{128}', ['\u{129}', '\0', '\0']), ('\u{12a}', ['\u{12b}', '\0', '\0']), - ('\u{12c}', ['\u{12d}', '\0', '\0']), ('\u{12e}', ['\u{12f}', '\0', '\0']), ('\u{130}', - ['\u{69}', '\u{307}', '\0']), ('\u{132}', ['\u{133}', '\0', '\0']), ('\u{134}', ['\u{135}', - '\0', '\0']), ('\u{136}', ['\u{137}', '\0', '\0']), ('\u{139}', ['\u{13a}', '\0', '\0']), - ('\u{13b}', ['\u{13c}', '\0', '\0']), ('\u{13d}', ['\u{13e}', '\0', '\0']), ('\u{13f}', - ['\u{140}', '\0', '\0']), ('\u{141}', ['\u{142}', '\0', '\0']), ('\u{143}', ['\u{144}', - '\0', '\0']), ('\u{145}', ['\u{146}', '\0', '\0']), ('\u{147}', ['\u{148}', '\0', '\0']), - ('\u{14a}', ['\u{14b}', '\0', '\0']), ('\u{14c}', ['\u{14d}', '\0', '\0']), ('\u{14e}', - ['\u{14f}', '\0', '\0']), ('\u{150}', ['\u{151}', '\0', '\0']), ('\u{152}', ['\u{153}', - '\0', '\0']), ('\u{154}', ['\u{155}', '\0', '\0']), ('\u{156}', ['\u{157}', '\0', '\0']), - ('\u{158}', ['\u{159}', '\0', '\0']), ('\u{15a}', ['\u{15b}', '\0', '\0']), ('\u{15c}', - ['\u{15d}', '\0', '\0']), ('\u{15e}', ['\u{15f}', '\0', '\0']), ('\u{160}', ['\u{161}', - '\0', '\0']), ('\u{162}', ['\u{163}', '\0', '\0']), ('\u{164}', ['\u{165}', '\0', '\0']), - ('\u{166}', ['\u{167}', '\0', '\0']), ('\u{168}', ['\u{169}', '\0', '\0']), ('\u{16a}', - ['\u{16b}', '\0', '\0']), ('\u{16c}', ['\u{16d}', '\0', '\0']), ('\u{16e}', ['\u{16f}', - '\0', '\0']), ('\u{170}', ['\u{171}', '\0', '\0']), ('\u{172}', ['\u{173}', '\0', '\0']), - ('\u{174}', ['\u{175}', '\0', '\0']), ('\u{176}', ['\u{177}', '\0', '\0']), ('\u{178}', - ['\u{ff}', '\0', '\0']), ('\u{179}', ['\u{17a}', '\0', '\0']), ('\u{17b}', ['\u{17c}', '\0', - '\0']), ('\u{17d}', ['\u{17e}', '\0', '\0']), ('\u{181}', ['\u{253}', '\0', '\0']), - ('\u{182}', ['\u{183}', '\0', '\0']), ('\u{184}', ['\u{185}', '\0', '\0']), ('\u{186}', - ['\u{254}', '\0', '\0']), ('\u{187}', ['\u{188}', '\0', '\0']), ('\u{189}', ['\u{256}', - '\0', '\0']), ('\u{18a}', ['\u{257}', '\0', '\0']), ('\u{18b}', ['\u{18c}', '\0', '\0']), - ('\u{18e}', ['\u{1dd}', '\0', '\0']), ('\u{18f}', ['\u{259}', '\0', '\0']), ('\u{190}', - ['\u{25b}', '\0', '\0']), ('\u{191}', ['\u{192}', '\0', '\0']), ('\u{193}', ['\u{260}', - '\0', '\0']), ('\u{194}', ['\u{263}', '\0', '\0']), ('\u{196}', ['\u{269}', '\0', '\0']), - ('\u{197}', ['\u{268}', '\0', '\0']), ('\u{198}', ['\u{199}', '\0', '\0']), ('\u{19c}', - ['\u{26f}', '\0', '\0']), ('\u{19d}', ['\u{272}', '\0', '\0']), ('\u{19f}', ['\u{275}', - '\0', '\0']), ('\u{1a0}', ['\u{1a1}', '\0', '\0']), ('\u{1a2}', ['\u{1a3}', '\0', '\0']), - ('\u{1a4}', ['\u{1a5}', '\0', '\0']), ('\u{1a6}', ['\u{280}', '\0', '\0']), ('\u{1a7}', - ['\u{1a8}', '\0', '\0']), ('\u{1a9}', ['\u{283}', '\0', '\0']), ('\u{1ac}', ['\u{1ad}', - '\0', '\0']), ('\u{1ae}', ['\u{288}', '\0', '\0']), ('\u{1af}', ['\u{1b0}', '\0', '\0']), - ('\u{1b1}', ['\u{28a}', '\0', '\0']), ('\u{1b2}', ['\u{28b}', '\0', '\0']), ('\u{1b3}', - ['\u{1b4}', '\0', '\0']), ('\u{1b5}', ['\u{1b6}', '\0', '\0']), ('\u{1b7}', ['\u{292}', - '\0', '\0']), ('\u{1b8}', ['\u{1b9}', '\0', '\0']), ('\u{1bc}', ['\u{1bd}', '\0', '\0']), - ('\u{1c4}', ['\u{1c6}', '\0', '\0']), ('\u{1c5}', ['\u{1c6}', '\0', '\0']), ('\u{1c7}', - ['\u{1c9}', '\0', '\0']), ('\u{1c8}', ['\u{1c9}', '\0', '\0']), ('\u{1ca}', ['\u{1cc}', - '\0', '\0']), ('\u{1cb}', ['\u{1cc}', '\0', '\0']), ('\u{1cd}', ['\u{1ce}', '\0', '\0']), - ('\u{1cf}', ['\u{1d0}', '\0', '\0']), ('\u{1d1}', ['\u{1d2}', '\0', '\0']), ('\u{1d3}', - ['\u{1d4}', '\0', '\0']), ('\u{1d5}', ['\u{1d6}', '\0', '\0']), ('\u{1d7}', ['\u{1d8}', - '\0', '\0']), ('\u{1d9}', ['\u{1da}', '\0', '\0']), ('\u{1db}', ['\u{1dc}', '\0', '\0']), - ('\u{1de}', ['\u{1df}', '\0', '\0']), ('\u{1e0}', ['\u{1e1}', '\0', '\0']), ('\u{1e2}', - ['\u{1e3}', '\0', '\0']), ('\u{1e4}', ['\u{1e5}', '\0', '\0']), ('\u{1e6}', ['\u{1e7}', - '\0', '\0']), ('\u{1e8}', ['\u{1e9}', '\0', '\0']), ('\u{1ea}', ['\u{1eb}', '\0', '\0']), - ('\u{1ec}', ['\u{1ed}', '\0', '\0']), ('\u{1ee}', ['\u{1ef}', '\0', '\0']), ('\u{1f1}', - ['\u{1f3}', '\0', '\0']), ('\u{1f2}', ['\u{1f3}', '\0', '\0']), ('\u{1f4}', ['\u{1f5}', - '\0', '\0']), ('\u{1f6}', ['\u{195}', '\0', '\0']), ('\u{1f7}', ['\u{1bf}', '\0', '\0']), - ('\u{1f8}', ['\u{1f9}', '\0', '\0']), ('\u{1fa}', ['\u{1fb}', '\0', '\0']), ('\u{1fc}', - ['\u{1fd}', '\0', '\0']), ('\u{1fe}', ['\u{1ff}', '\0', '\0']), ('\u{200}', ['\u{201}', - '\0', '\0']), ('\u{202}', ['\u{203}', '\0', '\0']), ('\u{204}', ['\u{205}', '\0', '\0']), - ('\u{206}', ['\u{207}', '\0', '\0']), ('\u{208}', ['\u{209}', '\0', '\0']), ('\u{20a}', - ['\u{20b}', '\0', '\0']), ('\u{20c}', ['\u{20d}', '\0', '\0']), ('\u{20e}', ['\u{20f}', - '\0', '\0']), ('\u{210}', ['\u{211}', '\0', '\0']), ('\u{212}', ['\u{213}', '\0', '\0']), - ('\u{214}', ['\u{215}', '\0', '\0']), ('\u{216}', ['\u{217}', '\0', '\0']), ('\u{218}', - ['\u{219}', '\0', '\0']), ('\u{21a}', ['\u{21b}', '\0', '\0']), ('\u{21c}', ['\u{21d}', - '\0', '\0']), ('\u{21e}', ['\u{21f}', '\0', '\0']), ('\u{220}', ['\u{19e}', '\0', '\0']), - ('\u{222}', ['\u{223}', '\0', '\0']), ('\u{224}', ['\u{225}', '\0', '\0']), ('\u{226}', - ['\u{227}', '\0', '\0']), ('\u{228}', ['\u{229}', '\0', '\0']), ('\u{22a}', ['\u{22b}', - '\0', '\0']), ('\u{22c}', ['\u{22d}', '\0', '\0']), ('\u{22e}', ['\u{22f}', '\0', '\0']), - ('\u{230}', ['\u{231}', '\0', '\0']), ('\u{232}', ['\u{233}', '\0', '\0']), ('\u{23a}', - ['\u{2c65}', '\0', '\0']), ('\u{23b}', ['\u{23c}', '\0', '\0']), ('\u{23d}', ['\u{19a}', - '\0', '\0']), ('\u{23e}', ['\u{2c66}', '\0', '\0']), ('\u{241}', ['\u{242}', '\0', '\0']), - ('\u{243}', ['\u{180}', '\0', '\0']), ('\u{244}', ['\u{289}', '\0', '\0']), ('\u{245}', - ['\u{28c}', '\0', '\0']), ('\u{246}', ['\u{247}', '\0', '\0']), ('\u{248}', ['\u{249}', - '\0', '\0']), ('\u{24a}', ['\u{24b}', '\0', '\0']), ('\u{24c}', ['\u{24d}', '\0', '\0']), - ('\u{24e}', ['\u{24f}', '\0', '\0']), ('\u{370}', ['\u{371}', '\0', '\0']), ('\u{372}', - ['\u{373}', '\0', '\0']), ('\u{376}', ['\u{377}', '\0', '\0']), ('\u{37f}', ['\u{3f3}', - '\0', '\0']), ('\u{386}', ['\u{3ac}', '\0', '\0']), ('\u{388}', ['\u{3ad}', '\0', '\0']), - ('\u{389}', ['\u{3ae}', '\0', '\0']), ('\u{38a}', ['\u{3af}', '\0', '\0']), ('\u{38c}', - ['\u{3cc}', '\0', '\0']), ('\u{38e}', ['\u{3cd}', '\0', '\0']), ('\u{38f}', ['\u{3ce}', - '\0', '\0']), ('\u{391}', ['\u{3b1}', '\0', '\0']), ('\u{392}', ['\u{3b2}', '\0', '\0']), - ('\u{393}', ['\u{3b3}', '\0', '\0']), ('\u{394}', ['\u{3b4}', '\0', '\0']), ('\u{395}', - ['\u{3b5}', '\0', '\0']), ('\u{396}', ['\u{3b6}', '\0', '\0']), ('\u{397}', ['\u{3b7}', - '\0', '\0']), ('\u{398}', ['\u{3b8}', '\0', '\0']), ('\u{399}', ['\u{3b9}', '\0', '\0']), - ('\u{39a}', ['\u{3ba}', '\0', '\0']), ('\u{39b}', ['\u{3bb}', '\0', '\0']), ('\u{39c}', - ['\u{3bc}', '\0', '\0']), ('\u{39d}', ['\u{3bd}', '\0', '\0']), ('\u{39e}', ['\u{3be}', - '\0', '\0']), ('\u{39f}', ['\u{3bf}', '\0', '\0']), ('\u{3a0}', ['\u{3c0}', '\0', '\0']), - ('\u{3a1}', ['\u{3c1}', '\0', '\0']), ('\u{3a3}', ['\u{3c3}', '\0', '\0']), ('\u{3a4}', - ['\u{3c4}', '\0', '\0']), ('\u{3a5}', ['\u{3c5}', '\0', '\0']), ('\u{3a6}', ['\u{3c6}', - '\0', '\0']), ('\u{3a7}', ['\u{3c7}', '\0', '\0']), ('\u{3a8}', ['\u{3c8}', '\0', '\0']), - ('\u{3a9}', ['\u{3c9}', '\0', '\0']), ('\u{3aa}', ['\u{3ca}', '\0', '\0']), ('\u{3ab}', - ['\u{3cb}', '\0', '\0']), ('\u{3cf}', ['\u{3d7}', '\0', '\0']), ('\u{3d8}', ['\u{3d9}', - '\0', '\0']), ('\u{3da}', ['\u{3db}', '\0', '\0']), ('\u{3dc}', ['\u{3dd}', '\0', '\0']), - ('\u{3de}', ['\u{3df}', '\0', '\0']), ('\u{3e0}', ['\u{3e1}', '\0', '\0']), ('\u{3e2}', - ['\u{3e3}', '\0', '\0']), ('\u{3e4}', ['\u{3e5}', '\0', '\0']), ('\u{3e6}', ['\u{3e7}', - '\0', '\0']), ('\u{3e8}', ['\u{3e9}', '\0', '\0']), ('\u{3ea}', ['\u{3eb}', '\0', '\0']), - ('\u{3ec}', ['\u{3ed}', '\0', '\0']), ('\u{3ee}', ['\u{3ef}', '\0', '\0']), ('\u{3f4}', - ['\u{3b8}', '\0', '\0']), ('\u{3f7}', ['\u{3f8}', '\0', '\0']), ('\u{3f9}', ['\u{3f2}', - '\0', '\0']), ('\u{3fa}', ['\u{3fb}', '\0', '\0']), ('\u{3fd}', ['\u{37b}', '\0', '\0']), - ('\u{3fe}', ['\u{37c}', '\0', '\0']), ('\u{3ff}', ['\u{37d}', '\0', '\0']), ('\u{400}', - ['\u{450}', '\0', '\0']), ('\u{401}', ['\u{451}', '\0', '\0']), ('\u{402}', ['\u{452}', - '\0', '\0']), ('\u{403}', ['\u{453}', '\0', '\0']), ('\u{404}', ['\u{454}', '\0', '\0']), - ('\u{405}', ['\u{455}', '\0', '\0']), ('\u{406}', ['\u{456}', '\0', '\0']), ('\u{407}', - ['\u{457}', '\0', '\0']), ('\u{408}', ['\u{458}', '\0', '\0']), ('\u{409}', ['\u{459}', - '\0', '\0']), ('\u{40a}', ['\u{45a}', '\0', '\0']), ('\u{40b}', ['\u{45b}', '\0', '\0']), - ('\u{40c}', ['\u{45c}', '\0', '\0']), ('\u{40d}', ['\u{45d}', '\0', '\0']), ('\u{40e}', - ['\u{45e}', '\0', '\0']), ('\u{40f}', ['\u{45f}', '\0', '\0']), ('\u{410}', ['\u{430}', - '\0', '\0']), ('\u{411}', ['\u{431}', '\0', '\0']), ('\u{412}', ['\u{432}', '\0', '\0']), - ('\u{413}', ['\u{433}', '\0', '\0']), ('\u{414}', ['\u{434}', '\0', '\0']), ('\u{415}', - ['\u{435}', '\0', '\0']), ('\u{416}', ['\u{436}', '\0', '\0']), ('\u{417}', ['\u{437}', - '\0', '\0']), ('\u{418}', ['\u{438}', '\0', '\0']), ('\u{419}', ['\u{439}', '\0', '\0']), - ('\u{41a}', ['\u{43a}', '\0', '\0']), ('\u{41b}', ['\u{43b}', '\0', '\0']), ('\u{41c}', - ['\u{43c}', '\0', '\0']), ('\u{41d}', ['\u{43d}', '\0', '\0']), ('\u{41e}', ['\u{43e}', - '\0', '\0']), ('\u{41f}', ['\u{43f}', '\0', '\0']), ('\u{420}', ['\u{440}', '\0', '\0']), - ('\u{421}', ['\u{441}', '\0', '\0']), ('\u{422}', ['\u{442}', '\0', '\0']), ('\u{423}', - ['\u{443}', '\0', '\0']), ('\u{424}', ['\u{444}', '\0', '\0']), ('\u{425}', ['\u{445}', - '\0', '\0']), ('\u{426}', ['\u{446}', '\0', '\0']), ('\u{427}', ['\u{447}', '\0', '\0']), - ('\u{428}', ['\u{448}', '\0', '\0']), ('\u{429}', ['\u{449}', '\0', '\0']), ('\u{42a}', - ['\u{44a}', '\0', '\0']), ('\u{42b}', ['\u{44b}', '\0', '\0']), ('\u{42c}', ['\u{44c}', - '\0', '\0']), ('\u{42d}', ['\u{44d}', '\0', '\0']), ('\u{42e}', ['\u{44e}', '\0', '\0']), - ('\u{42f}', ['\u{44f}', '\0', '\0']), ('\u{460}', ['\u{461}', '\0', '\0']), ('\u{462}', - ['\u{463}', '\0', '\0']), ('\u{464}', ['\u{465}', '\0', '\0']), ('\u{466}', ['\u{467}', - '\0', '\0']), ('\u{468}', ['\u{469}', '\0', '\0']), ('\u{46a}', ['\u{46b}', '\0', '\0']), - ('\u{46c}', ['\u{46d}', '\0', '\0']), ('\u{46e}', ['\u{46f}', '\0', '\0']), ('\u{470}', - ['\u{471}', '\0', '\0']), ('\u{472}', ['\u{473}', '\0', '\0']), ('\u{474}', ['\u{475}', - '\0', '\0']), ('\u{476}', ['\u{477}', '\0', '\0']), ('\u{478}', ['\u{479}', '\0', '\0']), - ('\u{47a}', ['\u{47b}', '\0', '\0']), ('\u{47c}', ['\u{47d}', '\0', '\0']), ('\u{47e}', - ['\u{47f}', '\0', '\0']), ('\u{480}', ['\u{481}', '\0', '\0']), ('\u{48a}', ['\u{48b}', - '\0', '\0']), ('\u{48c}', ['\u{48d}', '\0', '\0']), ('\u{48e}', ['\u{48f}', '\0', '\0']), - ('\u{490}', ['\u{491}', '\0', '\0']), ('\u{492}', ['\u{493}', '\0', '\0']), ('\u{494}', - ['\u{495}', '\0', '\0']), ('\u{496}', ['\u{497}', '\0', '\0']), ('\u{498}', ['\u{499}', - '\0', '\0']), ('\u{49a}', ['\u{49b}', '\0', '\0']), ('\u{49c}', ['\u{49d}', '\0', '\0']), - ('\u{49e}', ['\u{49f}', '\0', '\0']), ('\u{4a0}', ['\u{4a1}', '\0', '\0']), ('\u{4a2}', - ['\u{4a3}', '\0', '\0']), ('\u{4a4}', ['\u{4a5}', '\0', '\0']), ('\u{4a6}', ['\u{4a7}', - '\0', '\0']), ('\u{4a8}', ['\u{4a9}', '\0', '\0']), ('\u{4aa}', ['\u{4ab}', '\0', '\0']), - ('\u{4ac}', ['\u{4ad}', '\0', '\0']), ('\u{4ae}', ['\u{4af}', '\0', '\0']), ('\u{4b0}', - ['\u{4b1}', '\0', '\0']), ('\u{4b2}', ['\u{4b3}', '\0', '\0']), ('\u{4b4}', ['\u{4b5}', - '\0', '\0']), ('\u{4b6}', ['\u{4b7}', '\0', '\0']), ('\u{4b8}', ['\u{4b9}', '\0', '\0']), - ('\u{4ba}', ['\u{4bb}', '\0', '\0']), ('\u{4bc}', ['\u{4bd}', '\0', '\0']), ('\u{4be}', - ['\u{4bf}', '\0', '\0']), ('\u{4c0}', ['\u{4cf}', '\0', '\0']), ('\u{4c1}', ['\u{4c2}', - '\0', '\0']), ('\u{4c3}', ['\u{4c4}', '\0', '\0']), ('\u{4c5}', ['\u{4c6}', '\0', '\0']), - ('\u{4c7}', ['\u{4c8}', '\0', '\0']), ('\u{4c9}', ['\u{4ca}', '\0', '\0']), ('\u{4cb}', - ['\u{4cc}', '\0', '\0']), ('\u{4cd}', ['\u{4ce}', '\0', '\0']), ('\u{4d0}', ['\u{4d1}', - '\0', '\0']), ('\u{4d2}', ['\u{4d3}', '\0', '\0']), ('\u{4d4}', ['\u{4d5}', '\0', '\0']), - ('\u{4d6}', ['\u{4d7}', '\0', '\0']), ('\u{4d8}', ['\u{4d9}', '\0', '\0']), ('\u{4da}', - ['\u{4db}', '\0', '\0']), ('\u{4dc}', ['\u{4dd}', '\0', '\0']), ('\u{4de}', ['\u{4df}', - '\0', '\0']), ('\u{4e0}', ['\u{4e1}', '\0', '\0']), ('\u{4e2}', ['\u{4e3}', '\0', '\0']), - ('\u{4e4}', ['\u{4e5}', '\0', '\0']), ('\u{4e6}', ['\u{4e7}', '\0', '\0']), ('\u{4e8}', - ['\u{4e9}', '\0', '\0']), ('\u{4ea}', ['\u{4eb}', '\0', '\0']), ('\u{4ec}', ['\u{4ed}', - '\0', '\0']), ('\u{4ee}', ['\u{4ef}', '\0', '\0']), ('\u{4f0}', ['\u{4f1}', '\0', '\0']), - ('\u{4f2}', ['\u{4f3}', '\0', '\0']), ('\u{4f4}', ['\u{4f5}', '\0', '\0']), ('\u{4f6}', - ['\u{4f7}', '\0', '\0']), ('\u{4f8}', ['\u{4f9}', '\0', '\0']), ('\u{4fa}', ['\u{4fb}', - '\0', '\0']), ('\u{4fc}', ['\u{4fd}', '\0', '\0']), ('\u{4fe}', ['\u{4ff}', '\0', '\0']), - ('\u{500}', ['\u{501}', '\0', '\0']), ('\u{502}', ['\u{503}', '\0', '\0']), ('\u{504}', - ['\u{505}', '\0', '\0']), ('\u{506}', ['\u{507}', '\0', '\0']), ('\u{508}', ['\u{509}', - '\0', '\0']), ('\u{50a}', ['\u{50b}', '\0', '\0']), ('\u{50c}', ['\u{50d}', '\0', '\0']), - ('\u{50e}', ['\u{50f}', '\0', '\0']), ('\u{510}', ['\u{511}', '\0', '\0']), ('\u{512}', - ['\u{513}', '\0', '\0']), ('\u{514}', ['\u{515}', '\0', '\0']), ('\u{516}', ['\u{517}', - '\0', '\0']), ('\u{518}', ['\u{519}', '\0', '\0']), ('\u{51a}', ['\u{51b}', '\0', '\0']), - ('\u{51c}', ['\u{51d}', '\0', '\0']), ('\u{51e}', ['\u{51f}', '\0', '\0']), ('\u{520}', - ['\u{521}', '\0', '\0']), ('\u{522}', ['\u{523}', '\0', '\0']), ('\u{524}', ['\u{525}', - '\0', '\0']), ('\u{526}', ['\u{527}', '\0', '\0']), ('\u{528}', ['\u{529}', '\0', '\0']), - ('\u{52a}', ['\u{52b}', '\0', '\0']), ('\u{52c}', ['\u{52d}', '\0', '\0']), ('\u{52e}', - ['\u{52f}', '\0', '\0']), ('\u{531}', ['\u{561}', '\0', '\0']), ('\u{532}', ['\u{562}', - '\0', '\0']), ('\u{533}', ['\u{563}', '\0', '\0']), ('\u{534}', ['\u{564}', '\0', '\0']), - ('\u{535}', ['\u{565}', '\0', '\0']), ('\u{536}', ['\u{566}', '\0', '\0']), ('\u{537}', - ['\u{567}', '\0', '\0']), ('\u{538}', ['\u{568}', '\0', '\0']), ('\u{539}', ['\u{569}', - '\0', '\0']), ('\u{53a}', ['\u{56a}', '\0', '\0']), ('\u{53b}', ['\u{56b}', '\0', '\0']), - ('\u{53c}', ['\u{56c}', '\0', '\0']), ('\u{53d}', ['\u{56d}', '\0', '\0']), ('\u{53e}', - ['\u{56e}', '\0', '\0']), ('\u{53f}', ['\u{56f}', '\0', '\0']), ('\u{540}', ['\u{570}', - '\0', '\0']), ('\u{541}', ['\u{571}', '\0', '\0']), ('\u{542}', ['\u{572}', '\0', '\0']), - ('\u{543}', ['\u{573}', '\0', '\0']), ('\u{544}', ['\u{574}', '\0', '\0']), ('\u{545}', - ['\u{575}', '\0', '\0']), ('\u{546}', ['\u{576}', '\0', '\0']), ('\u{547}', ['\u{577}', - '\0', '\0']), ('\u{548}', ['\u{578}', '\0', '\0']), ('\u{549}', ['\u{579}', '\0', '\0']), - ('\u{54a}', ['\u{57a}', '\0', '\0']), ('\u{54b}', ['\u{57b}', '\0', '\0']), ('\u{54c}', - ['\u{57c}', '\0', '\0']), ('\u{54d}', ['\u{57d}', '\0', '\0']), ('\u{54e}', ['\u{57e}', - '\0', '\0']), ('\u{54f}', ['\u{57f}', '\0', '\0']), ('\u{550}', ['\u{580}', '\0', '\0']), - ('\u{551}', ['\u{581}', '\0', '\0']), ('\u{552}', ['\u{582}', '\0', '\0']), ('\u{553}', - ['\u{583}', '\0', '\0']), ('\u{554}', ['\u{584}', '\0', '\0']), ('\u{555}', ['\u{585}', - '\0', '\0']), ('\u{556}', ['\u{586}', '\0', '\0']), ('\u{10a0}', ['\u{2d00}', '\0', '\0']), - ('\u{10a1}', ['\u{2d01}', '\0', '\0']), ('\u{10a2}', ['\u{2d02}', '\0', '\0']), ('\u{10a3}', - ['\u{2d03}', '\0', '\0']), ('\u{10a4}', ['\u{2d04}', '\0', '\0']), ('\u{10a5}', ['\u{2d05}', - '\0', '\0']), ('\u{10a6}', ['\u{2d06}', '\0', '\0']), ('\u{10a7}', ['\u{2d07}', '\0', - '\0']), ('\u{10a8}', ['\u{2d08}', '\0', '\0']), ('\u{10a9}', ['\u{2d09}', '\0', '\0']), - ('\u{10aa}', ['\u{2d0a}', '\0', '\0']), ('\u{10ab}', ['\u{2d0b}', '\0', '\0']), ('\u{10ac}', - ['\u{2d0c}', '\0', '\0']), ('\u{10ad}', ['\u{2d0d}', '\0', '\0']), ('\u{10ae}', ['\u{2d0e}', - '\0', '\0']), ('\u{10af}', ['\u{2d0f}', '\0', '\0']), ('\u{10b0}', ['\u{2d10}', '\0', - '\0']), ('\u{10b1}', ['\u{2d11}', '\0', '\0']), ('\u{10b2}', ['\u{2d12}', '\0', '\0']), - ('\u{10b3}', ['\u{2d13}', '\0', '\0']), ('\u{10b4}', ['\u{2d14}', '\0', '\0']), ('\u{10b5}', - ['\u{2d15}', '\0', '\0']), ('\u{10b6}', ['\u{2d16}', '\0', '\0']), ('\u{10b7}', ['\u{2d17}', - '\0', '\0']), ('\u{10b8}', ['\u{2d18}', '\0', '\0']), ('\u{10b9}', ['\u{2d19}', '\0', - '\0']), ('\u{10ba}', ['\u{2d1a}', '\0', '\0']), ('\u{10bb}', ['\u{2d1b}', '\0', '\0']), - ('\u{10bc}', ['\u{2d1c}', '\0', '\0']), ('\u{10bd}', ['\u{2d1d}', '\0', '\0']), ('\u{10be}', - ['\u{2d1e}', '\0', '\0']), ('\u{10bf}', ['\u{2d1f}', '\0', '\0']), ('\u{10c0}', ['\u{2d20}', - '\0', '\0']), ('\u{10c1}', ['\u{2d21}', '\0', '\0']), ('\u{10c2}', ['\u{2d22}', '\0', - '\0']), ('\u{10c3}', ['\u{2d23}', '\0', '\0']), ('\u{10c4}', ['\u{2d24}', '\0', '\0']), - ('\u{10c5}', ['\u{2d25}', '\0', '\0']), ('\u{10c7}', ['\u{2d27}', '\0', '\0']), ('\u{10cd}', - ['\u{2d2d}', '\0', '\0']), ('\u{13a0}', ['\u{ab70}', '\0', '\0']), ('\u{13a1}', ['\u{ab71}', - '\0', '\0']), ('\u{13a2}', ['\u{ab72}', '\0', '\0']), ('\u{13a3}', ['\u{ab73}', '\0', - '\0']), ('\u{13a4}', ['\u{ab74}', '\0', '\0']), ('\u{13a5}', ['\u{ab75}', '\0', '\0']), - ('\u{13a6}', ['\u{ab76}', '\0', '\0']), ('\u{13a7}', ['\u{ab77}', '\0', '\0']), ('\u{13a8}', - ['\u{ab78}', '\0', '\0']), ('\u{13a9}', ['\u{ab79}', '\0', '\0']), ('\u{13aa}', ['\u{ab7a}', - '\0', '\0']), ('\u{13ab}', ['\u{ab7b}', '\0', '\0']), ('\u{13ac}', ['\u{ab7c}', '\0', - '\0']), ('\u{13ad}', ['\u{ab7d}', '\0', '\0']), ('\u{13ae}', ['\u{ab7e}', '\0', '\0']), - ('\u{13af}', ['\u{ab7f}', '\0', '\0']), ('\u{13b0}', ['\u{ab80}', '\0', '\0']), ('\u{13b1}', - ['\u{ab81}', '\0', '\0']), ('\u{13b2}', ['\u{ab82}', '\0', '\0']), ('\u{13b3}', ['\u{ab83}', - '\0', '\0']), ('\u{13b4}', ['\u{ab84}', '\0', '\0']), ('\u{13b5}', ['\u{ab85}', '\0', - '\0']), ('\u{13b6}', ['\u{ab86}', '\0', '\0']), ('\u{13b7}', ['\u{ab87}', '\0', '\0']), - ('\u{13b8}', ['\u{ab88}', '\0', '\0']), ('\u{13b9}', ['\u{ab89}', '\0', '\0']), ('\u{13ba}', - ['\u{ab8a}', '\0', '\0']), ('\u{13bb}', ['\u{ab8b}', '\0', '\0']), ('\u{13bc}', ['\u{ab8c}', - '\0', '\0']), ('\u{13bd}', ['\u{ab8d}', '\0', '\0']), ('\u{13be}', ['\u{ab8e}', '\0', - '\0']), ('\u{13bf}', ['\u{ab8f}', '\0', '\0']), ('\u{13c0}', ['\u{ab90}', '\0', '\0']), - ('\u{13c1}', ['\u{ab91}', '\0', '\0']), ('\u{13c2}', ['\u{ab92}', '\0', '\0']), ('\u{13c3}', - ['\u{ab93}', '\0', '\0']), ('\u{13c4}', ['\u{ab94}', '\0', '\0']), ('\u{13c5}', ['\u{ab95}', - '\0', '\0']), ('\u{13c6}', ['\u{ab96}', '\0', '\0']), ('\u{13c7}', ['\u{ab97}', '\0', - '\0']), ('\u{13c8}', ['\u{ab98}', '\0', '\0']), ('\u{13c9}', ['\u{ab99}', '\0', '\0']), - ('\u{13ca}', ['\u{ab9a}', '\0', '\0']), ('\u{13cb}', ['\u{ab9b}', '\0', '\0']), ('\u{13cc}', - ['\u{ab9c}', '\0', '\0']), ('\u{13cd}', ['\u{ab9d}', '\0', '\0']), ('\u{13ce}', ['\u{ab9e}', - '\0', '\0']), ('\u{13cf}', ['\u{ab9f}', '\0', '\0']), ('\u{13d0}', ['\u{aba0}', '\0', - '\0']), ('\u{13d1}', ['\u{aba1}', '\0', '\0']), ('\u{13d2}', ['\u{aba2}', '\0', '\0']), - ('\u{13d3}', ['\u{aba3}', '\0', '\0']), ('\u{13d4}', ['\u{aba4}', '\0', '\0']), ('\u{13d5}', - ['\u{aba5}', '\0', '\0']), ('\u{13d6}', ['\u{aba6}', '\0', '\0']), ('\u{13d7}', ['\u{aba7}', - '\0', '\0']), ('\u{13d8}', ['\u{aba8}', '\0', '\0']), ('\u{13d9}', ['\u{aba9}', '\0', - '\0']), ('\u{13da}', ['\u{abaa}', '\0', '\0']), ('\u{13db}', ['\u{abab}', '\0', '\0']), - ('\u{13dc}', ['\u{abac}', '\0', '\0']), ('\u{13dd}', ['\u{abad}', '\0', '\0']), ('\u{13de}', - ['\u{abae}', '\0', '\0']), ('\u{13df}', ['\u{abaf}', '\0', '\0']), ('\u{13e0}', ['\u{abb0}', - '\0', '\0']), ('\u{13e1}', ['\u{abb1}', '\0', '\0']), ('\u{13e2}', ['\u{abb2}', '\0', - '\0']), ('\u{13e3}', ['\u{abb3}', '\0', '\0']), ('\u{13e4}', ['\u{abb4}', '\0', '\0']), - ('\u{13e5}', ['\u{abb5}', '\0', '\0']), ('\u{13e6}', ['\u{abb6}', '\0', '\0']), ('\u{13e7}', - ['\u{abb7}', '\0', '\0']), ('\u{13e8}', ['\u{abb8}', '\0', '\0']), ('\u{13e9}', ['\u{abb9}', - '\0', '\0']), ('\u{13ea}', ['\u{abba}', '\0', '\0']), ('\u{13eb}', ['\u{abbb}', '\0', - '\0']), ('\u{13ec}', ['\u{abbc}', '\0', '\0']), ('\u{13ed}', ['\u{abbd}', '\0', '\0']), - ('\u{13ee}', ['\u{abbe}', '\0', '\0']), ('\u{13ef}', ['\u{abbf}', '\0', '\0']), ('\u{13f0}', - ['\u{13f8}', '\0', '\0']), ('\u{13f1}', ['\u{13f9}', '\0', '\0']), ('\u{13f2}', ['\u{13fa}', - '\0', '\0']), ('\u{13f3}', ['\u{13fb}', '\0', '\0']), ('\u{13f4}', ['\u{13fc}', '\0', - '\0']), ('\u{13f5}', ['\u{13fd}', '\0', '\0']), ('\u{1c90}', ['\u{10d0}', '\0', '\0']), - ('\u{1c91}', ['\u{10d1}', '\0', '\0']), ('\u{1c92}', ['\u{10d2}', '\0', '\0']), ('\u{1c93}', - ['\u{10d3}', '\0', '\0']), ('\u{1c94}', ['\u{10d4}', '\0', '\0']), ('\u{1c95}', ['\u{10d5}', - '\0', '\0']), ('\u{1c96}', ['\u{10d6}', '\0', '\0']), ('\u{1c97}', ['\u{10d7}', '\0', - '\0']), ('\u{1c98}', ['\u{10d8}', '\0', '\0']), ('\u{1c99}', ['\u{10d9}', '\0', '\0']), - ('\u{1c9a}', ['\u{10da}', '\0', '\0']), ('\u{1c9b}', ['\u{10db}', '\0', '\0']), ('\u{1c9c}', - ['\u{10dc}', '\0', '\0']), ('\u{1c9d}', ['\u{10dd}', '\0', '\0']), ('\u{1c9e}', ['\u{10de}', - '\0', '\0']), ('\u{1c9f}', ['\u{10df}', '\0', '\0']), ('\u{1ca0}', ['\u{10e0}', '\0', - '\0']), ('\u{1ca1}', ['\u{10e1}', '\0', '\0']), ('\u{1ca2}', ['\u{10e2}', '\0', '\0']), - ('\u{1ca3}', ['\u{10e3}', '\0', '\0']), ('\u{1ca4}', ['\u{10e4}', '\0', '\0']), ('\u{1ca5}', - ['\u{10e5}', '\0', '\0']), ('\u{1ca6}', ['\u{10e6}', '\0', '\0']), ('\u{1ca7}', ['\u{10e7}', - '\0', '\0']), ('\u{1ca8}', ['\u{10e8}', '\0', '\0']), ('\u{1ca9}', ['\u{10e9}', '\0', - '\0']), ('\u{1caa}', ['\u{10ea}', '\0', '\0']), ('\u{1cab}', ['\u{10eb}', '\0', '\0']), - ('\u{1cac}', ['\u{10ec}', '\0', '\0']), ('\u{1cad}', ['\u{10ed}', '\0', '\0']), ('\u{1cae}', - ['\u{10ee}', '\0', '\0']), ('\u{1caf}', ['\u{10ef}', '\0', '\0']), ('\u{1cb0}', ['\u{10f0}', - '\0', '\0']), ('\u{1cb1}', ['\u{10f1}', '\0', '\0']), ('\u{1cb2}', ['\u{10f2}', '\0', - '\0']), ('\u{1cb3}', ['\u{10f3}', '\0', '\0']), ('\u{1cb4}', ['\u{10f4}', '\0', '\0']), - ('\u{1cb5}', ['\u{10f5}', '\0', '\0']), ('\u{1cb6}', ['\u{10f6}', '\0', '\0']), ('\u{1cb7}', - ['\u{10f7}', '\0', '\0']), ('\u{1cb8}', ['\u{10f8}', '\0', '\0']), ('\u{1cb9}', ['\u{10f9}', - '\0', '\0']), ('\u{1cba}', ['\u{10fa}', '\0', '\0']), ('\u{1cbd}', ['\u{10fd}', '\0', - '\0']), ('\u{1cbe}', ['\u{10fe}', '\0', '\0']), ('\u{1cbf}', ['\u{10ff}', '\0', '\0']), - ('\u{1e00}', ['\u{1e01}', '\0', '\0']), ('\u{1e02}', ['\u{1e03}', '\0', '\0']), ('\u{1e04}', - ['\u{1e05}', '\0', '\0']), ('\u{1e06}', ['\u{1e07}', '\0', '\0']), ('\u{1e08}', ['\u{1e09}', - '\0', '\0']), ('\u{1e0a}', ['\u{1e0b}', '\0', '\0']), ('\u{1e0c}', ['\u{1e0d}', '\0', - '\0']), ('\u{1e0e}', ['\u{1e0f}', '\0', '\0']), ('\u{1e10}', ['\u{1e11}', '\0', '\0']), - ('\u{1e12}', ['\u{1e13}', '\0', '\0']), ('\u{1e14}', ['\u{1e15}', '\0', '\0']), ('\u{1e16}', - ['\u{1e17}', '\0', '\0']), ('\u{1e18}', ['\u{1e19}', '\0', '\0']), ('\u{1e1a}', ['\u{1e1b}', - '\0', '\0']), ('\u{1e1c}', ['\u{1e1d}', '\0', '\0']), ('\u{1e1e}', ['\u{1e1f}', '\0', - '\0']), ('\u{1e20}', ['\u{1e21}', '\0', '\0']), ('\u{1e22}', ['\u{1e23}', '\0', '\0']), - ('\u{1e24}', ['\u{1e25}', '\0', '\0']), ('\u{1e26}', ['\u{1e27}', '\0', '\0']), ('\u{1e28}', - ['\u{1e29}', '\0', '\0']), ('\u{1e2a}', ['\u{1e2b}', '\0', '\0']), ('\u{1e2c}', ['\u{1e2d}', - '\0', '\0']), ('\u{1e2e}', ['\u{1e2f}', '\0', '\0']), ('\u{1e30}', ['\u{1e31}', '\0', - '\0']), ('\u{1e32}', ['\u{1e33}', '\0', '\0']), ('\u{1e34}', ['\u{1e35}', '\0', '\0']), - ('\u{1e36}', ['\u{1e37}', '\0', '\0']), ('\u{1e38}', ['\u{1e39}', '\0', '\0']), ('\u{1e3a}', - ['\u{1e3b}', '\0', '\0']), ('\u{1e3c}', ['\u{1e3d}', '\0', '\0']), ('\u{1e3e}', ['\u{1e3f}', - '\0', '\0']), ('\u{1e40}', ['\u{1e41}', '\0', '\0']), ('\u{1e42}', ['\u{1e43}', '\0', - '\0']), ('\u{1e44}', ['\u{1e45}', '\0', '\0']), ('\u{1e46}', ['\u{1e47}', '\0', '\0']), - ('\u{1e48}', ['\u{1e49}', '\0', '\0']), ('\u{1e4a}', ['\u{1e4b}', '\0', '\0']), ('\u{1e4c}', - ['\u{1e4d}', '\0', '\0']), ('\u{1e4e}', ['\u{1e4f}', '\0', '\0']), ('\u{1e50}', ['\u{1e51}', - '\0', '\0']), ('\u{1e52}', ['\u{1e53}', '\0', '\0']), ('\u{1e54}', ['\u{1e55}', '\0', - '\0']), ('\u{1e56}', ['\u{1e57}', '\0', '\0']), ('\u{1e58}', ['\u{1e59}', '\0', '\0']), - ('\u{1e5a}', ['\u{1e5b}', '\0', '\0']), ('\u{1e5c}', ['\u{1e5d}', '\0', '\0']), ('\u{1e5e}', - ['\u{1e5f}', '\0', '\0']), ('\u{1e60}', ['\u{1e61}', '\0', '\0']), ('\u{1e62}', ['\u{1e63}', - '\0', '\0']), ('\u{1e64}', ['\u{1e65}', '\0', '\0']), ('\u{1e66}', ['\u{1e67}', '\0', - '\0']), ('\u{1e68}', ['\u{1e69}', '\0', '\0']), ('\u{1e6a}', ['\u{1e6b}', '\0', '\0']), - ('\u{1e6c}', ['\u{1e6d}', '\0', '\0']), ('\u{1e6e}', ['\u{1e6f}', '\0', '\0']), ('\u{1e70}', - ['\u{1e71}', '\0', '\0']), ('\u{1e72}', ['\u{1e73}', '\0', '\0']), ('\u{1e74}', ['\u{1e75}', - '\0', '\0']), ('\u{1e76}', ['\u{1e77}', '\0', '\0']), ('\u{1e78}', ['\u{1e79}', '\0', - '\0']), ('\u{1e7a}', ['\u{1e7b}', '\0', '\0']), ('\u{1e7c}', ['\u{1e7d}', '\0', '\0']), - ('\u{1e7e}', ['\u{1e7f}', '\0', '\0']), ('\u{1e80}', ['\u{1e81}', '\0', '\0']), ('\u{1e82}', - ['\u{1e83}', '\0', '\0']), ('\u{1e84}', ['\u{1e85}', '\0', '\0']), ('\u{1e86}', ['\u{1e87}', - '\0', '\0']), ('\u{1e88}', ['\u{1e89}', '\0', '\0']), ('\u{1e8a}', ['\u{1e8b}', '\0', - '\0']), ('\u{1e8c}', ['\u{1e8d}', '\0', '\0']), ('\u{1e8e}', ['\u{1e8f}', '\0', '\0']), - ('\u{1e90}', ['\u{1e91}', '\0', '\0']), ('\u{1e92}', ['\u{1e93}', '\0', '\0']), ('\u{1e94}', - ['\u{1e95}', '\0', '\0']), ('\u{1e9e}', ['\u{df}', '\0', '\0']), ('\u{1ea0}', ['\u{1ea1}', - '\0', '\0']), ('\u{1ea2}', ['\u{1ea3}', '\0', '\0']), ('\u{1ea4}', ['\u{1ea5}', '\0', - '\0']), ('\u{1ea6}', ['\u{1ea7}', '\0', '\0']), ('\u{1ea8}', ['\u{1ea9}', '\0', '\0']), - ('\u{1eaa}', ['\u{1eab}', '\0', '\0']), ('\u{1eac}', ['\u{1ead}', '\0', '\0']), ('\u{1eae}', - ['\u{1eaf}', '\0', '\0']), ('\u{1eb0}', ['\u{1eb1}', '\0', '\0']), ('\u{1eb2}', ['\u{1eb3}', - '\0', '\0']), ('\u{1eb4}', ['\u{1eb5}', '\0', '\0']), ('\u{1eb6}', ['\u{1eb7}', '\0', - '\0']), ('\u{1eb8}', ['\u{1eb9}', '\0', '\0']), ('\u{1eba}', ['\u{1ebb}', '\0', '\0']), - ('\u{1ebc}', ['\u{1ebd}', '\0', '\0']), ('\u{1ebe}', ['\u{1ebf}', '\0', '\0']), ('\u{1ec0}', - ['\u{1ec1}', '\0', '\0']), ('\u{1ec2}', ['\u{1ec3}', '\0', '\0']), ('\u{1ec4}', ['\u{1ec5}', - '\0', '\0']), ('\u{1ec6}', ['\u{1ec7}', '\0', '\0']), ('\u{1ec8}', ['\u{1ec9}', '\0', - '\0']), ('\u{1eca}', ['\u{1ecb}', '\0', '\0']), ('\u{1ecc}', ['\u{1ecd}', '\0', '\0']), - ('\u{1ece}', ['\u{1ecf}', '\0', '\0']), ('\u{1ed0}', ['\u{1ed1}', '\0', '\0']), ('\u{1ed2}', - ['\u{1ed3}', '\0', '\0']), ('\u{1ed4}', ['\u{1ed5}', '\0', '\0']), ('\u{1ed6}', ['\u{1ed7}', - '\0', '\0']), ('\u{1ed8}', ['\u{1ed9}', '\0', '\0']), ('\u{1eda}', ['\u{1edb}', '\0', - '\0']), ('\u{1edc}', ['\u{1edd}', '\0', '\0']), ('\u{1ede}', ['\u{1edf}', '\0', '\0']), - ('\u{1ee0}', ['\u{1ee1}', '\0', '\0']), ('\u{1ee2}', ['\u{1ee3}', '\0', '\0']), ('\u{1ee4}', - ['\u{1ee5}', '\0', '\0']), ('\u{1ee6}', ['\u{1ee7}', '\0', '\0']), ('\u{1ee8}', ['\u{1ee9}', - '\0', '\0']), ('\u{1eea}', ['\u{1eeb}', '\0', '\0']), ('\u{1eec}', ['\u{1eed}', '\0', - '\0']), ('\u{1eee}', ['\u{1eef}', '\0', '\0']), ('\u{1ef0}', ['\u{1ef1}', '\0', '\0']), - ('\u{1ef2}', ['\u{1ef3}', '\0', '\0']), ('\u{1ef4}', ['\u{1ef5}', '\0', '\0']), ('\u{1ef6}', - ['\u{1ef7}', '\0', '\0']), ('\u{1ef8}', ['\u{1ef9}', '\0', '\0']), ('\u{1efa}', ['\u{1efb}', - '\0', '\0']), ('\u{1efc}', ['\u{1efd}', '\0', '\0']), ('\u{1efe}', ['\u{1eff}', '\0', - '\0']), ('\u{1f08}', ['\u{1f00}', '\0', '\0']), ('\u{1f09}', ['\u{1f01}', '\0', '\0']), - ('\u{1f0a}', ['\u{1f02}', '\0', '\0']), ('\u{1f0b}', ['\u{1f03}', '\0', '\0']), ('\u{1f0c}', - ['\u{1f04}', '\0', '\0']), ('\u{1f0d}', ['\u{1f05}', '\0', '\0']), ('\u{1f0e}', ['\u{1f06}', - '\0', '\0']), ('\u{1f0f}', ['\u{1f07}', '\0', '\0']), ('\u{1f18}', ['\u{1f10}', '\0', - '\0']), ('\u{1f19}', ['\u{1f11}', '\0', '\0']), ('\u{1f1a}', ['\u{1f12}', '\0', '\0']), - ('\u{1f1b}', ['\u{1f13}', '\0', '\0']), ('\u{1f1c}', ['\u{1f14}', '\0', '\0']), ('\u{1f1d}', - ['\u{1f15}', '\0', '\0']), ('\u{1f28}', ['\u{1f20}', '\0', '\0']), ('\u{1f29}', ['\u{1f21}', - '\0', '\0']), ('\u{1f2a}', ['\u{1f22}', '\0', '\0']), ('\u{1f2b}', ['\u{1f23}', '\0', - '\0']), ('\u{1f2c}', ['\u{1f24}', '\0', '\0']), ('\u{1f2d}', ['\u{1f25}', '\0', '\0']), - ('\u{1f2e}', ['\u{1f26}', '\0', '\0']), ('\u{1f2f}', ['\u{1f27}', '\0', '\0']), ('\u{1f38}', - ['\u{1f30}', '\0', '\0']), ('\u{1f39}', ['\u{1f31}', '\0', '\0']), ('\u{1f3a}', ['\u{1f32}', - '\0', '\0']), ('\u{1f3b}', ['\u{1f33}', '\0', '\0']), ('\u{1f3c}', ['\u{1f34}', '\0', - '\0']), ('\u{1f3d}', ['\u{1f35}', '\0', '\0']), ('\u{1f3e}', ['\u{1f36}', '\0', '\0']), - ('\u{1f3f}', ['\u{1f37}', '\0', '\0']), ('\u{1f48}', ['\u{1f40}', '\0', '\0']), ('\u{1f49}', - ['\u{1f41}', '\0', '\0']), ('\u{1f4a}', ['\u{1f42}', '\0', '\0']), ('\u{1f4b}', ['\u{1f43}', - '\0', '\0']), ('\u{1f4c}', ['\u{1f44}', '\0', '\0']), ('\u{1f4d}', ['\u{1f45}', '\0', - '\0']), ('\u{1f59}', ['\u{1f51}', '\0', '\0']), ('\u{1f5b}', ['\u{1f53}', '\0', '\0']), - ('\u{1f5d}', ['\u{1f55}', '\0', '\0']), ('\u{1f5f}', ['\u{1f57}', '\0', '\0']), ('\u{1f68}', - ['\u{1f60}', '\0', '\0']), ('\u{1f69}', ['\u{1f61}', '\0', '\0']), ('\u{1f6a}', ['\u{1f62}', - '\0', '\0']), ('\u{1f6b}', ['\u{1f63}', '\0', '\0']), ('\u{1f6c}', ['\u{1f64}', '\0', - '\0']), ('\u{1f6d}', ['\u{1f65}', '\0', '\0']), ('\u{1f6e}', ['\u{1f66}', '\0', '\0']), - ('\u{1f6f}', ['\u{1f67}', '\0', '\0']), ('\u{1f88}', ['\u{1f80}', '\0', '\0']), ('\u{1f89}', - ['\u{1f81}', '\0', '\0']), ('\u{1f8a}', ['\u{1f82}', '\0', '\0']), ('\u{1f8b}', ['\u{1f83}', - '\0', '\0']), ('\u{1f8c}', ['\u{1f84}', '\0', '\0']), ('\u{1f8d}', ['\u{1f85}', '\0', - '\0']), ('\u{1f8e}', ['\u{1f86}', '\0', '\0']), ('\u{1f8f}', ['\u{1f87}', '\0', '\0']), - ('\u{1f98}', ['\u{1f90}', '\0', '\0']), ('\u{1f99}', ['\u{1f91}', '\0', '\0']), ('\u{1f9a}', - ['\u{1f92}', '\0', '\0']), ('\u{1f9b}', ['\u{1f93}', '\0', '\0']), ('\u{1f9c}', ['\u{1f94}', - '\0', '\0']), ('\u{1f9d}', ['\u{1f95}', '\0', '\0']), ('\u{1f9e}', ['\u{1f96}', '\0', - '\0']), ('\u{1f9f}', ['\u{1f97}', '\0', '\0']), ('\u{1fa8}', ['\u{1fa0}', '\0', '\0']), - ('\u{1fa9}', ['\u{1fa1}', '\0', '\0']), ('\u{1faa}', ['\u{1fa2}', '\0', '\0']), ('\u{1fab}', - ['\u{1fa3}', '\0', '\0']), ('\u{1fac}', ['\u{1fa4}', '\0', '\0']), ('\u{1fad}', ['\u{1fa5}', - '\0', '\0']), ('\u{1fae}', ['\u{1fa6}', '\0', '\0']), ('\u{1faf}', ['\u{1fa7}', '\0', - '\0']), ('\u{1fb8}', ['\u{1fb0}', '\0', '\0']), ('\u{1fb9}', ['\u{1fb1}', '\0', '\0']), - ('\u{1fba}', ['\u{1f70}', '\0', '\0']), ('\u{1fbb}', ['\u{1f71}', '\0', '\0']), ('\u{1fbc}', - ['\u{1fb3}', '\0', '\0']), ('\u{1fc8}', ['\u{1f72}', '\0', '\0']), ('\u{1fc9}', ['\u{1f73}', - '\0', '\0']), ('\u{1fca}', ['\u{1f74}', '\0', '\0']), ('\u{1fcb}', ['\u{1f75}', '\0', - '\0']), ('\u{1fcc}', ['\u{1fc3}', '\0', '\0']), ('\u{1fd8}', ['\u{1fd0}', '\0', '\0']), - ('\u{1fd9}', ['\u{1fd1}', '\0', '\0']), ('\u{1fda}', ['\u{1f76}', '\0', '\0']), ('\u{1fdb}', - ['\u{1f77}', '\0', '\0']), ('\u{1fe8}', ['\u{1fe0}', '\0', '\0']), ('\u{1fe9}', ['\u{1fe1}', - '\0', '\0']), ('\u{1fea}', ['\u{1f7a}', '\0', '\0']), ('\u{1feb}', ['\u{1f7b}', '\0', - '\0']), ('\u{1fec}', ['\u{1fe5}', '\0', '\0']), ('\u{1ff8}', ['\u{1f78}', '\0', '\0']), - ('\u{1ff9}', ['\u{1f79}', '\0', '\0']), ('\u{1ffa}', ['\u{1f7c}', '\0', '\0']), ('\u{1ffb}', - ['\u{1f7d}', '\0', '\0']), ('\u{1ffc}', ['\u{1ff3}', '\0', '\0']), ('\u{2126}', ['\u{3c9}', - '\0', '\0']), ('\u{212a}', ['\u{6b}', '\0', '\0']), ('\u{212b}', ['\u{e5}', '\0', '\0']), - ('\u{2132}', ['\u{214e}', '\0', '\0']), ('\u{2160}', ['\u{2170}', '\0', '\0']), ('\u{2161}', - ['\u{2171}', '\0', '\0']), ('\u{2162}', ['\u{2172}', '\0', '\0']), ('\u{2163}', ['\u{2173}', - '\0', '\0']), ('\u{2164}', ['\u{2174}', '\0', '\0']), ('\u{2165}', ['\u{2175}', '\0', - '\0']), ('\u{2166}', ['\u{2176}', '\0', '\0']), ('\u{2167}', ['\u{2177}', '\0', '\0']), - ('\u{2168}', ['\u{2178}', '\0', '\0']), ('\u{2169}', ['\u{2179}', '\0', '\0']), ('\u{216a}', - ['\u{217a}', '\0', '\0']), ('\u{216b}', ['\u{217b}', '\0', '\0']), ('\u{216c}', ['\u{217c}', - '\0', '\0']), ('\u{216d}', ['\u{217d}', '\0', '\0']), ('\u{216e}', ['\u{217e}', '\0', - '\0']), ('\u{216f}', ['\u{217f}', '\0', '\0']), ('\u{2183}', ['\u{2184}', '\0', '\0']), - ('\u{24b6}', ['\u{24d0}', '\0', '\0']), ('\u{24b7}', ['\u{24d1}', '\0', '\0']), ('\u{24b8}', - ['\u{24d2}', '\0', '\0']), ('\u{24b9}', ['\u{24d3}', '\0', '\0']), ('\u{24ba}', ['\u{24d4}', - '\0', '\0']), ('\u{24bb}', ['\u{24d5}', '\0', '\0']), ('\u{24bc}', ['\u{24d6}', '\0', - '\0']), ('\u{24bd}', ['\u{24d7}', '\0', '\0']), ('\u{24be}', ['\u{24d8}', '\0', '\0']), - ('\u{24bf}', ['\u{24d9}', '\0', '\0']), ('\u{24c0}', ['\u{24da}', '\0', '\0']), ('\u{24c1}', - ['\u{24db}', '\0', '\0']), ('\u{24c2}', ['\u{24dc}', '\0', '\0']), ('\u{24c3}', ['\u{24dd}', - '\0', '\0']), ('\u{24c4}', ['\u{24de}', '\0', '\0']), ('\u{24c5}', ['\u{24df}', '\0', - '\0']), ('\u{24c6}', ['\u{24e0}', '\0', '\0']), ('\u{24c7}', ['\u{24e1}', '\0', '\0']), - ('\u{24c8}', ['\u{24e2}', '\0', '\0']), ('\u{24c9}', ['\u{24e3}', '\0', '\0']), ('\u{24ca}', - ['\u{24e4}', '\0', '\0']), ('\u{24cb}', ['\u{24e5}', '\0', '\0']), ('\u{24cc}', ['\u{24e6}', - '\0', '\0']), ('\u{24cd}', ['\u{24e7}', '\0', '\0']), ('\u{24ce}', ['\u{24e8}', '\0', - '\0']), ('\u{24cf}', ['\u{24e9}', '\0', '\0']), ('\u{2c00}', ['\u{2c30}', '\0', '\0']), - ('\u{2c01}', ['\u{2c31}', '\0', '\0']), ('\u{2c02}', ['\u{2c32}', '\0', '\0']), ('\u{2c03}', - ['\u{2c33}', '\0', '\0']), ('\u{2c04}', ['\u{2c34}', '\0', '\0']), ('\u{2c05}', ['\u{2c35}', - '\0', '\0']), ('\u{2c06}', ['\u{2c36}', '\0', '\0']), ('\u{2c07}', ['\u{2c37}', '\0', - '\0']), ('\u{2c08}', ['\u{2c38}', '\0', '\0']), ('\u{2c09}', ['\u{2c39}', '\0', '\0']), - ('\u{2c0a}', ['\u{2c3a}', '\0', '\0']), ('\u{2c0b}', ['\u{2c3b}', '\0', '\0']), ('\u{2c0c}', - ['\u{2c3c}', '\0', '\0']), ('\u{2c0d}', ['\u{2c3d}', '\0', '\0']), ('\u{2c0e}', ['\u{2c3e}', - '\0', '\0']), ('\u{2c0f}', ['\u{2c3f}', '\0', '\0']), ('\u{2c10}', ['\u{2c40}', '\0', - '\0']), ('\u{2c11}', ['\u{2c41}', '\0', '\0']), ('\u{2c12}', ['\u{2c42}', '\0', '\0']), - ('\u{2c13}', ['\u{2c43}', '\0', '\0']), ('\u{2c14}', ['\u{2c44}', '\0', '\0']), ('\u{2c15}', - ['\u{2c45}', '\0', '\0']), ('\u{2c16}', ['\u{2c46}', '\0', '\0']), ('\u{2c17}', ['\u{2c47}', - '\0', '\0']), ('\u{2c18}', ['\u{2c48}', '\0', '\0']), ('\u{2c19}', ['\u{2c49}', '\0', - '\0']), ('\u{2c1a}', ['\u{2c4a}', '\0', '\0']), ('\u{2c1b}', ['\u{2c4b}', '\0', '\0']), - ('\u{2c1c}', ['\u{2c4c}', '\0', '\0']), ('\u{2c1d}', ['\u{2c4d}', '\0', '\0']), ('\u{2c1e}', - ['\u{2c4e}', '\0', '\0']), ('\u{2c1f}', ['\u{2c4f}', '\0', '\0']), ('\u{2c20}', ['\u{2c50}', - '\0', '\0']), ('\u{2c21}', ['\u{2c51}', '\0', '\0']), ('\u{2c22}', ['\u{2c52}', '\0', - '\0']), ('\u{2c23}', ['\u{2c53}', '\0', '\0']), ('\u{2c24}', ['\u{2c54}', '\0', '\0']), - ('\u{2c25}', ['\u{2c55}', '\0', '\0']), ('\u{2c26}', ['\u{2c56}', '\0', '\0']), ('\u{2c27}', - ['\u{2c57}', '\0', '\0']), ('\u{2c28}', ['\u{2c58}', '\0', '\0']), ('\u{2c29}', ['\u{2c59}', - '\0', '\0']), ('\u{2c2a}', ['\u{2c5a}', '\0', '\0']), ('\u{2c2b}', ['\u{2c5b}', '\0', - '\0']), ('\u{2c2c}', ['\u{2c5c}', '\0', '\0']), ('\u{2c2d}', ['\u{2c5d}', '\0', '\0']), - ('\u{2c2e}', ['\u{2c5e}', '\0', '\0']), ('\u{2c60}', ['\u{2c61}', '\0', '\0']), ('\u{2c62}', - ['\u{26b}', '\0', '\0']), ('\u{2c63}', ['\u{1d7d}', '\0', '\0']), ('\u{2c64}', ['\u{27d}', - '\0', '\0']), ('\u{2c67}', ['\u{2c68}', '\0', '\0']), ('\u{2c69}', ['\u{2c6a}', '\0', - '\0']), ('\u{2c6b}', ['\u{2c6c}', '\0', '\0']), ('\u{2c6d}', ['\u{251}', '\0', '\0']), - ('\u{2c6e}', ['\u{271}', '\0', '\0']), ('\u{2c6f}', ['\u{250}', '\0', '\0']), ('\u{2c70}', - ['\u{252}', '\0', '\0']), ('\u{2c72}', ['\u{2c73}', '\0', '\0']), ('\u{2c75}', ['\u{2c76}', - '\0', '\0']), ('\u{2c7e}', ['\u{23f}', '\0', '\0']), ('\u{2c7f}', ['\u{240}', '\0', '\0']), - ('\u{2c80}', ['\u{2c81}', '\0', '\0']), ('\u{2c82}', ['\u{2c83}', '\0', '\0']), ('\u{2c84}', - ['\u{2c85}', '\0', '\0']), ('\u{2c86}', ['\u{2c87}', '\0', '\0']), ('\u{2c88}', ['\u{2c89}', - '\0', '\0']), ('\u{2c8a}', ['\u{2c8b}', '\0', '\0']), ('\u{2c8c}', ['\u{2c8d}', '\0', - '\0']), ('\u{2c8e}', ['\u{2c8f}', '\0', '\0']), ('\u{2c90}', ['\u{2c91}', '\0', '\0']), - ('\u{2c92}', ['\u{2c93}', '\0', '\0']), ('\u{2c94}', ['\u{2c95}', '\0', '\0']), ('\u{2c96}', - ['\u{2c97}', '\0', '\0']), ('\u{2c98}', ['\u{2c99}', '\0', '\0']), ('\u{2c9a}', ['\u{2c9b}', - '\0', '\0']), ('\u{2c9c}', ['\u{2c9d}', '\0', '\0']), ('\u{2c9e}', ['\u{2c9f}', '\0', - '\0']), ('\u{2ca0}', ['\u{2ca1}', '\0', '\0']), ('\u{2ca2}', ['\u{2ca3}', '\0', '\0']), - ('\u{2ca4}', ['\u{2ca5}', '\0', '\0']), ('\u{2ca6}', ['\u{2ca7}', '\0', '\0']), ('\u{2ca8}', - ['\u{2ca9}', '\0', '\0']), ('\u{2caa}', ['\u{2cab}', '\0', '\0']), ('\u{2cac}', ['\u{2cad}', - '\0', '\0']), ('\u{2cae}', ['\u{2caf}', '\0', '\0']), ('\u{2cb0}', ['\u{2cb1}', '\0', - '\0']), ('\u{2cb2}', ['\u{2cb3}', '\0', '\0']), ('\u{2cb4}', ['\u{2cb5}', '\0', '\0']), - ('\u{2cb6}', ['\u{2cb7}', '\0', '\0']), ('\u{2cb8}', ['\u{2cb9}', '\0', '\0']), ('\u{2cba}', - ['\u{2cbb}', '\0', '\0']), ('\u{2cbc}', ['\u{2cbd}', '\0', '\0']), ('\u{2cbe}', ['\u{2cbf}', - '\0', '\0']), ('\u{2cc0}', ['\u{2cc1}', '\0', '\0']), ('\u{2cc2}', ['\u{2cc3}', '\0', - '\0']), ('\u{2cc4}', ['\u{2cc5}', '\0', '\0']), ('\u{2cc6}', ['\u{2cc7}', '\0', '\0']), - ('\u{2cc8}', ['\u{2cc9}', '\0', '\0']), ('\u{2cca}', ['\u{2ccb}', '\0', '\0']), ('\u{2ccc}', - ['\u{2ccd}', '\0', '\0']), ('\u{2cce}', ['\u{2ccf}', '\0', '\0']), ('\u{2cd0}', ['\u{2cd1}', - '\0', '\0']), ('\u{2cd2}', ['\u{2cd3}', '\0', '\0']), ('\u{2cd4}', ['\u{2cd5}', '\0', - '\0']), ('\u{2cd6}', ['\u{2cd7}', '\0', '\0']), ('\u{2cd8}', ['\u{2cd9}', '\0', '\0']), - ('\u{2cda}', ['\u{2cdb}', '\0', '\0']), ('\u{2cdc}', ['\u{2cdd}', '\0', '\0']), ('\u{2cde}', - ['\u{2cdf}', '\0', '\0']), ('\u{2ce0}', ['\u{2ce1}', '\0', '\0']), ('\u{2ce2}', ['\u{2ce3}', - '\0', '\0']), ('\u{2ceb}', ['\u{2cec}', '\0', '\0']), ('\u{2ced}', ['\u{2cee}', '\0', - '\0']), ('\u{2cf2}', ['\u{2cf3}', '\0', '\0']), ('\u{a640}', ['\u{a641}', '\0', '\0']), - ('\u{a642}', ['\u{a643}', '\0', '\0']), ('\u{a644}', ['\u{a645}', '\0', '\0']), ('\u{a646}', - ['\u{a647}', '\0', '\0']), ('\u{a648}', ['\u{a649}', '\0', '\0']), ('\u{a64a}', ['\u{a64b}', - '\0', '\0']), ('\u{a64c}', ['\u{a64d}', '\0', '\0']), ('\u{a64e}', ['\u{a64f}', '\0', - '\0']), ('\u{a650}', ['\u{a651}', '\0', '\0']), ('\u{a652}', ['\u{a653}', '\0', '\0']), - ('\u{a654}', ['\u{a655}', '\0', '\0']), ('\u{a656}', ['\u{a657}', '\0', '\0']), ('\u{a658}', - ['\u{a659}', '\0', '\0']), ('\u{a65a}', ['\u{a65b}', '\0', '\0']), ('\u{a65c}', ['\u{a65d}', - '\0', '\0']), ('\u{a65e}', ['\u{a65f}', '\0', '\0']), ('\u{a660}', ['\u{a661}', '\0', - '\0']), ('\u{a662}', ['\u{a663}', '\0', '\0']), ('\u{a664}', ['\u{a665}', '\0', '\0']), - ('\u{a666}', ['\u{a667}', '\0', '\0']), ('\u{a668}', ['\u{a669}', '\0', '\0']), ('\u{a66a}', - ['\u{a66b}', '\0', '\0']), ('\u{a66c}', ['\u{a66d}', '\0', '\0']), ('\u{a680}', ['\u{a681}', - '\0', '\0']), ('\u{a682}', ['\u{a683}', '\0', '\0']), ('\u{a684}', ['\u{a685}', '\0', - '\0']), ('\u{a686}', ['\u{a687}', '\0', '\0']), ('\u{a688}', ['\u{a689}', '\0', '\0']), - ('\u{a68a}', ['\u{a68b}', '\0', '\0']), ('\u{a68c}', ['\u{a68d}', '\0', '\0']), ('\u{a68e}', - ['\u{a68f}', '\0', '\0']), ('\u{a690}', ['\u{a691}', '\0', '\0']), ('\u{a692}', ['\u{a693}', - '\0', '\0']), ('\u{a694}', ['\u{a695}', '\0', '\0']), ('\u{a696}', ['\u{a697}', '\0', - '\0']), ('\u{a698}', ['\u{a699}', '\0', '\0']), ('\u{a69a}', ['\u{a69b}', '\0', '\0']), - ('\u{a722}', ['\u{a723}', '\0', '\0']), ('\u{a724}', ['\u{a725}', '\0', '\0']), ('\u{a726}', - ['\u{a727}', '\0', '\0']), ('\u{a728}', ['\u{a729}', '\0', '\0']), ('\u{a72a}', ['\u{a72b}', - '\0', '\0']), ('\u{a72c}', ['\u{a72d}', '\0', '\0']), ('\u{a72e}', ['\u{a72f}', '\0', - '\0']), ('\u{a732}', ['\u{a733}', '\0', '\0']), ('\u{a734}', ['\u{a735}', '\0', '\0']), - ('\u{a736}', ['\u{a737}', '\0', '\0']), ('\u{a738}', ['\u{a739}', '\0', '\0']), ('\u{a73a}', - ['\u{a73b}', '\0', '\0']), ('\u{a73c}', ['\u{a73d}', '\0', '\0']), ('\u{a73e}', ['\u{a73f}', - '\0', '\0']), ('\u{a740}', ['\u{a741}', '\0', '\0']), ('\u{a742}', ['\u{a743}', '\0', - '\0']), ('\u{a744}', ['\u{a745}', '\0', '\0']), ('\u{a746}', ['\u{a747}', '\0', '\0']), - ('\u{a748}', ['\u{a749}', '\0', '\0']), ('\u{a74a}', ['\u{a74b}', '\0', '\0']), ('\u{a74c}', - ['\u{a74d}', '\0', '\0']), ('\u{a74e}', ['\u{a74f}', '\0', '\0']), ('\u{a750}', ['\u{a751}', - '\0', '\0']), ('\u{a752}', ['\u{a753}', '\0', '\0']), ('\u{a754}', ['\u{a755}', '\0', - '\0']), ('\u{a756}', ['\u{a757}', '\0', '\0']), ('\u{a758}', ['\u{a759}', '\0', '\0']), - ('\u{a75a}', ['\u{a75b}', '\0', '\0']), ('\u{a75c}', ['\u{a75d}', '\0', '\0']), ('\u{a75e}', - ['\u{a75f}', '\0', '\0']), ('\u{a760}', ['\u{a761}', '\0', '\0']), ('\u{a762}', ['\u{a763}', - '\0', '\0']), ('\u{a764}', ['\u{a765}', '\0', '\0']), ('\u{a766}', ['\u{a767}', '\0', - '\0']), ('\u{a768}', ['\u{a769}', '\0', '\0']), ('\u{a76a}', ['\u{a76b}', '\0', '\0']), - ('\u{a76c}', ['\u{a76d}', '\0', '\0']), ('\u{a76e}', ['\u{a76f}', '\0', '\0']), ('\u{a779}', - ['\u{a77a}', '\0', '\0']), ('\u{a77b}', ['\u{a77c}', '\0', '\0']), ('\u{a77d}', ['\u{1d79}', - '\0', '\0']), ('\u{a77e}', ['\u{a77f}', '\0', '\0']), ('\u{a780}', ['\u{a781}', '\0', - '\0']), ('\u{a782}', ['\u{a783}', '\0', '\0']), ('\u{a784}', ['\u{a785}', '\0', '\0']), - ('\u{a786}', ['\u{a787}', '\0', '\0']), ('\u{a78b}', ['\u{a78c}', '\0', '\0']), ('\u{a78d}', - ['\u{265}', '\0', '\0']), ('\u{a790}', ['\u{a791}', '\0', '\0']), ('\u{a792}', ['\u{a793}', - '\0', '\0']), ('\u{a796}', ['\u{a797}', '\0', '\0']), ('\u{a798}', ['\u{a799}', '\0', - '\0']), ('\u{a79a}', ['\u{a79b}', '\0', '\0']), ('\u{a79c}', ['\u{a79d}', '\0', '\0']), - ('\u{a79e}', ['\u{a79f}', '\0', '\0']), ('\u{a7a0}', ['\u{a7a1}', '\0', '\0']), ('\u{a7a2}', - ['\u{a7a3}', '\0', '\0']), ('\u{a7a4}', ['\u{a7a5}', '\0', '\0']), ('\u{a7a6}', ['\u{a7a7}', - '\0', '\0']), ('\u{a7a8}', ['\u{a7a9}', '\0', '\0']), ('\u{a7aa}', ['\u{266}', '\0', '\0']), - ('\u{a7ab}', ['\u{25c}', '\0', '\0']), ('\u{a7ac}', ['\u{261}', '\0', '\0']), ('\u{a7ad}', - ['\u{26c}', '\0', '\0']), ('\u{a7ae}', ['\u{26a}', '\0', '\0']), ('\u{a7b0}', ['\u{29e}', - '\0', '\0']), ('\u{a7b1}', ['\u{287}', '\0', '\0']), ('\u{a7b2}', ['\u{29d}', '\0', '\0']), - ('\u{a7b3}', ['\u{ab53}', '\0', '\0']), ('\u{a7b4}', ['\u{a7b5}', '\0', '\0']), ('\u{a7b6}', - ['\u{a7b7}', '\0', '\0']), ('\u{a7b8}', ['\u{a7b9}', '\0', '\0']), ('\u{ff21}', ['\u{ff41}', - '\0', '\0']), ('\u{ff22}', ['\u{ff42}', '\0', '\0']), ('\u{ff23}', ['\u{ff43}', '\0', - '\0']), ('\u{ff24}', ['\u{ff44}', '\0', '\0']), ('\u{ff25}', ['\u{ff45}', '\0', '\0']), - ('\u{ff26}', ['\u{ff46}', '\0', '\0']), ('\u{ff27}', ['\u{ff47}', '\0', '\0']), ('\u{ff28}', - ['\u{ff48}', '\0', '\0']), ('\u{ff29}', ['\u{ff49}', '\0', '\0']), ('\u{ff2a}', ['\u{ff4a}', - '\0', '\0']), ('\u{ff2b}', ['\u{ff4b}', '\0', '\0']), ('\u{ff2c}', ['\u{ff4c}', '\0', - '\0']), ('\u{ff2d}', ['\u{ff4d}', '\0', '\0']), ('\u{ff2e}', ['\u{ff4e}', '\0', '\0']), - ('\u{ff2f}', ['\u{ff4f}', '\0', '\0']), ('\u{ff30}', ['\u{ff50}', '\0', '\0']), ('\u{ff31}', - ['\u{ff51}', '\0', '\0']), ('\u{ff32}', ['\u{ff52}', '\0', '\0']), ('\u{ff33}', ['\u{ff53}', - '\0', '\0']), ('\u{ff34}', ['\u{ff54}', '\0', '\0']), ('\u{ff35}', ['\u{ff55}', '\0', - '\0']), ('\u{ff36}', ['\u{ff56}', '\0', '\0']), ('\u{ff37}', ['\u{ff57}', '\0', '\0']), - ('\u{ff38}', ['\u{ff58}', '\0', '\0']), ('\u{ff39}', ['\u{ff59}', '\0', '\0']), ('\u{ff3a}', - ['\u{ff5a}', '\0', '\0']), ('\u{10400}', ['\u{10428}', '\0', '\0']), ('\u{10401}', - ['\u{10429}', '\0', '\0']), ('\u{10402}', ['\u{1042a}', '\0', '\0']), ('\u{10403}', - ['\u{1042b}', '\0', '\0']), ('\u{10404}', ['\u{1042c}', '\0', '\0']), ('\u{10405}', - ['\u{1042d}', '\0', '\0']), ('\u{10406}', ['\u{1042e}', '\0', '\0']), ('\u{10407}', - ['\u{1042f}', '\0', '\0']), ('\u{10408}', ['\u{10430}', '\0', '\0']), ('\u{10409}', - ['\u{10431}', '\0', '\0']), ('\u{1040a}', ['\u{10432}', '\0', '\0']), ('\u{1040b}', - ['\u{10433}', '\0', '\0']), ('\u{1040c}', ['\u{10434}', '\0', '\0']), ('\u{1040d}', - ['\u{10435}', '\0', '\0']), ('\u{1040e}', ['\u{10436}', '\0', '\0']), ('\u{1040f}', - ['\u{10437}', '\0', '\0']), ('\u{10410}', ['\u{10438}', '\0', '\0']), ('\u{10411}', - ['\u{10439}', '\0', '\0']), ('\u{10412}', ['\u{1043a}', '\0', '\0']), ('\u{10413}', - ['\u{1043b}', '\0', '\0']), ('\u{10414}', ['\u{1043c}', '\0', '\0']), ('\u{10415}', - ['\u{1043d}', '\0', '\0']), ('\u{10416}', ['\u{1043e}', '\0', '\0']), ('\u{10417}', - ['\u{1043f}', '\0', '\0']), ('\u{10418}', ['\u{10440}', '\0', '\0']), ('\u{10419}', - ['\u{10441}', '\0', '\0']), ('\u{1041a}', ['\u{10442}', '\0', '\0']), ('\u{1041b}', - ['\u{10443}', '\0', '\0']), ('\u{1041c}', ['\u{10444}', '\0', '\0']), ('\u{1041d}', - ['\u{10445}', '\0', '\0']), ('\u{1041e}', ['\u{10446}', '\0', '\0']), ('\u{1041f}', - ['\u{10447}', '\0', '\0']), ('\u{10420}', ['\u{10448}', '\0', '\0']), ('\u{10421}', - ['\u{10449}', '\0', '\0']), ('\u{10422}', ['\u{1044a}', '\0', '\0']), ('\u{10423}', - ['\u{1044b}', '\0', '\0']), ('\u{10424}', ['\u{1044c}', '\0', '\0']), ('\u{10425}', - ['\u{1044d}', '\0', '\0']), ('\u{10426}', ['\u{1044e}', '\0', '\0']), ('\u{10427}', - ['\u{1044f}', '\0', '\0']), ('\u{104b0}', ['\u{104d8}', '\0', '\0']), ('\u{104b1}', - ['\u{104d9}', '\0', '\0']), ('\u{104b2}', ['\u{104da}', '\0', '\0']), ('\u{104b3}', - ['\u{104db}', '\0', '\0']), ('\u{104b4}', ['\u{104dc}', '\0', '\0']), ('\u{104b5}', - ['\u{104dd}', '\0', '\0']), ('\u{104b6}', ['\u{104de}', '\0', '\0']), ('\u{104b7}', - ['\u{104df}', '\0', '\0']), ('\u{104b8}', ['\u{104e0}', '\0', '\0']), ('\u{104b9}', - ['\u{104e1}', '\0', '\0']), ('\u{104ba}', ['\u{104e2}', '\0', '\0']), ('\u{104bb}', - ['\u{104e3}', '\0', '\0']), ('\u{104bc}', ['\u{104e4}', '\0', '\0']), ('\u{104bd}', - ['\u{104e5}', '\0', '\0']), ('\u{104be}', ['\u{104e6}', '\0', '\0']), ('\u{104bf}', - ['\u{104e7}', '\0', '\0']), ('\u{104c0}', ['\u{104e8}', '\0', '\0']), ('\u{104c1}', - ['\u{104e9}', '\0', '\0']), ('\u{104c2}', ['\u{104ea}', '\0', '\0']), ('\u{104c3}', - ['\u{104eb}', '\0', '\0']), ('\u{104c4}', ['\u{104ec}', '\0', '\0']), ('\u{104c5}', - ['\u{104ed}', '\0', '\0']), ('\u{104c6}', ['\u{104ee}', '\0', '\0']), ('\u{104c7}', - ['\u{104ef}', '\0', '\0']), ('\u{104c8}', ['\u{104f0}', '\0', '\0']), ('\u{104c9}', - ['\u{104f1}', '\0', '\0']), ('\u{104ca}', ['\u{104f2}', '\0', '\0']), ('\u{104cb}', - ['\u{104f3}', '\0', '\0']), ('\u{104cc}', ['\u{104f4}', '\0', '\0']), ('\u{104cd}', - ['\u{104f5}', '\0', '\0']), ('\u{104ce}', ['\u{104f6}', '\0', '\0']), ('\u{104cf}', - ['\u{104f7}', '\0', '\0']), ('\u{104d0}', ['\u{104f8}', '\0', '\0']), ('\u{104d1}', - ['\u{104f9}', '\0', '\0']), ('\u{104d2}', ['\u{104fa}', '\0', '\0']), ('\u{104d3}', - ['\u{104fb}', '\0', '\0']), ('\u{10c80}', ['\u{10cc0}', '\0', '\0']), ('\u{10c81}', - ['\u{10cc1}', '\0', '\0']), ('\u{10c82}', ['\u{10cc2}', '\0', '\0']), ('\u{10c83}', - ['\u{10cc3}', '\0', '\0']), ('\u{10c84}', ['\u{10cc4}', '\0', '\0']), ('\u{10c85}', - ['\u{10cc5}', '\0', '\0']), ('\u{10c86}', ['\u{10cc6}', '\0', '\0']), ('\u{10c87}', - ['\u{10cc7}', '\0', '\0']), ('\u{10c88}', ['\u{10cc8}', '\0', '\0']), ('\u{10c89}', - ['\u{10cc9}', '\0', '\0']), ('\u{10c8a}', ['\u{10cca}', '\0', '\0']), ('\u{10c8b}', - ['\u{10ccb}', '\0', '\0']), ('\u{10c8c}', ['\u{10ccc}', '\0', '\0']), ('\u{10c8d}', - ['\u{10ccd}', '\0', '\0']), ('\u{10c8e}', ['\u{10cce}', '\0', '\0']), ('\u{10c8f}', - ['\u{10ccf}', '\0', '\0']), ('\u{10c90}', ['\u{10cd0}', '\0', '\0']), ('\u{10c91}', - ['\u{10cd1}', '\0', '\0']), ('\u{10c92}', ['\u{10cd2}', '\0', '\0']), ('\u{10c93}', - ['\u{10cd3}', '\0', '\0']), ('\u{10c94}', ['\u{10cd4}', '\0', '\0']), ('\u{10c95}', - ['\u{10cd5}', '\0', '\0']), ('\u{10c96}', ['\u{10cd6}', '\0', '\0']), ('\u{10c97}', - ['\u{10cd7}', '\0', '\0']), ('\u{10c98}', ['\u{10cd8}', '\0', '\0']), ('\u{10c99}', - ['\u{10cd9}', '\0', '\0']), ('\u{10c9a}', ['\u{10cda}', '\0', '\0']), ('\u{10c9b}', - ['\u{10cdb}', '\0', '\0']), ('\u{10c9c}', ['\u{10cdc}', '\0', '\0']), ('\u{10c9d}', - ['\u{10cdd}', '\0', '\0']), ('\u{10c9e}', ['\u{10cde}', '\0', '\0']), ('\u{10c9f}', - ['\u{10cdf}', '\0', '\0']), ('\u{10ca0}', ['\u{10ce0}', '\0', '\0']), ('\u{10ca1}', - ['\u{10ce1}', '\0', '\0']), ('\u{10ca2}', ['\u{10ce2}', '\0', '\0']), ('\u{10ca3}', - ['\u{10ce3}', '\0', '\0']), ('\u{10ca4}', ['\u{10ce4}', '\0', '\0']), ('\u{10ca5}', - ['\u{10ce5}', '\0', '\0']), ('\u{10ca6}', ['\u{10ce6}', '\0', '\0']), ('\u{10ca7}', - ['\u{10ce7}', '\0', '\0']), ('\u{10ca8}', ['\u{10ce8}', '\0', '\0']), ('\u{10ca9}', - ['\u{10ce9}', '\0', '\0']), ('\u{10caa}', ['\u{10cea}', '\0', '\0']), ('\u{10cab}', - ['\u{10ceb}', '\0', '\0']), ('\u{10cac}', ['\u{10cec}', '\0', '\0']), ('\u{10cad}', - ['\u{10ced}', '\0', '\0']), ('\u{10cae}', ['\u{10cee}', '\0', '\0']), ('\u{10caf}', - ['\u{10cef}', '\0', '\0']), ('\u{10cb0}', ['\u{10cf0}', '\0', '\0']), ('\u{10cb1}', - ['\u{10cf1}', '\0', '\0']), ('\u{10cb2}', ['\u{10cf2}', '\0', '\0']), ('\u{118a0}', - ['\u{118c0}', '\0', '\0']), ('\u{118a1}', ['\u{118c1}', '\0', '\0']), ('\u{118a2}', - ['\u{118c2}', '\0', '\0']), ('\u{118a3}', ['\u{118c3}', '\0', '\0']), ('\u{118a4}', - ['\u{118c4}', '\0', '\0']), ('\u{118a5}', ['\u{118c5}', '\0', '\0']), ('\u{118a6}', - ['\u{118c6}', '\0', '\0']), ('\u{118a7}', ['\u{118c7}', '\0', '\0']), ('\u{118a8}', - ['\u{118c8}', '\0', '\0']), ('\u{118a9}', ['\u{118c9}', '\0', '\0']), ('\u{118aa}', - ['\u{118ca}', '\0', '\0']), ('\u{118ab}', ['\u{118cb}', '\0', '\0']), ('\u{118ac}', - ['\u{118cc}', '\0', '\0']), ('\u{118ad}', ['\u{118cd}', '\0', '\0']), ('\u{118ae}', - ['\u{118ce}', '\0', '\0']), ('\u{118af}', ['\u{118cf}', '\0', '\0']), ('\u{118b0}', - ['\u{118d0}', '\0', '\0']), ('\u{118b1}', ['\u{118d1}', '\0', '\0']), ('\u{118b2}', - ['\u{118d2}', '\0', '\0']), ('\u{118b3}', ['\u{118d3}', '\0', '\0']), ('\u{118b4}', - ['\u{118d4}', '\0', '\0']), ('\u{118b5}', ['\u{118d5}', '\0', '\0']), ('\u{118b6}', - ['\u{118d6}', '\0', '\0']), ('\u{118b7}', ['\u{118d7}', '\0', '\0']), ('\u{118b8}', - ['\u{118d8}', '\0', '\0']), ('\u{118b9}', ['\u{118d9}', '\0', '\0']), ('\u{118ba}', - ['\u{118da}', '\0', '\0']), ('\u{118bb}', ['\u{118db}', '\0', '\0']), ('\u{118bc}', - ['\u{118dc}', '\0', '\0']), ('\u{118bd}', ['\u{118dd}', '\0', '\0']), ('\u{118be}', - ['\u{118de}', '\0', '\0']), ('\u{118bf}', ['\u{118df}', '\0', '\0']), ('\u{16e40}', - ['\u{16e60}', '\0', '\0']), ('\u{16e41}', ['\u{16e61}', '\0', '\0']), ('\u{16e42}', - ['\u{16e62}', '\0', '\0']), ('\u{16e43}', ['\u{16e63}', '\0', '\0']), ('\u{16e44}', - ['\u{16e64}', '\0', '\0']), ('\u{16e45}', ['\u{16e65}', '\0', '\0']), ('\u{16e46}', - ['\u{16e66}', '\0', '\0']), ('\u{16e47}', ['\u{16e67}', '\0', '\0']), ('\u{16e48}', - ['\u{16e68}', '\0', '\0']), ('\u{16e49}', ['\u{16e69}', '\0', '\0']), ('\u{16e4a}', - ['\u{16e6a}', '\0', '\0']), ('\u{16e4b}', ['\u{16e6b}', '\0', '\0']), ('\u{16e4c}', - ['\u{16e6c}', '\0', '\0']), ('\u{16e4d}', ['\u{16e6d}', '\0', '\0']), ('\u{16e4e}', - ['\u{16e6e}', '\0', '\0']), ('\u{16e4f}', ['\u{16e6f}', '\0', '\0']), ('\u{16e50}', - ['\u{16e70}', '\0', '\0']), ('\u{16e51}', ['\u{16e71}', '\0', '\0']), ('\u{16e52}', - ['\u{16e72}', '\0', '\0']), ('\u{16e53}', ['\u{16e73}', '\0', '\0']), ('\u{16e54}', - ['\u{16e74}', '\0', '\0']), ('\u{16e55}', ['\u{16e75}', '\0', '\0']), ('\u{16e56}', - ['\u{16e76}', '\0', '\0']), ('\u{16e57}', ['\u{16e77}', '\0', '\0']), ('\u{16e58}', - ['\u{16e78}', '\0', '\0']), ('\u{16e59}', ['\u{16e79}', '\0', '\0']), ('\u{16e5a}', - ['\u{16e7a}', '\0', '\0']), ('\u{16e5b}', ['\u{16e7b}', '\0', '\0']), ('\u{16e5c}', - ['\u{16e7c}', '\0', '\0']), ('\u{16e5d}', ['\u{16e7d}', '\0', '\0']), ('\u{16e5e}', - ['\u{16e7e}', '\0', '\0']), ('\u{16e5f}', ['\u{16e7f}', '\0', '\0']), ('\u{1e900}', - ['\u{1e922}', '\0', '\0']), ('\u{1e901}', ['\u{1e923}', '\0', '\0']), ('\u{1e902}', - ['\u{1e924}', '\0', '\0']), ('\u{1e903}', ['\u{1e925}', '\0', '\0']), ('\u{1e904}', - ['\u{1e926}', '\0', '\0']), ('\u{1e905}', ['\u{1e927}', '\0', '\0']), ('\u{1e906}', - ['\u{1e928}', '\0', '\0']), ('\u{1e907}', ['\u{1e929}', '\0', '\0']), ('\u{1e908}', - ['\u{1e92a}', '\0', '\0']), ('\u{1e909}', ['\u{1e92b}', '\0', '\0']), ('\u{1e90a}', - ['\u{1e92c}', '\0', '\0']), ('\u{1e90b}', ['\u{1e92d}', '\0', '\0']), ('\u{1e90c}', - ['\u{1e92e}', '\0', '\0']), ('\u{1e90d}', ['\u{1e92f}', '\0', '\0']), ('\u{1e90e}', - ['\u{1e930}', '\0', '\0']), ('\u{1e90f}', ['\u{1e931}', '\0', '\0']), ('\u{1e910}', - ['\u{1e932}', '\0', '\0']), ('\u{1e911}', ['\u{1e933}', '\0', '\0']), ('\u{1e912}', - ['\u{1e934}', '\0', '\0']), ('\u{1e913}', ['\u{1e935}', '\0', '\0']), ('\u{1e914}', - ['\u{1e936}', '\0', '\0']), ('\u{1e915}', ['\u{1e937}', '\0', '\0']), ('\u{1e916}', - ['\u{1e938}', '\0', '\0']), ('\u{1e917}', ['\u{1e939}', '\0', '\0']), ('\u{1e918}', - ['\u{1e93a}', '\0', '\0']), ('\u{1e919}', ['\u{1e93b}', '\0', '\0']), ('\u{1e91a}', - ['\u{1e93c}', '\0', '\0']), ('\u{1e91b}', ['\u{1e93d}', '\0', '\0']), ('\u{1e91c}', - ['\u{1e93e}', '\0', '\0']), ('\u{1e91d}', ['\u{1e93f}', '\0', '\0']), ('\u{1e91e}', - ['\u{1e940}', '\0', '\0']), ('\u{1e91f}', ['\u{1e941}', '\0', '\0']), ('\u{1e920}', - ['\u{1e942}', '\0', '\0']), ('\u{1e921}', ['\u{1e943}', '\0', '\0']) + ('\u{41}', ['\u{61}', '\0', '\0']), + ('\u{42}', ['\u{62}', '\0', '\0']), + ('\u{43}', ['\u{63}', '\0', '\0']), + ('\u{44}', ['\u{64}', '\0', '\0']), + ('\u{45}', ['\u{65}', '\0', '\0']), + ('\u{46}', ['\u{66}', '\0', '\0']), + ('\u{47}', ['\u{67}', '\0', '\0']), + ('\u{48}', ['\u{68}', '\0', '\0']), + ('\u{49}', ['\u{69}', '\0', '\0']), + ('\u{4a}', ['\u{6a}', '\0', '\0']), + ('\u{4b}', ['\u{6b}', '\0', '\0']), + ('\u{4c}', ['\u{6c}', '\0', '\0']), + ('\u{4d}', ['\u{6d}', '\0', '\0']), + ('\u{4e}', ['\u{6e}', '\0', '\0']), + ('\u{4f}', ['\u{6f}', '\0', '\0']), + ('\u{50}', ['\u{70}', '\0', '\0']), + ('\u{51}', ['\u{71}', '\0', '\0']), + ('\u{52}', ['\u{72}', '\0', '\0']), + ('\u{53}', ['\u{73}', '\0', '\0']), + ('\u{54}', ['\u{74}', '\0', '\0']), + ('\u{55}', ['\u{75}', '\0', '\0']), + ('\u{56}', ['\u{76}', '\0', '\0']), + ('\u{57}', ['\u{77}', '\0', '\0']), + ('\u{58}', ['\u{78}', '\0', '\0']), + ('\u{59}', ['\u{79}', '\0', '\0']), + ('\u{5a}', ['\u{7a}', '\0', '\0']), + ('\u{c0}', ['\u{e0}', '\0', '\0']), + ('\u{c1}', ['\u{e1}', '\0', '\0']), + ('\u{c2}', ['\u{e2}', '\0', '\0']), + ('\u{c3}', ['\u{e3}', '\0', '\0']), + ('\u{c4}', ['\u{e4}', '\0', '\0']), + ('\u{c5}', ['\u{e5}', '\0', '\0']), + ('\u{c6}', ['\u{e6}', '\0', '\0']), + ('\u{c7}', ['\u{e7}', '\0', '\0']), + ('\u{c8}', ['\u{e8}', '\0', '\0']), + ('\u{c9}', ['\u{e9}', '\0', '\0']), + ('\u{ca}', ['\u{ea}', '\0', '\0']), + ('\u{cb}', ['\u{eb}', '\0', '\0']), + ('\u{cc}', ['\u{ec}', '\0', '\0']), + ('\u{cd}', ['\u{ed}', '\0', '\0']), + ('\u{ce}', ['\u{ee}', '\0', '\0']), + ('\u{cf}', ['\u{ef}', '\0', '\0']), + ('\u{d0}', ['\u{f0}', '\0', '\0']), + ('\u{d1}', ['\u{f1}', '\0', '\0']), + ('\u{d2}', ['\u{f2}', '\0', '\0']), + ('\u{d3}', ['\u{f3}', '\0', '\0']), + ('\u{d4}', ['\u{f4}', '\0', '\0']), + ('\u{d5}', ['\u{f5}', '\0', '\0']), + ('\u{d6}', ['\u{f6}', '\0', '\0']), + ('\u{d8}', ['\u{f8}', '\0', '\0']), + ('\u{d9}', ['\u{f9}', '\0', '\0']), + ('\u{da}', ['\u{fa}', '\0', '\0']), + ('\u{db}', ['\u{fb}', '\0', '\0']), + ('\u{dc}', ['\u{fc}', '\0', '\0']), + ('\u{dd}', ['\u{fd}', '\0', '\0']), + ('\u{de}', ['\u{fe}', '\0', '\0']), + ('\u{100}', ['\u{101}', '\0', '\0']), + ('\u{102}', ['\u{103}', '\0', '\0']), + ('\u{104}', ['\u{105}', '\0', '\0']), + ('\u{106}', ['\u{107}', '\0', '\0']), + ('\u{108}', ['\u{109}', '\0', '\0']), + ('\u{10a}', ['\u{10b}', '\0', '\0']), + ('\u{10c}', ['\u{10d}', '\0', '\0']), + ('\u{10e}', ['\u{10f}', '\0', '\0']), + ('\u{110}', ['\u{111}', '\0', '\0']), + ('\u{112}', ['\u{113}', '\0', '\0']), + ('\u{114}', ['\u{115}', '\0', '\0']), + ('\u{116}', ['\u{117}', '\0', '\0']), + ('\u{118}', ['\u{119}', '\0', '\0']), + ('\u{11a}', ['\u{11b}', '\0', '\0']), + ('\u{11c}', ['\u{11d}', '\0', '\0']), + ('\u{11e}', ['\u{11f}', '\0', '\0']), + ('\u{120}', ['\u{121}', '\0', '\0']), + ('\u{122}', ['\u{123}', '\0', '\0']), + ('\u{124}', ['\u{125}', '\0', '\0']), + ('\u{126}', ['\u{127}', '\0', '\0']), + ('\u{128}', ['\u{129}', '\0', '\0']), + ('\u{12a}', ['\u{12b}', '\0', '\0']), + ('\u{12c}', ['\u{12d}', '\0', '\0']), + ('\u{12e}', ['\u{12f}', '\0', '\0']), + ('\u{130}', ['\u{69}', '\u{307}', '\0']), + ('\u{132}', ['\u{133}', '\0', '\0']), + ('\u{134}', ['\u{135}', '\0', '\0']), + ('\u{136}', ['\u{137}', '\0', '\0']), + ('\u{139}', ['\u{13a}', '\0', '\0']), + ('\u{13b}', ['\u{13c}', '\0', '\0']), + ('\u{13d}', ['\u{13e}', '\0', '\0']), + ('\u{13f}', ['\u{140}', '\0', '\0']), + ('\u{141}', ['\u{142}', '\0', '\0']), + ('\u{143}', ['\u{144}', '\0', '\0']), + ('\u{145}', ['\u{146}', '\0', '\0']), + ('\u{147}', ['\u{148}', '\0', '\0']), + ('\u{14a}', ['\u{14b}', '\0', '\0']), + ('\u{14c}', ['\u{14d}', '\0', '\0']), + ('\u{14e}', ['\u{14f}', '\0', '\0']), + ('\u{150}', ['\u{151}', '\0', '\0']), + ('\u{152}', ['\u{153}', '\0', '\0']), + ('\u{154}', ['\u{155}', '\0', '\0']), + ('\u{156}', ['\u{157}', '\0', '\0']), + ('\u{158}', ['\u{159}', '\0', '\0']), + ('\u{15a}', ['\u{15b}', '\0', '\0']), + ('\u{15c}', ['\u{15d}', '\0', '\0']), + ('\u{15e}', ['\u{15f}', '\0', '\0']), + ('\u{160}', ['\u{161}', '\0', '\0']), + ('\u{162}', ['\u{163}', '\0', '\0']), + ('\u{164}', ['\u{165}', '\0', '\0']), + ('\u{166}', ['\u{167}', '\0', '\0']), + ('\u{168}', ['\u{169}', '\0', '\0']), + ('\u{16a}', ['\u{16b}', '\0', '\0']), + ('\u{16c}', ['\u{16d}', '\0', '\0']), + ('\u{16e}', ['\u{16f}', '\0', '\0']), + ('\u{170}', ['\u{171}', '\0', '\0']), + ('\u{172}', ['\u{173}', '\0', '\0']), + ('\u{174}', ['\u{175}', '\0', '\0']), + ('\u{176}', ['\u{177}', '\0', '\0']), + ('\u{178}', ['\u{ff}', '\0', '\0']), + ('\u{179}', ['\u{17a}', '\0', '\0']), + ('\u{17b}', ['\u{17c}', '\0', '\0']), + ('\u{17d}', ['\u{17e}', '\0', '\0']), + ('\u{181}', ['\u{253}', '\0', '\0']), + ('\u{182}', ['\u{183}', '\0', '\0']), + ('\u{184}', ['\u{185}', '\0', '\0']), + ('\u{186}', ['\u{254}', '\0', '\0']), + ('\u{187}', ['\u{188}', '\0', '\0']), + ('\u{189}', ['\u{256}', '\0', '\0']), + ('\u{18a}', ['\u{257}', '\0', '\0']), + ('\u{18b}', ['\u{18c}', '\0', '\0']), + ('\u{18e}', ['\u{1dd}', '\0', '\0']), + ('\u{18f}', ['\u{259}', '\0', '\0']), + ('\u{190}', ['\u{25b}', '\0', '\0']), + ('\u{191}', ['\u{192}', '\0', '\0']), + ('\u{193}', ['\u{260}', '\0', '\0']), + ('\u{194}', ['\u{263}', '\0', '\0']), + ('\u{196}', ['\u{269}', '\0', '\0']), + ('\u{197}', ['\u{268}', '\0', '\0']), + ('\u{198}', ['\u{199}', '\0', '\0']), + ('\u{19c}', ['\u{26f}', '\0', '\0']), + ('\u{19d}', ['\u{272}', '\0', '\0']), + ('\u{19f}', ['\u{275}', '\0', '\0']), + ('\u{1a0}', ['\u{1a1}', '\0', '\0']), + ('\u{1a2}', ['\u{1a3}', '\0', '\0']), + ('\u{1a4}', ['\u{1a5}', '\0', '\0']), + ('\u{1a6}', ['\u{280}', '\0', '\0']), + ('\u{1a7}', ['\u{1a8}', '\0', '\0']), + ('\u{1a9}', ['\u{283}', '\0', '\0']), + ('\u{1ac}', ['\u{1ad}', '\0', '\0']), + ('\u{1ae}', ['\u{288}', '\0', '\0']), + ('\u{1af}', ['\u{1b0}', '\0', '\0']), + ('\u{1b1}', ['\u{28a}', '\0', '\0']), + ('\u{1b2}', ['\u{28b}', '\0', '\0']), + ('\u{1b3}', ['\u{1b4}', '\0', '\0']), + ('\u{1b5}', ['\u{1b6}', '\0', '\0']), + ('\u{1b7}', ['\u{292}', '\0', '\0']), + ('\u{1b8}', ['\u{1b9}', '\0', '\0']), + ('\u{1bc}', ['\u{1bd}', '\0', '\0']), + ('\u{1c4}', ['\u{1c6}', '\0', '\0']), + ('\u{1c5}', ['\u{1c6}', '\0', '\0']), + ('\u{1c7}', ['\u{1c9}', '\0', '\0']), + ('\u{1c8}', ['\u{1c9}', '\0', '\0']), + ('\u{1ca}', ['\u{1cc}', '\0', '\0']), + ('\u{1cb}', ['\u{1cc}', '\0', '\0']), + ('\u{1cd}', ['\u{1ce}', '\0', '\0']), + ('\u{1cf}', ['\u{1d0}', '\0', '\0']), + ('\u{1d1}', ['\u{1d2}', '\0', '\0']), + ('\u{1d3}', ['\u{1d4}', '\0', '\0']), + ('\u{1d5}', ['\u{1d6}', '\0', '\0']), + ('\u{1d7}', ['\u{1d8}', '\0', '\0']), + ('\u{1d9}', ['\u{1da}', '\0', '\0']), + ('\u{1db}', ['\u{1dc}', '\0', '\0']), + ('\u{1de}', ['\u{1df}', '\0', '\0']), + ('\u{1e0}', ['\u{1e1}', '\0', '\0']), + ('\u{1e2}', ['\u{1e3}', '\0', '\0']), + ('\u{1e4}', ['\u{1e5}', '\0', '\0']), + ('\u{1e6}', ['\u{1e7}', '\0', '\0']), + ('\u{1e8}', ['\u{1e9}', '\0', '\0']), + ('\u{1ea}', ['\u{1eb}', '\0', '\0']), + ('\u{1ec}', ['\u{1ed}', '\0', '\0']), + ('\u{1ee}', ['\u{1ef}', '\0', '\0']), + ('\u{1f1}', ['\u{1f3}', '\0', '\0']), + ('\u{1f2}', ['\u{1f3}', '\0', '\0']), + ('\u{1f4}', ['\u{1f5}', '\0', '\0']), + ('\u{1f6}', ['\u{195}', '\0', '\0']), + ('\u{1f7}', ['\u{1bf}', '\0', '\0']), + ('\u{1f8}', ['\u{1f9}', '\0', '\0']), + ('\u{1fa}', ['\u{1fb}', '\0', '\0']), + ('\u{1fc}', ['\u{1fd}', '\0', '\0']), + ('\u{1fe}', ['\u{1ff}', '\0', '\0']), + ('\u{200}', ['\u{201}', '\0', '\0']), + ('\u{202}', ['\u{203}', '\0', '\0']), + ('\u{204}', ['\u{205}', '\0', '\0']), + ('\u{206}', ['\u{207}', '\0', '\0']), + ('\u{208}', ['\u{209}', '\0', '\0']), + ('\u{20a}', ['\u{20b}', '\0', '\0']), + ('\u{20c}', ['\u{20d}', '\0', '\0']), + ('\u{20e}', ['\u{20f}', '\0', '\0']), + ('\u{210}', ['\u{211}', '\0', '\0']), + ('\u{212}', ['\u{213}', '\0', '\0']), + ('\u{214}', ['\u{215}', '\0', '\0']), + ('\u{216}', ['\u{217}', '\0', '\0']), + ('\u{218}', ['\u{219}', '\0', '\0']), + ('\u{21a}', ['\u{21b}', '\0', '\0']), + ('\u{21c}', ['\u{21d}', '\0', '\0']), + ('\u{21e}', ['\u{21f}', '\0', '\0']), + ('\u{220}', ['\u{19e}', '\0', '\0']), + ('\u{222}', ['\u{223}', '\0', '\0']), + ('\u{224}', ['\u{225}', '\0', '\0']), + ('\u{226}', ['\u{227}', '\0', '\0']), + ('\u{228}', ['\u{229}', '\0', '\0']), + ('\u{22a}', ['\u{22b}', '\0', '\0']), + ('\u{22c}', ['\u{22d}', '\0', '\0']), + ('\u{22e}', ['\u{22f}', '\0', '\0']), + ('\u{230}', ['\u{231}', '\0', '\0']), + ('\u{232}', ['\u{233}', '\0', '\0']), + ('\u{23a}', ['\u{2c65}', '\0', '\0']), + ('\u{23b}', ['\u{23c}', '\0', '\0']), + ('\u{23d}', ['\u{19a}', '\0', '\0']), + ('\u{23e}', ['\u{2c66}', '\0', '\0']), + ('\u{241}', ['\u{242}', '\0', '\0']), + ('\u{243}', ['\u{180}', '\0', '\0']), + ('\u{244}', ['\u{289}', '\0', '\0']), + ('\u{245}', ['\u{28c}', '\0', '\0']), + ('\u{246}', ['\u{247}', '\0', '\0']), + ('\u{248}', ['\u{249}', '\0', '\0']), + ('\u{24a}', ['\u{24b}', '\0', '\0']), + ('\u{24c}', ['\u{24d}', '\0', '\0']), + ('\u{24e}', ['\u{24f}', '\0', '\0']), + ('\u{370}', ['\u{371}', '\0', '\0']), + ('\u{372}', ['\u{373}', '\0', '\0']), + ('\u{376}', ['\u{377}', '\0', '\0']), + ('\u{37f}', ['\u{3f3}', '\0', '\0']), + ('\u{386}', ['\u{3ac}', '\0', '\0']), + ('\u{388}', ['\u{3ad}', '\0', '\0']), + ('\u{389}', ['\u{3ae}', '\0', '\0']), + ('\u{38a}', ['\u{3af}', '\0', '\0']), + ('\u{38c}', ['\u{3cc}', '\0', '\0']), + ('\u{38e}', ['\u{3cd}', '\0', '\0']), + ('\u{38f}', ['\u{3ce}', '\0', '\0']), + ('\u{391}', ['\u{3b1}', '\0', '\0']), + ('\u{392}', ['\u{3b2}', '\0', '\0']), + ('\u{393}', ['\u{3b3}', '\0', '\0']), + ('\u{394}', ['\u{3b4}', '\0', '\0']), + ('\u{395}', ['\u{3b5}', '\0', '\0']), + ('\u{396}', ['\u{3b6}', '\0', '\0']), + ('\u{397}', ['\u{3b7}', '\0', '\0']), + ('\u{398}', ['\u{3b8}', '\0', '\0']), + ('\u{399}', ['\u{3b9}', '\0', '\0']), + ('\u{39a}', ['\u{3ba}', '\0', '\0']), + ('\u{39b}', ['\u{3bb}', '\0', '\0']), + ('\u{39c}', ['\u{3bc}', '\0', '\0']), + ('\u{39d}', ['\u{3bd}', '\0', '\0']), + ('\u{39e}', ['\u{3be}', '\0', '\0']), + ('\u{39f}', ['\u{3bf}', '\0', '\0']), + ('\u{3a0}', ['\u{3c0}', '\0', '\0']), + ('\u{3a1}', ['\u{3c1}', '\0', '\0']), + ('\u{3a3}', ['\u{3c3}', '\0', '\0']), + ('\u{3a4}', ['\u{3c4}', '\0', '\0']), + ('\u{3a5}', ['\u{3c5}', '\0', '\0']), + ('\u{3a6}', ['\u{3c6}', '\0', '\0']), + ('\u{3a7}', ['\u{3c7}', '\0', '\0']), + ('\u{3a8}', ['\u{3c8}', '\0', '\0']), + ('\u{3a9}', ['\u{3c9}', '\0', '\0']), + ('\u{3aa}', ['\u{3ca}', '\0', '\0']), + ('\u{3ab}', ['\u{3cb}', '\0', '\0']), + ('\u{3cf}', ['\u{3d7}', '\0', '\0']), + ('\u{3d8}', ['\u{3d9}', '\0', '\0']), + ('\u{3da}', ['\u{3db}', '\0', '\0']), + ('\u{3dc}', ['\u{3dd}', '\0', '\0']), + ('\u{3de}', ['\u{3df}', '\0', '\0']), + ('\u{3e0}', ['\u{3e1}', '\0', '\0']), + ('\u{3e2}', ['\u{3e3}', '\0', '\0']), + ('\u{3e4}', ['\u{3e5}', '\0', '\0']), + ('\u{3e6}', ['\u{3e7}', '\0', '\0']), + ('\u{3e8}', ['\u{3e9}', '\0', '\0']), + ('\u{3ea}', ['\u{3eb}', '\0', '\0']), + ('\u{3ec}', ['\u{3ed}', '\0', '\0']), + ('\u{3ee}', ['\u{3ef}', '\0', '\0']), + ('\u{3f4}', ['\u{3b8}', '\0', '\0']), + ('\u{3f7}', ['\u{3f8}', '\0', '\0']), + ('\u{3f9}', ['\u{3f2}', '\0', '\0']), + ('\u{3fa}', ['\u{3fb}', '\0', '\0']), + ('\u{3fd}', ['\u{37b}', '\0', '\0']), + ('\u{3fe}', ['\u{37c}', '\0', '\0']), + ('\u{3ff}', ['\u{37d}', '\0', '\0']), + ('\u{400}', ['\u{450}', '\0', '\0']), + ('\u{401}', ['\u{451}', '\0', '\0']), + ('\u{402}', ['\u{452}', '\0', '\0']), + ('\u{403}', ['\u{453}', '\0', '\0']), + ('\u{404}', ['\u{454}', '\0', '\0']), + ('\u{405}', ['\u{455}', '\0', '\0']), + ('\u{406}', ['\u{456}', '\0', '\0']), + ('\u{407}', ['\u{457}', '\0', '\0']), + ('\u{408}', ['\u{458}', '\0', '\0']), + ('\u{409}', ['\u{459}', '\0', '\0']), + ('\u{40a}', ['\u{45a}', '\0', '\0']), + ('\u{40b}', ['\u{45b}', '\0', '\0']), + ('\u{40c}', ['\u{45c}', '\0', '\0']), + ('\u{40d}', ['\u{45d}', '\0', '\0']), + ('\u{40e}', ['\u{45e}', '\0', '\0']), + ('\u{40f}', ['\u{45f}', '\0', '\0']), + ('\u{410}', ['\u{430}', '\0', '\0']), + ('\u{411}', ['\u{431}', '\0', '\0']), + ('\u{412}', ['\u{432}', '\0', '\0']), + ('\u{413}', ['\u{433}', '\0', '\0']), + ('\u{414}', ['\u{434}', '\0', '\0']), + ('\u{415}', ['\u{435}', '\0', '\0']), + ('\u{416}', ['\u{436}', '\0', '\0']), + ('\u{417}', ['\u{437}', '\0', '\0']), + ('\u{418}', ['\u{438}', '\0', '\0']), + ('\u{419}', ['\u{439}', '\0', '\0']), + ('\u{41a}', ['\u{43a}', '\0', '\0']), + ('\u{41b}', ['\u{43b}', '\0', '\0']), + ('\u{41c}', ['\u{43c}', '\0', '\0']), + ('\u{41d}', ['\u{43d}', '\0', '\0']), + ('\u{41e}', ['\u{43e}', '\0', '\0']), + ('\u{41f}', ['\u{43f}', '\0', '\0']), + ('\u{420}', ['\u{440}', '\0', '\0']), + ('\u{421}', ['\u{441}', '\0', '\0']), + ('\u{422}', ['\u{442}', '\0', '\0']), + ('\u{423}', ['\u{443}', '\0', '\0']), + ('\u{424}', ['\u{444}', '\0', '\0']), + ('\u{425}', ['\u{445}', '\0', '\0']), + ('\u{426}', ['\u{446}', '\0', '\0']), + ('\u{427}', ['\u{447}', '\0', '\0']), + ('\u{428}', ['\u{448}', '\0', '\0']), + ('\u{429}', ['\u{449}', '\0', '\0']), + ('\u{42a}', ['\u{44a}', '\0', '\0']), + ('\u{42b}', ['\u{44b}', '\0', '\0']), + ('\u{42c}', ['\u{44c}', '\0', '\0']), + ('\u{42d}', ['\u{44d}', '\0', '\0']), + ('\u{42e}', ['\u{44e}', '\0', '\0']), + ('\u{42f}', ['\u{44f}', '\0', '\0']), + ('\u{460}', ['\u{461}', '\0', '\0']), + ('\u{462}', ['\u{463}', '\0', '\0']), + ('\u{464}', ['\u{465}', '\0', '\0']), + ('\u{466}', ['\u{467}', '\0', '\0']), + ('\u{468}', ['\u{469}', '\0', '\0']), + ('\u{46a}', ['\u{46b}', '\0', '\0']), + ('\u{46c}', ['\u{46d}', '\0', '\0']), + ('\u{46e}', ['\u{46f}', '\0', '\0']), + ('\u{470}', ['\u{471}', '\0', '\0']), + ('\u{472}', ['\u{473}', '\0', '\0']), + ('\u{474}', ['\u{475}', '\0', '\0']), + ('\u{476}', ['\u{477}', '\0', '\0']), + ('\u{478}', ['\u{479}', '\0', '\0']), + ('\u{47a}', ['\u{47b}', '\0', '\0']), + ('\u{47c}', ['\u{47d}', '\0', '\0']), + ('\u{47e}', ['\u{47f}', '\0', '\0']), + ('\u{480}', ['\u{481}', '\0', '\0']), + ('\u{48a}', ['\u{48b}', '\0', '\0']), + ('\u{48c}', ['\u{48d}', '\0', '\0']), + ('\u{48e}', ['\u{48f}', '\0', '\0']), + ('\u{490}', ['\u{491}', '\0', '\0']), + ('\u{492}', ['\u{493}', '\0', '\0']), + ('\u{494}', ['\u{495}', '\0', '\0']), + ('\u{496}', ['\u{497}', '\0', '\0']), + ('\u{498}', ['\u{499}', '\0', '\0']), + ('\u{49a}', ['\u{49b}', '\0', '\0']), + ('\u{49c}', ['\u{49d}', '\0', '\0']), + ('\u{49e}', ['\u{49f}', '\0', '\0']), + ('\u{4a0}', ['\u{4a1}', '\0', '\0']), + ('\u{4a2}', ['\u{4a3}', '\0', '\0']), + ('\u{4a4}', ['\u{4a5}', '\0', '\0']), + ('\u{4a6}', ['\u{4a7}', '\0', '\0']), + ('\u{4a8}', ['\u{4a9}', '\0', '\0']), + ('\u{4aa}', ['\u{4ab}', '\0', '\0']), + ('\u{4ac}', ['\u{4ad}', '\0', '\0']), + ('\u{4ae}', ['\u{4af}', '\0', '\0']), + ('\u{4b0}', ['\u{4b1}', '\0', '\0']), + ('\u{4b2}', ['\u{4b3}', '\0', '\0']), + ('\u{4b4}', ['\u{4b5}', '\0', '\0']), + ('\u{4b6}', ['\u{4b7}', '\0', '\0']), + ('\u{4b8}', ['\u{4b9}', '\0', '\0']), + ('\u{4ba}', ['\u{4bb}', '\0', '\0']), + ('\u{4bc}', ['\u{4bd}', '\0', '\0']), + ('\u{4be}', ['\u{4bf}', '\0', '\0']), + ('\u{4c0}', ['\u{4cf}', '\0', '\0']), + ('\u{4c1}', ['\u{4c2}', '\0', '\0']), + ('\u{4c3}', ['\u{4c4}', '\0', '\0']), + ('\u{4c5}', ['\u{4c6}', '\0', '\0']), + ('\u{4c7}', ['\u{4c8}', '\0', '\0']), + ('\u{4c9}', ['\u{4ca}', '\0', '\0']), + ('\u{4cb}', ['\u{4cc}', '\0', '\0']), + ('\u{4cd}', ['\u{4ce}', '\0', '\0']), + ('\u{4d0}', ['\u{4d1}', '\0', '\0']), + ('\u{4d2}', ['\u{4d3}', '\0', '\0']), + ('\u{4d4}', ['\u{4d5}', '\0', '\0']), + ('\u{4d6}', ['\u{4d7}', '\0', '\0']), + ('\u{4d8}', ['\u{4d9}', '\0', '\0']), + ('\u{4da}', ['\u{4db}', '\0', '\0']), + ('\u{4dc}', ['\u{4dd}', '\0', '\0']), + ('\u{4de}', ['\u{4df}', '\0', '\0']), + ('\u{4e0}', ['\u{4e1}', '\0', '\0']), + ('\u{4e2}', ['\u{4e3}', '\0', '\0']), + ('\u{4e4}', ['\u{4e5}', '\0', '\0']), + ('\u{4e6}', ['\u{4e7}', '\0', '\0']), + ('\u{4e8}', ['\u{4e9}', '\0', '\0']), + ('\u{4ea}', ['\u{4eb}', '\0', '\0']), + ('\u{4ec}', ['\u{4ed}', '\0', '\0']), + ('\u{4ee}', ['\u{4ef}', '\0', '\0']), + ('\u{4f0}', ['\u{4f1}', '\0', '\0']), + ('\u{4f2}', ['\u{4f3}', '\0', '\0']), + ('\u{4f4}', ['\u{4f5}', '\0', '\0']), + ('\u{4f6}', ['\u{4f7}', '\0', '\0']), + ('\u{4f8}', ['\u{4f9}', '\0', '\0']), + ('\u{4fa}', ['\u{4fb}', '\0', '\0']), + ('\u{4fc}', ['\u{4fd}', '\0', '\0']), + ('\u{4fe}', ['\u{4ff}', '\0', '\0']), + ('\u{500}', ['\u{501}', '\0', '\0']), + ('\u{502}', ['\u{503}', '\0', '\0']), + ('\u{504}', ['\u{505}', '\0', '\0']), + ('\u{506}', ['\u{507}', '\0', '\0']), + ('\u{508}', ['\u{509}', '\0', '\0']), + ('\u{50a}', ['\u{50b}', '\0', '\0']), + ('\u{50c}', ['\u{50d}', '\0', '\0']), + ('\u{50e}', ['\u{50f}', '\0', '\0']), + ('\u{510}', ['\u{511}', '\0', '\0']), + ('\u{512}', ['\u{513}', '\0', '\0']), + ('\u{514}', ['\u{515}', '\0', '\0']), + ('\u{516}', ['\u{517}', '\0', '\0']), + ('\u{518}', ['\u{519}', '\0', '\0']), + ('\u{51a}', ['\u{51b}', '\0', '\0']), + ('\u{51c}', ['\u{51d}', '\0', '\0']), + ('\u{51e}', ['\u{51f}', '\0', '\0']), + ('\u{520}', ['\u{521}', '\0', '\0']), + ('\u{522}', ['\u{523}', '\0', '\0']), + ('\u{524}', ['\u{525}', '\0', '\0']), + ('\u{526}', ['\u{527}', '\0', '\0']), + ('\u{528}', ['\u{529}', '\0', '\0']), + ('\u{52a}', ['\u{52b}', '\0', '\0']), + ('\u{52c}', ['\u{52d}', '\0', '\0']), + ('\u{52e}', ['\u{52f}', '\0', '\0']), + ('\u{531}', ['\u{561}', '\0', '\0']), + ('\u{532}', ['\u{562}', '\0', '\0']), + ('\u{533}', ['\u{563}', '\0', '\0']), + ('\u{534}', ['\u{564}', '\0', '\0']), + ('\u{535}', ['\u{565}', '\0', '\0']), + ('\u{536}', ['\u{566}', '\0', '\0']), + ('\u{537}', ['\u{567}', '\0', '\0']), + ('\u{538}', ['\u{568}', '\0', '\0']), + ('\u{539}', ['\u{569}', '\0', '\0']), + ('\u{53a}', ['\u{56a}', '\0', '\0']), + ('\u{53b}', ['\u{56b}', '\0', '\0']), + ('\u{53c}', ['\u{56c}', '\0', '\0']), + ('\u{53d}', ['\u{56d}', '\0', '\0']), + ('\u{53e}', ['\u{56e}', '\0', '\0']), + ('\u{53f}', ['\u{56f}', '\0', '\0']), + ('\u{540}', ['\u{570}', '\0', '\0']), + ('\u{541}', ['\u{571}', '\0', '\0']), + ('\u{542}', ['\u{572}', '\0', '\0']), + ('\u{543}', ['\u{573}', '\0', '\0']), + ('\u{544}', ['\u{574}', '\0', '\0']), + ('\u{545}', ['\u{575}', '\0', '\0']), + ('\u{546}', ['\u{576}', '\0', '\0']), + ('\u{547}', ['\u{577}', '\0', '\0']), + ('\u{548}', ['\u{578}', '\0', '\0']), + ('\u{549}', ['\u{579}', '\0', '\0']), + ('\u{54a}', ['\u{57a}', '\0', '\0']), + ('\u{54b}', ['\u{57b}', '\0', '\0']), + ('\u{54c}', ['\u{57c}', '\0', '\0']), + ('\u{54d}', ['\u{57d}', '\0', '\0']), + ('\u{54e}', ['\u{57e}', '\0', '\0']), + ('\u{54f}', ['\u{57f}', '\0', '\0']), + ('\u{550}', ['\u{580}', '\0', '\0']), + ('\u{551}', ['\u{581}', '\0', '\0']), + ('\u{552}', ['\u{582}', '\0', '\0']), + ('\u{553}', ['\u{583}', '\0', '\0']), + ('\u{554}', ['\u{584}', '\0', '\0']), + ('\u{555}', ['\u{585}', '\0', '\0']), + ('\u{556}', ['\u{586}', '\0', '\0']), + ('\u{10a0}', ['\u{2d00}', '\0', '\0']), + ('\u{10a1}', ['\u{2d01}', '\0', '\0']), + ('\u{10a2}', ['\u{2d02}', '\0', '\0']), + ('\u{10a3}', ['\u{2d03}', '\0', '\0']), + ('\u{10a4}', ['\u{2d04}', '\0', '\0']), + ('\u{10a5}', ['\u{2d05}', '\0', '\0']), + ('\u{10a6}', ['\u{2d06}', '\0', '\0']), + ('\u{10a7}', ['\u{2d07}', '\0', '\0']), + ('\u{10a8}', ['\u{2d08}', '\0', '\0']), + ('\u{10a9}', ['\u{2d09}', '\0', '\0']), + ('\u{10aa}', ['\u{2d0a}', '\0', '\0']), + ('\u{10ab}', ['\u{2d0b}', '\0', '\0']), + ('\u{10ac}', ['\u{2d0c}', '\0', '\0']), + ('\u{10ad}', ['\u{2d0d}', '\0', '\0']), + ('\u{10ae}', ['\u{2d0e}', '\0', '\0']), + ('\u{10af}', ['\u{2d0f}', '\0', '\0']), + ('\u{10b0}', ['\u{2d10}', '\0', '\0']), + ('\u{10b1}', ['\u{2d11}', '\0', '\0']), + ('\u{10b2}', ['\u{2d12}', '\0', '\0']), + ('\u{10b3}', ['\u{2d13}', '\0', '\0']), + ('\u{10b4}', ['\u{2d14}', '\0', '\0']), + ('\u{10b5}', ['\u{2d15}', '\0', '\0']), + ('\u{10b6}', ['\u{2d16}', '\0', '\0']), + ('\u{10b7}', ['\u{2d17}', '\0', '\0']), + ('\u{10b8}', ['\u{2d18}', '\0', '\0']), + ('\u{10b9}', ['\u{2d19}', '\0', '\0']), + ('\u{10ba}', ['\u{2d1a}', '\0', '\0']), + ('\u{10bb}', ['\u{2d1b}', '\0', '\0']), + ('\u{10bc}', ['\u{2d1c}', '\0', '\0']), + ('\u{10bd}', ['\u{2d1d}', '\0', '\0']), + ('\u{10be}', ['\u{2d1e}', '\0', '\0']), + ('\u{10bf}', ['\u{2d1f}', '\0', '\0']), + ('\u{10c0}', ['\u{2d20}', '\0', '\0']), + ('\u{10c1}', ['\u{2d21}', '\0', '\0']), + ('\u{10c2}', ['\u{2d22}', '\0', '\0']), + ('\u{10c3}', ['\u{2d23}', '\0', '\0']), + ('\u{10c4}', ['\u{2d24}', '\0', '\0']), + ('\u{10c5}', ['\u{2d25}', '\0', '\0']), + ('\u{10c7}', ['\u{2d27}', '\0', '\0']), + ('\u{10cd}', ['\u{2d2d}', '\0', '\0']), + ('\u{13a0}', ['\u{ab70}', '\0', '\0']), + ('\u{13a1}', ['\u{ab71}', '\0', '\0']), + ('\u{13a2}', ['\u{ab72}', '\0', '\0']), + ('\u{13a3}', ['\u{ab73}', '\0', '\0']), + ('\u{13a4}', ['\u{ab74}', '\0', '\0']), + ('\u{13a5}', ['\u{ab75}', '\0', '\0']), + ('\u{13a6}', ['\u{ab76}', '\0', '\0']), + ('\u{13a7}', ['\u{ab77}', '\0', '\0']), + ('\u{13a8}', ['\u{ab78}', '\0', '\0']), + ('\u{13a9}', ['\u{ab79}', '\0', '\0']), + ('\u{13aa}', ['\u{ab7a}', '\0', '\0']), + ('\u{13ab}', ['\u{ab7b}', '\0', '\0']), + ('\u{13ac}', ['\u{ab7c}', '\0', '\0']), + ('\u{13ad}', ['\u{ab7d}', '\0', '\0']), + ('\u{13ae}', ['\u{ab7e}', '\0', '\0']), + ('\u{13af}', ['\u{ab7f}', '\0', '\0']), + ('\u{13b0}', ['\u{ab80}', '\0', '\0']), + ('\u{13b1}', ['\u{ab81}', '\0', '\0']), + ('\u{13b2}', ['\u{ab82}', '\0', '\0']), + ('\u{13b3}', ['\u{ab83}', '\0', '\0']), + ('\u{13b4}', ['\u{ab84}', '\0', '\0']), + ('\u{13b5}', ['\u{ab85}', '\0', '\0']), + ('\u{13b6}', ['\u{ab86}', '\0', '\0']), + ('\u{13b7}', ['\u{ab87}', '\0', '\0']), + ('\u{13b8}', ['\u{ab88}', '\0', '\0']), + ('\u{13b9}', ['\u{ab89}', '\0', '\0']), + ('\u{13ba}', ['\u{ab8a}', '\0', '\0']), + ('\u{13bb}', ['\u{ab8b}', '\0', '\0']), + ('\u{13bc}', ['\u{ab8c}', '\0', '\0']), + ('\u{13bd}', ['\u{ab8d}', '\0', '\0']), + ('\u{13be}', ['\u{ab8e}', '\0', '\0']), + ('\u{13bf}', ['\u{ab8f}', '\0', '\0']), + ('\u{13c0}', ['\u{ab90}', '\0', '\0']), + ('\u{13c1}', ['\u{ab91}', '\0', '\0']), + ('\u{13c2}', ['\u{ab92}', '\0', '\0']), + ('\u{13c3}', ['\u{ab93}', '\0', '\0']), + ('\u{13c4}', ['\u{ab94}', '\0', '\0']), + ('\u{13c5}', ['\u{ab95}', '\0', '\0']), + ('\u{13c6}', ['\u{ab96}', '\0', '\0']), + ('\u{13c7}', ['\u{ab97}', '\0', '\0']), + ('\u{13c8}', ['\u{ab98}', '\0', '\0']), + ('\u{13c9}', ['\u{ab99}', '\0', '\0']), + ('\u{13ca}', ['\u{ab9a}', '\0', '\0']), + ('\u{13cb}', ['\u{ab9b}', '\0', '\0']), + ('\u{13cc}', ['\u{ab9c}', '\0', '\0']), + ('\u{13cd}', ['\u{ab9d}', '\0', '\0']), + ('\u{13ce}', ['\u{ab9e}', '\0', '\0']), + ('\u{13cf}', ['\u{ab9f}', '\0', '\0']), + ('\u{13d0}', ['\u{aba0}', '\0', '\0']), + ('\u{13d1}', ['\u{aba1}', '\0', '\0']), + ('\u{13d2}', ['\u{aba2}', '\0', '\0']), + ('\u{13d3}', ['\u{aba3}', '\0', '\0']), + ('\u{13d4}', ['\u{aba4}', '\0', '\0']), + ('\u{13d5}', ['\u{aba5}', '\0', '\0']), + ('\u{13d6}', ['\u{aba6}', '\0', '\0']), + ('\u{13d7}', ['\u{aba7}', '\0', '\0']), + ('\u{13d8}', ['\u{aba8}', '\0', '\0']), + ('\u{13d9}', ['\u{aba9}', '\0', '\0']), + ('\u{13da}', ['\u{abaa}', '\0', '\0']), + ('\u{13db}', ['\u{abab}', '\0', '\0']), + ('\u{13dc}', ['\u{abac}', '\0', '\0']), + ('\u{13dd}', ['\u{abad}', '\0', '\0']), + ('\u{13de}', ['\u{abae}', '\0', '\0']), + ('\u{13df}', ['\u{abaf}', '\0', '\0']), + ('\u{13e0}', ['\u{abb0}', '\0', '\0']), + ('\u{13e1}', ['\u{abb1}', '\0', '\0']), + ('\u{13e2}', ['\u{abb2}', '\0', '\0']), + ('\u{13e3}', ['\u{abb3}', '\0', '\0']), + ('\u{13e4}', ['\u{abb4}', '\0', '\0']), + ('\u{13e5}', ['\u{abb5}', '\0', '\0']), + ('\u{13e6}', ['\u{abb6}', '\0', '\0']), + ('\u{13e7}', ['\u{abb7}', '\0', '\0']), + ('\u{13e8}', ['\u{abb8}', '\0', '\0']), + ('\u{13e9}', ['\u{abb9}', '\0', '\0']), + ('\u{13ea}', ['\u{abba}', '\0', '\0']), + ('\u{13eb}', ['\u{abbb}', '\0', '\0']), + ('\u{13ec}', ['\u{abbc}', '\0', '\0']), + ('\u{13ed}', ['\u{abbd}', '\0', '\0']), + ('\u{13ee}', ['\u{abbe}', '\0', '\0']), + ('\u{13ef}', ['\u{abbf}', '\0', '\0']), + ('\u{13f0}', ['\u{13f8}', '\0', '\0']), + ('\u{13f1}', ['\u{13f9}', '\0', '\0']), + ('\u{13f2}', ['\u{13fa}', '\0', '\0']), + ('\u{13f3}', ['\u{13fb}', '\0', '\0']), + ('\u{13f4}', ['\u{13fc}', '\0', '\0']), + ('\u{13f5}', ['\u{13fd}', '\0', '\0']), + ('\u{1c90}', ['\u{10d0}', '\0', '\0']), + ('\u{1c91}', ['\u{10d1}', '\0', '\0']), + ('\u{1c92}', ['\u{10d2}', '\0', '\0']), + ('\u{1c93}', ['\u{10d3}', '\0', '\0']), + ('\u{1c94}', ['\u{10d4}', '\0', '\0']), + ('\u{1c95}', ['\u{10d5}', '\0', '\0']), + ('\u{1c96}', ['\u{10d6}', '\0', '\0']), + ('\u{1c97}', ['\u{10d7}', '\0', '\0']), + ('\u{1c98}', ['\u{10d8}', '\0', '\0']), + ('\u{1c99}', ['\u{10d9}', '\0', '\0']), + ('\u{1c9a}', ['\u{10da}', '\0', '\0']), + ('\u{1c9b}', ['\u{10db}', '\0', '\0']), + ('\u{1c9c}', ['\u{10dc}', '\0', '\0']), + ('\u{1c9d}', ['\u{10dd}', '\0', '\0']), + ('\u{1c9e}', ['\u{10de}', '\0', '\0']), + ('\u{1c9f}', ['\u{10df}', '\0', '\0']), + ('\u{1ca0}', ['\u{10e0}', '\0', '\0']), + ('\u{1ca1}', ['\u{10e1}', '\0', '\0']), + ('\u{1ca2}', ['\u{10e2}', '\0', '\0']), + ('\u{1ca3}', ['\u{10e3}', '\0', '\0']), + ('\u{1ca4}', ['\u{10e4}', '\0', '\0']), + ('\u{1ca5}', ['\u{10e5}', '\0', '\0']), + ('\u{1ca6}', ['\u{10e6}', '\0', '\0']), + ('\u{1ca7}', ['\u{10e7}', '\0', '\0']), + ('\u{1ca8}', ['\u{10e8}', '\0', '\0']), + ('\u{1ca9}', ['\u{10e9}', '\0', '\0']), + ('\u{1caa}', ['\u{10ea}', '\0', '\0']), + ('\u{1cab}', ['\u{10eb}', '\0', '\0']), + ('\u{1cac}', ['\u{10ec}', '\0', '\0']), + ('\u{1cad}', ['\u{10ed}', '\0', '\0']), + ('\u{1cae}', ['\u{10ee}', '\0', '\0']), + ('\u{1caf}', ['\u{10ef}', '\0', '\0']), + ('\u{1cb0}', ['\u{10f0}', '\0', '\0']), + ('\u{1cb1}', ['\u{10f1}', '\0', '\0']), + ('\u{1cb2}', ['\u{10f2}', '\0', '\0']), + ('\u{1cb3}', ['\u{10f3}', '\0', '\0']), + ('\u{1cb4}', ['\u{10f4}', '\0', '\0']), + ('\u{1cb5}', ['\u{10f5}', '\0', '\0']), + ('\u{1cb6}', ['\u{10f6}', '\0', '\0']), + ('\u{1cb7}', ['\u{10f7}', '\0', '\0']), + ('\u{1cb8}', ['\u{10f8}', '\0', '\0']), + ('\u{1cb9}', ['\u{10f9}', '\0', '\0']), + ('\u{1cba}', ['\u{10fa}', '\0', '\0']), + ('\u{1cbd}', ['\u{10fd}', '\0', '\0']), + ('\u{1cbe}', ['\u{10fe}', '\0', '\0']), + ('\u{1cbf}', ['\u{10ff}', '\0', '\0']), + ('\u{1e00}', ['\u{1e01}', '\0', '\0']), + ('\u{1e02}', ['\u{1e03}', '\0', '\0']), + ('\u{1e04}', ['\u{1e05}', '\0', '\0']), + ('\u{1e06}', ['\u{1e07}', '\0', '\0']), + ('\u{1e08}', ['\u{1e09}', '\0', '\0']), + ('\u{1e0a}', ['\u{1e0b}', '\0', '\0']), + ('\u{1e0c}', ['\u{1e0d}', '\0', '\0']), + ('\u{1e0e}', ['\u{1e0f}', '\0', '\0']), + ('\u{1e10}', ['\u{1e11}', '\0', '\0']), + ('\u{1e12}', ['\u{1e13}', '\0', '\0']), + ('\u{1e14}', ['\u{1e15}', '\0', '\0']), + ('\u{1e16}', ['\u{1e17}', '\0', '\0']), + ('\u{1e18}', ['\u{1e19}', '\0', '\0']), + ('\u{1e1a}', ['\u{1e1b}', '\0', '\0']), + ('\u{1e1c}', ['\u{1e1d}', '\0', '\0']), + ('\u{1e1e}', ['\u{1e1f}', '\0', '\0']), + ('\u{1e20}', ['\u{1e21}', '\0', '\0']), + ('\u{1e22}', ['\u{1e23}', '\0', '\0']), + ('\u{1e24}', ['\u{1e25}', '\0', '\0']), + ('\u{1e26}', ['\u{1e27}', '\0', '\0']), + ('\u{1e28}', ['\u{1e29}', '\0', '\0']), + ('\u{1e2a}', ['\u{1e2b}', '\0', '\0']), + ('\u{1e2c}', ['\u{1e2d}', '\0', '\0']), + ('\u{1e2e}', ['\u{1e2f}', '\0', '\0']), + ('\u{1e30}', ['\u{1e31}', '\0', '\0']), + ('\u{1e32}', ['\u{1e33}', '\0', '\0']), + ('\u{1e34}', ['\u{1e35}', '\0', '\0']), + ('\u{1e36}', ['\u{1e37}', '\0', '\0']), + ('\u{1e38}', ['\u{1e39}', '\0', '\0']), + ('\u{1e3a}', ['\u{1e3b}', '\0', '\0']), + ('\u{1e3c}', ['\u{1e3d}', '\0', '\0']), + ('\u{1e3e}', ['\u{1e3f}', '\0', '\0']), + ('\u{1e40}', ['\u{1e41}', '\0', '\0']), + ('\u{1e42}', ['\u{1e43}', '\0', '\0']), + ('\u{1e44}', ['\u{1e45}', '\0', '\0']), + ('\u{1e46}', ['\u{1e47}', '\0', '\0']), + ('\u{1e48}', ['\u{1e49}', '\0', '\0']), + ('\u{1e4a}', ['\u{1e4b}', '\0', '\0']), + ('\u{1e4c}', ['\u{1e4d}', '\0', '\0']), + ('\u{1e4e}', ['\u{1e4f}', '\0', '\0']), + ('\u{1e50}', ['\u{1e51}', '\0', '\0']), + ('\u{1e52}', ['\u{1e53}', '\0', '\0']), + ('\u{1e54}', ['\u{1e55}', '\0', '\0']), + ('\u{1e56}', ['\u{1e57}', '\0', '\0']), + ('\u{1e58}', ['\u{1e59}', '\0', '\0']), + ('\u{1e5a}', ['\u{1e5b}', '\0', '\0']), + ('\u{1e5c}', ['\u{1e5d}', '\0', '\0']), + ('\u{1e5e}', ['\u{1e5f}', '\0', '\0']), + ('\u{1e60}', ['\u{1e61}', '\0', '\0']), + ('\u{1e62}', ['\u{1e63}', '\0', '\0']), + ('\u{1e64}', ['\u{1e65}', '\0', '\0']), + ('\u{1e66}', ['\u{1e67}', '\0', '\0']), + ('\u{1e68}', ['\u{1e69}', '\0', '\0']), + ('\u{1e6a}', ['\u{1e6b}', '\0', '\0']), + ('\u{1e6c}', ['\u{1e6d}', '\0', '\0']), + ('\u{1e6e}', ['\u{1e6f}', '\0', '\0']), + ('\u{1e70}', ['\u{1e71}', '\0', '\0']), + ('\u{1e72}', ['\u{1e73}', '\0', '\0']), + ('\u{1e74}', ['\u{1e75}', '\0', '\0']), + ('\u{1e76}', ['\u{1e77}', '\0', '\0']), + ('\u{1e78}', ['\u{1e79}', '\0', '\0']), + ('\u{1e7a}', ['\u{1e7b}', '\0', '\0']), + ('\u{1e7c}', ['\u{1e7d}', '\0', '\0']), + ('\u{1e7e}', ['\u{1e7f}', '\0', '\0']), + ('\u{1e80}', ['\u{1e81}', '\0', '\0']), + ('\u{1e82}', ['\u{1e83}', '\0', '\0']), + ('\u{1e84}', ['\u{1e85}', '\0', '\0']), + ('\u{1e86}', ['\u{1e87}', '\0', '\0']), + ('\u{1e88}', ['\u{1e89}', '\0', '\0']), + ('\u{1e8a}', ['\u{1e8b}', '\0', '\0']), + ('\u{1e8c}', ['\u{1e8d}', '\0', '\0']), + ('\u{1e8e}', ['\u{1e8f}', '\0', '\0']), + ('\u{1e90}', ['\u{1e91}', '\0', '\0']), + ('\u{1e92}', ['\u{1e93}', '\0', '\0']), + ('\u{1e94}', ['\u{1e95}', '\0', '\0']), + ('\u{1e9e}', ['\u{df}', '\0', '\0']), + ('\u{1ea0}', ['\u{1ea1}', '\0', '\0']), + ('\u{1ea2}', ['\u{1ea3}', '\0', '\0']), + ('\u{1ea4}', ['\u{1ea5}', '\0', '\0']), + ('\u{1ea6}', ['\u{1ea7}', '\0', '\0']), + ('\u{1ea8}', ['\u{1ea9}', '\0', '\0']), + ('\u{1eaa}', ['\u{1eab}', '\0', '\0']), + ('\u{1eac}', ['\u{1ead}', '\0', '\0']), + ('\u{1eae}', ['\u{1eaf}', '\0', '\0']), + ('\u{1eb0}', ['\u{1eb1}', '\0', '\0']), + ('\u{1eb2}', ['\u{1eb3}', '\0', '\0']), + ('\u{1eb4}', ['\u{1eb5}', '\0', '\0']), + ('\u{1eb6}', ['\u{1eb7}', '\0', '\0']), + ('\u{1eb8}', ['\u{1eb9}', '\0', '\0']), + ('\u{1eba}', ['\u{1ebb}', '\0', '\0']), + ('\u{1ebc}', ['\u{1ebd}', '\0', '\0']), + ('\u{1ebe}', ['\u{1ebf}', '\0', '\0']), + ('\u{1ec0}', ['\u{1ec1}', '\0', '\0']), + ('\u{1ec2}', ['\u{1ec3}', '\0', '\0']), + ('\u{1ec4}', ['\u{1ec5}', '\0', '\0']), + ('\u{1ec6}', ['\u{1ec7}', '\0', '\0']), + ('\u{1ec8}', ['\u{1ec9}', '\0', '\0']), + ('\u{1eca}', ['\u{1ecb}', '\0', '\0']), + ('\u{1ecc}', ['\u{1ecd}', '\0', '\0']), + ('\u{1ece}', ['\u{1ecf}', '\0', '\0']), + ('\u{1ed0}', ['\u{1ed1}', '\0', '\0']), + ('\u{1ed2}', ['\u{1ed3}', '\0', '\0']), + ('\u{1ed4}', ['\u{1ed5}', '\0', '\0']), + ('\u{1ed6}', ['\u{1ed7}', '\0', '\0']), + ('\u{1ed8}', ['\u{1ed9}', '\0', '\0']), + ('\u{1eda}', ['\u{1edb}', '\0', '\0']), + ('\u{1edc}', ['\u{1edd}', '\0', '\0']), + ('\u{1ede}', ['\u{1edf}', '\0', '\0']), + ('\u{1ee0}', ['\u{1ee1}', '\0', '\0']), + ('\u{1ee2}', ['\u{1ee3}', '\0', '\0']), + ('\u{1ee4}', ['\u{1ee5}', '\0', '\0']), + ('\u{1ee6}', ['\u{1ee7}', '\0', '\0']), + ('\u{1ee8}', ['\u{1ee9}', '\0', '\0']), + ('\u{1eea}', ['\u{1eeb}', '\0', '\0']), + ('\u{1eec}', ['\u{1eed}', '\0', '\0']), + ('\u{1eee}', ['\u{1eef}', '\0', '\0']), + ('\u{1ef0}', ['\u{1ef1}', '\0', '\0']), + ('\u{1ef2}', ['\u{1ef3}', '\0', '\0']), + ('\u{1ef4}', ['\u{1ef5}', '\0', '\0']), + ('\u{1ef6}', ['\u{1ef7}', '\0', '\0']), + ('\u{1ef8}', ['\u{1ef9}', '\0', '\0']), + ('\u{1efa}', ['\u{1efb}', '\0', '\0']), + ('\u{1efc}', ['\u{1efd}', '\0', '\0']), + ('\u{1efe}', ['\u{1eff}', '\0', '\0']), + ('\u{1f08}', ['\u{1f00}', '\0', '\0']), + ('\u{1f09}', ['\u{1f01}', '\0', '\0']), + ('\u{1f0a}', ['\u{1f02}', '\0', '\0']), + ('\u{1f0b}', ['\u{1f03}', '\0', '\0']), + ('\u{1f0c}', ['\u{1f04}', '\0', '\0']), + ('\u{1f0d}', ['\u{1f05}', '\0', '\0']), + ('\u{1f0e}', ['\u{1f06}', '\0', '\0']), + ('\u{1f0f}', ['\u{1f07}', '\0', '\0']), + ('\u{1f18}', ['\u{1f10}', '\0', '\0']), + ('\u{1f19}', ['\u{1f11}', '\0', '\0']), + ('\u{1f1a}', ['\u{1f12}', '\0', '\0']), + ('\u{1f1b}', ['\u{1f13}', '\0', '\0']), + ('\u{1f1c}', ['\u{1f14}', '\0', '\0']), + ('\u{1f1d}', ['\u{1f15}', '\0', '\0']), + ('\u{1f28}', ['\u{1f20}', '\0', '\0']), + ('\u{1f29}', ['\u{1f21}', '\0', '\0']), + ('\u{1f2a}', ['\u{1f22}', '\0', '\0']), + ('\u{1f2b}', ['\u{1f23}', '\0', '\0']), + ('\u{1f2c}', ['\u{1f24}', '\0', '\0']), + ('\u{1f2d}', ['\u{1f25}', '\0', '\0']), + ('\u{1f2e}', ['\u{1f26}', '\0', '\0']), + ('\u{1f2f}', ['\u{1f27}', '\0', '\0']), + ('\u{1f38}', ['\u{1f30}', '\0', '\0']), + ('\u{1f39}', ['\u{1f31}', '\0', '\0']), + ('\u{1f3a}', ['\u{1f32}', '\0', '\0']), + ('\u{1f3b}', ['\u{1f33}', '\0', '\0']), + ('\u{1f3c}', ['\u{1f34}', '\0', '\0']), + ('\u{1f3d}', ['\u{1f35}', '\0', '\0']), + ('\u{1f3e}', ['\u{1f36}', '\0', '\0']), + ('\u{1f3f}', ['\u{1f37}', '\0', '\0']), + ('\u{1f48}', ['\u{1f40}', '\0', '\0']), + ('\u{1f49}', ['\u{1f41}', '\0', '\0']), + ('\u{1f4a}', ['\u{1f42}', '\0', '\0']), + ('\u{1f4b}', ['\u{1f43}', '\0', '\0']), + ('\u{1f4c}', ['\u{1f44}', '\0', '\0']), + ('\u{1f4d}', ['\u{1f45}', '\0', '\0']), + ('\u{1f59}', ['\u{1f51}', '\0', '\0']), + ('\u{1f5b}', ['\u{1f53}', '\0', '\0']), + ('\u{1f5d}', ['\u{1f55}', '\0', '\0']), + ('\u{1f5f}', ['\u{1f57}', '\0', '\0']), + ('\u{1f68}', ['\u{1f60}', '\0', '\0']), + ('\u{1f69}', ['\u{1f61}', '\0', '\0']), + ('\u{1f6a}', ['\u{1f62}', '\0', '\0']), + ('\u{1f6b}', ['\u{1f63}', '\0', '\0']), + ('\u{1f6c}', ['\u{1f64}', '\0', '\0']), + ('\u{1f6d}', ['\u{1f65}', '\0', '\0']), + ('\u{1f6e}', ['\u{1f66}', '\0', '\0']), + ('\u{1f6f}', ['\u{1f67}', '\0', '\0']), + ('\u{1f88}', ['\u{1f80}', '\0', '\0']), + ('\u{1f89}', ['\u{1f81}', '\0', '\0']), + ('\u{1f8a}', ['\u{1f82}', '\0', '\0']), + ('\u{1f8b}', ['\u{1f83}', '\0', '\0']), + ('\u{1f8c}', ['\u{1f84}', '\0', '\0']), + ('\u{1f8d}', ['\u{1f85}', '\0', '\0']), + ('\u{1f8e}', ['\u{1f86}', '\0', '\0']), + ('\u{1f8f}', ['\u{1f87}', '\0', '\0']), + ('\u{1f98}', ['\u{1f90}', '\0', '\0']), + ('\u{1f99}', ['\u{1f91}', '\0', '\0']), + ('\u{1f9a}', ['\u{1f92}', '\0', '\0']), + ('\u{1f9b}', ['\u{1f93}', '\0', '\0']), + ('\u{1f9c}', ['\u{1f94}', '\0', '\0']), + ('\u{1f9d}', ['\u{1f95}', '\0', '\0']), + ('\u{1f9e}', ['\u{1f96}', '\0', '\0']), + ('\u{1f9f}', ['\u{1f97}', '\0', '\0']), + ('\u{1fa8}', ['\u{1fa0}', '\0', '\0']), + ('\u{1fa9}', ['\u{1fa1}', '\0', '\0']), + ('\u{1faa}', ['\u{1fa2}', '\0', '\0']), + ('\u{1fab}', ['\u{1fa3}', '\0', '\0']), + ('\u{1fac}', ['\u{1fa4}', '\0', '\0']), + ('\u{1fad}', ['\u{1fa5}', '\0', '\0']), + ('\u{1fae}', ['\u{1fa6}', '\0', '\0']), + ('\u{1faf}', ['\u{1fa7}', '\0', '\0']), + ('\u{1fb8}', ['\u{1fb0}', '\0', '\0']), + ('\u{1fb9}', ['\u{1fb1}', '\0', '\0']), + ('\u{1fba}', ['\u{1f70}', '\0', '\0']), + ('\u{1fbb}', ['\u{1f71}', '\0', '\0']), + ('\u{1fbc}', ['\u{1fb3}', '\0', '\0']), + ('\u{1fc8}', ['\u{1f72}', '\0', '\0']), + ('\u{1fc9}', ['\u{1f73}', '\0', '\0']), + ('\u{1fca}', ['\u{1f74}', '\0', '\0']), + ('\u{1fcb}', ['\u{1f75}', '\0', '\0']), + ('\u{1fcc}', ['\u{1fc3}', '\0', '\0']), + ('\u{1fd8}', ['\u{1fd0}', '\0', '\0']), + ('\u{1fd9}', ['\u{1fd1}', '\0', '\0']), + ('\u{1fda}', ['\u{1f76}', '\0', '\0']), + ('\u{1fdb}', ['\u{1f77}', '\0', '\0']), + ('\u{1fe8}', ['\u{1fe0}', '\0', '\0']), + ('\u{1fe9}', ['\u{1fe1}', '\0', '\0']), + ('\u{1fea}', ['\u{1f7a}', '\0', '\0']), + ('\u{1feb}', ['\u{1f7b}', '\0', '\0']), + ('\u{1fec}', ['\u{1fe5}', '\0', '\0']), + ('\u{1ff8}', ['\u{1f78}', '\0', '\0']), + ('\u{1ff9}', ['\u{1f79}', '\0', '\0']), + ('\u{1ffa}', ['\u{1f7c}', '\0', '\0']), + ('\u{1ffb}', ['\u{1f7d}', '\0', '\0']), + ('\u{1ffc}', ['\u{1ff3}', '\0', '\0']), + ('\u{2126}', ['\u{3c9}', '\0', '\0']), + ('\u{212a}', ['\u{6b}', '\0', '\0']), + ('\u{212b}', ['\u{e5}', '\0', '\0']), + ('\u{2132}', ['\u{214e}', '\0', '\0']), + ('\u{2160}', ['\u{2170}', '\0', '\0']), + ('\u{2161}', ['\u{2171}', '\0', '\0']), + ('\u{2162}', ['\u{2172}', '\0', '\0']), + ('\u{2163}', ['\u{2173}', '\0', '\0']), + ('\u{2164}', ['\u{2174}', '\0', '\0']), + ('\u{2165}', ['\u{2175}', '\0', '\0']), + ('\u{2166}', ['\u{2176}', '\0', '\0']), + ('\u{2167}', ['\u{2177}', '\0', '\0']), + ('\u{2168}', ['\u{2178}', '\0', '\0']), + ('\u{2169}', ['\u{2179}', '\0', '\0']), + ('\u{216a}', ['\u{217a}', '\0', '\0']), + ('\u{216b}', ['\u{217b}', '\0', '\0']), + ('\u{216c}', ['\u{217c}', '\0', '\0']), + ('\u{216d}', ['\u{217d}', '\0', '\0']), + ('\u{216e}', ['\u{217e}', '\0', '\0']), + ('\u{216f}', ['\u{217f}', '\0', '\0']), + ('\u{2183}', ['\u{2184}', '\0', '\0']), + ('\u{24b6}', ['\u{24d0}', '\0', '\0']), + ('\u{24b7}', ['\u{24d1}', '\0', '\0']), + ('\u{24b8}', ['\u{24d2}', '\0', '\0']), + ('\u{24b9}', ['\u{24d3}', '\0', '\0']), + ('\u{24ba}', ['\u{24d4}', '\0', '\0']), + ('\u{24bb}', ['\u{24d5}', '\0', '\0']), + ('\u{24bc}', ['\u{24d6}', '\0', '\0']), + ('\u{24bd}', ['\u{24d7}', '\0', '\0']), + ('\u{24be}', ['\u{24d8}', '\0', '\0']), + ('\u{24bf}', ['\u{24d9}', '\0', '\0']), + ('\u{24c0}', ['\u{24da}', '\0', '\0']), + ('\u{24c1}', ['\u{24db}', '\0', '\0']), + ('\u{24c2}', ['\u{24dc}', '\0', '\0']), + ('\u{24c3}', ['\u{24dd}', '\0', '\0']), + ('\u{24c4}', ['\u{24de}', '\0', '\0']), + ('\u{24c5}', ['\u{24df}', '\0', '\0']), + ('\u{24c6}', ['\u{24e0}', '\0', '\0']), + ('\u{24c7}', ['\u{24e1}', '\0', '\0']), + ('\u{24c8}', ['\u{24e2}', '\0', '\0']), + ('\u{24c9}', ['\u{24e3}', '\0', '\0']), + ('\u{24ca}', ['\u{24e4}', '\0', '\0']), + ('\u{24cb}', ['\u{24e5}', '\0', '\0']), + ('\u{24cc}', ['\u{24e6}', '\0', '\0']), + ('\u{24cd}', ['\u{24e7}', '\0', '\0']), + ('\u{24ce}', ['\u{24e8}', '\0', '\0']), + ('\u{24cf}', ['\u{24e9}', '\0', '\0']), + ('\u{2c00}', ['\u{2c30}', '\0', '\0']), + ('\u{2c01}', ['\u{2c31}', '\0', '\0']), + ('\u{2c02}', ['\u{2c32}', '\0', '\0']), + ('\u{2c03}', ['\u{2c33}', '\0', '\0']), + ('\u{2c04}', ['\u{2c34}', '\0', '\0']), + ('\u{2c05}', ['\u{2c35}', '\0', '\0']), + ('\u{2c06}', ['\u{2c36}', '\0', '\0']), + ('\u{2c07}', ['\u{2c37}', '\0', '\0']), + ('\u{2c08}', ['\u{2c38}', '\0', '\0']), + ('\u{2c09}', ['\u{2c39}', '\0', '\0']), + ('\u{2c0a}', ['\u{2c3a}', '\0', '\0']), + ('\u{2c0b}', ['\u{2c3b}', '\0', '\0']), + ('\u{2c0c}', ['\u{2c3c}', '\0', '\0']), + ('\u{2c0d}', ['\u{2c3d}', '\0', '\0']), + ('\u{2c0e}', ['\u{2c3e}', '\0', '\0']), + ('\u{2c0f}', ['\u{2c3f}', '\0', '\0']), + ('\u{2c10}', ['\u{2c40}', '\0', '\0']), + ('\u{2c11}', ['\u{2c41}', '\0', '\0']), + ('\u{2c12}', ['\u{2c42}', '\0', '\0']), + ('\u{2c13}', ['\u{2c43}', '\0', '\0']), + ('\u{2c14}', ['\u{2c44}', '\0', '\0']), + ('\u{2c15}', ['\u{2c45}', '\0', '\0']), + ('\u{2c16}', ['\u{2c46}', '\0', '\0']), + ('\u{2c17}', ['\u{2c47}', '\0', '\0']), + ('\u{2c18}', ['\u{2c48}', '\0', '\0']), + ('\u{2c19}', ['\u{2c49}', '\0', '\0']), + ('\u{2c1a}', ['\u{2c4a}', '\0', '\0']), + ('\u{2c1b}', ['\u{2c4b}', '\0', '\0']), + ('\u{2c1c}', ['\u{2c4c}', '\0', '\0']), + ('\u{2c1d}', ['\u{2c4d}', '\0', '\0']), + ('\u{2c1e}', ['\u{2c4e}', '\0', '\0']), + ('\u{2c1f}', ['\u{2c4f}', '\0', '\0']), + ('\u{2c20}', ['\u{2c50}', '\0', '\0']), + ('\u{2c21}', ['\u{2c51}', '\0', '\0']), + ('\u{2c22}', ['\u{2c52}', '\0', '\0']), + ('\u{2c23}', ['\u{2c53}', '\0', '\0']), + ('\u{2c24}', ['\u{2c54}', '\0', '\0']), + ('\u{2c25}', ['\u{2c55}', '\0', '\0']), + ('\u{2c26}', ['\u{2c56}', '\0', '\0']), + ('\u{2c27}', ['\u{2c57}', '\0', '\0']), + ('\u{2c28}', ['\u{2c58}', '\0', '\0']), + ('\u{2c29}', ['\u{2c59}', '\0', '\0']), + ('\u{2c2a}', ['\u{2c5a}', '\0', '\0']), + ('\u{2c2b}', ['\u{2c5b}', '\0', '\0']), + ('\u{2c2c}', ['\u{2c5c}', '\0', '\0']), + ('\u{2c2d}', ['\u{2c5d}', '\0', '\0']), + ('\u{2c2e}', ['\u{2c5e}', '\0', '\0']), + ('\u{2c60}', ['\u{2c61}', '\0', '\0']), + ('\u{2c62}', ['\u{26b}', '\0', '\0']), + ('\u{2c63}', ['\u{1d7d}', '\0', '\0']), + ('\u{2c64}', ['\u{27d}', '\0', '\0']), + ('\u{2c67}', ['\u{2c68}', '\0', '\0']), + ('\u{2c69}', ['\u{2c6a}', '\0', '\0']), + ('\u{2c6b}', ['\u{2c6c}', '\0', '\0']), + ('\u{2c6d}', ['\u{251}', '\0', '\0']), + ('\u{2c6e}', ['\u{271}', '\0', '\0']), + ('\u{2c6f}', ['\u{250}', '\0', '\0']), + ('\u{2c70}', ['\u{252}', '\0', '\0']), + ('\u{2c72}', ['\u{2c73}', '\0', '\0']), + ('\u{2c75}', ['\u{2c76}', '\0', '\0']), + ('\u{2c7e}', ['\u{23f}', '\0', '\0']), + ('\u{2c7f}', ['\u{240}', '\0', '\0']), + ('\u{2c80}', ['\u{2c81}', '\0', '\0']), + ('\u{2c82}', ['\u{2c83}', '\0', '\0']), + ('\u{2c84}', ['\u{2c85}', '\0', '\0']), + ('\u{2c86}', ['\u{2c87}', '\0', '\0']), + ('\u{2c88}', ['\u{2c89}', '\0', '\0']), + ('\u{2c8a}', ['\u{2c8b}', '\0', '\0']), + ('\u{2c8c}', ['\u{2c8d}', '\0', '\0']), + ('\u{2c8e}', ['\u{2c8f}', '\0', '\0']), + ('\u{2c90}', ['\u{2c91}', '\0', '\0']), + ('\u{2c92}', ['\u{2c93}', '\0', '\0']), + ('\u{2c94}', ['\u{2c95}', '\0', '\0']), + ('\u{2c96}', ['\u{2c97}', '\0', '\0']), + ('\u{2c98}', ['\u{2c99}', '\0', '\0']), + ('\u{2c9a}', ['\u{2c9b}', '\0', '\0']), + ('\u{2c9c}', ['\u{2c9d}', '\0', '\0']), + ('\u{2c9e}', ['\u{2c9f}', '\0', '\0']), + ('\u{2ca0}', ['\u{2ca1}', '\0', '\0']), + ('\u{2ca2}', ['\u{2ca3}', '\0', '\0']), + ('\u{2ca4}', ['\u{2ca5}', '\0', '\0']), + ('\u{2ca6}', ['\u{2ca7}', '\0', '\0']), + ('\u{2ca8}', ['\u{2ca9}', '\0', '\0']), + ('\u{2caa}', ['\u{2cab}', '\0', '\0']), + ('\u{2cac}', ['\u{2cad}', '\0', '\0']), + ('\u{2cae}', ['\u{2caf}', '\0', '\0']), + ('\u{2cb0}', ['\u{2cb1}', '\0', '\0']), + ('\u{2cb2}', ['\u{2cb3}', '\0', '\0']), + ('\u{2cb4}', ['\u{2cb5}', '\0', '\0']), + ('\u{2cb6}', ['\u{2cb7}', '\0', '\0']), + ('\u{2cb8}', ['\u{2cb9}', '\0', '\0']), + ('\u{2cba}', ['\u{2cbb}', '\0', '\0']), + ('\u{2cbc}', ['\u{2cbd}', '\0', '\0']), + ('\u{2cbe}', ['\u{2cbf}', '\0', '\0']), + ('\u{2cc0}', ['\u{2cc1}', '\0', '\0']), + ('\u{2cc2}', ['\u{2cc3}', '\0', '\0']), + ('\u{2cc4}', ['\u{2cc5}', '\0', '\0']), + ('\u{2cc6}', ['\u{2cc7}', '\0', '\0']), + ('\u{2cc8}', ['\u{2cc9}', '\0', '\0']), + ('\u{2cca}', ['\u{2ccb}', '\0', '\0']), + ('\u{2ccc}', ['\u{2ccd}', '\0', '\0']), + ('\u{2cce}', ['\u{2ccf}', '\0', '\0']), + ('\u{2cd0}', ['\u{2cd1}', '\0', '\0']), + ('\u{2cd2}', ['\u{2cd3}', '\0', '\0']), + ('\u{2cd4}', ['\u{2cd5}', '\0', '\0']), + ('\u{2cd6}', ['\u{2cd7}', '\0', '\0']), + ('\u{2cd8}', ['\u{2cd9}', '\0', '\0']), + ('\u{2cda}', ['\u{2cdb}', '\0', '\0']), + ('\u{2cdc}', ['\u{2cdd}', '\0', '\0']), + ('\u{2cde}', ['\u{2cdf}', '\0', '\0']), + ('\u{2ce0}', ['\u{2ce1}', '\0', '\0']), + ('\u{2ce2}', ['\u{2ce3}', '\0', '\0']), + ('\u{2ceb}', ['\u{2cec}', '\0', '\0']), + ('\u{2ced}', ['\u{2cee}', '\0', '\0']), + ('\u{2cf2}', ['\u{2cf3}', '\0', '\0']), + ('\u{a640}', ['\u{a641}', '\0', '\0']), + ('\u{a642}', ['\u{a643}', '\0', '\0']), + ('\u{a644}', ['\u{a645}', '\0', '\0']), + ('\u{a646}', ['\u{a647}', '\0', '\0']), + ('\u{a648}', ['\u{a649}', '\0', '\0']), + ('\u{a64a}', ['\u{a64b}', '\0', '\0']), + ('\u{a64c}', ['\u{a64d}', '\0', '\0']), + ('\u{a64e}', ['\u{a64f}', '\0', '\0']), + ('\u{a650}', ['\u{a651}', '\0', '\0']), + ('\u{a652}', ['\u{a653}', '\0', '\0']), + ('\u{a654}', ['\u{a655}', '\0', '\0']), + ('\u{a656}', ['\u{a657}', '\0', '\0']), + ('\u{a658}', ['\u{a659}', '\0', '\0']), + ('\u{a65a}', ['\u{a65b}', '\0', '\0']), + ('\u{a65c}', ['\u{a65d}', '\0', '\0']), + ('\u{a65e}', ['\u{a65f}', '\0', '\0']), + ('\u{a660}', ['\u{a661}', '\0', '\0']), + ('\u{a662}', ['\u{a663}', '\0', '\0']), + ('\u{a664}', ['\u{a665}', '\0', '\0']), + ('\u{a666}', ['\u{a667}', '\0', '\0']), + ('\u{a668}', ['\u{a669}', '\0', '\0']), + ('\u{a66a}', ['\u{a66b}', '\0', '\0']), + ('\u{a66c}', ['\u{a66d}', '\0', '\0']), + ('\u{a680}', ['\u{a681}', '\0', '\0']), + ('\u{a682}', ['\u{a683}', '\0', '\0']), + ('\u{a684}', ['\u{a685}', '\0', '\0']), + ('\u{a686}', ['\u{a687}', '\0', '\0']), + ('\u{a688}', ['\u{a689}', '\0', '\0']), + ('\u{a68a}', ['\u{a68b}', '\0', '\0']), + ('\u{a68c}', ['\u{a68d}', '\0', '\0']), + ('\u{a68e}', ['\u{a68f}', '\0', '\0']), + ('\u{a690}', ['\u{a691}', '\0', '\0']), + ('\u{a692}', ['\u{a693}', '\0', '\0']), + ('\u{a694}', ['\u{a695}', '\0', '\0']), + ('\u{a696}', ['\u{a697}', '\0', '\0']), + ('\u{a698}', ['\u{a699}', '\0', '\0']), + ('\u{a69a}', ['\u{a69b}', '\0', '\0']), + ('\u{a722}', ['\u{a723}', '\0', '\0']), + ('\u{a724}', ['\u{a725}', '\0', '\0']), + ('\u{a726}', ['\u{a727}', '\0', '\0']), + ('\u{a728}', ['\u{a729}', '\0', '\0']), + ('\u{a72a}', ['\u{a72b}', '\0', '\0']), + ('\u{a72c}', ['\u{a72d}', '\0', '\0']), + ('\u{a72e}', ['\u{a72f}', '\0', '\0']), + ('\u{a732}', ['\u{a733}', '\0', '\0']), + ('\u{a734}', ['\u{a735}', '\0', '\0']), + ('\u{a736}', ['\u{a737}', '\0', '\0']), + ('\u{a738}', ['\u{a739}', '\0', '\0']), + ('\u{a73a}', ['\u{a73b}', '\0', '\0']), + ('\u{a73c}', ['\u{a73d}', '\0', '\0']), + ('\u{a73e}', ['\u{a73f}', '\0', '\0']), + ('\u{a740}', ['\u{a741}', '\0', '\0']), + ('\u{a742}', ['\u{a743}', '\0', '\0']), + ('\u{a744}', ['\u{a745}', '\0', '\0']), + ('\u{a746}', ['\u{a747}', '\0', '\0']), + ('\u{a748}', ['\u{a749}', '\0', '\0']), + ('\u{a74a}', ['\u{a74b}', '\0', '\0']), + ('\u{a74c}', ['\u{a74d}', '\0', '\0']), + ('\u{a74e}', ['\u{a74f}', '\0', '\0']), + ('\u{a750}', ['\u{a751}', '\0', '\0']), + ('\u{a752}', ['\u{a753}', '\0', '\0']), + ('\u{a754}', ['\u{a755}', '\0', '\0']), + ('\u{a756}', ['\u{a757}', '\0', '\0']), + ('\u{a758}', ['\u{a759}', '\0', '\0']), + ('\u{a75a}', ['\u{a75b}', '\0', '\0']), + ('\u{a75c}', ['\u{a75d}', '\0', '\0']), + ('\u{a75e}', ['\u{a75f}', '\0', '\0']), + ('\u{a760}', ['\u{a761}', '\0', '\0']), + ('\u{a762}', ['\u{a763}', '\0', '\0']), + ('\u{a764}', ['\u{a765}', '\0', '\0']), + ('\u{a766}', ['\u{a767}', '\0', '\0']), + ('\u{a768}', ['\u{a769}', '\0', '\0']), + ('\u{a76a}', ['\u{a76b}', '\0', '\0']), + ('\u{a76c}', ['\u{a76d}', '\0', '\0']), + ('\u{a76e}', ['\u{a76f}', '\0', '\0']), + ('\u{a779}', ['\u{a77a}', '\0', '\0']), + ('\u{a77b}', ['\u{a77c}', '\0', '\0']), + ('\u{a77d}', ['\u{1d79}', '\0', '\0']), + ('\u{a77e}', ['\u{a77f}', '\0', '\0']), + ('\u{a780}', ['\u{a781}', '\0', '\0']), + ('\u{a782}', ['\u{a783}', '\0', '\0']), + ('\u{a784}', ['\u{a785}', '\0', '\0']), + ('\u{a786}', ['\u{a787}', '\0', '\0']), + ('\u{a78b}', ['\u{a78c}', '\0', '\0']), + ('\u{a78d}', ['\u{265}', '\0', '\0']), + ('\u{a790}', ['\u{a791}', '\0', '\0']), + ('\u{a792}', ['\u{a793}', '\0', '\0']), + ('\u{a796}', ['\u{a797}', '\0', '\0']), + ('\u{a798}', ['\u{a799}', '\0', '\0']), + ('\u{a79a}', ['\u{a79b}', '\0', '\0']), + ('\u{a79c}', ['\u{a79d}', '\0', '\0']), + ('\u{a79e}', ['\u{a79f}', '\0', '\0']), + ('\u{a7a0}', ['\u{a7a1}', '\0', '\0']), + ('\u{a7a2}', ['\u{a7a3}', '\0', '\0']), + ('\u{a7a4}', ['\u{a7a5}', '\0', '\0']), + ('\u{a7a6}', ['\u{a7a7}', '\0', '\0']), + ('\u{a7a8}', ['\u{a7a9}', '\0', '\0']), + ('\u{a7aa}', ['\u{266}', '\0', '\0']), + ('\u{a7ab}', ['\u{25c}', '\0', '\0']), + ('\u{a7ac}', ['\u{261}', '\0', '\0']), + ('\u{a7ad}', ['\u{26c}', '\0', '\0']), + ('\u{a7ae}', ['\u{26a}', '\0', '\0']), + ('\u{a7b0}', ['\u{29e}', '\0', '\0']), + ('\u{a7b1}', ['\u{287}', '\0', '\0']), + ('\u{a7b2}', ['\u{29d}', '\0', '\0']), + ('\u{a7b3}', ['\u{ab53}', '\0', '\0']), + ('\u{a7b4}', ['\u{a7b5}', '\0', '\0']), + ('\u{a7b6}', ['\u{a7b7}', '\0', '\0']), + ('\u{a7b8}', ['\u{a7b9}', '\0', '\0']), + ('\u{ff21}', ['\u{ff41}', '\0', '\0']), + ('\u{ff22}', ['\u{ff42}', '\0', '\0']), + ('\u{ff23}', ['\u{ff43}', '\0', '\0']), + ('\u{ff24}', ['\u{ff44}', '\0', '\0']), + ('\u{ff25}', ['\u{ff45}', '\0', '\0']), + ('\u{ff26}', ['\u{ff46}', '\0', '\0']), + ('\u{ff27}', ['\u{ff47}', '\0', '\0']), + ('\u{ff28}', ['\u{ff48}', '\0', '\0']), + ('\u{ff29}', ['\u{ff49}', '\0', '\0']), + ('\u{ff2a}', ['\u{ff4a}', '\0', '\0']), + ('\u{ff2b}', ['\u{ff4b}', '\0', '\0']), + ('\u{ff2c}', ['\u{ff4c}', '\0', '\0']), + ('\u{ff2d}', ['\u{ff4d}', '\0', '\0']), + ('\u{ff2e}', ['\u{ff4e}', '\0', '\0']), + ('\u{ff2f}', ['\u{ff4f}', '\0', '\0']), + ('\u{ff30}', ['\u{ff50}', '\0', '\0']), + ('\u{ff31}', ['\u{ff51}', '\0', '\0']), + ('\u{ff32}', ['\u{ff52}', '\0', '\0']), + ('\u{ff33}', ['\u{ff53}', '\0', '\0']), + ('\u{ff34}', ['\u{ff54}', '\0', '\0']), + ('\u{ff35}', ['\u{ff55}', '\0', '\0']), + ('\u{ff36}', ['\u{ff56}', '\0', '\0']), + ('\u{ff37}', ['\u{ff57}', '\0', '\0']), + ('\u{ff38}', ['\u{ff58}', '\0', '\0']), + ('\u{ff39}', ['\u{ff59}', '\0', '\0']), + ('\u{ff3a}', ['\u{ff5a}', '\0', '\0']), + ('\u{10400}', ['\u{10428}', '\0', '\0']), + ('\u{10401}', ['\u{10429}', '\0', '\0']), + ('\u{10402}', ['\u{1042a}', '\0', '\0']), + ('\u{10403}', ['\u{1042b}', '\0', '\0']), + ('\u{10404}', ['\u{1042c}', '\0', '\0']), + ('\u{10405}', ['\u{1042d}', '\0', '\0']), + ('\u{10406}', ['\u{1042e}', '\0', '\0']), + ('\u{10407}', ['\u{1042f}', '\0', '\0']), + ('\u{10408}', ['\u{10430}', '\0', '\0']), + ('\u{10409}', ['\u{10431}', '\0', '\0']), + ('\u{1040a}', ['\u{10432}', '\0', '\0']), + ('\u{1040b}', ['\u{10433}', '\0', '\0']), + ('\u{1040c}', ['\u{10434}', '\0', '\0']), + ('\u{1040d}', ['\u{10435}', '\0', '\0']), + ('\u{1040e}', ['\u{10436}', '\0', '\0']), + ('\u{1040f}', ['\u{10437}', '\0', '\0']), + ('\u{10410}', ['\u{10438}', '\0', '\0']), + ('\u{10411}', ['\u{10439}', '\0', '\0']), + ('\u{10412}', ['\u{1043a}', '\0', '\0']), + ('\u{10413}', ['\u{1043b}', '\0', '\0']), + ('\u{10414}', ['\u{1043c}', '\0', '\0']), + ('\u{10415}', ['\u{1043d}', '\0', '\0']), + ('\u{10416}', ['\u{1043e}', '\0', '\0']), + ('\u{10417}', ['\u{1043f}', '\0', '\0']), + ('\u{10418}', ['\u{10440}', '\0', '\0']), + ('\u{10419}', ['\u{10441}', '\0', '\0']), + ('\u{1041a}', ['\u{10442}', '\0', '\0']), + ('\u{1041b}', ['\u{10443}', '\0', '\0']), + ('\u{1041c}', ['\u{10444}', '\0', '\0']), + ('\u{1041d}', ['\u{10445}', '\0', '\0']), + ('\u{1041e}', ['\u{10446}', '\0', '\0']), + ('\u{1041f}', ['\u{10447}', '\0', '\0']), + ('\u{10420}', ['\u{10448}', '\0', '\0']), + ('\u{10421}', ['\u{10449}', '\0', '\0']), + ('\u{10422}', ['\u{1044a}', '\0', '\0']), + ('\u{10423}', ['\u{1044b}', '\0', '\0']), + ('\u{10424}', ['\u{1044c}', '\0', '\0']), + ('\u{10425}', ['\u{1044d}', '\0', '\0']), + ('\u{10426}', ['\u{1044e}', '\0', '\0']), + ('\u{10427}', ['\u{1044f}', '\0', '\0']), + ('\u{104b0}', ['\u{104d8}', '\0', '\0']), + ('\u{104b1}', ['\u{104d9}', '\0', '\0']), + ('\u{104b2}', ['\u{104da}', '\0', '\0']), + ('\u{104b3}', ['\u{104db}', '\0', '\0']), + ('\u{104b4}', ['\u{104dc}', '\0', '\0']), + ('\u{104b5}', ['\u{104dd}', '\0', '\0']), + ('\u{104b6}', ['\u{104de}', '\0', '\0']), + ('\u{104b7}', ['\u{104df}', '\0', '\0']), + ('\u{104b8}', ['\u{104e0}', '\0', '\0']), + ('\u{104b9}', ['\u{104e1}', '\0', '\0']), + ('\u{104ba}', ['\u{104e2}', '\0', '\0']), + ('\u{104bb}', ['\u{104e3}', '\0', '\0']), + ('\u{104bc}', ['\u{104e4}', '\0', '\0']), + ('\u{104bd}', ['\u{104e5}', '\0', '\0']), + ('\u{104be}', ['\u{104e6}', '\0', '\0']), + ('\u{104bf}', ['\u{104e7}', '\0', '\0']), + ('\u{104c0}', ['\u{104e8}', '\0', '\0']), + ('\u{104c1}', ['\u{104e9}', '\0', '\0']), + ('\u{104c2}', ['\u{104ea}', '\0', '\0']), + ('\u{104c3}', ['\u{104eb}', '\0', '\0']), + ('\u{104c4}', ['\u{104ec}', '\0', '\0']), + ('\u{104c5}', ['\u{104ed}', '\0', '\0']), + ('\u{104c6}', ['\u{104ee}', '\0', '\0']), + ('\u{104c7}', ['\u{104ef}', '\0', '\0']), + ('\u{104c8}', ['\u{104f0}', '\0', '\0']), + ('\u{104c9}', ['\u{104f1}', '\0', '\0']), + ('\u{104ca}', ['\u{104f2}', '\0', '\0']), + ('\u{104cb}', ['\u{104f3}', '\0', '\0']), + ('\u{104cc}', ['\u{104f4}', '\0', '\0']), + ('\u{104cd}', ['\u{104f5}', '\0', '\0']), + ('\u{104ce}', ['\u{104f6}', '\0', '\0']), + ('\u{104cf}', ['\u{104f7}', '\0', '\0']), + ('\u{104d0}', ['\u{104f8}', '\0', '\0']), + ('\u{104d1}', ['\u{104f9}', '\0', '\0']), + ('\u{104d2}', ['\u{104fa}', '\0', '\0']), + ('\u{104d3}', ['\u{104fb}', '\0', '\0']), + ('\u{10c80}', ['\u{10cc0}', '\0', '\0']), + ('\u{10c81}', ['\u{10cc1}', '\0', '\0']), + ('\u{10c82}', ['\u{10cc2}', '\0', '\0']), + ('\u{10c83}', ['\u{10cc3}', '\0', '\0']), + ('\u{10c84}', ['\u{10cc4}', '\0', '\0']), + ('\u{10c85}', ['\u{10cc5}', '\0', '\0']), + ('\u{10c86}', ['\u{10cc6}', '\0', '\0']), + ('\u{10c87}', ['\u{10cc7}', '\0', '\0']), + ('\u{10c88}', ['\u{10cc8}', '\0', '\0']), + ('\u{10c89}', ['\u{10cc9}', '\0', '\0']), + ('\u{10c8a}', ['\u{10cca}', '\0', '\0']), + ('\u{10c8b}', ['\u{10ccb}', '\0', '\0']), + ('\u{10c8c}', ['\u{10ccc}', '\0', '\0']), + ('\u{10c8d}', ['\u{10ccd}', '\0', '\0']), + ('\u{10c8e}', ['\u{10cce}', '\0', '\0']), + ('\u{10c8f}', ['\u{10ccf}', '\0', '\0']), + ('\u{10c90}', ['\u{10cd0}', '\0', '\0']), + ('\u{10c91}', ['\u{10cd1}', '\0', '\0']), + ('\u{10c92}', ['\u{10cd2}', '\0', '\0']), + ('\u{10c93}', ['\u{10cd3}', '\0', '\0']), + ('\u{10c94}', ['\u{10cd4}', '\0', '\0']), + ('\u{10c95}', ['\u{10cd5}', '\0', '\0']), + ('\u{10c96}', ['\u{10cd6}', '\0', '\0']), + ('\u{10c97}', ['\u{10cd7}', '\0', '\0']), + ('\u{10c98}', ['\u{10cd8}', '\0', '\0']), + ('\u{10c99}', ['\u{10cd9}', '\0', '\0']), + ('\u{10c9a}', ['\u{10cda}', '\0', '\0']), + ('\u{10c9b}', ['\u{10cdb}', '\0', '\0']), + ('\u{10c9c}', ['\u{10cdc}', '\0', '\0']), + ('\u{10c9d}', ['\u{10cdd}', '\0', '\0']), + ('\u{10c9e}', ['\u{10cde}', '\0', '\0']), + ('\u{10c9f}', ['\u{10cdf}', '\0', '\0']), + ('\u{10ca0}', ['\u{10ce0}', '\0', '\0']), + ('\u{10ca1}', ['\u{10ce1}', '\0', '\0']), + ('\u{10ca2}', ['\u{10ce2}', '\0', '\0']), + ('\u{10ca3}', ['\u{10ce3}', '\0', '\0']), + ('\u{10ca4}', ['\u{10ce4}', '\0', '\0']), + ('\u{10ca5}', ['\u{10ce5}', '\0', '\0']), + ('\u{10ca6}', ['\u{10ce6}', '\0', '\0']), + ('\u{10ca7}', ['\u{10ce7}', '\0', '\0']), + ('\u{10ca8}', ['\u{10ce8}', '\0', '\0']), + ('\u{10ca9}', ['\u{10ce9}', '\0', '\0']), + ('\u{10caa}', ['\u{10cea}', '\0', '\0']), + ('\u{10cab}', ['\u{10ceb}', '\0', '\0']), + ('\u{10cac}', ['\u{10cec}', '\0', '\0']), + ('\u{10cad}', ['\u{10ced}', '\0', '\0']), + ('\u{10cae}', ['\u{10cee}', '\0', '\0']), + ('\u{10caf}', ['\u{10cef}', '\0', '\0']), + ('\u{10cb0}', ['\u{10cf0}', '\0', '\0']), + ('\u{10cb1}', ['\u{10cf1}', '\0', '\0']), + ('\u{10cb2}', ['\u{10cf2}', '\0', '\0']), + ('\u{118a0}', ['\u{118c0}', '\0', '\0']), + ('\u{118a1}', ['\u{118c1}', '\0', '\0']), + ('\u{118a2}', ['\u{118c2}', '\0', '\0']), + ('\u{118a3}', ['\u{118c3}', '\0', '\0']), + ('\u{118a4}', ['\u{118c4}', '\0', '\0']), + ('\u{118a5}', ['\u{118c5}', '\0', '\0']), + ('\u{118a6}', ['\u{118c6}', '\0', '\0']), + ('\u{118a7}', ['\u{118c7}', '\0', '\0']), + ('\u{118a8}', ['\u{118c8}', '\0', '\0']), + ('\u{118a9}', ['\u{118c9}', '\0', '\0']), + ('\u{118aa}', ['\u{118ca}', '\0', '\0']), + ('\u{118ab}', ['\u{118cb}', '\0', '\0']), + ('\u{118ac}', ['\u{118cc}', '\0', '\0']), + ('\u{118ad}', ['\u{118cd}', '\0', '\0']), + ('\u{118ae}', ['\u{118ce}', '\0', '\0']), + ('\u{118af}', ['\u{118cf}', '\0', '\0']), + ('\u{118b0}', ['\u{118d0}', '\0', '\0']), + ('\u{118b1}', ['\u{118d1}', '\0', '\0']), + ('\u{118b2}', ['\u{118d2}', '\0', '\0']), + ('\u{118b3}', ['\u{118d3}', '\0', '\0']), + ('\u{118b4}', ['\u{118d4}', '\0', '\0']), + ('\u{118b5}', ['\u{118d5}', '\0', '\0']), + ('\u{118b6}', ['\u{118d6}', '\0', '\0']), + ('\u{118b7}', ['\u{118d7}', '\0', '\0']), + ('\u{118b8}', ['\u{118d8}', '\0', '\0']), + ('\u{118b9}', ['\u{118d9}', '\0', '\0']), + ('\u{118ba}', ['\u{118da}', '\0', '\0']), + ('\u{118bb}', ['\u{118db}', '\0', '\0']), + ('\u{118bc}', ['\u{118dc}', '\0', '\0']), + ('\u{118bd}', ['\u{118dd}', '\0', '\0']), + ('\u{118be}', ['\u{118de}', '\0', '\0']), + ('\u{118bf}', ['\u{118df}', '\0', '\0']), + ('\u{16e40}', ['\u{16e60}', '\0', '\0']), + ('\u{16e41}', ['\u{16e61}', '\0', '\0']), + ('\u{16e42}', ['\u{16e62}', '\0', '\0']), + ('\u{16e43}', ['\u{16e63}', '\0', '\0']), + ('\u{16e44}', ['\u{16e64}', '\0', '\0']), + ('\u{16e45}', ['\u{16e65}', '\0', '\0']), + ('\u{16e46}', ['\u{16e66}', '\0', '\0']), + ('\u{16e47}', ['\u{16e67}', '\0', '\0']), + ('\u{16e48}', ['\u{16e68}', '\0', '\0']), + ('\u{16e49}', ['\u{16e69}', '\0', '\0']), + ('\u{16e4a}', ['\u{16e6a}', '\0', '\0']), + ('\u{16e4b}', ['\u{16e6b}', '\0', '\0']), + ('\u{16e4c}', ['\u{16e6c}', '\0', '\0']), + ('\u{16e4d}', ['\u{16e6d}', '\0', '\0']), + ('\u{16e4e}', ['\u{16e6e}', '\0', '\0']), + ('\u{16e4f}', ['\u{16e6f}', '\0', '\0']), + ('\u{16e50}', ['\u{16e70}', '\0', '\0']), + ('\u{16e51}', ['\u{16e71}', '\0', '\0']), + ('\u{16e52}', ['\u{16e72}', '\0', '\0']), + ('\u{16e53}', ['\u{16e73}', '\0', '\0']), + ('\u{16e54}', ['\u{16e74}', '\0', '\0']), + ('\u{16e55}', ['\u{16e75}', '\0', '\0']), + ('\u{16e56}', ['\u{16e76}', '\0', '\0']), + ('\u{16e57}', ['\u{16e77}', '\0', '\0']), + ('\u{16e58}', ['\u{16e78}', '\0', '\0']), + ('\u{16e59}', ['\u{16e79}', '\0', '\0']), + ('\u{16e5a}', ['\u{16e7a}', '\0', '\0']), + ('\u{16e5b}', ['\u{16e7b}', '\0', '\0']), + ('\u{16e5c}', ['\u{16e7c}', '\0', '\0']), + ('\u{16e5d}', ['\u{16e7d}', '\0', '\0']), + ('\u{16e5e}', ['\u{16e7e}', '\0', '\0']), + ('\u{16e5f}', ['\u{16e7f}', '\0', '\0']), + ('\u{1e900}', ['\u{1e922}', '\0', '\0']), + ('\u{1e901}', ['\u{1e923}', '\0', '\0']), + ('\u{1e902}', ['\u{1e924}', '\0', '\0']), + ('\u{1e903}', ['\u{1e925}', '\0', '\0']), + ('\u{1e904}', ['\u{1e926}', '\0', '\0']), + ('\u{1e905}', ['\u{1e927}', '\0', '\0']), + ('\u{1e906}', ['\u{1e928}', '\0', '\0']), + ('\u{1e907}', ['\u{1e929}', '\0', '\0']), + ('\u{1e908}', ['\u{1e92a}', '\0', '\0']), + ('\u{1e909}', ['\u{1e92b}', '\0', '\0']), + ('\u{1e90a}', ['\u{1e92c}', '\0', '\0']), + ('\u{1e90b}', ['\u{1e92d}', '\0', '\0']), + ('\u{1e90c}', ['\u{1e92e}', '\0', '\0']), + ('\u{1e90d}', ['\u{1e92f}', '\0', '\0']), + ('\u{1e90e}', ['\u{1e930}', '\0', '\0']), + ('\u{1e90f}', ['\u{1e931}', '\0', '\0']), + ('\u{1e910}', ['\u{1e932}', '\0', '\0']), + ('\u{1e911}', ['\u{1e933}', '\0', '\0']), + ('\u{1e912}', ['\u{1e934}', '\0', '\0']), + ('\u{1e913}', ['\u{1e935}', '\0', '\0']), + ('\u{1e914}', ['\u{1e936}', '\0', '\0']), + ('\u{1e915}', ['\u{1e937}', '\0', '\0']), + ('\u{1e916}', ['\u{1e938}', '\0', '\0']), + ('\u{1e917}', ['\u{1e939}', '\0', '\0']), + ('\u{1e918}', ['\u{1e93a}', '\0', '\0']), + ('\u{1e919}', ['\u{1e93b}', '\0', '\0']), + ('\u{1e91a}', ['\u{1e93c}', '\0', '\0']), + ('\u{1e91b}', ['\u{1e93d}', '\0', '\0']), + ('\u{1e91c}', ['\u{1e93e}', '\0', '\0']), + ('\u{1e91d}', ['\u{1e93f}', '\0', '\0']), + ('\u{1e91e}', ['\u{1e940}', '\0', '\0']), + ('\u{1e91f}', ['\u{1e941}', '\0', '\0']), + ('\u{1e920}', ['\u{1e942}', '\0', '\0']), + ('\u{1e921}', ['\u{1e943}', '\0', '\0']), ]; const to_uppercase_table: &[(char, [char; 3])] = &[ - ('\u{61}', ['\u{41}', '\0', '\0']), ('\u{62}', ['\u{42}', '\0', '\0']), ('\u{63}', - ['\u{43}', '\0', '\0']), ('\u{64}', ['\u{44}', '\0', '\0']), ('\u{65}', ['\u{45}', '\0', - '\0']), ('\u{66}', ['\u{46}', '\0', '\0']), ('\u{67}', ['\u{47}', '\0', '\0']), ('\u{68}', - ['\u{48}', '\0', '\0']), ('\u{69}', ['\u{49}', '\0', '\0']), ('\u{6a}', ['\u{4a}', '\0', - '\0']), ('\u{6b}', ['\u{4b}', '\0', '\0']), ('\u{6c}', ['\u{4c}', '\0', '\0']), ('\u{6d}', - ['\u{4d}', '\0', '\0']), ('\u{6e}', ['\u{4e}', '\0', '\0']), ('\u{6f}', ['\u{4f}', '\0', - '\0']), ('\u{70}', ['\u{50}', '\0', '\0']), ('\u{71}', ['\u{51}', '\0', '\0']), ('\u{72}', - ['\u{52}', '\0', '\0']), ('\u{73}', ['\u{53}', '\0', '\0']), ('\u{74}', ['\u{54}', '\0', - '\0']), ('\u{75}', ['\u{55}', '\0', '\0']), ('\u{76}', ['\u{56}', '\0', '\0']), ('\u{77}', - ['\u{57}', '\0', '\0']), ('\u{78}', ['\u{58}', '\0', '\0']), ('\u{79}', ['\u{59}', '\0', - '\0']), ('\u{7a}', ['\u{5a}', '\0', '\0']), ('\u{b5}', ['\u{39c}', '\0', '\0']), ('\u{df}', - ['\u{53}', '\u{53}', '\0']), ('\u{e0}', ['\u{c0}', '\0', '\0']), ('\u{e1}', ['\u{c1}', '\0', - '\0']), ('\u{e2}', ['\u{c2}', '\0', '\0']), ('\u{e3}', ['\u{c3}', '\0', '\0']), ('\u{e4}', - ['\u{c4}', '\0', '\0']), ('\u{e5}', ['\u{c5}', '\0', '\0']), ('\u{e6}', ['\u{c6}', '\0', - '\0']), ('\u{e7}', ['\u{c7}', '\0', '\0']), ('\u{e8}', ['\u{c8}', '\0', '\0']), ('\u{e9}', - ['\u{c9}', '\0', '\0']), ('\u{ea}', ['\u{ca}', '\0', '\0']), ('\u{eb}', ['\u{cb}', '\0', - '\0']), ('\u{ec}', ['\u{cc}', '\0', '\0']), ('\u{ed}', ['\u{cd}', '\0', '\0']), ('\u{ee}', - ['\u{ce}', '\0', '\0']), ('\u{ef}', ['\u{cf}', '\0', '\0']), ('\u{f0}', ['\u{d0}', '\0', - '\0']), ('\u{f1}', ['\u{d1}', '\0', '\0']), ('\u{f2}', ['\u{d2}', '\0', '\0']), ('\u{f3}', - ['\u{d3}', '\0', '\0']), ('\u{f4}', ['\u{d4}', '\0', '\0']), ('\u{f5}', ['\u{d5}', '\0', - '\0']), ('\u{f6}', ['\u{d6}', '\0', '\0']), ('\u{f8}', ['\u{d8}', '\0', '\0']), ('\u{f9}', - ['\u{d9}', '\0', '\0']), ('\u{fa}', ['\u{da}', '\0', '\0']), ('\u{fb}', ['\u{db}', '\0', - '\0']), ('\u{fc}', ['\u{dc}', '\0', '\0']), ('\u{fd}', ['\u{dd}', '\0', '\0']), ('\u{fe}', - ['\u{de}', '\0', '\0']), ('\u{ff}', ['\u{178}', '\0', '\0']), ('\u{101}', ['\u{100}', '\0', - '\0']), ('\u{103}', ['\u{102}', '\0', '\0']), ('\u{105}', ['\u{104}', '\0', '\0']), - ('\u{107}', ['\u{106}', '\0', '\0']), ('\u{109}', ['\u{108}', '\0', '\0']), ('\u{10b}', - ['\u{10a}', '\0', '\0']), ('\u{10d}', ['\u{10c}', '\0', '\0']), ('\u{10f}', ['\u{10e}', - '\0', '\0']), ('\u{111}', ['\u{110}', '\0', '\0']), ('\u{113}', ['\u{112}', '\0', '\0']), - ('\u{115}', ['\u{114}', '\0', '\0']), ('\u{117}', ['\u{116}', '\0', '\0']), ('\u{119}', - ['\u{118}', '\0', '\0']), ('\u{11b}', ['\u{11a}', '\0', '\0']), ('\u{11d}', ['\u{11c}', - '\0', '\0']), ('\u{11f}', ['\u{11e}', '\0', '\0']), ('\u{121}', ['\u{120}', '\0', '\0']), - ('\u{123}', ['\u{122}', '\0', '\0']), ('\u{125}', ['\u{124}', '\0', '\0']), ('\u{127}', - ['\u{126}', '\0', '\0']), ('\u{129}', ['\u{128}', '\0', '\0']), ('\u{12b}', ['\u{12a}', - '\0', '\0']), ('\u{12d}', ['\u{12c}', '\0', '\0']), ('\u{12f}', ['\u{12e}', '\0', '\0']), - ('\u{131}', ['\u{49}', '\0', '\0']), ('\u{133}', ['\u{132}', '\0', '\0']), ('\u{135}', - ['\u{134}', '\0', '\0']), ('\u{137}', ['\u{136}', '\0', '\0']), ('\u{13a}', ['\u{139}', - '\0', '\0']), ('\u{13c}', ['\u{13b}', '\0', '\0']), ('\u{13e}', ['\u{13d}', '\0', '\0']), - ('\u{140}', ['\u{13f}', '\0', '\0']), ('\u{142}', ['\u{141}', '\0', '\0']), ('\u{144}', - ['\u{143}', '\0', '\0']), ('\u{146}', ['\u{145}', '\0', '\0']), ('\u{148}', ['\u{147}', - '\0', '\0']), ('\u{149}', ['\u{2bc}', '\u{4e}', '\0']), ('\u{14b}', ['\u{14a}', '\0', - '\0']), ('\u{14d}', ['\u{14c}', '\0', '\0']), ('\u{14f}', ['\u{14e}', '\0', '\0']), - ('\u{151}', ['\u{150}', '\0', '\0']), ('\u{153}', ['\u{152}', '\0', '\0']), ('\u{155}', - ['\u{154}', '\0', '\0']), ('\u{157}', ['\u{156}', '\0', '\0']), ('\u{159}', ['\u{158}', - '\0', '\0']), ('\u{15b}', ['\u{15a}', '\0', '\0']), ('\u{15d}', ['\u{15c}', '\0', '\0']), - ('\u{15f}', ['\u{15e}', '\0', '\0']), ('\u{161}', ['\u{160}', '\0', '\0']), ('\u{163}', - ['\u{162}', '\0', '\0']), ('\u{165}', ['\u{164}', '\0', '\0']), ('\u{167}', ['\u{166}', - '\0', '\0']), ('\u{169}', ['\u{168}', '\0', '\0']), ('\u{16b}', ['\u{16a}', '\0', '\0']), - ('\u{16d}', ['\u{16c}', '\0', '\0']), ('\u{16f}', ['\u{16e}', '\0', '\0']), ('\u{171}', - ['\u{170}', '\0', '\0']), ('\u{173}', ['\u{172}', '\0', '\0']), ('\u{175}', ['\u{174}', - '\0', '\0']), ('\u{177}', ['\u{176}', '\0', '\0']), ('\u{17a}', ['\u{179}', '\0', '\0']), - ('\u{17c}', ['\u{17b}', '\0', '\0']), ('\u{17e}', ['\u{17d}', '\0', '\0']), ('\u{17f}', - ['\u{53}', '\0', '\0']), ('\u{180}', ['\u{243}', '\0', '\0']), ('\u{183}', ['\u{182}', '\0', - '\0']), ('\u{185}', ['\u{184}', '\0', '\0']), ('\u{188}', ['\u{187}', '\0', '\0']), - ('\u{18c}', ['\u{18b}', '\0', '\0']), ('\u{192}', ['\u{191}', '\0', '\0']), ('\u{195}', - ['\u{1f6}', '\0', '\0']), ('\u{199}', ['\u{198}', '\0', '\0']), ('\u{19a}', ['\u{23d}', - '\0', '\0']), ('\u{19e}', ['\u{220}', '\0', '\0']), ('\u{1a1}', ['\u{1a0}', '\0', '\0']), - ('\u{1a3}', ['\u{1a2}', '\0', '\0']), ('\u{1a5}', ['\u{1a4}', '\0', '\0']), ('\u{1a8}', - ['\u{1a7}', '\0', '\0']), ('\u{1ad}', ['\u{1ac}', '\0', '\0']), ('\u{1b0}', ['\u{1af}', - '\0', '\0']), ('\u{1b4}', ['\u{1b3}', '\0', '\0']), ('\u{1b6}', ['\u{1b5}', '\0', '\0']), - ('\u{1b9}', ['\u{1b8}', '\0', '\0']), ('\u{1bd}', ['\u{1bc}', '\0', '\0']), ('\u{1bf}', - ['\u{1f7}', '\0', '\0']), ('\u{1c5}', ['\u{1c4}', '\0', '\0']), ('\u{1c6}', ['\u{1c4}', - '\0', '\0']), ('\u{1c8}', ['\u{1c7}', '\0', '\0']), ('\u{1c9}', ['\u{1c7}', '\0', '\0']), - ('\u{1cb}', ['\u{1ca}', '\0', '\0']), ('\u{1cc}', ['\u{1ca}', '\0', '\0']), ('\u{1ce}', - ['\u{1cd}', '\0', '\0']), ('\u{1d0}', ['\u{1cf}', '\0', '\0']), ('\u{1d2}', ['\u{1d1}', - '\0', '\0']), ('\u{1d4}', ['\u{1d3}', '\0', '\0']), ('\u{1d6}', ['\u{1d5}', '\0', '\0']), - ('\u{1d8}', ['\u{1d7}', '\0', '\0']), ('\u{1da}', ['\u{1d9}', '\0', '\0']), ('\u{1dc}', - ['\u{1db}', '\0', '\0']), ('\u{1dd}', ['\u{18e}', '\0', '\0']), ('\u{1df}', ['\u{1de}', - '\0', '\0']), ('\u{1e1}', ['\u{1e0}', '\0', '\0']), ('\u{1e3}', ['\u{1e2}', '\0', '\0']), - ('\u{1e5}', ['\u{1e4}', '\0', '\0']), ('\u{1e7}', ['\u{1e6}', '\0', '\0']), ('\u{1e9}', - ['\u{1e8}', '\0', '\0']), ('\u{1eb}', ['\u{1ea}', '\0', '\0']), ('\u{1ed}', ['\u{1ec}', - '\0', '\0']), ('\u{1ef}', ['\u{1ee}', '\0', '\0']), ('\u{1f0}', ['\u{4a}', '\u{30c}', - '\0']), ('\u{1f2}', ['\u{1f1}', '\0', '\0']), ('\u{1f3}', ['\u{1f1}', '\0', '\0']), - ('\u{1f5}', ['\u{1f4}', '\0', '\0']), ('\u{1f9}', ['\u{1f8}', '\0', '\0']), ('\u{1fb}', - ['\u{1fa}', '\0', '\0']), ('\u{1fd}', ['\u{1fc}', '\0', '\0']), ('\u{1ff}', ['\u{1fe}', - '\0', '\0']), ('\u{201}', ['\u{200}', '\0', '\0']), ('\u{203}', ['\u{202}', '\0', '\0']), - ('\u{205}', ['\u{204}', '\0', '\0']), ('\u{207}', ['\u{206}', '\0', '\0']), ('\u{209}', - ['\u{208}', '\0', '\0']), ('\u{20b}', ['\u{20a}', '\0', '\0']), ('\u{20d}', ['\u{20c}', - '\0', '\0']), ('\u{20f}', ['\u{20e}', '\0', '\0']), ('\u{211}', ['\u{210}', '\0', '\0']), - ('\u{213}', ['\u{212}', '\0', '\0']), ('\u{215}', ['\u{214}', '\0', '\0']), ('\u{217}', - ['\u{216}', '\0', '\0']), ('\u{219}', ['\u{218}', '\0', '\0']), ('\u{21b}', ['\u{21a}', - '\0', '\0']), ('\u{21d}', ['\u{21c}', '\0', '\0']), ('\u{21f}', ['\u{21e}', '\0', '\0']), - ('\u{223}', ['\u{222}', '\0', '\0']), ('\u{225}', ['\u{224}', '\0', '\0']), ('\u{227}', - ['\u{226}', '\0', '\0']), ('\u{229}', ['\u{228}', '\0', '\0']), ('\u{22b}', ['\u{22a}', - '\0', '\0']), ('\u{22d}', ['\u{22c}', '\0', '\0']), ('\u{22f}', ['\u{22e}', '\0', '\0']), - ('\u{231}', ['\u{230}', '\0', '\0']), ('\u{233}', ['\u{232}', '\0', '\0']), ('\u{23c}', - ['\u{23b}', '\0', '\0']), ('\u{23f}', ['\u{2c7e}', '\0', '\0']), ('\u{240}', ['\u{2c7f}', - '\0', '\0']), ('\u{242}', ['\u{241}', '\0', '\0']), ('\u{247}', ['\u{246}', '\0', '\0']), - ('\u{249}', ['\u{248}', '\0', '\0']), ('\u{24b}', ['\u{24a}', '\0', '\0']), ('\u{24d}', - ['\u{24c}', '\0', '\0']), ('\u{24f}', ['\u{24e}', '\0', '\0']), ('\u{250}', ['\u{2c6f}', - '\0', '\0']), ('\u{251}', ['\u{2c6d}', '\0', '\0']), ('\u{252}', ['\u{2c70}', '\0', '\0']), - ('\u{253}', ['\u{181}', '\0', '\0']), ('\u{254}', ['\u{186}', '\0', '\0']), ('\u{256}', - ['\u{189}', '\0', '\0']), ('\u{257}', ['\u{18a}', '\0', '\0']), ('\u{259}', ['\u{18f}', - '\0', '\0']), ('\u{25b}', ['\u{190}', '\0', '\0']), ('\u{25c}', ['\u{a7ab}', '\0', '\0']), - ('\u{260}', ['\u{193}', '\0', '\0']), ('\u{261}', ['\u{a7ac}', '\0', '\0']), ('\u{263}', - ['\u{194}', '\0', '\0']), ('\u{265}', ['\u{a78d}', '\0', '\0']), ('\u{266}', ['\u{a7aa}', - '\0', '\0']), ('\u{268}', ['\u{197}', '\0', '\0']), ('\u{269}', ['\u{196}', '\0', '\0']), - ('\u{26a}', ['\u{a7ae}', '\0', '\0']), ('\u{26b}', ['\u{2c62}', '\0', '\0']), ('\u{26c}', - ['\u{a7ad}', '\0', '\0']), ('\u{26f}', ['\u{19c}', '\0', '\0']), ('\u{271}', ['\u{2c6e}', - '\0', '\0']), ('\u{272}', ['\u{19d}', '\0', '\0']), ('\u{275}', ['\u{19f}', '\0', '\0']), - ('\u{27d}', ['\u{2c64}', '\0', '\0']), ('\u{280}', ['\u{1a6}', '\0', '\0']), ('\u{283}', - ['\u{1a9}', '\0', '\0']), ('\u{287}', ['\u{a7b1}', '\0', '\0']), ('\u{288}', ['\u{1ae}', - '\0', '\0']), ('\u{289}', ['\u{244}', '\0', '\0']), ('\u{28a}', ['\u{1b1}', '\0', '\0']), - ('\u{28b}', ['\u{1b2}', '\0', '\0']), ('\u{28c}', ['\u{245}', '\0', '\0']), ('\u{292}', - ['\u{1b7}', '\0', '\0']), ('\u{29d}', ['\u{a7b2}', '\0', '\0']), ('\u{29e}', ['\u{a7b0}', - '\0', '\0']), ('\u{345}', ['\u{399}', '\0', '\0']), ('\u{371}', ['\u{370}', '\0', '\0']), - ('\u{373}', ['\u{372}', '\0', '\0']), ('\u{377}', ['\u{376}', '\0', '\0']), ('\u{37b}', - ['\u{3fd}', '\0', '\0']), ('\u{37c}', ['\u{3fe}', '\0', '\0']), ('\u{37d}', ['\u{3ff}', - '\0', '\0']), ('\u{390}', ['\u{399}', '\u{308}', '\u{301}']), ('\u{3ac}', ['\u{386}', '\0', - '\0']), ('\u{3ad}', ['\u{388}', '\0', '\0']), ('\u{3ae}', ['\u{389}', '\0', '\0']), - ('\u{3af}', ['\u{38a}', '\0', '\0']), ('\u{3b0}', ['\u{3a5}', '\u{308}', '\u{301}']), - ('\u{3b1}', ['\u{391}', '\0', '\0']), ('\u{3b2}', ['\u{392}', '\0', '\0']), ('\u{3b3}', - ['\u{393}', '\0', '\0']), ('\u{3b4}', ['\u{394}', '\0', '\0']), ('\u{3b5}', ['\u{395}', - '\0', '\0']), ('\u{3b6}', ['\u{396}', '\0', '\0']), ('\u{3b7}', ['\u{397}', '\0', '\0']), - ('\u{3b8}', ['\u{398}', '\0', '\0']), ('\u{3b9}', ['\u{399}', '\0', '\0']), ('\u{3ba}', - ['\u{39a}', '\0', '\0']), ('\u{3bb}', ['\u{39b}', '\0', '\0']), ('\u{3bc}', ['\u{39c}', - '\0', '\0']), ('\u{3bd}', ['\u{39d}', '\0', '\0']), ('\u{3be}', ['\u{39e}', '\0', '\0']), - ('\u{3bf}', ['\u{39f}', '\0', '\0']), ('\u{3c0}', ['\u{3a0}', '\0', '\0']), ('\u{3c1}', - ['\u{3a1}', '\0', '\0']), ('\u{3c2}', ['\u{3a3}', '\0', '\0']), ('\u{3c3}', ['\u{3a3}', - '\0', '\0']), ('\u{3c4}', ['\u{3a4}', '\0', '\0']), ('\u{3c5}', ['\u{3a5}', '\0', '\0']), - ('\u{3c6}', ['\u{3a6}', '\0', '\0']), ('\u{3c7}', ['\u{3a7}', '\0', '\0']), ('\u{3c8}', - ['\u{3a8}', '\0', '\0']), ('\u{3c9}', ['\u{3a9}', '\0', '\0']), ('\u{3ca}', ['\u{3aa}', - '\0', '\0']), ('\u{3cb}', ['\u{3ab}', '\0', '\0']), ('\u{3cc}', ['\u{38c}', '\0', '\0']), - ('\u{3cd}', ['\u{38e}', '\0', '\0']), ('\u{3ce}', ['\u{38f}', '\0', '\0']), ('\u{3d0}', - ['\u{392}', '\0', '\0']), ('\u{3d1}', ['\u{398}', '\0', '\0']), ('\u{3d5}', ['\u{3a6}', - '\0', '\0']), ('\u{3d6}', ['\u{3a0}', '\0', '\0']), ('\u{3d7}', ['\u{3cf}', '\0', '\0']), - ('\u{3d9}', ['\u{3d8}', '\0', '\0']), ('\u{3db}', ['\u{3da}', '\0', '\0']), ('\u{3dd}', - ['\u{3dc}', '\0', '\0']), ('\u{3df}', ['\u{3de}', '\0', '\0']), ('\u{3e1}', ['\u{3e0}', - '\0', '\0']), ('\u{3e3}', ['\u{3e2}', '\0', '\0']), ('\u{3e5}', ['\u{3e4}', '\0', '\0']), - ('\u{3e7}', ['\u{3e6}', '\0', '\0']), ('\u{3e9}', ['\u{3e8}', '\0', '\0']), ('\u{3eb}', - ['\u{3ea}', '\0', '\0']), ('\u{3ed}', ['\u{3ec}', '\0', '\0']), ('\u{3ef}', ['\u{3ee}', - '\0', '\0']), ('\u{3f0}', ['\u{39a}', '\0', '\0']), ('\u{3f1}', ['\u{3a1}', '\0', '\0']), - ('\u{3f2}', ['\u{3f9}', '\0', '\0']), ('\u{3f3}', ['\u{37f}', '\0', '\0']), ('\u{3f5}', - ['\u{395}', '\0', '\0']), ('\u{3f8}', ['\u{3f7}', '\0', '\0']), ('\u{3fb}', ['\u{3fa}', - '\0', '\0']), ('\u{430}', ['\u{410}', '\0', '\0']), ('\u{431}', ['\u{411}', '\0', '\0']), - ('\u{432}', ['\u{412}', '\0', '\0']), ('\u{433}', ['\u{413}', '\0', '\0']), ('\u{434}', - ['\u{414}', '\0', '\0']), ('\u{435}', ['\u{415}', '\0', '\0']), ('\u{436}', ['\u{416}', - '\0', '\0']), ('\u{437}', ['\u{417}', '\0', '\0']), ('\u{438}', ['\u{418}', '\0', '\0']), - ('\u{439}', ['\u{419}', '\0', '\0']), ('\u{43a}', ['\u{41a}', '\0', '\0']), ('\u{43b}', - ['\u{41b}', '\0', '\0']), ('\u{43c}', ['\u{41c}', '\0', '\0']), ('\u{43d}', ['\u{41d}', - '\0', '\0']), ('\u{43e}', ['\u{41e}', '\0', '\0']), ('\u{43f}', ['\u{41f}', '\0', '\0']), - ('\u{440}', ['\u{420}', '\0', '\0']), ('\u{441}', ['\u{421}', '\0', '\0']), ('\u{442}', - ['\u{422}', '\0', '\0']), ('\u{443}', ['\u{423}', '\0', '\0']), ('\u{444}', ['\u{424}', - '\0', '\0']), ('\u{445}', ['\u{425}', '\0', '\0']), ('\u{446}', ['\u{426}', '\0', '\0']), - ('\u{447}', ['\u{427}', '\0', '\0']), ('\u{448}', ['\u{428}', '\0', '\0']), ('\u{449}', - ['\u{429}', '\0', '\0']), ('\u{44a}', ['\u{42a}', '\0', '\0']), ('\u{44b}', ['\u{42b}', - '\0', '\0']), ('\u{44c}', ['\u{42c}', '\0', '\0']), ('\u{44d}', ['\u{42d}', '\0', '\0']), - ('\u{44e}', ['\u{42e}', '\0', '\0']), ('\u{44f}', ['\u{42f}', '\0', '\0']), ('\u{450}', - ['\u{400}', '\0', '\0']), ('\u{451}', ['\u{401}', '\0', '\0']), ('\u{452}', ['\u{402}', - '\0', '\0']), ('\u{453}', ['\u{403}', '\0', '\0']), ('\u{454}', ['\u{404}', '\0', '\0']), - ('\u{455}', ['\u{405}', '\0', '\0']), ('\u{456}', ['\u{406}', '\0', '\0']), ('\u{457}', - ['\u{407}', '\0', '\0']), ('\u{458}', ['\u{408}', '\0', '\0']), ('\u{459}', ['\u{409}', - '\0', '\0']), ('\u{45a}', ['\u{40a}', '\0', '\0']), ('\u{45b}', ['\u{40b}', '\0', '\0']), - ('\u{45c}', ['\u{40c}', '\0', '\0']), ('\u{45d}', ['\u{40d}', '\0', '\0']), ('\u{45e}', - ['\u{40e}', '\0', '\0']), ('\u{45f}', ['\u{40f}', '\0', '\0']), ('\u{461}', ['\u{460}', - '\0', '\0']), ('\u{463}', ['\u{462}', '\0', '\0']), ('\u{465}', ['\u{464}', '\0', '\0']), - ('\u{467}', ['\u{466}', '\0', '\0']), ('\u{469}', ['\u{468}', '\0', '\0']), ('\u{46b}', - ['\u{46a}', '\0', '\0']), ('\u{46d}', ['\u{46c}', '\0', '\0']), ('\u{46f}', ['\u{46e}', - '\0', '\0']), ('\u{471}', ['\u{470}', '\0', '\0']), ('\u{473}', ['\u{472}', '\0', '\0']), - ('\u{475}', ['\u{474}', '\0', '\0']), ('\u{477}', ['\u{476}', '\0', '\0']), ('\u{479}', - ['\u{478}', '\0', '\0']), ('\u{47b}', ['\u{47a}', '\0', '\0']), ('\u{47d}', ['\u{47c}', - '\0', '\0']), ('\u{47f}', ['\u{47e}', '\0', '\0']), ('\u{481}', ['\u{480}', '\0', '\0']), - ('\u{48b}', ['\u{48a}', '\0', '\0']), ('\u{48d}', ['\u{48c}', '\0', '\0']), ('\u{48f}', - ['\u{48e}', '\0', '\0']), ('\u{491}', ['\u{490}', '\0', '\0']), ('\u{493}', ['\u{492}', - '\0', '\0']), ('\u{495}', ['\u{494}', '\0', '\0']), ('\u{497}', ['\u{496}', '\0', '\0']), - ('\u{499}', ['\u{498}', '\0', '\0']), ('\u{49b}', ['\u{49a}', '\0', '\0']), ('\u{49d}', - ['\u{49c}', '\0', '\0']), ('\u{49f}', ['\u{49e}', '\0', '\0']), ('\u{4a1}', ['\u{4a0}', - '\0', '\0']), ('\u{4a3}', ['\u{4a2}', '\0', '\0']), ('\u{4a5}', ['\u{4a4}', '\0', '\0']), - ('\u{4a7}', ['\u{4a6}', '\0', '\0']), ('\u{4a9}', ['\u{4a8}', '\0', '\0']), ('\u{4ab}', - ['\u{4aa}', '\0', '\0']), ('\u{4ad}', ['\u{4ac}', '\0', '\0']), ('\u{4af}', ['\u{4ae}', - '\0', '\0']), ('\u{4b1}', ['\u{4b0}', '\0', '\0']), ('\u{4b3}', ['\u{4b2}', '\0', '\0']), - ('\u{4b5}', ['\u{4b4}', '\0', '\0']), ('\u{4b7}', ['\u{4b6}', '\0', '\0']), ('\u{4b9}', - ['\u{4b8}', '\0', '\0']), ('\u{4bb}', ['\u{4ba}', '\0', '\0']), ('\u{4bd}', ['\u{4bc}', - '\0', '\0']), ('\u{4bf}', ['\u{4be}', '\0', '\0']), ('\u{4c2}', ['\u{4c1}', '\0', '\0']), - ('\u{4c4}', ['\u{4c3}', '\0', '\0']), ('\u{4c6}', ['\u{4c5}', '\0', '\0']), ('\u{4c8}', - ['\u{4c7}', '\0', '\0']), ('\u{4ca}', ['\u{4c9}', '\0', '\0']), ('\u{4cc}', ['\u{4cb}', - '\0', '\0']), ('\u{4ce}', ['\u{4cd}', '\0', '\0']), ('\u{4cf}', ['\u{4c0}', '\0', '\0']), - ('\u{4d1}', ['\u{4d0}', '\0', '\0']), ('\u{4d3}', ['\u{4d2}', '\0', '\0']), ('\u{4d5}', - ['\u{4d4}', '\0', '\0']), ('\u{4d7}', ['\u{4d6}', '\0', '\0']), ('\u{4d9}', ['\u{4d8}', - '\0', '\0']), ('\u{4db}', ['\u{4da}', '\0', '\0']), ('\u{4dd}', ['\u{4dc}', '\0', '\0']), - ('\u{4df}', ['\u{4de}', '\0', '\0']), ('\u{4e1}', ['\u{4e0}', '\0', '\0']), ('\u{4e3}', - ['\u{4e2}', '\0', '\0']), ('\u{4e5}', ['\u{4e4}', '\0', '\0']), ('\u{4e7}', ['\u{4e6}', - '\0', '\0']), ('\u{4e9}', ['\u{4e8}', '\0', '\0']), ('\u{4eb}', ['\u{4ea}', '\0', '\0']), - ('\u{4ed}', ['\u{4ec}', '\0', '\0']), ('\u{4ef}', ['\u{4ee}', '\0', '\0']), ('\u{4f1}', - ['\u{4f0}', '\0', '\0']), ('\u{4f3}', ['\u{4f2}', '\0', '\0']), ('\u{4f5}', ['\u{4f4}', - '\0', '\0']), ('\u{4f7}', ['\u{4f6}', '\0', '\0']), ('\u{4f9}', ['\u{4f8}', '\0', '\0']), - ('\u{4fb}', ['\u{4fa}', '\0', '\0']), ('\u{4fd}', ['\u{4fc}', '\0', '\0']), ('\u{4ff}', - ['\u{4fe}', '\0', '\0']), ('\u{501}', ['\u{500}', '\0', '\0']), ('\u{503}', ['\u{502}', - '\0', '\0']), ('\u{505}', ['\u{504}', '\0', '\0']), ('\u{507}', ['\u{506}', '\0', '\0']), - ('\u{509}', ['\u{508}', '\0', '\0']), ('\u{50b}', ['\u{50a}', '\0', '\0']), ('\u{50d}', - ['\u{50c}', '\0', '\0']), ('\u{50f}', ['\u{50e}', '\0', '\0']), ('\u{511}', ['\u{510}', - '\0', '\0']), ('\u{513}', ['\u{512}', '\0', '\0']), ('\u{515}', ['\u{514}', '\0', '\0']), - ('\u{517}', ['\u{516}', '\0', '\0']), ('\u{519}', ['\u{518}', '\0', '\0']), ('\u{51b}', - ['\u{51a}', '\0', '\0']), ('\u{51d}', ['\u{51c}', '\0', '\0']), ('\u{51f}', ['\u{51e}', - '\0', '\0']), ('\u{521}', ['\u{520}', '\0', '\0']), ('\u{523}', ['\u{522}', '\0', '\0']), - ('\u{525}', ['\u{524}', '\0', '\0']), ('\u{527}', ['\u{526}', '\0', '\0']), ('\u{529}', - ['\u{528}', '\0', '\0']), ('\u{52b}', ['\u{52a}', '\0', '\0']), ('\u{52d}', ['\u{52c}', - '\0', '\0']), ('\u{52f}', ['\u{52e}', '\0', '\0']), ('\u{561}', ['\u{531}', '\0', '\0']), - ('\u{562}', ['\u{532}', '\0', '\0']), ('\u{563}', ['\u{533}', '\0', '\0']), ('\u{564}', - ['\u{534}', '\0', '\0']), ('\u{565}', ['\u{535}', '\0', '\0']), ('\u{566}', ['\u{536}', - '\0', '\0']), ('\u{567}', ['\u{537}', '\0', '\0']), ('\u{568}', ['\u{538}', '\0', '\0']), - ('\u{569}', ['\u{539}', '\0', '\0']), ('\u{56a}', ['\u{53a}', '\0', '\0']), ('\u{56b}', - ['\u{53b}', '\0', '\0']), ('\u{56c}', ['\u{53c}', '\0', '\0']), ('\u{56d}', ['\u{53d}', - '\0', '\0']), ('\u{56e}', ['\u{53e}', '\0', '\0']), ('\u{56f}', ['\u{53f}', '\0', '\0']), - ('\u{570}', ['\u{540}', '\0', '\0']), ('\u{571}', ['\u{541}', '\0', '\0']), ('\u{572}', - ['\u{542}', '\0', '\0']), ('\u{573}', ['\u{543}', '\0', '\0']), ('\u{574}', ['\u{544}', - '\0', '\0']), ('\u{575}', ['\u{545}', '\0', '\0']), ('\u{576}', ['\u{546}', '\0', '\0']), - ('\u{577}', ['\u{547}', '\0', '\0']), ('\u{578}', ['\u{548}', '\0', '\0']), ('\u{579}', - ['\u{549}', '\0', '\0']), ('\u{57a}', ['\u{54a}', '\0', '\0']), ('\u{57b}', ['\u{54b}', - '\0', '\0']), ('\u{57c}', ['\u{54c}', '\0', '\0']), ('\u{57d}', ['\u{54d}', '\0', '\0']), - ('\u{57e}', ['\u{54e}', '\0', '\0']), ('\u{57f}', ['\u{54f}', '\0', '\0']), ('\u{580}', - ['\u{550}', '\0', '\0']), ('\u{581}', ['\u{551}', '\0', '\0']), ('\u{582}', ['\u{552}', - '\0', '\0']), ('\u{583}', ['\u{553}', '\0', '\0']), ('\u{584}', ['\u{554}', '\0', '\0']), - ('\u{585}', ['\u{555}', '\0', '\0']), ('\u{586}', ['\u{556}', '\0', '\0']), ('\u{587}', - ['\u{535}', '\u{552}', '\0']), ('\u{10d0}', ['\u{1c90}', '\0', '\0']), ('\u{10d1}', - ['\u{1c91}', '\0', '\0']), ('\u{10d2}', ['\u{1c92}', '\0', '\0']), ('\u{10d3}', ['\u{1c93}', - '\0', '\0']), ('\u{10d4}', ['\u{1c94}', '\0', '\0']), ('\u{10d5}', ['\u{1c95}', '\0', - '\0']), ('\u{10d6}', ['\u{1c96}', '\0', '\0']), ('\u{10d7}', ['\u{1c97}', '\0', '\0']), - ('\u{10d8}', ['\u{1c98}', '\0', '\0']), ('\u{10d9}', ['\u{1c99}', '\0', '\0']), ('\u{10da}', - ['\u{1c9a}', '\0', '\0']), ('\u{10db}', ['\u{1c9b}', '\0', '\0']), ('\u{10dc}', ['\u{1c9c}', - '\0', '\0']), ('\u{10dd}', ['\u{1c9d}', '\0', '\0']), ('\u{10de}', ['\u{1c9e}', '\0', - '\0']), ('\u{10df}', ['\u{1c9f}', '\0', '\0']), ('\u{10e0}', ['\u{1ca0}', '\0', '\0']), - ('\u{10e1}', ['\u{1ca1}', '\0', '\0']), ('\u{10e2}', ['\u{1ca2}', '\0', '\0']), ('\u{10e3}', - ['\u{1ca3}', '\0', '\0']), ('\u{10e4}', ['\u{1ca4}', '\0', '\0']), ('\u{10e5}', ['\u{1ca5}', - '\0', '\0']), ('\u{10e6}', ['\u{1ca6}', '\0', '\0']), ('\u{10e7}', ['\u{1ca7}', '\0', - '\0']), ('\u{10e8}', ['\u{1ca8}', '\0', '\0']), ('\u{10e9}', ['\u{1ca9}', '\0', '\0']), - ('\u{10ea}', ['\u{1caa}', '\0', '\0']), ('\u{10eb}', ['\u{1cab}', '\0', '\0']), ('\u{10ec}', - ['\u{1cac}', '\0', '\0']), ('\u{10ed}', ['\u{1cad}', '\0', '\0']), ('\u{10ee}', ['\u{1cae}', - '\0', '\0']), ('\u{10ef}', ['\u{1caf}', '\0', '\0']), ('\u{10f0}', ['\u{1cb0}', '\0', - '\0']), ('\u{10f1}', ['\u{1cb1}', '\0', '\0']), ('\u{10f2}', ['\u{1cb2}', '\0', '\0']), - ('\u{10f3}', ['\u{1cb3}', '\0', '\0']), ('\u{10f4}', ['\u{1cb4}', '\0', '\0']), ('\u{10f5}', - ['\u{1cb5}', '\0', '\0']), ('\u{10f6}', ['\u{1cb6}', '\0', '\0']), ('\u{10f7}', ['\u{1cb7}', - '\0', '\0']), ('\u{10f8}', ['\u{1cb8}', '\0', '\0']), ('\u{10f9}', ['\u{1cb9}', '\0', - '\0']), ('\u{10fa}', ['\u{1cba}', '\0', '\0']), ('\u{10fd}', ['\u{1cbd}', '\0', '\0']), - ('\u{10fe}', ['\u{1cbe}', '\0', '\0']), ('\u{10ff}', ['\u{1cbf}', '\0', '\0']), ('\u{13f8}', - ['\u{13f0}', '\0', '\0']), ('\u{13f9}', ['\u{13f1}', '\0', '\0']), ('\u{13fa}', ['\u{13f2}', - '\0', '\0']), ('\u{13fb}', ['\u{13f3}', '\0', '\0']), ('\u{13fc}', ['\u{13f4}', '\0', - '\0']), ('\u{13fd}', ['\u{13f5}', '\0', '\0']), ('\u{1c80}', ['\u{412}', '\0', '\0']), - ('\u{1c81}', ['\u{414}', '\0', '\0']), ('\u{1c82}', ['\u{41e}', '\0', '\0']), ('\u{1c83}', - ['\u{421}', '\0', '\0']), ('\u{1c84}', ['\u{422}', '\0', '\0']), ('\u{1c85}', ['\u{422}', - '\0', '\0']), ('\u{1c86}', ['\u{42a}', '\0', '\0']), ('\u{1c87}', ['\u{462}', '\0', '\0']), - ('\u{1c88}', ['\u{a64a}', '\0', '\0']), ('\u{1d79}', ['\u{a77d}', '\0', '\0']), ('\u{1d7d}', - ['\u{2c63}', '\0', '\0']), ('\u{1e01}', ['\u{1e00}', '\0', '\0']), ('\u{1e03}', ['\u{1e02}', - '\0', '\0']), ('\u{1e05}', ['\u{1e04}', '\0', '\0']), ('\u{1e07}', ['\u{1e06}', '\0', - '\0']), ('\u{1e09}', ['\u{1e08}', '\0', '\0']), ('\u{1e0b}', ['\u{1e0a}', '\0', '\0']), - ('\u{1e0d}', ['\u{1e0c}', '\0', '\0']), ('\u{1e0f}', ['\u{1e0e}', '\0', '\0']), ('\u{1e11}', - ['\u{1e10}', '\0', '\0']), ('\u{1e13}', ['\u{1e12}', '\0', '\0']), ('\u{1e15}', ['\u{1e14}', - '\0', '\0']), ('\u{1e17}', ['\u{1e16}', '\0', '\0']), ('\u{1e19}', ['\u{1e18}', '\0', - '\0']), ('\u{1e1b}', ['\u{1e1a}', '\0', '\0']), ('\u{1e1d}', ['\u{1e1c}', '\0', '\0']), - ('\u{1e1f}', ['\u{1e1e}', '\0', '\0']), ('\u{1e21}', ['\u{1e20}', '\0', '\0']), ('\u{1e23}', - ['\u{1e22}', '\0', '\0']), ('\u{1e25}', ['\u{1e24}', '\0', '\0']), ('\u{1e27}', ['\u{1e26}', - '\0', '\0']), ('\u{1e29}', ['\u{1e28}', '\0', '\0']), ('\u{1e2b}', ['\u{1e2a}', '\0', - '\0']), ('\u{1e2d}', ['\u{1e2c}', '\0', '\0']), ('\u{1e2f}', ['\u{1e2e}', '\0', '\0']), - ('\u{1e31}', ['\u{1e30}', '\0', '\0']), ('\u{1e33}', ['\u{1e32}', '\0', '\0']), ('\u{1e35}', - ['\u{1e34}', '\0', '\0']), ('\u{1e37}', ['\u{1e36}', '\0', '\0']), ('\u{1e39}', ['\u{1e38}', - '\0', '\0']), ('\u{1e3b}', ['\u{1e3a}', '\0', '\0']), ('\u{1e3d}', ['\u{1e3c}', '\0', - '\0']), ('\u{1e3f}', ['\u{1e3e}', '\0', '\0']), ('\u{1e41}', ['\u{1e40}', '\0', '\0']), - ('\u{1e43}', ['\u{1e42}', '\0', '\0']), ('\u{1e45}', ['\u{1e44}', '\0', '\0']), ('\u{1e47}', - ['\u{1e46}', '\0', '\0']), ('\u{1e49}', ['\u{1e48}', '\0', '\0']), ('\u{1e4b}', ['\u{1e4a}', - '\0', '\0']), ('\u{1e4d}', ['\u{1e4c}', '\0', '\0']), ('\u{1e4f}', ['\u{1e4e}', '\0', - '\0']), ('\u{1e51}', ['\u{1e50}', '\0', '\0']), ('\u{1e53}', ['\u{1e52}', '\0', '\0']), - ('\u{1e55}', ['\u{1e54}', '\0', '\0']), ('\u{1e57}', ['\u{1e56}', '\0', '\0']), ('\u{1e59}', - ['\u{1e58}', '\0', '\0']), ('\u{1e5b}', ['\u{1e5a}', '\0', '\0']), ('\u{1e5d}', ['\u{1e5c}', - '\0', '\0']), ('\u{1e5f}', ['\u{1e5e}', '\0', '\0']), ('\u{1e61}', ['\u{1e60}', '\0', - '\0']), ('\u{1e63}', ['\u{1e62}', '\0', '\0']), ('\u{1e65}', ['\u{1e64}', '\0', '\0']), - ('\u{1e67}', ['\u{1e66}', '\0', '\0']), ('\u{1e69}', ['\u{1e68}', '\0', '\0']), ('\u{1e6b}', - ['\u{1e6a}', '\0', '\0']), ('\u{1e6d}', ['\u{1e6c}', '\0', '\0']), ('\u{1e6f}', ['\u{1e6e}', - '\0', '\0']), ('\u{1e71}', ['\u{1e70}', '\0', '\0']), ('\u{1e73}', ['\u{1e72}', '\0', - '\0']), ('\u{1e75}', ['\u{1e74}', '\0', '\0']), ('\u{1e77}', ['\u{1e76}', '\0', '\0']), - ('\u{1e79}', ['\u{1e78}', '\0', '\0']), ('\u{1e7b}', ['\u{1e7a}', '\0', '\0']), ('\u{1e7d}', - ['\u{1e7c}', '\0', '\0']), ('\u{1e7f}', ['\u{1e7e}', '\0', '\0']), ('\u{1e81}', ['\u{1e80}', - '\0', '\0']), ('\u{1e83}', ['\u{1e82}', '\0', '\0']), ('\u{1e85}', ['\u{1e84}', '\0', - '\0']), ('\u{1e87}', ['\u{1e86}', '\0', '\0']), ('\u{1e89}', ['\u{1e88}', '\0', '\0']), - ('\u{1e8b}', ['\u{1e8a}', '\0', '\0']), ('\u{1e8d}', ['\u{1e8c}', '\0', '\0']), ('\u{1e8f}', - ['\u{1e8e}', '\0', '\0']), ('\u{1e91}', ['\u{1e90}', '\0', '\0']), ('\u{1e93}', ['\u{1e92}', - '\0', '\0']), ('\u{1e95}', ['\u{1e94}', '\0', '\0']), ('\u{1e96}', ['\u{48}', '\u{331}', - '\0']), ('\u{1e97}', ['\u{54}', '\u{308}', '\0']), ('\u{1e98}', ['\u{57}', '\u{30a}', - '\0']), ('\u{1e99}', ['\u{59}', '\u{30a}', '\0']), ('\u{1e9a}', ['\u{41}', '\u{2be}', - '\0']), ('\u{1e9b}', ['\u{1e60}', '\0', '\0']), ('\u{1ea1}', ['\u{1ea0}', '\0', '\0']), - ('\u{1ea3}', ['\u{1ea2}', '\0', '\0']), ('\u{1ea5}', ['\u{1ea4}', '\0', '\0']), ('\u{1ea7}', - ['\u{1ea6}', '\0', '\0']), ('\u{1ea9}', ['\u{1ea8}', '\0', '\0']), ('\u{1eab}', ['\u{1eaa}', - '\0', '\0']), ('\u{1ead}', ['\u{1eac}', '\0', '\0']), ('\u{1eaf}', ['\u{1eae}', '\0', - '\0']), ('\u{1eb1}', ['\u{1eb0}', '\0', '\0']), ('\u{1eb3}', ['\u{1eb2}', '\0', '\0']), - ('\u{1eb5}', ['\u{1eb4}', '\0', '\0']), ('\u{1eb7}', ['\u{1eb6}', '\0', '\0']), ('\u{1eb9}', - ['\u{1eb8}', '\0', '\0']), ('\u{1ebb}', ['\u{1eba}', '\0', '\0']), ('\u{1ebd}', ['\u{1ebc}', - '\0', '\0']), ('\u{1ebf}', ['\u{1ebe}', '\0', '\0']), ('\u{1ec1}', ['\u{1ec0}', '\0', - '\0']), ('\u{1ec3}', ['\u{1ec2}', '\0', '\0']), ('\u{1ec5}', ['\u{1ec4}', '\0', '\0']), - ('\u{1ec7}', ['\u{1ec6}', '\0', '\0']), ('\u{1ec9}', ['\u{1ec8}', '\0', '\0']), ('\u{1ecb}', - ['\u{1eca}', '\0', '\0']), ('\u{1ecd}', ['\u{1ecc}', '\0', '\0']), ('\u{1ecf}', ['\u{1ece}', - '\0', '\0']), ('\u{1ed1}', ['\u{1ed0}', '\0', '\0']), ('\u{1ed3}', ['\u{1ed2}', '\0', - '\0']), ('\u{1ed5}', ['\u{1ed4}', '\0', '\0']), ('\u{1ed7}', ['\u{1ed6}', '\0', '\0']), - ('\u{1ed9}', ['\u{1ed8}', '\0', '\0']), ('\u{1edb}', ['\u{1eda}', '\0', '\0']), ('\u{1edd}', - ['\u{1edc}', '\0', '\0']), ('\u{1edf}', ['\u{1ede}', '\0', '\0']), ('\u{1ee1}', ['\u{1ee0}', - '\0', '\0']), ('\u{1ee3}', ['\u{1ee2}', '\0', '\0']), ('\u{1ee5}', ['\u{1ee4}', '\0', - '\0']), ('\u{1ee7}', ['\u{1ee6}', '\0', '\0']), ('\u{1ee9}', ['\u{1ee8}', '\0', '\0']), - ('\u{1eeb}', ['\u{1eea}', '\0', '\0']), ('\u{1eed}', ['\u{1eec}', '\0', '\0']), ('\u{1eef}', - ['\u{1eee}', '\0', '\0']), ('\u{1ef1}', ['\u{1ef0}', '\0', '\0']), ('\u{1ef3}', ['\u{1ef2}', - '\0', '\0']), ('\u{1ef5}', ['\u{1ef4}', '\0', '\0']), ('\u{1ef7}', ['\u{1ef6}', '\0', - '\0']), ('\u{1ef9}', ['\u{1ef8}', '\0', '\0']), ('\u{1efb}', ['\u{1efa}', '\0', '\0']), - ('\u{1efd}', ['\u{1efc}', '\0', '\0']), ('\u{1eff}', ['\u{1efe}', '\0', '\0']), ('\u{1f00}', - ['\u{1f08}', '\0', '\0']), ('\u{1f01}', ['\u{1f09}', '\0', '\0']), ('\u{1f02}', ['\u{1f0a}', - '\0', '\0']), ('\u{1f03}', ['\u{1f0b}', '\0', '\0']), ('\u{1f04}', ['\u{1f0c}', '\0', - '\0']), ('\u{1f05}', ['\u{1f0d}', '\0', '\0']), ('\u{1f06}', ['\u{1f0e}', '\0', '\0']), - ('\u{1f07}', ['\u{1f0f}', '\0', '\0']), ('\u{1f10}', ['\u{1f18}', '\0', '\0']), ('\u{1f11}', - ['\u{1f19}', '\0', '\0']), ('\u{1f12}', ['\u{1f1a}', '\0', '\0']), ('\u{1f13}', ['\u{1f1b}', - '\0', '\0']), ('\u{1f14}', ['\u{1f1c}', '\0', '\0']), ('\u{1f15}', ['\u{1f1d}', '\0', - '\0']), ('\u{1f20}', ['\u{1f28}', '\0', '\0']), ('\u{1f21}', ['\u{1f29}', '\0', '\0']), - ('\u{1f22}', ['\u{1f2a}', '\0', '\0']), ('\u{1f23}', ['\u{1f2b}', '\0', '\0']), ('\u{1f24}', - ['\u{1f2c}', '\0', '\0']), ('\u{1f25}', ['\u{1f2d}', '\0', '\0']), ('\u{1f26}', ['\u{1f2e}', - '\0', '\0']), ('\u{1f27}', ['\u{1f2f}', '\0', '\0']), ('\u{1f30}', ['\u{1f38}', '\0', - '\0']), ('\u{1f31}', ['\u{1f39}', '\0', '\0']), ('\u{1f32}', ['\u{1f3a}', '\0', '\0']), - ('\u{1f33}', ['\u{1f3b}', '\0', '\0']), ('\u{1f34}', ['\u{1f3c}', '\0', '\0']), ('\u{1f35}', - ['\u{1f3d}', '\0', '\0']), ('\u{1f36}', ['\u{1f3e}', '\0', '\0']), ('\u{1f37}', ['\u{1f3f}', - '\0', '\0']), ('\u{1f40}', ['\u{1f48}', '\0', '\0']), ('\u{1f41}', ['\u{1f49}', '\0', - '\0']), ('\u{1f42}', ['\u{1f4a}', '\0', '\0']), ('\u{1f43}', ['\u{1f4b}', '\0', '\0']), - ('\u{1f44}', ['\u{1f4c}', '\0', '\0']), ('\u{1f45}', ['\u{1f4d}', '\0', '\0']), ('\u{1f50}', - ['\u{3a5}', '\u{313}', '\0']), ('\u{1f51}', ['\u{1f59}', '\0', '\0']), ('\u{1f52}', - ['\u{3a5}', '\u{313}', '\u{300}']), ('\u{1f53}', ['\u{1f5b}', '\0', '\0']), ('\u{1f54}', - ['\u{3a5}', '\u{313}', '\u{301}']), ('\u{1f55}', ['\u{1f5d}', '\0', '\0']), ('\u{1f56}', - ['\u{3a5}', '\u{313}', '\u{342}']), ('\u{1f57}', ['\u{1f5f}', '\0', '\0']), ('\u{1f60}', - ['\u{1f68}', '\0', '\0']), ('\u{1f61}', ['\u{1f69}', '\0', '\0']), ('\u{1f62}', ['\u{1f6a}', - '\0', '\0']), ('\u{1f63}', ['\u{1f6b}', '\0', '\0']), ('\u{1f64}', ['\u{1f6c}', '\0', - '\0']), ('\u{1f65}', ['\u{1f6d}', '\0', '\0']), ('\u{1f66}', ['\u{1f6e}', '\0', '\0']), - ('\u{1f67}', ['\u{1f6f}', '\0', '\0']), ('\u{1f70}', ['\u{1fba}', '\0', '\0']), ('\u{1f71}', - ['\u{1fbb}', '\0', '\0']), ('\u{1f72}', ['\u{1fc8}', '\0', '\0']), ('\u{1f73}', ['\u{1fc9}', - '\0', '\0']), ('\u{1f74}', ['\u{1fca}', '\0', '\0']), ('\u{1f75}', ['\u{1fcb}', '\0', - '\0']), ('\u{1f76}', ['\u{1fda}', '\0', '\0']), ('\u{1f77}', ['\u{1fdb}', '\0', '\0']), - ('\u{1f78}', ['\u{1ff8}', '\0', '\0']), ('\u{1f79}', ['\u{1ff9}', '\0', '\0']), ('\u{1f7a}', - ['\u{1fea}', '\0', '\0']), ('\u{1f7b}', ['\u{1feb}', '\0', '\0']), ('\u{1f7c}', ['\u{1ffa}', - '\0', '\0']), ('\u{1f7d}', ['\u{1ffb}', '\0', '\0']), ('\u{1f80}', ['\u{1f08}', '\u{399}', - '\0']), ('\u{1f81}', ['\u{1f09}', '\u{399}', '\0']), ('\u{1f82}', ['\u{1f0a}', '\u{399}', - '\0']), ('\u{1f83}', ['\u{1f0b}', '\u{399}', '\0']), ('\u{1f84}', ['\u{1f0c}', '\u{399}', - '\0']), ('\u{1f85}', ['\u{1f0d}', '\u{399}', '\0']), ('\u{1f86}', ['\u{1f0e}', '\u{399}', - '\0']), ('\u{1f87}', ['\u{1f0f}', '\u{399}', '\0']), ('\u{1f88}', ['\u{1f08}', '\u{399}', - '\0']), ('\u{1f89}', ['\u{1f09}', '\u{399}', '\0']), ('\u{1f8a}', ['\u{1f0a}', '\u{399}', - '\0']), ('\u{1f8b}', ['\u{1f0b}', '\u{399}', '\0']), ('\u{1f8c}', ['\u{1f0c}', '\u{399}', - '\0']), ('\u{1f8d}', ['\u{1f0d}', '\u{399}', '\0']), ('\u{1f8e}', ['\u{1f0e}', '\u{399}', - '\0']), ('\u{1f8f}', ['\u{1f0f}', '\u{399}', '\0']), ('\u{1f90}', ['\u{1f28}', '\u{399}', - '\0']), ('\u{1f91}', ['\u{1f29}', '\u{399}', '\0']), ('\u{1f92}', ['\u{1f2a}', '\u{399}', - '\0']), ('\u{1f93}', ['\u{1f2b}', '\u{399}', '\0']), ('\u{1f94}', ['\u{1f2c}', '\u{399}', - '\0']), ('\u{1f95}', ['\u{1f2d}', '\u{399}', '\0']), ('\u{1f96}', ['\u{1f2e}', '\u{399}', - '\0']), ('\u{1f97}', ['\u{1f2f}', '\u{399}', '\0']), ('\u{1f98}', ['\u{1f28}', '\u{399}', - '\0']), ('\u{1f99}', ['\u{1f29}', '\u{399}', '\0']), ('\u{1f9a}', ['\u{1f2a}', '\u{399}', - '\0']), ('\u{1f9b}', ['\u{1f2b}', '\u{399}', '\0']), ('\u{1f9c}', ['\u{1f2c}', '\u{399}', - '\0']), ('\u{1f9d}', ['\u{1f2d}', '\u{399}', '\0']), ('\u{1f9e}', ['\u{1f2e}', '\u{399}', - '\0']), ('\u{1f9f}', ['\u{1f2f}', '\u{399}', '\0']), ('\u{1fa0}', ['\u{1f68}', '\u{399}', - '\0']), ('\u{1fa1}', ['\u{1f69}', '\u{399}', '\0']), ('\u{1fa2}', ['\u{1f6a}', '\u{399}', - '\0']), ('\u{1fa3}', ['\u{1f6b}', '\u{399}', '\0']), ('\u{1fa4}', ['\u{1f6c}', '\u{399}', - '\0']), ('\u{1fa5}', ['\u{1f6d}', '\u{399}', '\0']), ('\u{1fa6}', ['\u{1f6e}', '\u{399}', - '\0']), ('\u{1fa7}', ['\u{1f6f}', '\u{399}', '\0']), ('\u{1fa8}', ['\u{1f68}', '\u{399}', - '\0']), ('\u{1fa9}', ['\u{1f69}', '\u{399}', '\0']), ('\u{1faa}', ['\u{1f6a}', '\u{399}', - '\0']), ('\u{1fab}', ['\u{1f6b}', '\u{399}', '\0']), ('\u{1fac}', ['\u{1f6c}', '\u{399}', - '\0']), ('\u{1fad}', ['\u{1f6d}', '\u{399}', '\0']), ('\u{1fae}', ['\u{1f6e}', '\u{399}', - '\0']), ('\u{1faf}', ['\u{1f6f}', '\u{399}', '\0']), ('\u{1fb0}', ['\u{1fb8}', '\0', '\0']), - ('\u{1fb1}', ['\u{1fb9}', '\0', '\0']), ('\u{1fb2}', ['\u{1fba}', '\u{399}', '\0']), - ('\u{1fb3}', ['\u{391}', '\u{399}', '\0']), ('\u{1fb4}', ['\u{386}', '\u{399}', '\0']), - ('\u{1fb6}', ['\u{391}', '\u{342}', '\0']), ('\u{1fb7}', ['\u{391}', '\u{342}', '\u{399}']), - ('\u{1fbc}', ['\u{391}', '\u{399}', '\0']), ('\u{1fbe}', ['\u{399}', '\0', '\0']), - ('\u{1fc2}', ['\u{1fca}', '\u{399}', '\0']), ('\u{1fc3}', ['\u{397}', '\u{399}', '\0']), - ('\u{1fc4}', ['\u{389}', '\u{399}', '\0']), ('\u{1fc6}', ['\u{397}', '\u{342}', '\0']), - ('\u{1fc7}', ['\u{397}', '\u{342}', '\u{399}']), ('\u{1fcc}', ['\u{397}', '\u{399}', '\0']), - ('\u{1fd0}', ['\u{1fd8}', '\0', '\0']), ('\u{1fd1}', ['\u{1fd9}', '\0', '\0']), ('\u{1fd2}', - ['\u{399}', '\u{308}', '\u{300}']), ('\u{1fd3}', ['\u{399}', '\u{308}', '\u{301}']), - ('\u{1fd6}', ['\u{399}', '\u{342}', '\0']), ('\u{1fd7}', ['\u{399}', '\u{308}', '\u{342}']), - ('\u{1fe0}', ['\u{1fe8}', '\0', '\0']), ('\u{1fe1}', ['\u{1fe9}', '\0', '\0']), ('\u{1fe2}', - ['\u{3a5}', '\u{308}', '\u{300}']), ('\u{1fe3}', ['\u{3a5}', '\u{308}', '\u{301}']), - ('\u{1fe4}', ['\u{3a1}', '\u{313}', '\0']), ('\u{1fe5}', ['\u{1fec}', '\0', '\0']), - ('\u{1fe6}', ['\u{3a5}', '\u{342}', '\0']), ('\u{1fe7}', ['\u{3a5}', '\u{308}', '\u{342}']), - ('\u{1ff2}', ['\u{1ffa}', '\u{399}', '\0']), ('\u{1ff3}', ['\u{3a9}', '\u{399}', '\0']), - ('\u{1ff4}', ['\u{38f}', '\u{399}', '\0']), ('\u{1ff6}', ['\u{3a9}', '\u{342}', '\0']), - ('\u{1ff7}', ['\u{3a9}', '\u{342}', '\u{399}']), ('\u{1ffc}', ['\u{3a9}', '\u{399}', '\0']), - ('\u{214e}', ['\u{2132}', '\0', '\0']), ('\u{2170}', ['\u{2160}', '\0', '\0']), ('\u{2171}', - ['\u{2161}', '\0', '\0']), ('\u{2172}', ['\u{2162}', '\0', '\0']), ('\u{2173}', ['\u{2163}', - '\0', '\0']), ('\u{2174}', ['\u{2164}', '\0', '\0']), ('\u{2175}', ['\u{2165}', '\0', - '\0']), ('\u{2176}', ['\u{2166}', '\0', '\0']), ('\u{2177}', ['\u{2167}', '\0', '\0']), - ('\u{2178}', ['\u{2168}', '\0', '\0']), ('\u{2179}', ['\u{2169}', '\0', '\0']), ('\u{217a}', - ['\u{216a}', '\0', '\0']), ('\u{217b}', ['\u{216b}', '\0', '\0']), ('\u{217c}', ['\u{216c}', - '\0', '\0']), ('\u{217d}', ['\u{216d}', '\0', '\0']), ('\u{217e}', ['\u{216e}', '\0', - '\0']), ('\u{217f}', ['\u{216f}', '\0', '\0']), ('\u{2184}', ['\u{2183}', '\0', '\0']), - ('\u{24d0}', ['\u{24b6}', '\0', '\0']), ('\u{24d1}', ['\u{24b7}', '\0', '\0']), ('\u{24d2}', - ['\u{24b8}', '\0', '\0']), ('\u{24d3}', ['\u{24b9}', '\0', '\0']), ('\u{24d4}', ['\u{24ba}', - '\0', '\0']), ('\u{24d5}', ['\u{24bb}', '\0', '\0']), ('\u{24d6}', ['\u{24bc}', '\0', - '\0']), ('\u{24d7}', ['\u{24bd}', '\0', '\0']), ('\u{24d8}', ['\u{24be}', '\0', '\0']), - ('\u{24d9}', ['\u{24bf}', '\0', '\0']), ('\u{24da}', ['\u{24c0}', '\0', '\0']), ('\u{24db}', - ['\u{24c1}', '\0', '\0']), ('\u{24dc}', ['\u{24c2}', '\0', '\0']), ('\u{24dd}', ['\u{24c3}', - '\0', '\0']), ('\u{24de}', ['\u{24c4}', '\0', '\0']), ('\u{24df}', ['\u{24c5}', '\0', - '\0']), ('\u{24e0}', ['\u{24c6}', '\0', '\0']), ('\u{24e1}', ['\u{24c7}', '\0', '\0']), - ('\u{24e2}', ['\u{24c8}', '\0', '\0']), ('\u{24e3}', ['\u{24c9}', '\0', '\0']), ('\u{24e4}', - ['\u{24ca}', '\0', '\0']), ('\u{24e5}', ['\u{24cb}', '\0', '\0']), ('\u{24e6}', ['\u{24cc}', - '\0', '\0']), ('\u{24e7}', ['\u{24cd}', '\0', '\0']), ('\u{24e8}', ['\u{24ce}', '\0', - '\0']), ('\u{24e9}', ['\u{24cf}', '\0', '\0']), ('\u{2c30}', ['\u{2c00}', '\0', '\0']), - ('\u{2c31}', ['\u{2c01}', '\0', '\0']), ('\u{2c32}', ['\u{2c02}', '\0', '\0']), ('\u{2c33}', - ['\u{2c03}', '\0', '\0']), ('\u{2c34}', ['\u{2c04}', '\0', '\0']), ('\u{2c35}', ['\u{2c05}', - '\0', '\0']), ('\u{2c36}', ['\u{2c06}', '\0', '\0']), ('\u{2c37}', ['\u{2c07}', '\0', - '\0']), ('\u{2c38}', ['\u{2c08}', '\0', '\0']), ('\u{2c39}', ['\u{2c09}', '\0', '\0']), - ('\u{2c3a}', ['\u{2c0a}', '\0', '\0']), ('\u{2c3b}', ['\u{2c0b}', '\0', '\0']), ('\u{2c3c}', - ['\u{2c0c}', '\0', '\0']), ('\u{2c3d}', ['\u{2c0d}', '\0', '\0']), ('\u{2c3e}', ['\u{2c0e}', - '\0', '\0']), ('\u{2c3f}', ['\u{2c0f}', '\0', '\0']), ('\u{2c40}', ['\u{2c10}', '\0', - '\0']), ('\u{2c41}', ['\u{2c11}', '\0', '\0']), ('\u{2c42}', ['\u{2c12}', '\0', '\0']), - ('\u{2c43}', ['\u{2c13}', '\0', '\0']), ('\u{2c44}', ['\u{2c14}', '\0', '\0']), ('\u{2c45}', - ['\u{2c15}', '\0', '\0']), ('\u{2c46}', ['\u{2c16}', '\0', '\0']), ('\u{2c47}', ['\u{2c17}', - '\0', '\0']), ('\u{2c48}', ['\u{2c18}', '\0', '\0']), ('\u{2c49}', ['\u{2c19}', '\0', - '\0']), ('\u{2c4a}', ['\u{2c1a}', '\0', '\0']), ('\u{2c4b}', ['\u{2c1b}', '\0', '\0']), - ('\u{2c4c}', ['\u{2c1c}', '\0', '\0']), ('\u{2c4d}', ['\u{2c1d}', '\0', '\0']), ('\u{2c4e}', - ['\u{2c1e}', '\0', '\0']), ('\u{2c4f}', ['\u{2c1f}', '\0', '\0']), ('\u{2c50}', ['\u{2c20}', - '\0', '\0']), ('\u{2c51}', ['\u{2c21}', '\0', '\0']), ('\u{2c52}', ['\u{2c22}', '\0', - '\0']), ('\u{2c53}', ['\u{2c23}', '\0', '\0']), ('\u{2c54}', ['\u{2c24}', '\0', '\0']), - ('\u{2c55}', ['\u{2c25}', '\0', '\0']), ('\u{2c56}', ['\u{2c26}', '\0', '\0']), ('\u{2c57}', - ['\u{2c27}', '\0', '\0']), ('\u{2c58}', ['\u{2c28}', '\0', '\0']), ('\u{2c59}', ['\u{2c29}', - '\0', '\0']), ('\u{2c5a}', ['\u{2c2a}', '\0', '\0']), ('\u{2c5b}', ['\u{2c2b}', '\0', - '\0']), ('\u{2c5c}', ['\u{2c2c}', '\0', '\0']), ('\u{2c5d}', ['\u{2c2d}', '\0', '\0']), - ('\u{2c5e}', ['\u{2c2e}', '\0', '\0']), ('\u{2c61}', ['\u{2c60}', '\0', '\0']), ('\u{2c65}', - ['\u{23a}', '\0', '\0']), ('\u{2c66}', ['\u{23e}', '\0', '\0']), ('\u{2c68}', ['\u{2c67}', - '\0', '\0']), ('\u{2c6a}', ['\u{2c69}', '\0', '\0']), ('\u{2c6c}', ['\u{2c6b}', '\0', - '\0']), ('\u{2c73}', ['\u{2c72}', '\0', '\0']), ('\u{2c76}', ['\u{2c75}', '\0', '\0']), - ('\u{2c81}', ['\u{2c80}', '\0', '\0']), ('\u{2c83}', ['\u{2c82}', '\0', '\0']), ('\u{2c85}', - ['\u{2c84}', '\0', '\0']), ('\u{2c87}', ['\u{2c86}', '\0', '\0']), ('\u{2c89}', ['\u{2c88}', - '\0', '\0']), ('\u{2c8b}', ['\u{2c8a}', '\0', '\0']), ('\u{2c8d}', ['\u{2c8c}', '\0', - '\0']), ('\u{2c8f}', ['\u{2c8e}', '\0', '\0']), ('\u{2c91}', ['\u{2c90}', '\0', '\0']), - ('\u{2c93}', ['\u{2c92}', '\0', '\0']), ('\u{2c95}', ['\u{2c94}', '\0', '\0']), ('\u{2c97}', - ['\u{2c96}', '\0', '\0']), ('\u{2c99}', ['\u{2c98}', '\0', '\0']), ('\u{2c9b}', ['\u{2c9a}', - '\0', '\0']), ('\u{2c9d}', ['\u{2c9c}', '\0', '\0']), ('\u{2c9f}', ['\u{2c9e}', '\0', - '\0']), ('\u{2ca1}', ['\u{2ca0}', '\0', '\0']), ('\u{2ca3}', ['\u{2ca2}', '\0', '\0']), - ('\u{2ca5}', ['\u{2ca4}', '\0', '\0']), ('\u{2ca7}', ['\u{2ca6}', '\0', '\0']), ('\u{2ca9}', - ['\u{2ca8}', '\0', '\0']), ('\u{2cab}', ['\u{2caa}', '\0', '\0']), ('\u{2cad}', ['\u{2cac}', - '\0', '\0']), ('\u{2caf}', ['\u{2cae}', '\0', '\0']), ('\u{2cb1}', ['\u{2cb0}', '\0', - '\0']), ('\u{2cb3}', ['\u{2cb2}', '\0', '\0']), ('\u{2cb5}', ['\u{2cb4}', '\0', '\0']), - ('\u{2cb7}', ['\u{2cb6}', '\0', '\0']), ('\u{2cb9}', ['\u{2cb8}', '\0', '\0']), ('\u{2cbb}', - ['\u{2cba}', '\0', '\0']), ('\u{2cbd}', ['\u{2cbc}', '\0', '\0']), ('\u{2cbf}', ['\u{2cbe}', - '\0', '\0']), ('\u{2cc1}', ['\u{2cc0}', '\0', '\0']), ('\u{2cc3}', ['\u{2cc2}', '\0', - '\0']), ('\u{2cc5}', ['\u{2cc4}', '\0', '\0']), ('\u{2cc7}', ['\u{2cc6}', '\0', '\0']), - ('\u{2cc9}', ['\u{2cc8}', '\0', '\0']), ('\u{2ccb}', ['\u{2cca}', '\0', '\0']), ('\u{2ccd}', - ['\u{2ccc}', '\0', '\0']), ('\u{2ccf}', ['\u{2cce}', '\0', '\0']), ('\u{2cd1}', ['\u{2cd0}', - '\0', '\0']), ('\u{2cd3}', ['\u{2cd2}', '\0', '\0']), ('\u{2cd5}', ['\u{2cd4}', '\0', - '\0']), ('\u{2cd7}', ['\u{2cd6}', '\0', '\0']), ('\u{2cd9}', ['\u{2cd8}', '\0', '\0']), - ('\u{2cdb}', ['\u{2cda}', '\0', '\0']), ('\u{2cdd}', ['\u{2cdc}', '\0', '\0']), ('\u{2cdf}', - ['\u{2cde}', '\0', '\0']), ('\u{2ce1}', ['\u{2ce0}', '\0', '\0']), ('\u{2ce3}', ['\u{2ce2}', - '\0', '\0']), ('\u{2cec}', ['\u{2ceb}', '\0', '\0']), ('\u{2cee}', ['\u{2ced}', '\0', - '\0']), ('\u{2cf3}', ['\u{2cf2}', '\0', '\0']), ('\u{2d00}', ['\u{10a0}', '\0', '\0']), - ('\u{2d01}', ['\u{10a1}', '\0', '\0']), ('\u{2d02}', ['\u{10a2}', '\0', '\0']), ('\u{2d03}', - ['\u{10a3}', '\0', '\0']), ('\u{2d04}', ['\u{10a4}', '\0', '\0']), ('\u{2d05}', ['\u{10a5}', - '\0', '\0']), ('\u{2d06}', ['\u{10a6}', '\0', '\0']), ('\u{2d07}', ['\u{10a7}', '\0', - '\0']), ('\u{2d08}', ['\u{10a8}', '\0', '\0']), ('\u{2d09}', ['\u{10a9}', '\0', '\0']), - ('\u{2d0a}', ['\u{10aa}', '\0', '\0']), ('\u{2d0b}', ['\u{10ab}', '\0', '\0']), ('\u{2d0c}', - ['\u{10ac}', '\0', '\0']), ('\u{2d0d}', ['\u{10ad}', '\0', '\0']), ('\u{2d0e}', ['\u{10ae}', - '\0', '\0']), ('\u{2d0f}', ['\u{10af}', '\0', '\0']), ('\u{2d10}', ['\u{10b0}', '\0', - '\0']), ('\u{2d11}', ['\u{10b1}', '\0', '\0']), ('\u{2d12}', ['\u{10b2}', '\0', '\0']), - ('\u{2d13}', ['\u{10b3}', '\0', '\0']), ('\u{2d14}', ['\u{10b4}', '\0', '\0']), ('\u{2d15}', - ['\u{10b5}', '\0', '\0']), ('\u{2d16}', ['\u{10b6}', '\0', '\0']), ('\u{2d17}', ['\u{10b7}', - '\0', '\0']), ('\u{2d18}', ['\u{10b8}', '\0', '\0']), ('\u{2d19}', ['\u{10b9}', '\0', - '\0']), ('\u{2d1a}', ['\u{10ba}', '\0', '\0']), ('\u{2d1b}', ['\u{10bb}', '\0', '\0']), - ('\u{2d1c}', ['\u{10bc}', '\0', '\0']), ('\u{2d1d}', ['\u{10bd}', '\0', '\0']), ('\u{2d1e}', - ['\u{10be}', '\0', '\0']), ('\u{2d1f}', ['\u{10bf}', '\0', '\0']), ('\u{2d20}', ['\u{10c0}', - '\0', '\0']), ('\u{2d21}', ['\u{10c1}', '\0', '\0']), ('\u{2d22}', ['\u{10c2}', '\0', - '\0']), ('\u{2d23}', ['\u{10c3}', '\0', '\0']), ('\u{2d24}', ['\u{10c4}', '\0', '\0']), - ('\u{2d25}', ['\u{10c5}', '\0', '\0']), ('\u{2d27}', ['\u{10c7}', '\0', '\0']), ('\u{2d2d}', - ['\u{10cd}', '\0', '\0']), ('\u{a641}', ['\u{a640}', '\0', '\0']), ('\u{a643}', ['\u{a642}', - '\0', '\0']), ('\u{a645}', ['\u{a644}', '\0', '\0']), ('\u{a647}', ['\u{a646}', '\0', - '\0']), ('\u{a649}', ['\u{a648}', '\0', '\0']), ('\u{a64b}', ['\u{a64a}', '\0', '\0']), - ('\u{a64d}', ['\u{a64c}', '\0', '\0']), ('\u{a64f}', ['\u{a64e}', '\0', '\0']), ('\u{a651}', - ['\u{a650}', '\0', '\0']), ('\u{a653}', ['\u{a652}', '\0', '\0']), ('\u{a655}', ['\u{a654}', - '\0', '\0']), ('\u{a657}', ['\u{a656}', '\0', '\0']), ('\u{a659}', ['\u{a658}', '\0', - '\0']), ('\u{a65b}', ['\u{a65a}', '\0', '\0']), ('\u{a65d}', ['\u{a65c}', '\0', '\0']), - ('\u{a65f}', ['\u{a65e}', '\0', '\0']), ('\u{a661}', ['\u{a660}', '\0', '\0']), ('\u{a663}', - ['\u{a662}', '\0', '\0']), ('\u{a665}', ['\u{a664}', '\0', '\0']), ('\u{a667}', ['\u{a666}', - '\0', '\0']), ('\u{a669}', ['\u{a668}', '\0', '\0']), ('\u{a66b}', ['\u{a66a}', '\0', - '\0']), ('\u{a66d}', ['\u{a66c}', '\0', '\0']), ('\u{a681}', ['\u{a680}', '\0', '\0']), - ('\u{a683}', ['\u{a682}', '\0', '\0']), ('\u{a685}', ['\u{a684}', '\0', '\0']), ('\u{a687}', - ['\u{a686}', '\0', '\0']), ('\u{a689}', ['\u{a688}', '\0', '\0']), ('\u{a68b}', ['\u{a68a}', - '\0', '\0']), ('\u{a68d}', ['\u{a68c}', '\0', '\0']), ('\u{a68f}', ['\u{a68e}', '\0', - '\0']), ('\u{a691}', ['\u{a690}', '\0', '\0']), ('\u{a693}', ['\u{a692}', '\0', '\0']), - ('\u{a695}', ['\u{a694}', '\0', '\0']), ('\u{a697}', ['\u{a696}', '\0', '\0']), ('\u{a699}', - ['\u{a698}', '\0', '\0']), ('\u{a69b}', ['\u{a69a}', '\0', '\0']), ('\u{a723}', ['\u{a722}', - '\0', '\0']), ('\u{a725}', ['\u{a724}', '\0', '\0']), ('\u{a727}', ['\u{a726}', '\0', - '\0']), ('\u{a729}', ['\u{a728}', '\0', '\0']), ('\u{a72b}', ['\u{a72a}', '\0', '\0']), - ('\u{a72d}', ['\u{a72c}', '\0', '\0']), ('\u{a72f}', ['\u{a72e}', '\0', '\0']), ('\u{a733}', - ['\u{a732}', '\0', '\0']), ('\u{a735}', ['\u{a734}', '\0', '\0']), ('\u{a737}', ['\u{a736}', - '\0', '\0']), ('\u{a739}', ['\u{a738}', '\0', '\0']), ('\u{a73b}', ['\u{a73a}', '\0', - '\0']), ('\u{a73d}', ['\u{a73c}', '\0', '\0']), ('\u{a73f}', ['\u{a73e}', '\0', '\0']), - ('\u{a741}', ['\u{a740}', '\0', '\0']), ('\u{a743}', ['\u{a742}', '\0', '\0']), ('\u{a745}', - ['\u{a744}', '\0', '\0']), ('\u{a747}', ['\u{a746}', '\0', '\0']), ('\u{a749}', ['\u{a748}', - '\0', '\0']), ('\u{a74b}', ['\u{a74a}', '\0', '\0']), ('\u{a74d}', ['\u{a74c}', '\0', - '\0']), ('\u{a74f}', ['\u{a74e}', '\0', '\0']), ('\u{a751}', ['\u{a750}', '\0', '\0']), - ('\u{a753}', ['\u{a752}', '\0', '\0']), ('\u{a755}', ['\u{a754}', '\0', '\0']), ('\u{a757}', - ['\u{a756}', '\0', '\0']), ('\u{a759}', ['\u{a758}', '\0', '\0']), ('\u{a75b}', ['\u{a75a}', - '\0', '\0']), ('\u{a75d}', ['\u{a75c}', '\0', '\0']), ('\u{a75f}', ['\u{a75e}', '\0', - '\0']), ('\u{a761}', ['\u{a760}', '\0', '\0']), ('\u{a763}', ['\u{a762}', '\0', '\0']), - ('\u{a765}', ['\u{a764}', '\0', '\0']), ('\u{a767}', ['\u{a766}', '\0', '\0']), ('\u{a769}', - ['\u{a768}', '\0', '\0']), ('\u{a76b}', ['\u{a76a}', '\0', '\0']), ('\u{a76d}', ['\u{a76c}', - '\0', '\0']), ('\u{a76f}', ['\u{a76e}', '\0', '\0']), ('\u{a77a}', ['\u{a779}', '\0', - '\0']), ('\u{a77c}', ['\u{a77b}', '\0', '\0']), ('\u{a77f}', ['\u{a77e}', '\0', '\0']), - ('\u{a781}', ['\u{a780}', '\0', '\0']), ('\u{a783}', ['\u{a782}', '\0', '\0']), ('\u{a785}', - ['\u{a784}', '\0', '\0']), ('\u{a787}', ['\u{a786}', '\0', '\0']), ('\u{a78c}', ['\u{a78b}', - '\0', '\0']), ('\u{a791}', ['\u{a790}', '\0', '\0']), ('\u{a793}', ['\u{a792}', '\0', - '\0']), ('\u{a797}', ['\u{a796}', '\0', '\0']), ('\u{a799}', ['\u{a798}', '\0', '\0']), - ('\u{a79b}', ['\u{a79a}', '\0', '\0']), ('\u{a79d}', ['\u{a79c}', '\0', '\0']), ('\u{a79f}', - ['\u{a79e}', '\0', '\0']), ('\u{a7a1}', ['\u{a7a0}', '\0', '\0']), ('\u{a7a3}', ['\u{a7a2}', - '\0', '\0']), ('\u{a7a5}', ['\u{a7a4}', '\0', '\0']), ('\u{a7a7}', ['\u{a7a6}', '\0', - '\0']), ('\u{a7a9}', ['\u{a7a8}', '\0', '\0']), ('\u{a7b5}', ['\u{a7b4}', '\0', '\0']), - ('\u{a7b7}', ['\u{a7b6}', '\0', '\0']), ('\u{a7b9}', ['\u{a7b8}', '\0', '\0']), ('\u{ab53}', - ['\u{a7b3}', '\0', '\0']), ('\u{ab70}', ['\u{13a0}', '\0', '\0']), ('\u{ab71}', ['\u{13a1}', - '\0', '\0']), ('\u{ab72}', ['\u{13a2}', '\0', '\0']), ('\u{ab73}', ['\u{13a3}', '\0', - '\0']), ('\u{ab74}', ['\u{13a4}', '\0', '\0']), ('\u{ab75}', ['\u{13a5}', '\0', '\0']), - ('\u{ab76}', ['\u{13a6}', '\0', '\0']), ('\u{ab77}', ['\u{13a7}', '\0', '\0']), ('\u{ab78}', - ['\u{13a8}', '\0', '\0']), ('\u{ab79}', ['\u{13a9}', '\0', '\0']), ('\u{ab7a}', ['\u{13aa}', - '\0', '\0']), ('\u{ab7b}', ['\u{13ab}', '\0', '\0']), ('\u{ab7c}', ['\u{13ac}', '\0', - '\0']), ('\u{ab7d}', ['\u{13ad}', '\0', '\0']), ('\u{ab7e}', ['\u{13ae}', '\0', '\0']), - ('\u{ab7f}', ['\u{13af}', '\0', '\0']), ('\u{ab80}', ['\u{13b0}', '\0', '\0']), ('\u{ab81}', - ['\u{13b1}', '\0', '\0']), ('\u{ab82}', ['\u{13b2}', '\0', '\0']), ('\u{ab83}', ['\u{13b3}', - '\0', '\0']), ('\u{ab84}', ['\u{13b4}', '\0', '\0']), ('\u{ab85}', ['\u{13b5}', '\0', - '\0']), ('\u{ab86}', ['\u{13b6}', '\0', '\0']), ('\u{ab87}', ['\u{13b7}', '\0', '\0']), - ('\u{ab88}', ['\u{13b8}', '\0', '\0']), ('\u{ab89}', ['\u{13b9}', '\0', '\0']), ('\u{ab8a}', - ['\u{13ba}', '\0', '\0']), ('\u{ab8b}', ['\u{13bb}', '\0', '\0']), ('\u{ab8c}', ['\u{13bc}', - '\0', '\0']), ('\u{ab8d}', ['\u{13bd}', '\0', '\0']), ('\u{ab8e}', ['\u{13be}', '\0', - '\0']), ('\u{ab8f}', ['\u{13bf}', '\0', '\0']), ('\u{ab90}', ['\u{13c0}', '\0', '\0']), - ('\u{ab91}', ['\u{13c1}', '\0', '\0']), ('\u{ab92}', ['\u{13c2}', '\0', '\0']), ('\u{ab93}', - ['\u{13c3}', '\0', '\0']), ('\u{ab94}', ['\u{13c4}', '\0', '\0']), ('\u{ab95}', ['\u{13c5}', - '\0', '\0']), ('\u{ab96}', ['\u{13c6}', '\0', '\0']), ('\u{ab97}', ['\u{13c7}', '\0', - '\0']), ('\u{ab98}', ['\u{13c8}', '\0', '\0']), ('\u{ab99}', ['\u{13c9}', '\0', '\0']), - ('\u{ab9a}', ['\u{13ca}', '\0', '\0']), ('\u{ab9b}', ['\u{13cb}', '\0', '\0']), ('\u{ab9c}', - ['\u{13cc}', '\0', '\0']), ('\u{ab9d}', ['\u{13cd}', '\0', '\0']), ('\u{ab9e}', ['\u{13ce}', - '\0', '\0']), ('\u{ab9f}', ['\u{13cf}', '\0', '\0']), ('\u{aba0}', ['\u{13d0}', '\0', - '\0']), ('\u{aba1}', ['\u{13d1}', '\0', '\0']), ('\u{aba2}', ['\u{13d2}', '\0', '\0']), - ('\u{aba3}', ['\u{13d3}', '\0', '\0']), ('\u{aba4}', ['\u{13d4}', '\0', '\0']), ('\u{aba5}', - ['\u{13d5}', '\0', '\0']), ('\u{aba6}', ['\u{13d6}', '\0', '\0']), ('\u{aba7}', ['\u{13d7}', - '\0', '\0']), ('\u{aba8}', ['\u{13d8}', '\0', '\0']), ('\u{aba9}', ['\u{13d9}', '\0', - '\0']), ('\u{abaa}', ['\u{13da}', '\0', '\0']), ('\u{abab}', ['\u{13db}', '\0', '\0']), - ('\u{abac}', ['\u{13dc}', '\0', '\0']), ('\u{abad}', ['\u{13dd}', '\0', '\0']), ('\u{abae}', - ['\u{13de}', '\0', '\0']), ('\u{abaf}', ['\u{13df}', '\0', '\0']), ('\u{abb0}', ['\u{13e0}', - '\0', '\0']), ('\u{abb1}', ['\u{13e1}', '\0', '\0']), ('\u{abb2}', ['\u{13e2}', '\0', - '\0']), ('\u{abb3}', ['\u{13e3}', '\0', '\0']), ('\u{abb4}', ['\u{13e4}', '\0', '\0']), - ('\u{abb5}', ['\u{13e5}', '\0', '\0']), ('\u{abb6}', ['\u{13e6}', '\0', '\0']), ('\u{abb7}', - ['\u{13e7}', '\0', '\0']), ('\u{abb8}', ['\u{13e8}', '\0', '\0']), ('\u{abb9}', ['\u{13e9}', - '\0', '\0']), ('\u{abba}', ['\u{13ea}', '\0', '\0']), ('\u{abbb}', ['\u{13eb}', '\0', - '\0']), ('\u{abbc}', ['\u{13ec}', '\0', '\0']), ('\u{abbd}', ['\u{13ed}', '\0', '\0']), - ('\u{abbe}', ['\u{13ee}', '\0', '\0']), ('\u{abbf}', ['\u{13ef}', '\0', '\0']), ('\u{fb00}', - ['\u{46}', '\u{46}', '\0']), ('\u{fb01}', ['\u{46}', '\u{49}', '\0']), ('\u{fb02}', - ['\u{46}', '\u{4c}', '\0']), ('\u{fb03}', ['\u{46}', '\u{46}', '\u{49}']), ('\u{fb04}', - ['\u{46}', '\u{46}', '\u{4c}']), ('\u{fb05}', ['\u{53}', '\u{54}', '\0']), ('\u{fb06}', - ['\u{53}', '\u{54}', '\0']), ('\u{fb13}', ['\u{544}', '\u{546}', '\0']), ('\u{fb14}', - ['\u{544}', '\u{535}', '\0']), ('\u{fb15}', ['\u{544}', '\u{53b}', '\0']), ('\u{fb16}', - ['\u{54e}', '\u{546}', '\0']), ('\u{fb17}', ['\u{544}', '\u{53d}', '\0']), ('\u{ff41}', - ['\u{ff21}', '\0', '\0']), ('\u{ff42}', ['\u{ff22}', '\0', '\0']), ('\u{ff43}', ['\u{ff23}', - '\0', '\0']), ('\u{ff44}', ['\u{ff24}', '\0', '\0']), ('\u{ff45}', ['\u{ff25}', '\0', - '\0']), ('\u{ff46}', ['\u{ff26}', '\0', '\0']), ('\u{ff47}', ['\u{ff27}', '\0', '\0']), - ('\u{ff48}', ['\u{ff28}', '\0', '\0']), ('\u{ff49}', ['\u{ff29}', '\0', '\0']), ('\u{ff4a}', - ['\u{ff2a}', '\0', '\0']), ('\u{ff4b}', ['\u{ff2b}', '\0', '\0']), ('\u{ff4c}', ['\u{ff2c}', - '\0', '\0']), ('\u{ff4d}', ['\u{ff2d}', '\0', '\0']), ('\u{ff4e}', ['\u{ff2e}', '\0', - '\0']), ('\u{ff4f}', ['\u{ff2f}', '\0', '\0']), ('\u{ff50}', ['\u{ff30}', '\0', '\0']), - ('\u{ff51}', ['\u{ff31}', '\0', '\0']), ('\u{ff52}', ['\u{ff32}', '\0', '\0']), ('\u{ff53}', - ['\u{ff33}', '\0', '\0']), ('\u{ff54}', ['\u{ff34}', '\0', '\0']), ('\u{ff55}', ['\u{ff35}', - '\0', '\0']), ('\u{ff56}', ['\u{ff36}', '\0', '\0']), ('\u{ff57}', ['\u{ff37}', '\0', - '\0']), ('\u{ff58}', ['\u{ff38}', '\0', '\0']), ('\u{ff59}', ['\u{ff39}', '\0', '\0']), - ('\u{ff5a}', ['\u{ff3a}', '\0', '\0']), ('\u{10428}', ['\u{10400}', '\0', '\0']), - ('\u{10429}', ['\u{10401}', '\0', '\0']), ('\u{1042a}', ['\u{10402}', '\0', '\0']), - ('\u{1042b}', ['\u{10403}', '\0', '\0']), ('\u{1042c}', ['\u{10404}', '\0', '\0']), - ('\u{1042d}', ['\u{10405}', '\0', '\0']), ('\u{1042e}', ['\u{10406}', '\0', '\0']), - ('\u{1042f}', ['\u{10407}', '\0', '\0']), ('\u{10430}', ['\u{10408}', '\0', '\0']), - ('\u{10431}', ['\u{10409}', '\0', '\0']), ('\u{10432}', ['\u{1040a}', '\0', '\0']), - ('\u{10433}', ['\u{1040b}', '\0', '\0']), ('\u{10434}', ['\u{1040c}', '\0', '\0']), - ('\u{10435}', ['\u{1040d}', '\0', '\0']), ('\u{10436}', ['\u{1040e}', '\0', '\0']), - ('\u{10437}', ['\u{1040f}', '\0', '\0']), ('\u{10438}', ['\u{10410}', '\0', '\0']), - ('\u{10439}', ['\u{10411}', '\0', '\0']), ('\u{1043a}', ['\u{10412}', '\0', '\0']), - ('\u{1043b}', ['\u{10413}', '\0', '\0']), ('\u{1043c}', ['\u{10414}', '\0', '\0']), - ('\u{1043d}', ['\u{10415}', '\0', '\0']), ('\u{1043e}', ['\u{10416}', '\0', '\0']), - ('\u{1043f}', ['\u{10417}', '\0', '\0']), ('\u{10440}', ['\u{10418}', '\0', '\0']), - ('\u{10441}', ['\u{10419}', '\0', '\0']), ('\u{10442}', ['\u{1041a}', '\0', '\0']), - ('\u{10443}', ['\u{1041b}', '\0', '\0']), ('\u{10444}', ['\u{1041c}', '\0', '\0']), - ('\u{10445}', ['\u{1041d}', '\0', '\0']), ('\u{10446}', ['\u{1041e}', '\0', '\0']), - ('\u{10447}', ['\u{1041f}', '\0', '\0']), ('\u{10448}', ['\u{10420}', '\0', '\0']), - ('\u{10449}', ['\u{10421}', '\0', '\0']), ('\u{1044a}', ['\u{10422}', '\0', '\0']), - ('\u{1044b}', ['\u{10423}', '\0', '\0']), ('\u{1044c}', ['\u{10424}', '\0', '\0']), - ('\u{1044d}', ['\u{10425}', '\0', '\0']), ('\u{1044e}', ['\u{10426}', '\0', '\0']), - ('\u{1044f}', ['\u{10427}', '\0', '\0']), ('\u{104d8}', ['\u{104b0}', '\0', '\0']), - ('\u{104d9}', ['\u{104b1}', '\0', '\0']), ('\u{104da}', ['\u{104b2}', '\0', '\0']), - ('\u{104db}', ['\u{104b3}', '\0', '\0']), ('\u{104dc}', ['\u{104b4}', '\0', '\0']), - ('\u{104dd}', ['\u{104b5}', '\0', '\0']), ('\u{104de}', ['\u{104b6}', '\0', '\0']), - ('\u{104df}', ['\u{104b7}', '\0', '\0']), ('\u{104e0}', ['\u{104b8}', '\0', '\0']), - ('\u{104e1}', ['\u{104b9}', '\0', '\0']), ('\u{104e2}', ['\u{104ba}', '\0', '\0']), - ('\u{104e3}', ['\u{104bb}', '\0', '\0']), ('\u{104e4}', ['\u{104bc}', '\0', '\0']), - ('\u{104e5}', ['\u{104bd}', '\0', '\0']), ('\u{104e6}', ['\u{104be}', '\0', '\0']), - ('\u{104e7}', ['\u{104bf}', '\0', '\0']), ('\u{104e8}', ['\u{104c0}', '\0', '\0']), - ('\u{104e9}', ['\u{104c1}', '\0', '\0']), ('\u{104ea}', ['\u{104c2}', '\0', '\0']), - ('\u{104eb}', ['\u{104c3}', '\0', '\0']), ('\u{104ec}', ['\u{104c4}', '\0', '\0']), - ('\u{104ed}', ['\u{104c5}', '\0', '\0']), ('\u{104ee}', ['\u{104c6}', '\0', '\0']), - ('\u{104ef}', ['\u{104c7}', '\0', '\0']), ('\u{104f0}', ['\u{104c8}', '\0', '\0']), - ('\u{104f1}', ['\u{104c9}', '\0', '\0']), ('\u{104f2}', ['\u{104ca}', '\0', '\0']), - ('\u{104f3}', ['\u{104cb}', '\0', '\0']), ('\u{104f4}', ['\u{104cc}', '\0', '\0']), - ('\u{104f5}', ['\u{104cd}', '\0', '\0']), ('\u{104f6}', ['\u{104ce}', '\0', '\0']), - ('\u{104f7}', ['\u{104cf}', '\0', '\0']), ('\u{104f8}', ['\u{104d0}', '\0', '\0']), - ('\u{104f9}', ['\u{104d1}', '\0', '\0']), ('\u{104fa}', ['\u{104d2}', '\0', '\0']), - ('\u{104fb}', ['\u{104d3}', '\0', '\0']), ('\u{10cc0}', ['\u{10c80}', '\0', '\0']), - ('\u{10cc1}', ['\u{10c81}', '\0', '\0']), ('\u{10cc2}', ['\u{10c82}', '\0', '\0']), - ('\u{10cc3}', ['\u{10c83}', '\0', '\0']), ('\u{10cc4}', ['\u{10c84}', '\0', '\0']), - ('\u{10cc5}', ['\u{10c85}', '\0', '\0']), ('\u{10cc6}', ['\u{10c86}', '\0', '\0']), - ('\u{10cc7}', ['\u{10c87}', '\0', '\0']), ('\u{10cc8}', ['\u{10c88}', '\0', '\0']), - ('\u{10cc9}', ['\u{10c89}', '\0', '\0']), ('\u{10cca}', ['\u{10c8a}', '\0', '\0']), - ('\u{10ccb}', ['\u{10c8b}', '\0', '\0']), ('\u{10ccc}', ['\u{10c8c}', '\0', '\0']), - ('\u{10ccd}', ['\u{10c8d}', '\0', '\0']), ('\u{10cce}', ['\u{10c8e}', '\0', '\0']), - ('\u{10ccf}', ['\u{10c8f}', '\0', '\0']), ('\u{10cd0}', ['\u{10c90}', '\0', '\0']), - ('\u{10cd1}', ['\u{10c91}', '\0', '\0']), ('\u{10cd2}', ['\u{10c92}', '\0', '\0']), - ('\u{10cd3}', ['\u{10c93}', '\0', '\0']), ('\u{10cd4}', ['\u{10c94}', '\0', '\0']), - ('\u{10cd5}', ['\u{10c95}', '\0', '\0']), ('\u{10cd6}', ['\u{10c96}', '\0', '\0']), - ('\u{10cd7}', ['\u{10c97}', '\0', '\0']), ('\u{10cd8}', ['\u{10c98}', '\0', '\0']), - ('\u{10cd9}', ['\u{10c99}', '\0', '\0']), ('\u{10cda}', ['\u{10c9a}', '\0', '\0']), - ('\u{10cdb}', ['\u{10c9b}', '\0', '\0']), ('\u{10cdc}', ['\u{10c9c}', '\0', '\0']), - ('\u{10cdd}', ['\u{10c9d}', '\0', '\0']), ('\u{10cde}', ['\u{10c9e}', '\0', '\0']), - ('\u{10cdf}', ['\u{10c9f}', '\0', '\0']), ('\u{10ce0}', ['\u{10ca0}', '\0', '\0']), - ('\u{10ce1}', ['\u{10ca1}', '\0', '\0']), ('\u{10ce2}', ['\u{10ca2}', '\0', '\0']), - ('\u{10ce3}', ['\u{10ca3}', '\0', '\0']), ('\u{10ce4}', ['\u{10ca4}', '\0', '\0']), - ('\u{10ce5}', ['\u{10ca5}', '\0', '\0']), ('\u{10ce6}', ['\u{10ca6}', '\0', '\0']), - ('\u{10ce7}', ['\u{10ca7}', '\0', '\0']), ('\u{10ce8}', ['\u{10ca8}', '\0', '\0']), - ('\u{10ce9}', ['\u{10ca9}', '\0', '\0']), ('\u{10cea}', ['\u{10caa}', '\0', '\0']), - ('\u{10ceb}', ['\u{10cab}', '\0', '\0']), ('\u{10cec}', ['\u{10cac}', '\0', '\0']), - ('\u{10ced}', ['\u{10cad}', '\0', '\0']), ('\u{10cee}', ['\u{10cae}', '\0', '\0']), - ('\u{10cef}', ['\u{10caf}', '\0', '\0']), ('\u{10cf0}', ['\u{10cb0}', '\0', '\0']), - ('\u{10cf1}', ['\u{10cb1}', '\0', '\0']), ('\u{10cf2}', ['\u{10cb2}', '\0', '\0']), - ('\u{118c0}', ['\u{118a0}', '\0', '\0']), ('\u{118c1}', ['\u{118a1}', '\0', '\0']), - ('\u{118c2}', ['\u{118a2}', '\0', '\0']), ('\u{118c3}', ['\u{118a3}', '\0', '\0']), - ('\u{118c4}', ['\u{118a4}', '\0', '\0']), ('\u{118c5}', ['\u{118a5}', '\0', '\0']), - ('\u{118c6}', ['\u{118a6}', '\0', '\0']), ('\u{118c7}', ['\u{118a7}', '\0', '\0']), - ('\u{118c8}', ['\u{118a8}', '\0', '\0']), ('\u{118c9}', ['\u{118a9}', '\0', '\0']), - ('\u{118ca}', ['\u{118aa}', '\0', '\0']), ('\u{118cb}', ['\u{118ab}', '\0', '\0']), - ('\u{118cc}', ['\u{118ac}', '\0', '\0']), ('\u{118cd}', ['\u{118ad}', '\0', '\0']), - ('\u{118ce}', ['\u{118ae}', '\0', '\0']), ('\u{118cf}', ['\u{118af}', '\0', '\0']), - ('\u{118d0}', ['\u{118b0}', '\0', '\0']), ('\u{118d1}', ['\u{118b1}', '\0', '\0']), - ('\u{118d2}', ['\u{118b2}', '\0', '\0']), ('\u{118d3}', ['\u{118b3}', '\0', '\0']), - ('\u{118d4}', ['\u{118b4}', '\0', '\0']), ('\u{118d5}', ['\u{118b5}', '\0', '\0']), - ('\u{118d6}', ['\u{118b6}', '\0', '\0']), ('\u{118d7}', ['\u{118b7}', '\0', '\0']), - ('\u{118d8}', ['\u{118b8}', '\0', '\0']), ('\u{118d9}', ['\u{118b9}', '\0', '\0']), - ('\u{118da}', ['\u{118ba}', '\0', '\0']), ('\u{118db}', ['\u{118bb}', '\0', '\0']), - ('\u{118dc}', ['\u{118bc}', '\0', '\0']), ('\u{118dd}', ['\u{118bd}', '\0', '\0']), - ('\u{118de}', ['\u{118be}', '\0', '\0']), ('\u{118df}', ['\u{118bf}', '\0', '\0']), - ('\u{16e60}', ['\u{16e40}', '\0', '\0']), ('\u{16e61}', ['\u{16e41}', '\0', '\0']), - ('\u{16e62}', ['\u{16e42}', '\0', '\0']), ('\u{16e63}', ['\u{16e43}', '\0', '\0']), - ('\u{16e64}', ['\u{16e44}', '\0', '\0']), ('\u{16e65}', ['\u{16e45}', '\0', '\0']), - ('\u{16e66}', ['\u{16e46}', '\0', '\0']), ('\u{16e67}', ['\u{16e47}', '\0', '\0']), - ('\u{16e68}', ['\u{16e48}', '\0', '\0']), ('\u{16e69}', ['\u{16e49}', '\0', '\0']), - ('\u{16e6a}', ['\u{16e4a}', '\0', '\0']), ('\u{16e6b}', ['\u{16e4b}', '\0', '\0']), - ('\u{16e6c}', ['\u{16e4c}', '\0', '\0']), ('\u{16e6d}', ['\u{16e4d}', '\0', '\0']), - ('\u{16e6e}', ['\u{16e4e}', '\0', '\0']), ('\u{16e6f}', ['\u{16e4f}', '\0', '\0']), - ('\u{16e70}', ['\u{16e50}', '\0', '\0']), ('\u{16e71}', ['\u{16e51}', '\0', '\0']), - ('\u{16e72}', ['\u{16e52}', '\0', '\0']), ('\u{16e73}', ['\u{16e53}', '\0', '\0']), - ('\u{16e74}', ['\u{16e54}', '\0', '\0']), ('\u{16e75}', ['\u{16e55}', '\0', '\0']), - ('\u{16e76}', ['\u{16e56}', '\0', '\0']), ('\u{16e77}', ['\u{16e57}', '\0', '\0']), - ('\u{16e78}', ['\u{16e58}', '\0', '\0']), ('\u{16e79}', ['\u{16e59}', '\0', '\0']), - ('\u{16e7a}', ['\u{16e5a}', '\0', '\0']), ('\u{16e7b}', ['\u{16e5b}', '\0', '\0']), - ('\u{16e7c}', ['\u{16e5c}', '\0', '\0']), ('\u{16e7d}', ['\u{16e5d}', '\0', '\0']), - ('\u{16e7e}', ['\u{16e5e}', '\0', '\0']), ('\u{16e7f}', ['\u{16e5f}', '\0', '\0']), - ('\u{1e922}', ['\u{1e900}', '\0', '\0']), ('\u{1e923}', ['\u{1e901}', '\0', '\0']), - ('\u{1e924}', ['\u{1e902}', '\0', '\0']), ('\u{1e925}', ['\u{1e903}', '\0', '\0']), - ('\u{1e926}', ['\u{1e904}', '\0', '\0']), ('\u{1e927}', ['\u{1e905}', '\0', '\0']), - ('\u{1e928}', ['\u{1e906}', '\0', '\0']), ('\u{1e929}', ['\u{1e907}', '\0', '\0']), - ('\u{1e92a}', ['\u{1e908}', '\0', '\0']), ('\u{1e92b}', ['\u{1e909}', '\0', '\0']), - ('\u{1e92c}', ['\u{1e90a}', '\0', '\0']), ('\u{1e92d}', ['\u{1e90b}', '\0', '\0']), - ('\u{1e92e}', ['\u{1e90c}', '\0', '\0']), ('\u{1e92f}', ['\u{1e90d}', '\0', '\0']), - ('\u{1e930}', ['\u{1e90e}', '\0', '\0']), ('\u{1e931}', ['\u{1e90f}', '\0', '\0']), - ('\u{1e932}', ['\u{1e910}', '\0', '\0']), ('\u{1e933}', ['\u{1e911}', '\0', '\0']), - ('\u{1e934}', ['\u{1e912}', '\0', '\0']), ('\u{1e935}', ['\u{1e913}', '\0', '\0']), - ('\u{1e936}', ['\u{1e914}', '\0', '\0']), ('\u{1e937}', ['\u{1e915}', '\0', '\0']), - ('\u{1e938}', ['\u{1e916}', '\0', '\0']), ('\u{1e939}', ['\u{1e917}', '\0', '\0']), - ('\u{1e93a}', ['\u{1e918}', '\0', '\0']), ('\u{1e93b}', ['\u{1e919}', '\0', '\0']), - ('\u{1e93c}', ['\u{1e91a}', '\0', '\0']), ('\u{1e93d}', ['\u{1e91b}', '\0', '\0']), - ('\u{1e93e}', ['\u{1e91c}', '\0', '\0']), ('\u{1e93f}', ['\u{1e91d}', '\0', '\0']), - ('\u{1e940}', ['\u{1e91e}', '\0', '\0']), ('\u{1e941}', ['\u{1e91f}', '\0', '\0']), - ('\u{1e942}', ['\u{1e920}', '\0', '\0']), ('\u{1e943}', ['\u{1e921}', '\0', '\0']) + ('\u{61}', ['\u{41}', '\0', '\0']), + ('\u{62}', ['\u{42}', '\0', '\0']), + ('\u{63}', ['\u{43}', '\0', '\0']), + ('\u{64}', ['\u{44}', '\0', '\0']), + ('\u{65}', ['\u{45}', '\0', '\0']), + ('\u{66}', ['\u{46}', '\0', '\0']), + ('\u{67}', ['\u{47}', '\0', '\0']), + ('\u{68}', ['\u{48}', '\0', '\0']), + ('\u{69}', ['\u{49}', '\0', '\0']), + ('\u{6a}', ['\u{4a}', '\0', '\0']), + ('\u{6b}', ['\u{4b}', '\0', '\0']), + ('\u{6c}', ['\u{4c}', '\0', '\0']), + ('\u{6d}', ['\u{4d}', '\0', '\0']), + ('\u{6e}', ['\u{4e}', '\0', '\0']), + ('\u{6f}', ['\u{4f}', '\0', '\0']), + ('\u{70}', ['\u{50}', '\0', '\0']), + ('\u{71}', ['\u{51}', '\0', '\0']), + ('\u{72}', ['\u{52}', '\0', '\0']), + ('\u{73}', ['\u{53}', '\0', '\0']), + ('\u{74}', ['\u{54}', '\0', '\0']), + ('\u{75}', ['\u{55}', '\0', '\0']), + ('\u{76}', ['\u{56}', '\0', '\0']), + ('\u{77}', ['\u{57}', '\0', '\0']), + ('\u{78}', ['\u{58}', '\0', '\0']), + ('\u{79}', ['\u{59}', '\0', '\0']), + ('\u{7a}', ['\u{5a}', '\0', '\0']), + ('\u{b5}', ['\u{39c}', '\0', '\0']), + ('\u{df}', ['\u{53}', '\u{53}', '\0']), + ('\u{e0}', ['\u{c0}', '\0', '\0']), + ('\u{e1}', ['\u{c1}', '\0', '\0']), + ('\u{e2}', ['\u{c2}', '\0', '\0']), + ('\u{e3}', ['\u{c3}', '\0', '\0']), + ('\u{e4}', ['\u{c4}', '\0', '\0']), + ('\u{e5}', ['\u{c5}', '\0', '\0']), + ('\u{e6}', ['\u{c6}', '\0', '\0']), + ('\u{e7}', ['\u{c7}', '\0', '\0']), + ('\u{e8}', ['\u{c8}', '\0', '\0']), + ('\u{e9}', ['\u{c9}', '\0', '\0']), + ('\u{ea}', ['\u{ca}', '\0', '\0']), + ('\u{eb}', ['\u{cb}', '\0', '\0']), + ('\u{ec}', ['\u{cc}', '\0', '\0']), + ('\u{ed}', ['\u{cd}', '\0', '\0']), + ('\u{ee}', ['\u{ce}', '\0', '\0']), + ('\u{ef}', ['\u{cf}', '\0', '\0']), + ('\u{f0}', ['\u{d0}', '\0', '\0']), + ('\u{f1}', ['\u{d1}', '\0', '\0']), + ('\u{f2}', ['\u{d2}', '\0', '\0']), + ('\u{f3}', ['\u{d3}', '\0', '\0']), + ('\u{f4}', ['\u{d4}', '\0', '\0']), + ('\u{f5}', ['\u{d5}', '\0', '\0']), + ('\u{f6}', ['\u{d6}', '\0', '\0']), + ('\u{f8}', ['\u{d8}', '\0', '\0']), + ('\u{f9}', ['\u{d9}', '\0', '\0']), + ('\u{fa}', ['\u{da}', '\0', '\0']), + ('\u{fb}', ['\u{db}', '\0', '\0']), + ('\u{fc}', ['\u{dc}', '\0', '\0']), + ('\u{fd}', ['\u{dd}', '\0', '\0']), + ('\u{fe}', ['\u{de}', '\0', '\0']), + ('\u{ff}', ['\u{178}', '\0', '\0']), + ('\u{101}', ['\u{100}', '\0', '\0']), + ('\u{103}', ['\u{102}', '\0', '\0']), + ('\u{105}', ['\u{104}', '\0', '\0']), + ('\u{107}', ['\u{106}', '\0', '\0']), + ('\u{109}', ['\u{108}', '\0', '\0']), + ('\u{10b}', ['\u{10a}', '\0', '\0']), + ('\u{10d}', ['\u{10c}', '\0', '\0']), + ('\u{10f}', ['\u{10e}', '\0', '\0']), + ('\u{111}', ['\u{110}', '\0', '\0']), + ('\u{113}', ['\u{112}', '\0', '\0']), + ('\u{115}', ['\u{114}', '\0', '\0']), + ('\u{117}', ['\u{116}', '\0', '\0']), + ('\u{119}', ['\u{118}', '\0', '\0']), + ('\u{11b}', ['\u{11a}', '\0', '\0']), + ('\u{11d}', ['\u{11c}', '\0', '\0']), + ('\u{11f}', ['\u{11e}', '\0', '\0']), + ('\u{121}', ['\u{120}', '\0', '\0']), + ('\u{123}', ['\u{122}', '\0', '\0']), + ('\u{125}', ['\u{124}', '\0', '\0']), + ('\u{127}', ['\u{126}', '\0', '\0']), + ('\u{129}', ['\u{128}', '\0', '\0']), + ('\u{12b}', ['\u{12a}', '\0', '\0']), + ('\u{12d}', ['\u{12c}', '\0', '\0']), + ('\u{12f}', ['\u{12e}', '\0', '\0']), + ('\u{131}', ['\u{49}', '\0', '\0']), + ('\u{133}', ['\u{132}', '\0', '\0']), + ('\u{135}', ['\u{134}', '\0', '\0']), + ('\u{137}', ['\u{136}', '\0', '\0']), + ('\u{13a}', ['\u{139}', '\0', '\0']), + ('\u{13c}', ['\u{13b}', '\0', '\0']), + ('\u{13e}', ['\u{13d}', '\0', '\0']), + ('\u{140}', ['\u{13f}', '\0', '\0']), + ('\u{142}', ['\u{141}', '\0', '\0']), + ('\u{144}', ['\u{143}', '\0', '\0']), + ('\u{146}', ['\u{145}', '\0', '\0']), + ('\u{148}', ['\u{147}', '\0', '\0']), + ('\u{149}', ['\u{2bc}', '\u{4e}', '\0']), + ('\u{14b}', ['\u{14a}', '\0', '\0']), + ('\u{14d}', ['\u{14c}', '\0', '\0']), + ('\u{14f}', ['\u{14e}', '\0', '\0']), + ('\u{151}', ['\u{150}', '\0', '\0']), + ('\u{153}', ['\u{152}', '\0', '\0']), + ('\u{155}', ['\u{154}', '\0', '\0']), + ('\u{157}', ['\u{156}', '\0', '\0']), + ('\u{159}', ['\u{158}', '\0', '\0']), + ('\u{15b}', ['\u{15a}', '\0', '\0']), + ('\u{15d}', ['\u{15c}', '\0', '\0']), + ('\u{15f}', ['\u{15e}', '\0', '\0']), + ('\u{161}', ['\u{160}', '\0', '\0']), + ('\u{163}', ['\u{162}', '\0', '\0']), + ('\u{165}', ['\u{164}', '\0', '\0']), + ('\u{167}', ['\u{166}', '\0', '\0']), + ('\u{169}', ['\u{168}', '\0', '\0']), + ('\u{16b}', ['\u{16a}', '\0', '\0']), + ('\u{16d}', ['\u{16c}', '\0', '\0']), + ('\u{16f}', ['\u{16e}', '\0', '\0']), + ('\u{171}', ['\u{170}', '\0', '\0']), + ('\u{173}', ['\u{172}', '\0', '\0']), + ('\u{175}', ['\u{174}', '\0', '\0']), + ('\u{177}', ['\u{176}', '\0', '\0']), + ('\u{17a}', ['\u{179}', '\0', '\0']), + ('\u{17c}', ['\u{17b}', '\0', '\0']), + ('\u{17e}', ['\u{17d}', '\0', '\0']), + ('\u{17f}', ['\u{53}', '\0', '\0']), + ('\u{180}', ['\u{243}', '\0', '\0']), + ('\u{183}', ['\u{182}', '\0', '\0']), + ('\u{185}', ['\u{184}', '\0', '\0']), + ('\u{188}', ['\u{187}', '\0', '\0']), + ('\u{18c}', ['\u{18b}', '\0', '\0']), + ('\u{192}', ['\u{191}', '\0', '\0']), + ('\u{195}', ['\u{1f6}', '\0', '\0']), + ('\u{199}', ['\u{198}', '\0', '\0']), + ('\u{19a}', ['\u{23d}', '\0', '\0']), + ('\u{19e}', ['\u{220}', '\0', '\0']), + ('\u{1a1}', ['\u{1a0}', '\0', '\0']), + ('\u{1a3}', ['\u{1a2}', '\0', '\0']), + ('\u{1a5}', ['\u{1a4}', '\0', '\0']), + ('\u{1a8}', ['\u{1a7}', '\0', '\0']), + ('\u{1ad}', ['\u{1ac}', '\0', '\0']), + ('\u{1b0}', ['\u{1af}', '\0', '\0']), + ('\u{1b4}', ['\u{1b3}', '\0', '\0']), + ('\u{1b6}', ['\u{1b5}', '\0', '\0']), + ('\u{1b9}', ['\u{1b8}', '\0', '\0']), + ('\u{1bd}', ['\u{1bc}', '\0', '\0']), + ('\u{1bf}', ['\u{1f7}', '\0', '\0']), + ('\u{1c5}', ['\u{1c4}', '\0', '\0']), + ('\u{1c6}', ['\u{1c4}', '\0', '\0']), + ('\u{1c8}', ['\u{1c7}', '\0', '\0']), + ('\u{1c9}', ['\u{1c7}', '\0', '\0']), + ('\u{1cb}', ['\u{1ca}', '\0', '\0']), + ('\u{1cc}', ['\u{1ca}', '\0', '\0']), + ('\u{1ce}', ['\u{1cd}', '\0', '\0']), + ('\u{1d0}', ['\u{1cf}', '\0', '\0']), + ('\u{1d2}', ['\u{1d1}', '\0', '\0']), + ('\u{1d4}', ['\u{1d3}', '\0', '\0']), + ('\u{1d6}', ['\u{1d5}', '\0', '\0']), + ('\u{1d8}', ['\u{1d7}', '\0', '\0']), + ('\u{1da}', ['\u{1d9}', '\0', '\0']), + ('\u{1dc}', ['\u{1db}', '\0', '\0']), + ('\u{1dd}', ['\u{18e}', '\0', '\0']), + ('\u{1df}', ['\u{1de}', '\0', '\0']), + ('\u{1e1}', ['\u{1e0}', '\0', '\0']), + ('\u{1e3}', ['\u{1e2}', '\0', '\0']), + ('\u{1e5}', ['\u{1e4}', '\0', '\0']), + ('\u{1e7}', ['\u{1e6}', '\0', '\0']), + ('\u{1e9}', ['\u{1e8}', '\0', '\0']), + ('\u{1eb}', ['\u{1ea}', '\0', '\0']), + ('\u{1ed}', ['\u{1ec}', '\0', '\0']), + ('\u{1ef}', ['\u{1ee}', '\0', '\0']), + ('\u{1f0}', ['\u{4a}', '\u{30c}', '\0']), + ('\u{1f2}', ['\u{1f1}', '\0', '\0']), + ('\u{1f3}', ['\u{1f1}', '\0', '\0']), + ('\u{1f5}', ['\u{1f4}', '\0', '\0']), + ('\u{1f9}', ['\u{1f8}', '\0', '\0']), + ('\u{1fb}', ['\u{1fa}', '\0', '\0']), + ('\u{1fd}', ['\u{1fc}', '\0', '\0']), + ('\u{1ff}', ['\u{1fe}', '\0', '\0']), + ('\u{201}', ['\u{200}', '\0', '\0']), + ('\u{203}', ['\u{202}', '\0', '\0']), + ('\u{205}', ['\u{204}', '\0', '\0']), + ('\u{207}', ['\u{206}', '\0', '\0']), + ('\u{209}', ['\u{208}', '\0', '\0']), + ('\u{20b}', ['\u{20a}', '\0', '\0']), + ('\u{20d}', ['\u{20c}', '\0', '\0']), + ('\u{20f}', ['\u{20e}', '\0', '\0']), + ('\u{211}', ['\u{210}', '\0', '\0']), + ('\u{213}', ['\u{212}', '\0', '\0']), + ('\u{215}', ['\u{214}', '\0', '\0']), + ('\u{217}', ['\u{216}', '\0', '\0']), + ('\u{219}', ['\u{218}', '\0', '\0']), + ('\u{21b}', ['\u{21a}', '\0', '\0']), + ('\u{21d}', ['\u{21c}', '\0', '\0']), + ('\u{21f}', ['\u{21e}', '\0', '\0']), + ('\u{223}', ['\u{222}', '\0', '\0']), + ('\u{225}', ['\u{224}', '\0', '\0']), + ('\u{227}', ['\u{226}', '\0', '\0']), + ('\u{229}', ['\u{228}', '\0', '\0']), + ('\u{22b}', ['\u{22a}', '\0', '\0']), + ('\u{22d}', ['\u{22c}', '\0', '\0']), + ('\u{22f}', ['\u{22e}', '\0', '\0']), + ('\u{231}', ['\u{230}', '\0', '\0']), + ('\u{233}', ['\u{232}', '\0', '\0']), + ('\u{23c}', ['\u{23b}', '\0', '\0']), + ('\u{23f}', ['\u{2c7e}', '\0', '\0']), + ('\u{240}', ['\u{2c7f}', '\0', '\0']), + ('\u{242}', ['\u{241}', '\0', '\0']), + ('\u{247}', ['\u{246}', '\0', '\0']), + ('\u{249}', ['\u{248}', '\0', '\0']), + ('\u{24b}', ['\u{24a}', '\0', '\0']), + ('\u{24d}', ['\u{24c}', '\0', '\0']), + ('\u{24f}', ['\u{24e}', '\0', '\0']), + ('\u{250}', ['\u{2c6f}', '\0', '\0']), + ('\u{251}', ['\u{2c6d}', '\0', '\0']), + ('\u{252}', ['\u{2c70}', '\0', '\0']), + ('\u{253}', ['\u{181}', '\0', '\0']), + ('\u{254}', ['\u{186}', '\0', '\0']), + ('\u{256}', ['\u{189}', '\0', '\0']), + ('\u{257}', ['\u{18a}', '\0', '\0']), + ('\u{259}', ['\u{18f}', '\0', '\0']), + ('\u{25b}', ['\u{190}', '\0', '\0']), + ('\u{25c}', ['\u{a7ab}', '\0', '\0']), + ('\u{260}', ['\u{193}', '\0', '\0']), + ('\u{261}', ['\u{a7ac}', '\0', '\0']), + ('\u{263}', ['\u{194}', '\0', '\0']), + ('\u{265}', ['\u{a78d}', '\0', '\0']), + ('\u{266}', ['\u{a7aa}', '\0', '\0']), + ('\u{268}', ['\u{197}', '\0', '\0']), + ('\u{269}', ['\u{196}', '\0', '\0']), + ('\u{26a}', ['\u{a7ae}', '\0', '\0']), + ('\u{26b}', ['\u{2c62}', '\0', '\0']), + ('\u{26c}', ['\u{a7ad}', '\0', '\0']), + ('\u{26f}', ['\u{19c}', '\0', '\0']), + ('\u{271}', ['\u{2c6e}', '\0', '\0']), + ('\u{272}', ['\u{19d}', '\0', '\0']), + ('\u{275}', ['\u{19f}', '\0', '\0']), + ('\u{27d}', ['\u{2c64}', '\0', '\0']), + ('\u{280}', ['\u{1a6}', '\0', '\0']), + ('\u{283}', ['\u{1a9}', '\0', '\0']), + ('\u{287}', ['\u{a7b1}', '\0', '\0']), + ('\u{288}', ['\u{1ae}', '\0', '\0']), + ('\u{289}', ['\u{244}', '\0', '\0']), + ('\u{28a}', ['\u{1b1}', '\0', '\0']), + ('\u{28b}', ['\u{1b2}', '\0', '\0']), + ('\u{28c}', ['\u{245}', '\0', '\0']), + ('\u{292}', ['\u{1b7}', '\0', '\0']), + ('\u{29d}', ['\u{a7b2}', '\0', '\0']), + ('\u{29e}', ['\u{a7b0}', '\0', '\0']), + ('\u{345}', ['\u{399}', '\0', '\0']), + ('\u{371}', ['\u{370}', '\0', '\0']), + ('\u{373}', ['\u{372}', '\0', '\0']), + ('\u{377}', ['\u{376}', '\0', '\0']), + ('\u{37b}', ['\u{3fd}', '\0', '\0']), + ('\u{37c}', ['\u{3fe}', '\0', '\0']), + ('\u{37d}', ['\u{3ff}', '\0', '\0']), + ('\u{390}', ['\u{399}', '\u{308}', '\u{301}']), + ('\u{3ac}', ['\u{386}', '\0', '\0']), + ('\u{3ad}', ['\u{388}', '\0', '\0']), + ('\u{3ae}', ['\u{389}', '\0', '\0']), + ('\u{3af}', ['\u{38a}', '\0', '\0']), + ('\u{3b0}', ['\u{3a5}', '\u{308}', '\u{301}']), + ('\u{3b1}', ['\u{391}', '\0', '\0']), + ('\u{3b2}', ['\u{392}', '\0', '\0']), + ('\u{3b3}', ['\u{393}', '\0', '\0']), + ('\u{3b4}', ['\u{394}', '\0', '\0']), + ('\u{3b5}', ['\u{395}', '\0', '\0']), + ('\u{3b6}', ['\u{396}', '\0', '\0']), + ('\u{3b7}', ['\u{397}', '\0', '\0']), + ('\u{3b8}', ['\u{398}', '\0', '\0']), + ('\u{3b9}', ['\u{399}', '\0', '\0']), + ('\u{3ba}', ['\u{39a}', '\0', '\0']), + ('\u{3bb}', ['\u{39b}', '\0', '\0']), + ('\u{3bc}', ['\u{39c}', '\0', '\0']), + ('\u{3bd}', ['\u{39d}', '\0', '\0']), + ('\u{3be}', ['\u{39e}', '\0', '\0']), + ('\u{3bf}', ['\u{39f}', '\0', '\0']), + ('\u{3c0}', ['\u{3a0}', '\0', '\0']), + ('\u{3c1}', ['\u{3a1}', '\0', '\0']), + ('\u{3c2}', ['\u{3a3}', '\0', '\0']), + ('\u{3c3}', ['\u{3a3}', '\0', '\0']), + ('\u{3c4}', ['\u{3a4}', '\0', '\0']), + ('\u{3c5}', ['\u{3a5}', '\0', '\0']), + ('\u{3c6}', ['\u{3a6}', '\0', '\0']), + ('\u{3c7}', ['\u{3a7}', '\0', '\0']), + ('\u{3c8}', ['\u{3a8}', '\0', '\0']), + ('\u{3c9}', ['\u{3a9}', '\0', '\0']), + ('\u{3ca}', ['\u{3aa}', '\0', '\0']), + ('\u{3cb}', ['\u{3ab}', '\0', '\0']), + ('\u{3cc}', ['\u{38c}', '\0', '\0']), + ('\u{3cd}', ['\u{38e}', '\0', '\0']), + ('\u{3ce}', ['\u{38f}', '\0', '\0']), + ('\u{3d0}', ['\u{392}', '\0', '\0']), + ('\u{3d1}', ['\u{398}', '\0', '\0']), + ('\u{3d5}', ['\u{3a6}', '\0', '\0']), + ('\u{3d6}', ['\u{3a0}', '\0', '\0']), + ('\u{3d7}', ['\u{3cf}', '\0', '\0']), + ('\u{3d9}', ['\u{3d8}', '\0', '\0']), + ('\u{3db}', ['\u{3da}', '\0', '\0']), + ('\u{3dd}', ['\u{3dc}', '\0', '\0']), + ('\u{3df}', ['\u{3de}', '\0', '\0']), + ('\u{3e1}', ['\u{3e0}', '\0', '\0']), + ('\u{3e3}', ['\u{3e2}', '\0', '\0']), + ('\u{3e5}', ['\u{3e4}', '\0', '\0']), + ('\u{3e7}', ['\u{3e6}', '\0', '\0']), + ('\u{3e9}', ['\u{3e8}', '\0', '\0']), + ('\u{3eb}', ['\u{3ea}', '\0', '\0']), + ('\u{3ed}', ['\u{3ec}', '\0', '\0']), + ('\u{3ef}', ['\u{3ee}', '\0', '\0']), + ('\u{3f0}', ['\u{39a}', '\0', '\0']), + ('\u{3f1}', ['\u{3a1}', '\0', '\0']), + ('\u{3f2}', ['\u{3f9}', '\0', '\0']), + ('\u{3f3}', ['\u{37f}', '\0', '\0']), + ('\u{3f5}', ['\u{395}', '\0', '\0']), + ('\u{3f8}', ['\u{3f7}', '\0', '\0']), + ('\u{3fb}', ['\u{3fa}', '\0', '\0']), + ('\u{430}', ['\u{410}', '\0', '\0']), + ('\u{431}', ['\u{411}', '\0', '\0']), + ('\u{432}', ['\u{412}', '\0', '\0']), + ('\u{433}', ['\u{413}', '\0', '\0']), + ('\u{434}', ['\u{414}', '\0', '\0']), + ('\u{435}', ['\u{415}', '\0', '\0']), + ('\u{436}', ['\u{416}', '\0', '\0']), + ('\u{437}', ['\u{417}', '\0', '\0']), + ('\u{438}', ['\u{418}', '\0', '\0']), + ('\u{439}', ['\u{419}', '\0', '\0']), + ('\u{43a}', ['\u{41a}', '\0', '\0']), + ('\u{43b}', ['\u{41b}', '\0', '\0']), + ('\u{43c}', ['\u{41c}', '\0', '\0']), + ('\u{43d}', ['\u{41d}', '\0', '\0']), + ('\u{43e}', ['\u{41e}', '\0', '\0']), + ('\u{43f}', ['\u{41f}', '\0', '\0']), + ('\u{440}', ['\u{420}', '\0', '\0']), + ('\u{441}', ['\u{421}', '\0', '\0']), + ('\u{442}', ['\u{422}', '\0', '\0']), + ('\u{443}', ['\u{423}', '\0', '\0']), + ('\u{444}', ['\u{424}', '\0', '\0']), + ('\u{445}', ['\u{425}', '\0', '\0']), + ('\u{446}', ['\u{426}', '\0', '\0']), + ('\u{447}', ['\u{427}', '\0', '\0']), + ('\u{448}', ['\u{428}', '\0', '\0']), + ('\u{449}', ['\u{429}', '\0', '\0']), + ('\u{44a}', ['\u{42a}', '\0', '\0']), + ('\u{44b}', ['\u{42b}', '\0', '\0']), + ('\u{44c}', ['\u{42c}', '\0', '\0']), + ('\u{44d}', ['\u{42d}', '\0', '\0']), + ('\u{44e}', ['\u{42e}', '\0', '\0']), + ('\u{44f}', ['\u{42f}', '\0', '\0']), + ('\u{450}', ['\u{400}', '\0', '\0']), + ('\u{451}', ['\u{401}', '\0', '\0']), + ('\u{452}', ['\u{402}', '\0', '\0']), + ('\u{453}', ['\u{403}', '\0', '\0']), + ('\u{454}', ['\u{404}', '\0', '\0']), + ('\u{455}', ['\u{405}', '\0', '\0']), + ('\u{456}', ['\u{406}', '\0', '\0']), + ('\u{457}', ['\u{407}', '\0', '\0']), + ('\u{458}', ['\u{408}', '\0', '\0']), + ('\u{459}', ['\u{409}', '\0', '\0']), + ('\u{45a}', ['\u{40a}', '\0', '\0']), + ('\u{45b}', ['\u{40b}', '\0', '\0']), + ('\u{45c}', ['\u{40c}', '\0', '\0']), + ('\u{45d}', ['\u{40d}', '\0', '\0']), + ('\u{45e}', ['\u{40e}', '\0', '\0']), + ('\u{45f}', ['\u{40f}', '\0', '\0']), + ('\u{461}', ['\u{460}', '\0', '\0']), + ('\u{463}', ['\u{462}', '\0', '\0']), + ('\u{465}', ['\u{464}', '\0', '\0']), + ('\u{467}', ['\u{466}', '\0', '\0']), + ('\u{469}', ['\u{468}', '\0', '\0']), + ('\u{46b}', ['\u{46a}', '\0', '\0']), + ('\u{46d}', ['\u{46c}', '\0', '\0']), + ('\u{46f}', ['\u{46e}', '\0', '\0']), + ('\u{471}', ['\u{470}', '\0', '\0']), + ('\u{473}', ['\u{472}', '\0', '\0']), + ('\u{475}', ['\u{474}', '\0', '\0']), + ('\u{477}', ['\u{476}', '\0', '\0']), + ('\u{479}', ['\u{478}', '\0', '\0']), + ('\u{47b}', ['\u{47a}', '\0', '\0']), + ('\u{47d}', ['\u{47c}', '\0', '\0']), + ('\u{47f}', ['\u{47e}', '\0', '\0']), + ('\u{481}', ['\u{480}', '\0', '\0']), + ('\u{48b}', ['\u{48a}', '\0', '\0']), + ('\u{48d}', ['\u{48c}', '\0', '\0']), + ('\u{48f}', ['\u{48e}', '\0', '\0']), + ('\u{491}', ['\u{490}', '\0', '\0']), + ('\u{493}', ['\u{492}', '\0', '\0']), + ('\u{495}', ['\u{494}', '\0', '\0']), + ('\u{497}', ['\u{496}', '\0', '\0']), + ('\u{499}', ['\u{498}', '\0', '\0']), + ('\u{49b}', ['\u{49a}', '\0', '\0']), + ('\u{49d}', ['\u{49c}', '\0', '\0']), + ('\u{49f}', ['\u{49e}', '\0', '\0']), + ('\u{4a1}', ['\u{4a0}', '\0', '\0']), + ('\u{4a3}', ['\u{4a2}', '\0', '\0']), + ('\u{4a5}', ['\u{4a4}', '\0', '\0']), + ('\u{4a7}', ['\u{4a6}', '\0', '\0']), + ('\u{4a9}', ['\u{4a8}', '\0', '\0']), + ('\u{4ab}', ['\u{4aa}', '\0', '\0']), + ('\u{4ad}', ['\u{4ac}', '\0', '\0']), + ('\u{4af}', ['\u{4ae}', '\0', '\0']), + ('\u{4b1}', ['\u{4b0}', '\0', '\0']), + ('\u{4b3}', ['\u{4b2}', '\0', '\0']), + ('\u{4b5}', ['\u{4b4}', '\0', '\0']), + ('\u{4b7}', ['\u{4b6}', '\0', '\0']), + ('\u{4b9}', ['\u{4b8}', '\0', '\0']), + ('\u{4bb}', ['\u{4ba}', '\0', '\0']), + ('\u{4bd}', ['\u{4bc}', '\0', '\0']), + ('\u{4bf}', ['\u{4be}', '\0', '\0']), + ('\u{4c2}', ['\u{4c1}', '\0', '\0']), + ('\u{4c4}', ['\u{4c3}', '\0', '\0']), + ('\u{4c6}', ['\u{4c5}', '\0', '\0']), + ('\u{4c8}', ['\u{4c7}', '\0', '\0']), + ('\u{4ca}', ['\u{4c9}', '\0', '\0']), + ('\u{4cc}', ['\u{4cb}', '\0', '\0']), + ('\u{4ce}', ['\u{4cd}', '\0', '\0']), + ('\u{4cf}', ['\u{4c0}', '\0', '\0']), + ('\u{4d1}', ['\u{4d0}', '\0', '\0']), + ('\u{4d3}', ['\u{4d2}', '\0', '\0']), + ('\u{4d5}', ['\u{4d4}', '\0', '\0']), + ('\u{4d7}', ['\u{4d6}', '\0', '\0']), + ('\u{4d9}', ['\u{4d8}', '\0', '\0']), + ('\u{4db}', ['\u{4da}', '\0', '\0']), + ('\u{4dd}', ['\u{4dc}', '\0', '\0']), + ('\u{4df}', ['\u{4de}', '\0', '\0']), + ('\u{4e1}', ['\u{4e0}', '\0', '\0']), + ('\u{4e3}', ['\u{4e2}', '\0', '\0']), + ('\u{4e5}', ['\u{4e4}', '\0', '\0']), + ('\u{4e7}', ['\u{4e6}', '\0', '\0']), + ('\u{4e9}', ['\u{4e8}', '\0', '\0']), + ('\u{4eb}', ['\u{4ea}', '\0', '\0']), + ('\u{4ed}', ['\u{4ec}', '\0', '\0']), + ('\u{4ef}', ['\u{4ee}', '\0', '\0']), + ('\u{4f1}', ['\u{4f0}', '\0', '\0']), + ('\u{4f3}', ['\u{4f2}', '\0', '\0']), + ('\u{4f5}', ['\u{4f4}', '\0', '\0']), + ('\u{4f7}', ['\u{4f6}', '\0', '\0']), + ('\u{4f9}', ['\u{4f8}', '\0', '\0']), + ('\u{4fb}', ['\u{4fa}', '\0', '\0']), + ('\u{4fd}', ['\u{4fc}', '\0', '\0']), + ('\u{4ff}', ['\u{4fe}', '\0', '\0']), + ('\u{501}', ['\u{500}', '\0', '\0']), + ('\u{503}', ['\u{502}', '\0', '\0']), + ('\u{505}', ['\u{504}', '\0', '\0']), + ('\u{507}', ['\u{506}', '\0', '\0']), + ('\u{509}', ['\u{508}', '\0', '\0']), + ('\u{50b}', ['\u{50a}', '\0', '\0']), + ('\u{50d}', ['\u{50c}', '\0', '\0']), + ('\u{50f}', ['\u{50e}', '\0', '\0']), + ('\u{511}', ['\u{510}', '\0', '\0']), + ('\u{513}', ['\u{512}', '\0', '\0']), + ('\u{515}', ['\u{514}', '\0', '\0']), + ('\u{517}', ['\u{516}', '\0', '\0']), + ('\u{519}', ['\u{518}', '\0', '\0']), + ('\u{51b}', ['\u{51a}', '\0', '\0']), + ('\u{51d}', ['\u{51c}', '\0', '\0']), + ('\u{51f}', ['\u{51e}', '\0', '\0']), + ('\u{521}', ['\u{520}', '\0', '\0']), + ('\u{523}', ['\u{522}', '\0', '\0']), + ('\u{525}', ['\u{524}', '\0', '\0']), + ('\u{527}', ['\u{526}', '\0', '\0']), + ('\u{529}', ['\u{528}', '\0', '\0']), + ('\u{52b}', ['\u{52a}', '\0', '\0']), + ('\u{52d}', ['\u{52c}', '\0', '\0']), + ('\u{52f}', ['\u{52e}', '\0', '\0']), + ('\u{561}', ['\u{531}', '\0', '\0']), + ('\u{562}', ['\u{532}', '\0', '\0']), + ('\u{563}', ['\u{533}', '\0', '\0']), + ('\u{564}', ['\u{534}', '\0', '\0']), + ('\u{565}', ['\u{535}', '\0', '\0']), + ('\u{566}', ['\u{536}', '\0', '\0']), + ('\u{567}', ['\u{537}', '\0', '\0']), + ('\u{568}', ['\u{538}', '\0', '\0']), + ('\u{569}', ['\u{539}', '\0', '\0']), + ('\u{56a}', ['\u{53a}', '\0', '\0']), + ('\u{56b}', ['\u{53b}', '\0', '\0']), + ('\u{56c}', ['\u{53c}', '\0', '\0']), + ('\u{56d}', ['\u{53d}', '\0', '\0']), + ('\u{56e}', ['\u{53e}', '\0', '\0']), + ('\u{56f}', ['\u{53f}', '\0', '\0']), + ('\u{570}', ['\u{540}', '\0', '\0']), + ('\u{571}', ['\u{541}', '\0', '\0']), + ('\u{572}', ['\u{542}', '\0', '\0']), + ('\u{573}', ['\u{543}', '\0', '\0']), + ('\u{574}', ['\u{544}', '\0', '\0']), + ('\u{575}', ['\u{545}', '\0', '\0']), + ('\u{576}', ['\u{546}', '\0', '\0']), + ('\u{577}', ['\u{547}', '\0', '\0']), + ('\u{578}', ['\u{548}', '\0', '\0']), + ('\u{579}', ['\u{549}', '\0', '\0']), + ('\u{57a}', ['\u{54a}', '\0', '\0']), + ('\u{57b}', ['\u{54b}', '\0', '\0']), + ('\u{57c}', ['\u{54c}', '\0', '\0']), + ('\u{57d}', ['\u{54d}', '\0', '\0']), + ('\u{57e}', ['\u{54e}', '\0', '\0']), + ('\u{57f}', ['\u{54f}', '\0', '\0']), + ('\u{580}', ['\u{550}', '\0', '\0']), + ('\u{581}', ['\u{551}', '\0', '\0']), + ('\u{582}', ['\u{552}', '\0', '\0']), + ('\u{583}', ['\u{553}', '\0', '\0']), + ('\u{584}', ['\u{554}', '\0', '\0']), + ('\u{585}', ['\u{555}', '\0', '\0']), + ('\u{586}', ['\u{556}', '\0', '\0']), + ('\u{587}', ['\u{535}', '\u{552}', '\0']), + ('\u{10d0}', ['\u{1c90}', '\0', '\0']), + ('\u{10d1}', ['\u{1c91}', '\0', '\0']), + ('\u{10d2}', ['\u{1c92}', '\0', '\0']), + ('\u{10d3}', ['\u{1c93}', '\0', '\0']), + ('\u{10d4}', ['\u{1c94}', '\0', '\0']), + ('\u{10d5}', ['\u{1c95}', '\0', '\0']), + ('\u{10d6}', ['\u{1c96}', '\0', '\0']), + ('\u{10d7}', ['\u{1c97}', '\0', '\0']), + ('\u{10d8}', ['\u{1c98}', '\0', '\0']), + ('\u{10d9}', ['\u{1c99}', '\0', '\0']), + ('\u{10da}', ['\u{1c9a}', '\0', '\0']), + ('\u{10db}', ['\u{1c9b}', '\0', '\0']), + ('\u{10dc}', ['\u{1c9c}', '\0', '\0']), + ('\u{10dd}', ['\u{1c9d}', '\0', '\0']), + ('\u{10de}', ['\u{1c9e}', '\0', '\0']), + ('\u{10df}', ['\u{1c9f}', '\0', '\0']), + ('\u{10e0}', ['\u{1ca0}', '\0', '\0']), + ('\u{10e1}', ['\u{1ca1}', '\0', '\0']), + ('\u{10e2}', ['\u{1ca2}', '\0', '\0']), + ('\u{10e3}', ['\u{1ca3}', '\0', '\0']), + ('\u{10e4}', ['\u{1ca4}', '\0', '\0']), + ('\u{10e5}', ['\u{1ca5}', '\0', '\0']), + ('\u{10e6}', ['\u{1ca6}', '\0', '\0']), + ('\u{10e7}', ['\u{1ca7}', '\0', '\0']), + ('\u{10e8}', ['\u{1ca8}', '\0', '\0']), + ('\u{10e9}', ['\u{1ca9}', '\0', '\0']), + ('\u{10ea}', ['\u{1caa}', '\0', '\0']), + ('\u{10eb}', ['\u{1cab}', '\0', '\0']), + ('\u{10ec}', ['\u{1cac}', '\0', '\0']), + ('\u{10ed}', ['\u{1cad}', '\0', '\0']), + ('\u{10ee}', ['\u{1cae}', '\0', '\0']), + ('\u{10ef}', ['\u{1caf}', '\0', '\0']), + ('\u{10f0}', ['\u{1cb0}', '\0', '\0']), + ('\u{10f1}', ['\u{1cb1}', '\0', '\0']), + ('\u{10f2}', ['\u{1cb2}', '\0', '\0']), + ('\u{10f3}', ['\u{1cb3}', '\0', '\0']), + ('\u{10f4}', ['\u{1cb4}', '\0', '\0']), + ('\u{10f5}', ['\u{1cb5}', '\0', '\0']), + ('\u{10f6}', ['\u{1cb6}', '\0', '\0']), + ('\u{10f7}', ['\u{1cb7}', '\0', '\0']), + ('\u{10f8}', ['\u{1cb8}', '\0', '\0']), + ('\u{10f9}', ['\u{1cb9}', '\0', '\0']), + ('\u{10fa}', ['\u{1cba}', '\0', '\0']), + ('\u{10fd}', ['\u{1cbd}', '\0', '\0']), + ('\u{10fe}', ['\u{1cbe}', '\0', '\0']), + ('\u{10ff}', ['\u{1cbf}', '\0', '\0']), + ('\u{13f8}', ['\u{13f0}', '\0', '\0']), + ('\u{13f9}', ['\u{13f1}', '\0', '\0']), + ('\u{13fa}', ['\u{13f2}', '\0', '\0']), + ('\u{13fb}', ['\u{13f3}', '\0', '\0']), + ('\u{13fc}', ['\u{13f4}', '\0', '\0']), + ('\u{13fd}', ['\u{13f5}', '\0', '\0']), + ('\u{1c80}', ['\u{412}', '\0', '\0']), + ('\u{1c81}', ['\u{414}', '\0', '\0']), + ('\u{1c82}', ['\u{41e}', '\0', '\0']), + ('\u{1c83}', ['\u{421}', '\0', '\0']), + ('\u{1c84}', ['\u{422}', '\0', '\0']), + ('\u{1c85}', ['\u{422}', '\0', '\0']), + ('\u{1c86}', ['\u{42a}', '\0', '\0']), + ('\u{1c87}', ['\u{462}', '\0', '\0']), + ('\u{1c88}', ['\u{a64a}', '\0', '\0']), + ('\u{1d79}', ['\u{a77d}', '\0', '\0']), + ('\u{1d7d}', ['\u{2c63}', '\0', '\0']), + ('\u{1e01}', ['\u{1e00}', '\0', '\0']), + ('\u{1e03}', ['\u{1e02}', '\0', '\0']), + ('\u{1e05}', ['\u{1e04}', '\0', '\0']), + ('\u{1e07}', ['\u{1e06}', '\0', '\0']), + ('\u{1e09}', ['\u{1e08}', '\0', '\0']), + ('\u{1e0b}', ['\u{1e0a}', '\0', '\0']), + ('\u{1e0d}', ['\u{1e0c}', '\0', '\0']), + ('\u{1e0f}', ['\u{1e0e}', '\0', '\0']), + ('\u{1e11}', ['\u{1e10}', '\0', '\0']), + ('\u{1e13}', ['\u{1e12}', '\0', '\0']), + ('\u{1e15}', ['\u{1e14}', '\0', '\0']), + ('\u{1e17}', ['\u{1e16}', '\0', '\0']), + ('\u{1e19}', ['\u{1e18}', '\0', '\0']), + ('\u{1e1b}', ['\u{1e1a}', '\0', '\0']), + ('\u{1e1d}', ['\u{1e1c}', '\0', '\0']), + ('\u{1e1f}', ['\u{1e1e}', '\0', '\0']), + ('\u{1e21}', ['\u{1e20}', '\0', '\0']), + ('\u{1e23}', ['\u{1e22}', '\0', '\0']), + ('\u{1e25}', ['\u{1e24}', '\0', '\0']), + ('\u{1e27}', ['\u{1e26}', '\0', '\0']), + ('\u{1e29}', ['\u{1e28}', '\0', '\0']), + ('\u{1e2b}', ['\u{1e2a}', '\0', '\0']), + ('\u{1e2d}', ['\u{1e2c}', '\0', '\0']), + ('\u{1e2f}', ['\u{1e2e}', '\0', '\0']), + ('\u{1e31}', ['\u{1e30}', '\0', '\0']), + ('\u{1e33}', ['\u{1e32}', '\0', '\0']), + ('\u{1e35}', ['\u{1e34}', '\0', '\0']), + ('\u{1e37}', ['\u{1e36}', '\0', '\0']), + ('\u{1e39}', ['\u{1e38}', '\0', '\0']), + ('\u{1e3b}', ['\u{1e3a}', '\0', '\0']), + ('\u{1e3d}', ['\u{1e3c}', '\0', '\0']), + ('\u{1e3f}', ['\u{1e3e}', '\0', '\0']), + ('\u{1e41}', ['\u{1e40}', '\0', '\0']), + ('\u{1e43}', ['\u{1e42}', '\0', '\0']), + ('\u{1e45}', ['\u{1e44}', '\0', '\0']), + ('\u{1e47}', ['\u{1e46}', '\0', '\0']), + ('\u{1e49}', ['\u{1e48}', '\0', '\0']), + ('\u{1e4b}', ['\u{1e4a}', '\0', '\0']), + ('\u{1e4d}', ['\u{1e4c}', '\0', '\0']), + ('\u{1e4f}', ['\u{1e4e}', '\0', '\0']), + ('\u{1e51}', ['\u{1e50}', '\0', '\0']), + ('\u{1e53}', ['\u{1e52}', '\0', '\0']), + ('\u{1e55}', ['\u{1e54}', '\0', '\0']), + ('\u{1e57}', ['\u{1e56}', '\0', '\0']), + ('\u{1e59}', ['\u{1e58}', '\0', '\0']), + ('\u{1e5b}', ['\u{1e5a}', '\0', '\0']), + ('\u{1e5d}', ['\u{1e5c}', '\0', '\0']), + ('\u{1e5f}', ['\u{1e5e}', '\0', '\0']), + ('\u{1e61}', ['\u{1e60}', '\0', '\0']), + ('\u{1e63}', ['\u{1e62}', '\0', '\0']), + ('\u{1e65}', ['\u{1e64}', '\0', '\0']), + ('\u{1e67}', ['\u{1e66}', '\0', '\0']), + ('\u{1e69}', ['\u{1e68}', '\0', '\0']), + ('\u{1e6b}', ['\u{1e6a}', '\0', '\0']), + ('\u{1e6d}', ['\u{1e6c}', '\0', '\0']), + ('\u{1e6f}', ['\u{1e6e}', '\0', '\0']), + ('\u{1e71}', ['\u{1e70}', '\0', '\0']), + ('\u{1e73}', ['\u{1e72}', '\0', '\0']), + ('\u{1e75}', ['\u{1e74}', '\0', '\0']), + ('\u{1e77}', ['\u{1e76}', '\0', '\0']), + ('\u{1e79}', ['\u{1e78}', '\0', '\0']), + ('\u{1e7b}', ['\u{1e7a}', '\0', '\0']), + ('\u{1e7d}', ['\u{1e7c}', '\0', '\0']), + ('\u{1e7f}', ['\u{1e7e}', '\0', '\0']), + ('\u{1e81}', ['\u{1e80}', '\0', '\0']), + ('\u{1e83}', ['\u{1e82}', '\0', '\0']), + ('\u{1e85}', ['\u{1e84}', '\0', '\0']), + ('\u{1e87}', ['\u{1e86}', '\0', '\0']), + ('\u{1e89}', ['\u{1e88}', '\0', '\0']), + ('\u{1e8b}', ['\u{1e8a}', '\0', '\0']), + ('\u{1e8d}', ['\u{1e8c}', '\0', '\0']), + ('\u{1e8f}', ['\u{1e8e}', '\0', '\0']), + ('\u{1e91}', ['\u{1e90}', '\0', '\0']), + ('\u{1e93}', ['\u{1e92}', '\0', '\0']), + ('\u{1e95}', ['\u{1e94}', '\0', '\0']), + ('\u{1e96}', ['\u{48}', '\u{331}', '\0']), + ('\u{1e97}', ['\u{54}', '\u{308}', '\0']), + ('\u{1e98}', ['\u{57}', '\u{30a}', '\0']), + ('\u{1e99}', ['\u{59}', '\u{30a}', '\0']), + ('\u{1e9a}', ['\u{41}', '\u{2be}', '\0']), + ('\u{1e9b}', ['\u{1e60}', '\0', '\0']), + ('\u{1ea1}', ['\u{1ea0}', '\0', '\0']), + ('\u{1ea3}', ['\u{1ea2}', '\0', '\0']), + ('\u{1ea5}', ['\u{1ea4}', '\0', '\0']), + ('\u{1ea7}', ['\u{1ea6}', '\0', '\0']), + ('\u{1ea9}', ['\u{1ea8}', '\0', '\0']), + ('\u{1eab}', ['\u{1eaa}', '\0', '\0']), + ('\u{1ead}', ['\u{1eac}', '\0', '\0']), + ('\u{1eaf}', ['\u{1eae}', '\0', '\0']), + ('\u{1eb1}', ['\u{1eb0}', '\0', '\0']), + ('\u{1eb3}', ['\u{1eb2}', '\0', '\0']), + ('\u{1eb5}', ['\u{1eb4}', '\0', '\0']), + ('\u{1eb7}', ['\u{1eb6}', '\0', '\0']), + ('\u{1eb9}', ['\u{1eb8}', '\0', '\0']), + ('\u{1ebb}', ['\u{1eba}', '\0', '\0']), + ('\u{1ebd}', ['\u{1ebc}', '\0', '\0']), + ('\u{1ebf}', ['\u{1ebe}', '\0', '\0']), + ('\u{1ec1}', ['\u{1ec0}', '\0', '\0']), + ('\u{1ec3}', ['\u{1ec2}', '\0', '\0']), + ('\u{1ec5}', ['\u{1ec4}', '\0', '\0']), + ('\u{1ec7}', ['\u{1ec6}', '\0', '\0']), + ('\u{1ec9}', ['\u{1ec8}', '\0', '\0']), + ('\u{1ecb}', ['\u{1eca}', '\0', '\0']), + ('\u{1ecd}', ['\u{1ecc}', '\0', '\0']), + ('\u{1ecf}', ['\u{1ece}', '\0', '\0']), + ('\u{1ed1}', ['\u{1ed0}', '\0', '\0']), + ('\u{1ed3}', ['\u{1ed2}', '\0', '\0']), + ('\u{1ed5}', ['\u{1ed4}', '\0', '\0']), + ('\u{1ed7}', ['\u{1ed6}', '\0', '\0']), + ('\u{1ed9}', ['\u{1ed8}', '\0', '\0']), + ('\u{1edb}', ['\u{1eda}', '\0', '\0']), + ('\u{1edd}', ['\u{1edc}', '\0', '\0']), + ('\u{1edf}', ['\u{1ede}', '\0', '\0']), + ('\u{1ee1}', ['\u{1ee0}', '\0', '\0']), + ('\u{1ee3}', ['\u{1ee2}', '\0', '\0']), + ('\u{1ee5}', ['\u{1ee4}', '\0', '\0']), + ('\u{1ee7}', ['\u{1ee6}', '\0', '\0']), + ('\u{1ee9}', ['\u{1ee8}', '\0', '\0']), + ('\u{1eeb}', ['\u{1eea}', '\0', '\0']), + ('\u{1eed}', ['\u{1eec}', '\0', '\0']), + ('\u{1eef}', ['\u{1eee}', '\0', '\0']), + ('\u{1ef1}', ['\u{1ef0}', '\0', '\0']), + ('\u{1ef3}', ['\u{1ef2}', '\0', '\0']), + ('\u{1ef5}', ['\u{1ef4}', '\0', '\0']), + ('\u{1ef7}', ['\u{1ef6}', '\0', '\0']), + ('\u{1ef9}', ['\u{1ef8}', '\0', '\0']), + ('\u{1efb}', ['\u{1efa}', '\0', '\0']), + ('\u{1efd}', ['\u{1efc}', '\0', '\0']), + ('\u{1eff}', ['\u{1efe}', '\0', '\0']), + ('\u{1f00}', ['\u{1f08}', '\0', '\0']), + ('\u{1f01}', ['\u{1f09}', '\0', '\0']), + ('\u{1f02}', ['\u{1f0a}', '\0', '\0']), + ('\u{1f03}', ['\u{1f0b}', '\0', '\0']), + ('\u{1f04}', ['\u{1f0c}', '\0', '\0']), + ('\u{1f05}', ['\u{1f0d}', '\0', '\0']), + ('\u{1f06}', ['\u{1f0e}', '\0', '\0']), + ('\u{1f07}', ['\u{1f0f}', '\0', '\0']), + ('\u{1f10}', ['\u{1f18}', '\0', '\0']), + ('\u{1f11}', ['\u{1f19}', '\0', '\0']), + ('\u{1f12}', ['\u{1f1a}', '\0', '\0']), + ('\u{1f13}', ['\u{1f1b}', '\0', '\0']), + ('\u{1f14}', ['\u{1f1c}', '\0', '\0']), + ('\u{1f15}', ['\u{1f1d}', '\0', '\0']), + ('\u{1f20}', ['\u{1f28}', '\0', '\0']), + ('\u{1f21}', ['\u{1f29}', '\0', '\0']), + ('\u{1f22}', ['\u{1f2a}', '\0', '\0']), + ('\u{1f23}', ['\u{1f2b}', '\0', '\0']), + ('\u{1f24}', ['\u{1f2c}', '\0', '\0']), + ('\u{1f25}', ['\u{1f2d}', '\0', '\0']), + ('\u{1f26}', ['\u{1f2e}', '\0', '\0']), + ('\u{1f27}', ['\u{1f2f}', '\0', '\0']), + ('\u{1f30}', ['\u{1f38}', '\0', '\0']), + ('\u{1f31}', ['\u{1f39}', '\0', '\0']), + ('\u{1f32}', ['\u{1f3a}', '\0', '\0']), + ('\u{1f33}', ['\u{1f3b}', '\0', '\0']), + ('\u{1f34}', ['\u{1f3c}', '\0', '\0']), + ('\u{1f35}', ['\u{1f3d}', '\0', '\0']), + ('\u{1f36}', ['\u{1f3e}', '\0', '\0']), + ('\u{1f37}', ['\u{1f3f}', '\0', '\0']), + ('\u{1f40}', ['\u{1f48}', '\0', '\0']), + ('\u{1f41}', ['\u{1f49}', '\0', '\0']), + ('\u{1f42}', ['\u{1f4a}', '\0', '\0']), + ('\u{1f43}', ['\u{1f4b}', '\0', '\0']), + ('\u{1f44}', ['\u{1f4c}', '\0', '\0']), + ('\u{1f45}', ['\u{1f4d}', '\0', '\0']), + ('\u{1f50}', ['\u{3a5}', '\u{313}', '\0']), + ('\u{1f51}', ['\u{1f59}', '\0', '\0']), + ('\u{1f52}', ['\u{3a5}', '\u{313}', '\u{300}']), + ('\u{1f53}', ['\u{1f5b}', '\0', '\0']), + ('\u{1f54}', ['\u{3a5}', '\u{313}', '\u{301}']), + ('\u{1f55}', ['\u{1f5d}', '\0', '\0']), + ('\u{1f56}', ['\u{3a5}', '\u{313}', '\u{342}']), + ('\u{1f57}', ['\u{1f5f}', '\0', '\0']), + ('\u{1f60}', ['\u{1f68}', '\0', '\0']), + ('\u{1f61}', ['\u{1f69}', '\0', '\0']), + ('\u{1f62}', ['\u{1f6a}', '\0', '\0']), + ('\u{1f63}', ['\u{1f6b}', '\0', '\0']), + ('\u{1f64}', ['\u{1f6c}', '\0', '\0']), + ('\u{1f65}', ['\u{1f6d}', '\0', '\0']), + ('\u{1f66}', ['\u{1f6e}', '\0', '\0']), + ('\u{1f67}', ['\u{1f6f}', '\0', '\0']), + ('\u{1f70}', ['\u{1fba}', '\0', '\0']), + ('\u{1f71}', ['\u{1fbb}', '\0', '\0']), + ('\u{1f72}', ['\u{1fc8}', '\0', '\0']), + ('\u{1f73}', ['\u{1fc9}', '\0', '\0']), + ('\u{1f74}', ['\u{1fca}', '\0', '\0']), + ('\u{1f75}', ['\u{1fcb}', '\0', '\0']), + ('\u{1f76}', ['\u{1fda}', '\0', '\0']), + ('\u{1f77}', ['\u{1fdb}', '\0', '\0']), + ('\u{1f78}', ['\u{1ff8}', '\0', '\0']), + ('\u{1f79}', ['\u{1ff9}', '\0', '\0']), + ('\u{1f7a}', ['\u{1fea}', '\0', '\0']), + ('\u{1f7b}', ['\u{1feb}', '\0', '\0']), + ('\u{1f7c}', ['\u{1ffa}', '\0', '\0']), + ('\u{1f7d}', ['\u{1ffb}', '\0', '\0']), + ('\u{1f80}', ['\u{1f08}', '\u{399}', '\0']), + ('\u{1f81}', ['\u{1f09}', '\u{399}', '\0']), + ('\u{1f82}', ['\u{1f0a}', '\u{399}', '\0']), + ('\u{1f83}', ['\u{1f0b}', '\u{399}', '\0']), + ('\u{1f84}', ['\u{1f0c}', '\u{399}', '\0']), + ('\u{1f85}', ['\u{1f0d}', '\u{399}', '\0']), + ('\u{1f86}', ['\u{1f0e}', '\u{399}', '\0']), + ('\u{1f87}', ['\u{1f0f}', '\u{399}', '\0']), + ('\u{1f88}', ['\u{1f08}', '\u{399}', '\0']), + ('\u{1f89}', ['\u{1f09}', '\u{399}', '\0']), + ('\u{1f8a}', ['\u{1f0a}', '\u{399}', '\0']), + ('\u{1f8b}', ['\u{1f0b}', '\u{399}', '\0']), + ('\u{1f8c}', ['\u{1f0c}', '\u{399}', '\0']), + ('\u{1f8d}', ['\u{1f0d}', '\u{399}', '\0']), + ('\u{1f8e}', ['\u{1f0e}', '\u{399}', '\0']), + ('\u{1f8f}', ['\u{1f0f}', '\u{399}', '\0']), + ('\u{1f90}', ['\u{1f28}', '\u{399}', '\0']), + ('\u{1f91}', ['\u{1f29}', '\u{399}', '\0']), + ('\u{1f92}', ['\u{1f2a}', '\u{399}', '\0']), + ('\u{1f93}', ['\u{1f2b}', '\u{399}', '\0']), + ('\u{1f94}', ['\u{1f2c}', '\u{399}', '\0']), + ('\u{1f95}', ['\u{1f2d}', '\u{399}', '\0']), + ('\u{1f96}', ['\u{1f2e}', '\u{399}', '\0']), + ('\u{1f97}', ['\u{1f2f}', '\u{399}', '\0']), + ('\u{1f98}', ['\u{1f28}', '\u{399}', '\0']), + ('\u{1f99}', ['\u{1f29}', '\u{399}', '\0']), + ('\u{1f9a}', ['\u{1f2a}', '\u{399}', '\0']), + ('\u{1f9b}', ['\u{1f2b}', '\u{399}', '\0']), + ('\u{1f9c}', ['\u{1f2c}', '\u{399}', '\0']), + ('\u{1f9d}', ['\u{1f2d}', '\u{399}', '\0']), + ('\u{1f9e}', ['\u{1f2e}', '\u{399}', '\0']), + ('\u{1f9f}', ['\u{1f2f}', '\u{399}', '\0']), + ('\u{1fa0}', ['\u{1f68}', '\u{399}', '\0']), + ('\u{1fa1}', ['\u{1f69}', '\u{399}', '\0']), + ('\u{1fa2}', ['\u{1f6a}', '\u{399}', '\0']), + ('\u{1fa3}', ['\u{1f6b}', '\u{399}', '\0']), + ('\u{1fa4}', ['\u{1f6c}', '\u{399}', '\0']), + ('\u{1fa5}', ['\u{1f6d}', '\u{399}', '\0']), + ('\u{1fa6}', ['\u{1f6e}', '\u{399}', '\0']), + ('\u{1fa7}', ['\u{1f6f}', '\u{399}', '\0']), + ('\u{1fa8}', ['\u{1f68}', '\u{399}', '\0']), + ('\u{1fa9}', ['\u{1f69}', '\u{399}', '\0']), + ('\u{1faa}', ['\u{1f6a}', '\u{399}', '\0']), + ('\u{1fab}', ['\u{1f6b}', '\u{399}', '\0']), + ('\u{1fac}', ['\u{1f6c}', '\u{399}', '\0']), + ('\u{1fad}', ['\u{1f6d}', '\u{399}', '\0']), + ('\u{1fae}', ['\u{1f6e}', '\u{399}', '\0']), + ('\u{1faf}', ['\u{1f6f}', '\u{399}', '\0']), + ('\u{1fb0}', ['\u{1fb8}', '\0', '\0']), + ('\u{1fb1}', ['\u{1fb9}', '\0', '\0']), + ('\u{1fb2}', ['\u{1fba}', '\u{399}', '\0']), + ('\u{1fb3}', ['\u{391}', '\u{399}', '\0']), + ('\u{1fb4}', ['\u{386}', '\u{399}', '\0']), + ('\u{1fb6}', ['\u{391}', '\u{342}', '\0']), + ('\u{1fb7}', ['\u{391}', '\u{342}', '\u{399}']), + ('\u{1fbc}', ['\u{391}', '\u{399}', '\0']), + ('\u{1fbe}', ['\u{399}', '\0', '\0']), + ('\u{1fc2}', ['\u{1fca}', '\u{399}', '\0']), + ('\u{1fc3}', ['\u{397}', '\u{399}', '\0']), + ('\u{1fc4}', ['\u{389}', '\u{399}', '\0']), + ('\u{1fc6}', ['\u{397}', '\u{342}', '\0']), + ('\u{1fc7}', ['\u{397}', '\u{342}', '\u{399}']), + ('\u{1fcc}', ['\u{397}', '\u{399}', '\0']), + ('\u{1fd0}', ['\u{1fd8}', '\0', '\0']), + ('\u{1fd1}', ['\u{1fd9}', '\0', '\0']), + ('\u{1fd2}', ['\u{399}', '\u{308}', '\u{300}']), + ('\u{1fd3}', ['\u{399}', '\u{308}', '\u{301}']), + ('\u{1fd6}', ['\u{399}', '\u{342}', '\0']), + ('\u{1fd7}', ['\u{399}', '\u{308}', '\u{342}']), + ('\u{1fe0}', ['\u{1fe8}', '\0', '\0']), + ('\u{1fe1}', ['\u{1fe9}', '\0', '\0']), + ('\u{1fe2}', ['\u{3a5}', '\u{308}', '\u{300}']), + ('\u{1fe3}', ['\u{3a5}', '\u{308}', '\u{301}']), + ('\u{1fe4}', ['\u{3a1}', '\u{313}', '\0']), + ('\u{1fe5}', ['\u{1fec}', '\0', '\0']), + ('\u{1fe6}', ['\u{3a5}', '\u{342}', '\0']), + ('\u{1fe7}', ['\u{3a5}', '\u{308}', '\u{342}']), + ('\u{1ff2}', ['\u{1ffa}', '\u{399}', '\0']), + ('\u{1ff3}', ['\u{3a9}', '\u{399}', '\0']), + ('\u{1ff4}', ['\u{38f}', '\u{399}', '\0']), + ('\u{1ff6}', ['\u{3a9}', '\u{342}', '\0']), + ('\u{1ff7}', ['\u{3a9}', '\u{342}', '\u{399}']), + ('\u{1ffc}', ['\u{3a9}', '\u{399}', '\0']), + ('\u{214e}', ['\u{2132}', '\0', '\0']), + ('\u{2170}', ['\u{2160}', '\0', '\0']), + ('\u{2171}', ['\u{2161}', '\0', '\0']), + ('\u{2172}', ['\u{2162}', '\0', '\0']), + ('\u{2173}', ['\u{2163}', '\0', '\0']), + ('\u{2174}', ['\u{2164}', '\0', '\0']), + ('\u{2175}', ['\u{2165}', '\0', '\0']), + ('\u{2176}', ['\u{2166}', '\0', '\0']), + ('\u{2177}', ['\u{2167}', '\0', '\0']), + ('\u{2178}', ['\u{2168}', '\0', '\0']), + ('\u{2179}', ['\u{2169}', '\0', '\0']), + ('\u{217a}', ['\u{216a}', '\0', '\0']), + ('\u{217b}', ['\u{216b}', '\0', '\0']), + ('\u{217c}', ['\u{216c}', '\0', '\0']), + ('\u{217d}', ['\u{216d}', '\0', '\0']), + ('\u{217e}', ['\u{216e}', '\0', '\0']), + ('\u{217f}', ['\u{216f}', '\0', '\0']), + ('\u{2184}', ['\u{2183}', '\0', '\0']), + ('\u{24d0}', ['\u{24b6}', '\0', '\0']), + ('\u{24d1}', ['\u{24b7}', '\0', '\0']), + ('\u{24d2}', ['\u{24b8}', '\0', '\0']), + ('\u{24d3}', ['\u{24b9}', '\0', '\0']), + ('\u{24d4}', ['\u{24ba}', '\0', '\0']), + ('\u{24d5}', ['\u{24bb}', '\0', '\0']), + ('\u{24d6}', ['\u{24bc}', '\0', '\0']), + ('\u{24d7}', ['\u{24bd}', '\0', '\0']), + ('\u{24d8}', ['\u{24be}', '\0', '\0']), + ('\u{24d9}', ['\u{24bf}', '\0', '\0']), + ('\u{24da}', ['\u{24c0}', '\0', '\0']), + ('\u{24db}', ['\u{24c1}', '\0', '\0']), + ('\u{24dc}', ['\u{24c2}', '\0', '\0']), + ('\u{24dd}', ['\u{24c3}', '\0', '\0']), + ('\u{24de}', ['\u{24c4}', '\0', '\0']), + ('\u{24df}', ['\u{24c5}', '\0', '\0']), + ('\u{24e0}', ['\u{24c6}', '\0', '\0']), + ('\u{24e1}', ['\u{24c7}', '\0', '\0']), + ('\u{24e2}', ['\u{24c8}', '\0', '\0']), + ('\u{24e3}', ['\u{24c9}', '\0', '\0']), + ('\u{24e4}', ['\u{24ca}', '\0', '\0']), + ('\u{24e5}', ['\u{24cb}', '\0', '\0']), + ('\u{24e6}', ['\u{24cc}', '\0', '\0']), + ('\u{24e7}', ['\u{24cd}', '\0', '\0']), + ('\u{24e8}', ['\u{24ce}', '\0', '\0']), + ('\u{24e9}', ['\u{24cf}', '\0', '\0']), + ('\u{2c30}', ['\u{2c00}', '\0', '\0']), + ('\u{2c31}', ['\u{2c01}', '\0', '\0']), + ('\u{2c32}', ['\u{2c02}', '\0', '\0']), + ('\u{2c33}', ['\u{2c03}', '\0', '\0']), + ('\u{2c34}', ['\u{2c04}', '\0', '\0']), + ('\u{2c35}', ['\u{2c05}', '\0', '\0']), + ('\u{2c36}', ['\u{2c06}', '\0', '\0']), + ('\u{2c37}', ['\u{2c07}', '\0', '\0']), + ('\u{2c38}', ['\u{2c08}', '\0', '\0']), + ('\u{2c39}', ['\u{2c09}', '\0', '\0']), + ('\u{2c3a}', ['\u{2c0a}', '\0', '\0']), + ('\u{2c3b}', ['\u{2c0b}', '\0', '\0']), + ('\u{2c3c}', ['\u{2c0c}', '\0', '\0']), + ('\u{2c3d}', ['\u{2c0d}', '\0', '\0']), + ('\u{2c3e}', ['\u{2c0e}', '\0', '\0']), + ('\u{2c3f}', ['\u{2c0f}', '\0', '\0']), + ('\u{2c40}', ['\u{2c10}', '\0', '\0']), + ('\u{2c41}', ['\u{2c11}', '\0', '\0']), + ('\u{2c42}', ['\u{2c12}', '\0', '\0']), + ('\u{2c43}', ['\u{2c13}', '\0', '\0']), + ('\u{2c44}', ['\u{2c14}', '\0', '\0']), + ('\u{2c45}', ['\u{2c15}', '\0', '\0']), + ('\u{2c46}', ['\u{2c16}', '\0', '\0']), + ('\u{2c47}', ['\u{2c17}', '\0', '\0']), + ('\u{2c48}', ['\u{2c18}', '\0', '\0']), + ('\u{2c49}', ['\u{2c19}', '\0', '\0']), + ('\u{2c4a}', ['\u{2c1a}', '\0', '\0']), + ('\u{2c4b}', ['\u{2c1b}', '\0', '\0']), + ('\u{2c4c}', ['\u{2c1c}', '\0', '\0']), + ('\u{2c4d}', ['\u{2c1d}', '\0', '\0']), + ('\u{2c4e}', ['\u{2c1e}', '\0', '\0']), + ('\u{2c4f}', ['\u{2c1f}', '\0', '\0']), + ('\u{2c50}', ['\u{2c20}', '\0', '\0']), + ('\u{2c51}', ['\u{2c21}', '\0', '\0']), + ('\u{2c52}', ['\u{2c22}', '\0', '\0']), + ('\u{2c53}', ['\u{2c23}', '\0', '\0']), + ('\u{2c54}', ['\u{2c24}', '\0', '\0']), + ('\u{2c55}', ['\u{2c25}', '\0', '\0']), + ('\u{2c56}', ['\u{2c26}', '\0', '\0']), + ('\u{2c57}', ['\u{2c27}', '\0', '\0']), + ('\u{2c58}', ['\u{2c28}', '\0', '\0']), + ('\u{2c59}', ['\u{2c29}', '\0', '\0']), + ('\u{2c5a}', ['\u{2c2a}', '\0', '\0']), + ('\u{2c5b}', ['\u{2c2b}', '\0', '\0']), + ('\u{2c5c}', ['\u{2c2c}', '\0', '\0']), + ('\u{2c5d}', ['\u{2c2d}', '\0', '\0']), + ('\u{2c5e}', ['\u{2c2e}', '\0', '\0']), + ('\u{2c61}', ['\u{2c60}', '\0', '\0']), + ('\u{2c65}', ['\u{23a}', '\0', '\0']), + ('\u{2c66}', ['\u{23e}', '\0', '\0']), + ('\u{2c68}', ['\u{2c67}', '\0', '\0']), + ('\u{2c6a}', ['\u{2c69}', '\0', '\0']), + ('\u{2c6c}', ['\u{2c6b}', '\0', '\0']), + ('\u{2c73}', ['\u{2c72}', '\0', '\0']), + ('\u{2c76}', ['\u{2c75}', '\0', '\0']), + ('\u{2c81}', ['\u{2c80}', '\0', '\0']), + ('\u{2c83}', ['\u{2c82}', '\0', '\0']), + ('\u{2c85}', ['\u{2c84}', '\0', '\0']), + ('\u{2c87}', ['\u{2c86}', '\0', '\0']), + ('\u{2c89}', ['\u{2c88}', '\0', '\0']), + ('\u{2c8b}', ['\u{2c8a}', '\0', '\0']), + ('\u{2c8d}', ['\u{2c8c}', '\0', '\0']), + ('\u{2c8f}', ['\u{2c8e}', '\0', '\0']), + ('\u{2c91}', ['\u{2c90}', '\0', '\0']), + ('\u{2c93}', ['\u{2c92}', '\0', '\0']), + ('\u{2c95}', ['\u{2c94}', '\0', '\0']), + ('\u{2c97}', ['\u{2c96}', '\0', '\0']), + ('\u{2c99}', ['\u{2c98}', '\0', '\0']), + ('\u{2c9b}', ['\u{2c9a}', '\0', '\0']), + ('\u{2c9d}', ['\u{2c9c}', '\0', '\0']), + ('\u{2c9f}', ['\u{2c9e}', '\0', '\0']), + ('\u{2ca1}', ['\u{2ca0}', '\0', '\0']), + ('\u{2ca3}', ['\u{2ca2}', '\0', '\0']), + ('\u{2ca5}', ['\u{2ca4}', '\0', '\0']), + ('\u{2ca7}', ['\u{2ca6}', '\0', '\0']), + ('\u{2ca9}', ['\u{2ca8}', '\0', '\0']), + ('\u{2cab}', ['\u{2caa}', '\0', '\0']), + ('\u{2cad}', ['\u{2cac}', '\0', '\0']), + ('\u{2caf}', ['\u{2cae}', '\0', '\0']), + ('\u{2cb1}', ['\u{2cb0}', '\0', '\0']), + ('\u{2cb3}', ['\u{2cb2}', '\0', '\0']), + ('\u{2cb5}', ['\u{2cb4}', '\0', '\0']), + ('\u{2cb7}', ['\u{2cb6}', '\0', '\0']), + ('\u{2cb9}', ['\u{2cb8}', '\0', '\0']), + ('\u{2cbb}', ['\u{2cba}', '\0', '\0']), + ('\u{2cbd}', ['\u{2cbc}', '\0', '\0']), + ('\u{2cbf}', ['\u{2cbe}', '\0', '\0']), + ('\u{2cc1}', ['\u{2cc0}', '\0', '\0']), + ('\u{2cc3}', ['\u{2cc2}', '\0', '\0']), + ('\u{2cc5}', ['\u{2cc4}', '\0', '\0']), + ('\u{2cc7}', ['\u{2cc6}', '\0', '\0']), + ('\u{2cc9}', ['\u{2cc8}', '\0', '\0']), + ('\u{2ccb}', ['\u{2cca}', '\0', '\0']), + ('\u{2ccd}', ['\u{2ccc}', '\0', '\0']), + ('\u{2ccf}', ['\u{2cce}', '\0', '\0']), + ('\u{2cd1}', ['\u{2cd0}', '\0', '\0']), + ('\u{2cd3}', ['\u{2cd2}', '\0', '\0']), + ('\u{2cd5}', ['\u{2cd4}', '\0', '\0']), + ('\u{2cd7}', ['\u{2cd6}', '\0', '\0']), + ('\u{2cd9}', ['\u{2cd8}', '\0', '\0']), + ('\u{2cdb}', ['\u{2cda}', '\0', '\0']), + ('\u{2cdd}', ['\u{2cdc}', '\0', '\0']), + ('\u{2cdf}', ['\u{2cde}', '\0', '\0']), + ('\u{2ce1}', ['\u{2ce0}', '\0', '\0']), + ('\u{2ce3}', ['\u{2ce2}', '\0', '\0']), + ('\u{2cec}', ['\u{2ceb}', '\0', '\0']), + ('\u{2cee}', ['\u{2ced}', '\0', '\0']), + ('\u{2cf3}', ['\u{2cf2}', '\0', '\0']), + ('\u{2d00}', ['\u{10a0}', '\0', '\0']), + ('\u{2d01}', ['\u{10a1}', '\0', '\0']), + ('\u{2d02}', ['\u{10a2}', '\0', '\0']), + ('\u{2d03}', ['\u{10a3}', '\0', '\0']), + ('\u{2d04}', ['\u{10a4}', '\0', '\0']), + ('\u{2d05}', ['\u{10a5}', '\0', '\0']), + ('\u{2d06}', ['\u{10a6}', '\0', '\0']), + ('\u{2d07}', ['\u{10a7}', '\0', '\0']), + ('\u{2d08}', ['\u{10a8}', '\0', '\0']), + ('\u{2d09}', ['\u{10a9}', '\0', '\0']), + ('\u{2d0a}', ['\u{10aa}', '\0', '\0']), + ('\u{2d0b}', ['\u{10ab}', '\0', '\0']), + ('\u{2d0c}', ['\u{10ac}', '\0', '\0']), + ('\u{2d0d}', ['\u{10ad}', '\0', '\0']), + ('\u{2d0e}', ['\u{10ae}', '\0', '\0']), + ('\u{2d0f}', ['\u{10af}', '\0', '\0']), + ('\u{2d10}', ['\u{10b0}', '\0', '\0']), + ('\u{2d11}', ['\u{10b1}', '\0', '\0']), + ('\u{2d12}', ['\u{10b2}', '\0', '\0']), + ('\u{2d13}', ['\u{10b3}', '\0', '\0']), + ('\u{2d14}', ['\u{10b4}', '\0', '\0']), + ('\u{2d15}', ['\u{10b5}', '\0', '\0']), + ('\u{2d16}', ['\u{10b6}', '\0', '\0']), + ('\u{2d17}', ['\u{10b7}', '\0', '\0']), + ('\u{2d18}', ['\u{10b8}', '\0', '\0']), + ('\u{2d19}', ['\u{10b9}', '\0', '\0']), + ('\u{2d1a}', ['\u{10ba}', '\0', '\0']), + ('\u{2d1b}', ['\u{10bb}', '\0', '\0']), + ('\u{2d1c}', ['\u{10bc}', '\0', '\0']), + ('\u{2d1d}', ['\u{10bd}', '\0', '\0']), + ('\u{2d1e}', ['\u{10be}', '\0', '\0']), + ('\u{2d1f}', ['\u{10bf}', '\0', '\0']), + ('\u{2d20}', ['\u{10c0}', '\0', '\0']), + ('\u{2d21}', ['\u{10c1}', '\0', '\0']), + ('\u{2d22}', ['\u{10c2}', '\0', '\0']), + ('\u{2d23}', ['\u{10c3}', '\0', '\0']), + ('\u{2d24}', ['\u{10c4}', '\0', '\0']), + ('\u{2d25}', ['\u{10c5}', '\0', '\0']), + ('\u{2d27}', ['\u{10c7}', '\0', '\0']), + ('\u{2d2d}', ['\u{10cd}', '\0', '\0']), + ('\u{a641}', ['\u{a640}', '\0', '\0']), + ('\u{a643}', ['\u{a642}', '\0', '\0']), + ('\u{a645}', ['\u{a644}', '\0', '\0']), + ('\u{a647}', ['\u{a646}', '\0', '\0']), + ('\u{a649}', ['\u{a648}', '\0', '\0']), + ('\u{a64b}', ['\u{a64a}', '\0', '\0']), + ('\u{a64d}', ['\u{a64c}', '\0', '\0']), + ('\u{a64f}', ['\u{a64e}', '\0', '\0']), + ('\u{a651}', ['\u{a650}', '\0', '\0']), + ('\u{a653}', ['\u{a652}', '\0', '\0']), + ('\u{a655}', ['\u{a654}', '\0', '\0']), + ('\u{a657}', ['\u{a656}', '\0', '\0']), + ('\u{a659}', ['\u{a658}', '\0', '\0']), + ('\u{a65b}', ['\u{a65a}', '\0', '\0']), + ('\u{a65d}', ['\u{a65c}', '\0', '\0']), + ('\u{a65f}', ['\u{a65e}', '\0', '\0']), + ('\u{a661}', ['\u{a660}', '\0', '\0']), + ('\u{a663}', ['\u{a662}', '\0', '\0']), + ('\u{a665}', ['\u{a664}', '\0', '\0']), + ('\u{a667}', ['\u{a666}', '\0', '\0']), + ('\u{a669}', ['\u{a668}', '\0', '\0']), + ('\u{a66b}', ['\u{a66a}', '\0', '\0']), + ('\u{a66d}', ['\u{a66c}', '\0', '\0']), + ('\u{a681}', ['\u{a680}', '\0', '\0']), + ('\u{a683}', ['\u{a682}', '\0', '\0']), + ('\u{a685}', ['\u{a684}', '\0', '\0']), + ('\u{a687}', ['\u{a686}', '\0', '\0']), + ('\u{a689}', ['\u{a688}', '\0', '\0']), + ('\u{a68b}', ['\u{a68a}', '\0', '\0']), + ('\u{a68d}', ['\u{a68c}', '\0', '\0']), + ('\u{a68f}', ['\u{a68e}', '\0', '\0']), + ('\u{a691}', ['\u{a690}', '\0', '\0']), + ('\u{a693}', ['\u{a692}', '\0', '\0']), + ('\u{a695}', ['\u{a694}', '\0', '\0']), + ('\u{a697}', ['\u{a696}', '\0', '\0']), + ('\u{a699}', ['\u{a698}', '\0', '\0']), + ('\u{a69b}', ['\u{a69a}', '\0', '\0']), + ('\u{a723}', ['\u{a722}', '\0', '\0']), + ('\u{a725}', ['\u{a724}', '\0', '\0']), + ('\u{a727}', ['\u{a726}', '\0', '\0']), + ('\u{a729}', ['\u{a728}', '\0', '\0']), + ('\u{a72b}', ['\u{a72a}', '\0', '\0']), + ('\u{a72d}', ['\u{a72c}', '\0', '\0']), + ('\u{a72f}', ['\u{a72e}', '\0', '\0']), + ('\u{a733}', ['\u{a732}', '\0', '\0']), + ('\u{a735}', ['\u{a734}', '\0', '\0']), + ('\u{a737}', ['\u{a736}', '\0', '\0']), + ('\u{a739}', ['\u{a738}', '\0', '\0']), + ('\u{a73b}', ['\u{a73a}', '\0', '\0']), + ('\u{a73d}', ['\u{a73c}', '\0', '\0']), + ('\u{a73f}', ['\u{a73e}', '\0', '\0']), + ('\u{a741}', ['\u{a740}', '\0', '\0']), + ('\u{a743}', ['\u{a742}', '\0', '\0']), + ('\u{a745}', ['\u{a744}', '\0', '\0']), + ('\u{a747}', ['\u{a746}', '\0', '\0']), + ('\u{a749}', ['\u{a748}', '\0', '\0']), + ('\u{a74b}', ['\u{a74a}', '\0', '\0']), + ('\u{a74d}', ['\u{a74c}', '\0', '\0']), + ('\u{a74f}', ['\u{a74e}', '\0', '\0']), + ('\u{a751}', ['\u{a750}', '\0', '\0']), + ('\u{a753}', ['\u{a752}', '\0', '\0']), + ('\u{a755}', ['\u{a754}', '\0', '\0']), + ('\u{a757}', ['\u{a756}', '\0', '\0']), + ('\u{a759}', ['\u{a758}', '\0', '\0']), + ('\u{a75b}', ['\u{a75a}', '\0', '\0']), + ('\u{a75d}', ['\u{a75c}', '\0', '\0']), + ('\u{a75f}', ['\u{a75e}', '\0', '\0']), + ('\u{a761}', ['\u{a760}', '\0', '\0']), + ('\u{a763}', ['\u{a762}', '\0', '\0']), + ('\u{a765}', ['\u{a764}', '\0', '\0']), + ('\u{a767}', ['\u{a766}', '\0', '\0']), + ('\u{a769}', ['\u{a768}', '\0', '\0']), + ('\u{a76b}', ['\u{a76a}', '\0', '\0']), + ('\u{a76d}', ['\u{a76c}', '\0', '\0']), + ('\u{a76f}', ['\u{a76e}', '\0', '\0']), + ('\u{a77a}', ['\u{a779}', '\0', '\0']), + ('\u{a77c}', ['\u{a77b}', '\0', '\0']), + ('\u{a77f}', ['\u{a77e}', '\0', '\0']), + ('\u{a781}', ['\u{a780}', '\0', '\0']), + ('\u{a783}', ['\u{a782}', '\0', '\0']), + ('\u{a785}', ['\u{a784}', '\0', '\0']), + ('\u{a787}', ['\u{a786}', '\0', '\0']), + ('\u{a78c}', ['\u{a78b}', '\0', '\0']), + ('\u{a791}', ['\u{a790}', '\0', '\0']), + ('\u{a793}', ['\u{a792}', '\0', '\0']), + ('\u{a797}', ['\u{a796}', '\0', '\0']), + ('\u{a799}', ['\u{a798}', '\0', '\0']), + ('\u{a79b}', ['\u{a79a}', '\0', '\0']), + ('\u{a79d}', ['\u{a79c}', '\0', '\0']), + ('\u{a79f}', ['\u{a79e}', '\0', '\0']), + ('\u{a7a1}', ['\u{a7a0}', '\0', '\0']), + ('\u{a7a3}', ['\u{a7a2}', '\0', '\0']), + ('\u{a7a5}', ['\u{a7a4}', '\0', '\0']), + ('\u{a7a7}', ['\u{a7a6}', '\0', '\0']), + ('\u{a7a9}', ['\u{a7a8}', '\0', '\0']), + ('\u{a7b5}', ['\u{a7b4}', '\0', '\0']), + ('\u{a7b7}', ['\u{a7b6}', '\0', '\0']), + ('\u{a7b9}', ['\u{a7b8}', '\0', '\0']), + ('\u{ab53}', ['\u{a7b3}', '\0', '\0']), + ('\u{ab70}', ['\u{13a0}', '\0', '\0']), + ('\u{ab71}', ['\u{13a1}', '\0', '\0']), + ('\u{ab72}', ['\u{13a2}', '\0', '\0']), + ('\u{ab73}', ['\u{13a3}', '\0', '\0']), + ('\u{ab74}', ['\u{13a4}', '\0', '\0']), + ('\u{ab75}', ['\u{13a5}', '\0', '\0']), + ('\u{ab76}', ['\u{13a6}', '\0', '\0']), + ('\u{ab77}', ['\u{13a7}', '\0', '\0']), + ('\u{ab78}', ['\u{13a8}', '\0', '\0']), + ('\u{ab79}', ['\u{13a9}', '\0', '\0']), + ('\u{ab7a}', ['\u{13aa}', '\0', '\0']), + ('\u{ab7b}', ['\u{13ab}', '\0', '\0']), + ('\u{ab7c}', ['\u{13ac}', '\0', '\0']), + ('\u{ab7d}', ['\u{13ad}', '\0', '\0']), + ('\u{ab7e}', ['\u{13ae}', '\0', '\0']), + ('\u{ab7f}', ['\u{13af}', '\0', '\0']), + ('\u{ab80}', ['\u{13b0}', '\0', '\0']), + ('\u{ab81}', ['\u{13b1}', '\0', '\0']), + ('\u{ab82}', ['\u{13b2}', '\0', '\0']), + ('\u{ab83}', ['\u{13b3}', '\0', '\0']), + ('\u{ab84}', ['\u{13b4}', '\0', '\0']), + ('\u{ab85}', ['\u{13b5}', '\0', '\0']), + ('\u{ab86}', ['\u{13b6}', '\0', '\0']), + ('\u{ab87}', ['\u{13b7}', '\0', '\0']), + ('\u{ab88}', ['\u{13b8}', '\0', '\0']), + ('\u{ab89}', ['\u{13b9}', '\0', '\0']), + ('\u{ab8a}', ['\u{13ba}', '\0', '\0']), + ('\u{ab8b}', ['\u{13bb}', '\0', '\0']), + ('\u{ab8c}', ['\u{13bc}', '\0', '\0']), + ('\u{ab8d}', ['\u{13bd}', '\0', '\0']), + ('\u{ab8e}', ['\u{13be}', '\0', '\0']), + ('\u{ab8f}', ['\u{13bf}', '\0', '\0']), + ('\u{ab90}', ['\u{13c0}', '\0', '\0']), + ('\u{ab91}', ['\u{13c1}', '\0', '\0']), + ('\u{ab92}', ['\u{13c2}', '\0', '\0']), + ('\u{ab93}', ['\u{13c3}', '\0', '\0']), + ('\u{ab94}', ['\u{13c4}', '\0', '\0']), + ('\u{ab95}', ['\u{13c5}', '\0', '\0']), + ('\u{ab96}', ['\u{13c6}', '\0', '\0']), + ('\u{ab97}', ['\u{13c7}', '\0', '\0']), + ('\u{ab98}', ['\u{13c8}', '\0', '\0']), + ('\u{ab99}', ['\u{13c9}', '\0', '\0']), + ('\u{ab9a}', ['\u{13ca}', '\0', '\0']), + ('\u{ab9b}', ['\u{13cb}', '\0', '\0']), + ('\u{ab9c}', ['\u{13cc}', '\0', '\0']), + ('\u{ab9d}', ['\u{13cd}', '\0', '\0']), + ('\u{ab9e}', ['\u{13ce}', '\0', '\0']), + ('\u{ab9f}', ['\u{13cf}', '\0', '\0']), + ('\u{aba0}', ['\u{13d0}', '\0', '\0']), + ('\u{aba1}', ['\u{13d1}', '\0', '\0']), + ('\u{aba2}', ['\u{13d2}', '\0', '\0']), + ('\u{aba3}', ['\u{13d3}', '\0', '\0']), + ('\u{aba4}', ['\u{13d4}', '\0', '\0']), + ('\u{aba5}', ['\u{13d5}', '\0', '\0']), + ('\u{aba6}', ['\u{13d6}', '\0', '\0']), + ('\u{aba7}', ['\u{13d7}', '\0', '\0']), + ('\u{aba8}', ['\u{13d8}', '\0', '\0']), + ('\u{aba9}', ['\u{13d9}', '\0', '\0']), + ('\u{abaa}', ['\u{13da}', '\0', '\0']), + ('\u{abab}', ['\u{13db}', '\0', '\0']), + ('\u{abac}', ['\u{13dc}', '\0', '\0']), + ('\u{abad}', ['\u{13dd}', '\0', '\0']), + ('\u{abae}', ['\u{13de}', '\0', '\0']), + ('\u{abaf}', ['\u{13df}', '\0', '\0']), + ('\u{abb0}', ['\u{13e0}', '\0', '\0']), + ('\u{abb1}', ['\u{13e1}', '\0', '\0']), + ('\u{abb2}', ['\u{13e2}', '\0', '\0']), + ('\u{abb3}', ['\u{13e3}', '\0', '\0']), + ('\u{abb4}', ['\u{13e4}', '\0', '\0']), + ('\u{abb5}', ['\u{13e5}', '\0', '\0']), + ('\u{abb6}', ['\u{13e6}', '\0', '\0']), + ('\u{abb7}', ['\u{13e7}', '\0', '\0']), + ('\u{abb8}', ['\u{13e8}', '\0', '\0']), + ('\u{abb9}', ['\u{13e9}', '\0', '\0']), + ('\u{abba}', ['\u{13ea}', '\0', '\0']), + ('\u{abbb}', ['\u{13eb}', '\0', '\0']), + ('\u{abbc}', ['\u{13ec}', '\0', '\0']), + ('\u{abbd}', ['\u{13ed}', '\0', '\0']), + ('\u{abbe}', ['\u{13ee}', '\0', '\0']), + ('\u{abbf}', ['\u{13ef}', '\0', '\0']), + ('\u{fb00}', ['\u{46}', '\u{46}', '\0']), + ('\u{fb01}', ['\u{46}', '\u{49}', '\0']), + ('\u{fb02}', ['\u{46}', '\u{4c}', '\0']), + ('\u{fb03}', ['\u{46}', '\u{46}', '\u{49}']), + ('\u{fb04}', ['\u{46}', '\u{46}', '\u{4c}']), + ('\u{fb05}', ['\u{53}', '\u{54}', '\0']), + ('\u{fb06}', ['\u{53}', '\u{54}', '\0']), + ('\u{fb13}', ['\u{544}', '\u{546}', '\0']), + ('\u{fb14}', ['\u{544}', '\u{535}', '\0']), + ('\u{fb15}', ['\u{544}', '\u{53b}', '\0']), + ('\u{fb16}', ['\u{54e}', '\u{546}', '\0']), + ('\u{fb17}', ['\u{544}', '\u{53d}', '\0']), + ('\u{ff41}', ['\u{ff21}', '\0', '\0']), + ('\u{ff42}', ['\u{ff22}', '\0', '\0']), + ('\u{ff43}', ['\u{ff23}', '\0', '\0']), + ('\u{ff44}', ['\u{ff24}', '\0', '\0']), + ('\u{ff45}', ['\u{ff25}', '\0', '\0']), + ('\u{ff46}', ['\u{ff26}', '\0', '\0']), + ('\u{ff47}', ['\u{ff27}', '\0', '\0']), + ('\u{ff48}', ['\u{ff28}', '\0', '\0']), + ('\u{ff49}', ['\u{ff29}', '\0', '\0']), + ('\u{ff4a}', ['\u{ff2a}', '\0', '\0']), + ('\u{ff4b}', ['\u{ff2b}', '\0', '\0']), + ('\u{ff4c}', ['\u{ff2c}', '\0', '\0']), + ('\u{ff4d}', ['\u{ff2d}', '\0', '\0']), + ('\u{ff4e}', ['\u{ff2e}', '\0', '\0']), + ('\u{ff4f}', ['\u{ff2f}', '\0', '\0']), + ('\u{ff50}', ['\u{ff30}', '\0', '\0']), + ('\u{ff51}', ['\u{ff31}', '\0', '\0']), + ('\u{ff52}', ['\u{ff32}', '\0', '\0']), + ('\u{ff53}', ['\u{ff33}', '\0', '\0']), + ('\u{ff54}', ['\u{ff34}', '\0', '\0']), + ('\u{ff55}', ['\u{ff35}', '\0', '\0']), + ('\u{ff56}', ['\u{ff36}', '\0', '\0']), + ('\u{ff57}', ['\u{ff37}', '\0', '\0']), + ('\u{ff58}', ['\u{ff38}', '\0', '\0']), + ('\u{ff59}', ['\u{ff39}', '\0', '\0']), + ('\u{ff5a}', ['\u{ff3a}', '\0', '\0']), + ('\u{10428}', ['\u{10400}', '\0', '\0']), + ('\u{10429}', ['\u{10401}', '\0', '\0']), + ('\u{1042a}', ['\u{10402}', '\0', '\0']), + ('\u{1042b}', ['\u{10403}', '\0', '\0']), + ('\u{1042c}', ['\u{10404}', '\0', '\0']), + ('\u{1042d}', ['\u{10405}', '\0', '\0']), + ('\u{1042e}', ['\u{10406}', '\0', '\0']), + ('\u{1042f}', ['\u{10407}', '\0', '\0']), + ('\u{10430}', ['\u{10408}', '\0', '\0']), + ('\u{10431}', ['\u{10409}', '\0', '\0']), + ('\u{10432}', ['\u{1040a}', '\0', '\0']), + ('\u{10433}', ['\u{1040b}', '\0', '\0']), + ('\u{10434}', ['\u{1040c}', '\0', '\0']), + ('\u{10435}', ['\u{1040d}', '\0', '\0']), + ('\u{10436}', ['\u{1040e}', '\0', '\0']), + ('\u{10437}', ['\u{1040f}', '\0', '\0']), + ('\u{10438}', ['\u{10410}', '\0', '\0']), + ('\u{10439}', ['\u{10411}', '\0', '\0']), + ('\u{1043a}', ['\u{10412}', '\0', '\0']), + ('\u{1043b}', ['\u{10413}', '\0', '\0']), + ('\u{1043c}', ['\u{10414}', '\0', '\0']), + ('\u{1043d}', ['\u{10415}', '\0', '\0']), + ('\u{1043e}', ['\u{10416}', '\0', '\0']), + ('\u{1043f}', ['\u{10417}', '\0', '\0']), + ('\u{10440}', ['\u{10418}', '\0', '\0']), + ('\u{10441}', ['\u{10419}', '\0', '\0']), + ('\u{10442}', ['\u{1041a}', '\0', '\0']), + ('\u{10443}', ['\u{1041b}', '\0', '\0']), + ('\u{10444}', ['\u{1041c}', '\0', '\0']), + ('\u{10445}', ['\u{1041d}', '\0', '\0']), + ('\u{10446}', ['\u{1041e}', '\0', '\0']), + ('\u{10447}', ['\u{1041f}', '\0', '\0']), + ('\u{10448}', ['\u{10420}', '\0', '\0']), + ('\u{10449}', ['\u{10421}', '\0', '\0']), + ('\u{1044a}', ['\u{10422}', '\0', '\0']), + ('\u{1044b}', ['\u{10423}', '\0', '\0']), + ('\u{1044c}', ['\u{10424}', '\0', '\0']), + ('\u{1044d}', ['\u{10425}', '\0', '\0']), + ('\u{1044e}', ['\u{10426}', '\0', '\0']), + ('\u{1044f}', ['\u{10427}', '\0', '\0']), + ('\u{104d8}', ['\u{104b0}', '\0', '\0']), + ('\u{104d9}', ['\u{104b1}', '\0', '\0']), + ('\u{104da}', ['\u{104b2}', '\0', '\0']), + ('\u{104db}', ['\u{104b3}', '\0', '\0']), + ('\u{104dc}', ['\u{104b4}', '\0', '\0']), + ('\u{104dd}', ['\u{104b5}', '\0', '\0']), + ('\u{104de}', ['\u{104b6}', '\0', '\0']), + ('\u{104df}', ['\u{104b7}', '\0', '\0']), + ('\u{104e0}', ['\u{104b8}', '\0', '\0']), + ('\u{104e1}', ['\u{104b9}', '\0', '\0']), + ('\u{104e2}', ['\u{104ba}', '\0', '\0']), + ('\u{104e3}', ['\u{104bb}', '\0', '\0']), + ('\u{104e4}', ['\u{104bc}', '\0', '\0']), + ('\u{104e5}', ['\u{104bd}', '\0', '\0']), + ('\u{104e6}', ['\u{104be}', '\0', '\0']), + ('\u{104e7}', ['\u{104bf}', '\0', '\0']), + ('\u{104e8}', ['\u{104c0}', '\0', '\0']), + ('\u{104e9}', ['\u{104c1}', '\0', '\0']), + ('\u{104ea}', ['\u{104c2}', '\0', '\0']), + ('\u{104eb}', ['\u{104c3}', '\0', '\0']), + ('\u{104ec}', ['\u{104c4}', '\0', '\0']), + ('\u{104ed}', ['\u{104c5}', '\0', '\0']), + ('\u{104ee}', ['\u{104c6}', '\0', '\0']), + ('\u{104ef}', ['\u{104c7}', '\0', '\0']), + ('\u{104f0}', ['\u{104c8}', '\0', '\0']), + ('\u{104f1}', ['\u{104c9}', '\0', '\0']), + ('\u{104f2}', ['\u{104ca}', '\0', '\0']), + ('\u{104f3}', ['\u{104cb}', '\0', '\0']), + ('\u{104f4}', ['\u{104cc}', '\0', '\0']), + ('\u{104f5}', ['\u{104cd}', '\0', '\0']), + ('\u{104f6}', ['\u{104ce}', '\0', '\0']), + ('\u{104f7}', ['\u{104cf}', '\0', '\0']), + ('\u{104f8}', ['\u{104d0}', '\0', '\0']), + ('\u{104f9}', ['\u{104d1}', '\0', '\0']), + ('\u{104fa}', ['\u{104d2}', '\0', '\0']), + ('\u{104fb}', ['\u{104d3}', '\0', '\0']), + ('\u{10cc0}', ['\u{10c80}', '\0', '\0']), + ('\u{10cc1}', ['\u{10c81}', '\0', '\0']), + ('\u{10cc2}', ['\u{10c82}', '\0', '\0']), + ('\u{10cc3}', ['\u{10c83}', '\0', '\0']), + ('\u{10cc4}', ['\u{10c84}', '\0', '\0']), + ('\u{10cc5}', ['\u{10c85}', '\0', '\0']), + ('\u{10cc6}', ['\u{10c86}', '\0', '\0']), + ('\u{10cc7}', ['\u{10c87}', '\0', '\0']), + ('\u{10cc8}', ['\u{10c88}', '\0', '\0']), + ('\u{10cc9}', ['\u{10c89}', '\0', '\0']), + ('\u{10cca}', ['\u{10c8a}', '\0', '\0']), + ('\u{10ccb}', ['\u{10c8b}', '\0', '\0']), + ('\u{10ccc}', ['\u{10c8c}', '\0', '\0']), + ('\u{10ccd}', ['\u{10c8d}', '\0', '\0']), + ('\u{10cce}', ['\u{10c8e}', '\0', '\0']), + ('\u{10ccf}', ['\u{10c8f}', '\0', '\0']), + ('\u{10cd0}', ['\u{10c90}', '\0', '\0']), + ('\u{10cd1}', ['\u{10c91}', '\0', '\0']), + ('\u{10cd2}', ['\u{10c92}', '\0', '\0']), + ('\u{10cd3}', ['\u{10c93}', '\0', '\0']), + ('\u{10cd4}', ['\u{10c94}', '\0', '\0']), + ('\u{10cd5}', ['\u{10c95}', '\0', '\0']), + ('\u{10cd6}', ['\u{10c96}', '\0', '\0']), + ('\u{10cd7}', ['\u{10c97}', '\0', '\0']), + ('\u{10cd8}', ['\u{10c98}', '\0', '\0']), + ('\u{10cd9}', ['\u{10c99}', '\0', '\0']), + ('\u{10cda}', ['\u{10c9a}', '\0', '\0']), + ('\u{10cdb}', ['\u{10c9b}', '\0', '\0']), + ('\u{10cdc}', ['\u{10c9c}', '\0', '\0']), + ('\u{10cdd}', ['\u{10c9d}', '\0', '\0']), + ('\u{10cde}', ['\u{10c9e}', '\0', '\0']), + ('\u{10cdf}', ['\u{10c9f}', '\0', '\0']), + ('\u{10ce0}', ['\u{10ca0}', '\0', '\0']), + ('\u{10ce1}', ['\u{10ca1}', '\0', '\0']), + ('\u{10ce2}', ['\u{10ca2}', '\0', '\0']), + ('\u{10ce3}', ['\u{10ca3}', '\0', '\0']), + ('\u{10ce4}', ['\u{10ca4}', '\0', '\0']), + ('\u{10ce5}', ['\u{10ca5}', '\0', '\0']), + ('\u{10ce6}', ['\u{10ca6}', '\0', '\0']), + ('\u{10ce7}', ['\u{10ca7}', '\0', '\0']), + ('\u{10ce8}', ['\u{10ca8}', '\0', '\0']), + ('\u{10ce9}', ['\u{10ca9}', '\0', '\0']), + ('\u{10cea}', ['\u{10caa}', '\0', '\0']), + ('\u{10ceb}', ['\u{10cab}', '\0', '\0']), + ('\u{10cec}', ['\u{10cac}', '\0', '\0']), + ('\u{10ced}', ['\u{10cad}', '\0', '\0']), + ('\u{10cee}', ['\u{10cae}', '\0', '\0']), + ('\u{10cef}', ['\u{10caf}', '\0', '\0']), + ('\u{10cf0}', ['\u{10cb0}', '\0', '\0']), + ('\u{10cf1}', ['\u{10cb1}', '\0', '\0']), + ('\u{10cf2}', ['\u{10cb2}', '\0', '\0']), + ('\u{118c0}', ['\u{118a0}', '\0', '\0']), + ('\u{118c1}', ['\u{118a1}', '\0', '\0']), + ('\u{118c2}', ['\u{118a2}', '\0', '\0']), + ('\u{118c3}', ['\u{118a3}', '\0', '\0']), + ('\u{118c4}', ['\u{118a4}', '\0', '\0']), + ('\u{118c5}', ['\u{118a5}', '\0', '\0']), + ('\u{118c6}', ['\u{118a6}', '\0', '\0']), + ('\u{118c7}', ['\u{118a7}', '\0', '\0']), + ('\u{118c8}', ['\u{118a8}', '\0', '\0']), + ('\u{118c9}', ['\u{118a9}', '\0', '\0']), + ('\u{118ca}', ['\u{118aa}', '\0', '\0']), + ('\u{118cb}', ['\u{118ab}', '\0', '\0']), + ('\u{118cc}', ['\u{118ac}', '\0', '\0']), + ('\u{118cd}', ['\u{118ad}', '\0', '\0']), + ('\u{118ce}', ['\u{118ae}', '\0', '\0']), + ('\u{118cf}', ['\u{118af}', '\0', '\0']), + ('\u{118d0}', ['\u{118b0}', '\0', '\0']), + ('\u{118d1}', ['\u{118b1}', '\0', '\0']), + ('\u{118d2}', ['\u{118b2}', '\0', '\0']), + ('\u{118d3}', ['\u{118b3}', '\0', '\0']), + ('\u{118d4}', ['\u{118b4}', '\0', '\0']), + ('\u{118d5}', ['\u{118b5}', '\0', '\0']), + ('\u{118d6}', ['\u{118b6}', '\0', '\0']), + ('\u{118d7}', ['\u{118b7}', '\0', '\0']), + ('\u{118d8}', ['\u{118b8}', '\0', '\0']), + ('\u{118d9}', ['\u{118b9}', '\0', '\0']), + ('\u{118da}', ['\u{118ba}', '\0', '\0']), + ('\u{118db}', ['\u{118bb}', '\0', '\0']), + ('\u{118dc}', ['\u{118bc}', '\0', '\0']), + ('\u{118dd}', ['\u{118bd}', '\0', '\0']), + ('\u{118de}', ['\u{118be}', '\0', '\0']), + ('\u{118df}', ['\u{118bf}', '\0', '\0']), + ('\u{16e60}', ['\u{16e40}', '\0', '\0']), + ('\u{16e61}', ['\u{16e41}', '\0', '\0']), + ('\u{16e62}', ['\u{16e42}', '\0', '\0']), + ('\u{16e63}', ['\u{16e43}', '\0', '\0']), + ('\u{16e64}', ['\u{16e44}', '\0', '\0']), + ('\u{16e65}', ['\u{16e45}', '\0', '\0']), + ('\u{16e66}', ['\u{16e46}', '\0', '\0']), + ('\u{16e67}', ['\u{16e47}', '\0', '\0']), + ('\u{16e68}', ['\u{16e48}', '\0', '\0']), + ('\u{16e69}', ['\u{16e49}', '\0', '\0']), + ('\u{16e6a}', ['\u{16e4a}', '\0', '\0']), + ('\u{16e6b}', ['\u{16e4b}', '\0', '\0']), + ('\u{16e6c}', ['\u{16e4c}', '\0', '\0']), + ('\u{16e6d}', ['\u{16e4d}', '\0', '\0']), + ('\u{16e6e}', ['\u{16e4e}', '\0', '\0']), + ('\u{16e6f}', ['\u{16e4f}', '\0', '\0']), + ('\u{16e70}', ['\u{16e50}', '\0', '\0']), + ('\u{16e71}', ['\u{16e51}', '\0', '\0']), + ('\u{16e72}', ['\u{16e52}', '\0', '\0']), + ('\u{16e73}', ['\u{16e53}', '\0', '\0']), + ('\u{16e74}', ['\u{16e54}', '\0', '\0']), + ('\u{16e75}', ['\u{16e55}', '\0', '\0']), + ('\u{16e76}', ['\u{16e56}', '\0', '\0']), + ('\u{16e77}', ['\u{16e57}', '\0', '\0']), + ('\u{16e78}', ['\u{16e58}', '\0', '\0']), + ('\u{16e79}', ['\u{16e59}', '\0', '\0']), + ('\u{16e7a}', ['\u{16e5a}', '\0', '\0']), + ('\u{16e7b}', ['\u{16e5b}', '\0', '\0']), + ('\u{16e7c}', ['\u{16e5c}', '\0', '\0']), + ('\u{16e7d}', ['\u{16e5d}', '\0', '\0']), + ('\u{16e7e}', ['\u{16e5e}', '\0', '\0']), + ('\u{16e7f}', ['\u{16e5f}', '\0', '\0']), + ('\u{1e922}', ['\u{1e900}', '\0', '\0']), + ('\u{1e923}', ['\u{1e901}', '\0', '\0']), + ('\u{1e924}', ['\u{1e902}', '\0', '\0']), + ('\u{1e925}', ['\u{1e903}', '\0', '\0']), + ('\u{1e926}', ['\u{1e904}', '\0', '\0']), + ('\u{1e927}', ['\u{1e905}', '\0', '\0']), + ('\u{1e928}', ['\u{1e906}', '\0', '\0']), + ('\u{1e929}', ['\u{1e907}', '\0', '\0']), + ('\u{1e92a}', ['\u{1e908}', '\0', '\0']), + ('\u{1e92b}', ['\u{1e909}', '\0', '\0']), + ('\u{1e92c}', ['\u{1e90a}', '\0', '\0']), + ('\u{1e92d}', ['\u{1e90b}', '\0', '\0']), + ('\u{1e92e}', ['\u{1e90c}', '\0', '\0']), + ('\u{1e92f}', ['\u{1e90d}', '\0', '\0']), + ('\u{1e930}', ['\u{1e90e}', '\0', '\0']), + ('\u{1e931}', ['\u{1e90f}', '\0', '\0']), + ('\u{1e932}', ['\u{1e910}', '\0', '\0']), + ('\u{1e933}', ['\u{1e911}', '\0', '\0']), + ('\u{1e934}', ['\u{1e912}', '\0', '\0']), + ('\u{1e935}', ['\u{1e913}', '\0', '\0']), + ('\u{1e936}', ['\u{1e914}', '\0', '\0']), + ('\u{1e937}', ['\u{1e915}', '\0', '\0']), + ('\u{1e938}', ['\u{1e916}', '\0', '\0']), + ('\u{1e939}', ['\u{1e917}', '\0', '\0']), + ('\u{1e93a}', ['\u{1e918}', '\0', '\0']), + ('\u{1e93b}', ['\u{1e919}', '\0', '\0']), + ('\u{1e93c}', ['\u{1e91a}', '\0', '\0']), + ('\u{1e93d}', ['\u{1e91b}', '\0', '\0']), + ('\u{1e93e}', ['\u{1e91c}', '\0', '\0']), + ('\u{1e93f}', ['\u{1e91d}', '\0', '\0']), + ('\u{1e940}', ['\u{1e91e}', '\0', '\0']), + ('\u{1e941}', ['\u{1e91f}', '\0', '\0']), + ('\u{1e942}', ['\u{1e920}', '\0', '\0']), + ('\u{1e943}', ['\u{1e921}', '\0', '\0']), ]; } diff --git a/src/libcore/unit.rs b/src/libcore/unit.rs index 540025d77bb4c..d9f16c620c4c5 100644 --- a/src/libcore/unit.rs +++ b/src/libcore/unit.rs @@ -15,7 +15,7 @@ use iter::FromIterator; /// ``` #[stable(feature = "unit_from_iter", since = "1.23.0")] impl FromIterator<()> for () { - fn from_iter>(iter: I) -> Self { + fn from_iter>(iter: I) -> Self { iter.into_iter().for_each(|()| {}) } } diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 32ae878909f30..dd41b43e16a47 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -4,23 +4,24 @@ //! Parsing does not happen at runtime: structures of `std::fmt::rt` are //! generated instead. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - html_playground_url = "https://play.rust-lang.org/", - test(attr(deny(warnings))))] - +#![doc( + html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + html_playground_url = "https://play.rust-lang.org/", + test(attr(deny(warnings))) +)] #![feature(nll)] -pub use self::Piece::*; -pub use self::Position::*; pub use self::Alignment::*; -pub use self::Flag::*; pub use self::Count::*; +pub use self::Flag::*; +pub use self::Piece::*; +pub use self::Position::*; +use std::iter; use std::str; use std::string; -use std::iter; /// A piece is a portion of the format string which represents the next part /// to emit. These are emitted as a stream by the `Parser` class. @@ -178,9 +179,10 @@ impl<'a> Iterator for Parser<'a> { Some(String(self.string(pos + 1))) } else { let arg = self.argument(); - if let Some(arg_pos) = self.must_consume('}').map(|end| { - (self.to_span_index(pos), self.to_span_index(end + 1)) - }) { + if let Some(arg_pos) = self + .must_consume('}') + .map(|end| (self.to_span_index(pos), self.to_span_index(end + 1))) + { self.arg_places.push(arg_pos); } Some(NextArgument(arg)) @@ -202,9 +204,7 @@ impl<'a> Iterator for Parser<'a> { None } } - '\n' => { - Some(String(self.string(pos))) - } + '\n' => Some(String(self.string(pos))), _ => Some(String(self.string(pos))), } } else { @@ -257,7 +257,11 @@ impl<'a> Parser<'a> { /// Notifies of an error. The message doesn't actually need to be of type /// String, but I think it does when this eventually uses conditions so it /// might as well start using it now. - fn err_with_note, S2: Into, S3: Into>( + fn err_with_note< + S1: Into, + S2: Into, + S3: Into, + >( &mut self, description: S1, label: S2, @@ -323,10 +327,13 @@ impl<'a> Parser<'a> { let description = format!("expected `'}}'`, found `{:?}`", maybe); let label = "expected `}`".to_owned(); let (note, secondary_label) = if c == '}' { - (Some("if you intended to print `{`, you can escape it using `{{`".to_owned()), - self.last_opening_brace_pos.map(|pos| { - ("because of this opening brace".to_owned(), pos, pos) - })) + ( + Some( + "if you intended to print `{`, you can escape it using `{{`".to_owned(), + ), + self.last_opening_brace_pos + .map(|pos| ("because of this opening brace".to_owned(), pos, pos)), + ) } else { (None, None) }; @@ -348,10 +355,13 @@ impl<'a> Parser<'a> { if c == '}' { let label = format!("expected `{:?}`", c); let (note, secondary_label) = if c == '}' { - (Some("if you intended to print `{`, you can escape it using `{{`".to_owned()), - self.last_opening_brace_pos.map(|pos| { - ("because of this opening brace".to_owned(), pos, pos) - })) + ( + Some( + "if you intended to print `{`, you can escape it using `{{`".to_owned(), + ), + self.last_opening_brace_pos + .map(|pos| ("because of this opening brace".to_owned(), pos, pos)), + ) } else { (None, None) }; @@ -431,13 +441,15 @@ impl<'a> Parser<'a> { Some(&(_, c)) if c.is_alphabetic() => Some(ArgumentNamed(self.word())), Some(&(pos, c)) if c == '_' => { let invalid_name = self.string(pos); - self.err_with_note(format!("invalid argument name `{}`", invalid_name), - "invalid argument name", - "argument names cannot start with an underscore", - self.to_span_index(pos), - self.to_span_index(pos + invalid_name.len())); + self.err_with_note( + format!("invalid argument name `{}`", invalid_name), + "invalid argument name", + "argument names cannot start with an underscore", + self.to_span_index(pos), + self.to_span_index(pos + invalid_name.len()), + ); Some(ArgumentNamed(invalid_name)) - }, + } // This is an `ArgumentNext`. // Record the fact and do the resolution after parsing the @@ -670,187 +682,219 @@ mod tests { #[test] fn format_nothing() { - same("{}", - &[NextArgument(Argument { - position: ArgumentImplicitlyIs(0), - format: fmtdflt(), - })]); + same( + "{}", + &[NextArgument(Argument { + position: ArgumentImplicitlyIs(0), + format: fmtdflt(), + })], + ); } #[test] fn format_position() { - same("{3}", - &[NextArgument(Argument { - position: ArgumentIs(3), - format: fmtdflt(), - })]); + same( + "{3}", + &[NextArgument(Argument { + position: ArgumentIs(3), + format: fmtdflt(), + })], + ); } #[test] fn format_position_nothing_else() { - same("{3:}", - &[NextArgument(Argument { - position: ArgumentIs(3), - format: fmtdflt(), - })]); + same( + "{3:}", + &[NextArgument(Argument { + position: ArgumentIs(3), + format: fmtdflt(), + })], + ); } #[test] fn format_type() { - same("{3:a}", - &[NextArgument(Argument { - position: ArgumentIs(3), - format: FormatSpec { - fill: None, - align: AlignUnknown, - flags: 0, - precision: CountImplied, - width: CountImplied, - ty: "a", - }, - })]); + same( + "{3:a}", + &[NextArgument(Argument { + position: ArgumentIs(3), + format: FormatSpec { + fill: None, + align: AlignUnknown, + flags: 0, + precision: CountImplied, + width: CountImplied, + ty: "a", + }, + })], + ); } #[test] fn format_align_fill() { - same("{3:>}", - &[NextArgument(Argument { - position: ArgumentIs(3), - format: FormatSpec { - fill: None, - align: AlignRight, - flags: 0, - precision: CountImplied, - width: CountImplied, - ty: "", - }, - })]); - same("{3:0<}", - &[NextArgument(Argument { - position: ArgumentIs(3), - format: FormatSpec { - fill: Some('0'), - align: AlignLeft, - flags: 0, - precision: CountImplied, - width: CountImplied, - ty: "", - }, - })]); - same("{3:*}", + &[NextArgument(Argument { + position: ArgumentIs(3), + format: FormatSpec { + fill: None, + align: AlignRight, + flags: 0, + precision: CountImplied, + width: CountImplied, + ty: "", + }, + })], + ); + same( + "{3:0<}", + &[NextArgument(Argument { + position: ArgumentIs(3), + format: FormatSpec { + fill: Some('0'), + align: AlignLeft, + flags: 0, + precision: CountImplied, + width: CountImplied, + ty: "", + }, + })], + ); + same( + "{3:* { @@ -405,7 +406,7 @@ impl<'a> Id<'a> { Some(c) if c.is_ascii_alphabetic() || c == '_' => {} _ => return Err(()), } - if !name.chars().all(|c| c.is_ascii_alphanumeric() || c == '_' ) { + if !name.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') { return Err(()); } @@ -479,9 +480,9 @@ pub trait Labeller<'a> { /// Graphviz HTML label. pub fn escape_html(s: &str) -> String { s.replace("&", "&") - .replace("\"", """) - .replace("<", "<") - .replace(">", ">") + .replace("\"", """) + .replace("<", "<") + .replace(">", ">") } impl<'a> LabelText<'a> { @@ -498,7 +499,8 @@ impl<'a> LabelText<'a> { } fn escape_char(c: char, mut f: F) - where F: FnMut(char) + where + F: FnMut(char), { match c { // not escaping \\, since Graphviz escString needs to @@ -562,8 +564,8 @@ impl<'a> LabelText<'a> { } } -pub type Nodes<'a,N> = Cow<'a,[N]>; -pub type Edges<'a,E> = Cow<'a,[E]>; +pub type Nodes<'a, N> = Cow<'a, [N]>; +pub type Edges<'a, E> = Cow<'a, [E]>; // (The type parameters in GraphWalk should be associated items, // when/if Rust supports such.) @@ -610,25 +612,24 @@ pub fn default_options() -> Vec { /// Renders directed graph `g` into the writer `w` in DOT syntax. /// (Simple wrapper around `render_opts` that passes a default set of options.) -pub fn render<'a,N,E,G,W>(g: &'a G, w: &mut W) -> io::Result<()> - where N: Clone + 'a, - E: Clone + 'a, - G: Labeller<'a, Node=N, Edge=E> + GraphWalk<'a, Node=N, Edge=E>, - W: Write +pub fn render<'a, N, E, G, W>(g: &'a G, w: &mut W) -> io::Result<()> +where + N: Clone + 'a, + E: Clone + 'a, + G: Labeller<'a, Node = N, Edge = E> + GraphWalk<'a, Node = N, Edge = E>, + W: Write, { render_opts(g, w, &[]) } /// Renders directed graph `g` into the writer `w` in DOT syntax. /// (Main entry point for the library.) -pub fn render_opts<'a, N, E, G, W>(g: &'a G, - w: &mut W, - options: &[RenderOption]) - -> io::Result<()> - where N: Clone + 'a, - E: Clone + 'a, - G: Labeller<'a, Node=N, Edge=E> + GraphWalk<'a, Node=N, Edge=E>, - W: Write +pub fn render_opts<'a, N, E, G, W>(g: &'a G, w: &mut W, options: &[RenderOption]) -> io::Result<()> +where + N: Clone + 'a, + E: Clone + 'a, + G: Labeller<'a, Node = N, Edge = E> + GraphWalk<'a, Node = N, Edge = E>, + W: Write, { writeln!(w, "digraph {} {{", g.graph_id().as_slice())?; for n in g.nodes().iter() { @@ -687,8 +688,8 @@ pub fn render_opts<'a, N, E, G, W>(g: &'a G, #[cfg(test)] mod tests { use self::NodeLabels::*; - use super::{Id, Labeller, Nodes, Edges, GraphWalk, render, Style}; - use super::LabelText::{self, LabelStr, EscStr, HtmlStr}; + use super::LabelText::{self, EscStr, HtmlStr, LabelStr}; + use super::{render, Edges, GraphWalk, Id, Labeller, Nodes, Style}; use std::io; use std::io::prelude::*; @@ -763,11 +764,12 @@ mod tests { } impl LabelledGraph { - fn new(name: &'static str, - node_labels: Trivial, - edges: Vec, - node_styles: Option>) - -> LabelledGraph { + fn new( + name: &'static str, + node_labels: Trivial, + edges: Vec, + node_styles: Option>, + ) -> LabelledGraph { let count = node_labels.len(); LabelledGraph { name, @@ -782,11 +784,14 @@ mod tests { } impl LabelledGraphWithEscStrs { - fn new(name: &'static str, - node_labels: Trivial, - edges: Vec) - -> LabelledGraphWithEscStrs { - LabelledGraphWithEscStrs { graph: LabelledGraph::new(name, node_labels, edges, None) } + fn new( + name: &'static str, + node_labels: Trivial, + edges: Vec, + ) -> LabelledGraphWithEscStrs { + LabelledGraphWithEscStrs { + graph: LabelledGraph::new(name, node_labels, edges, None), + } } } @@ -891,21 +896,25 @@ mod tests { fn empty_graph() { let labels: Trivial = UnlabelledNodes(0); let r = test_input(LabelledGraph::new("empty_graph", labels, vec![], None)); - assert_eq!(r.unwrap(), -r#"digraph empty_graph { + assert_eq!( + r.unwrap(), + r#"digraph empty_graph { } -"#); +"# + ); } #[test] fn single_node() { let labels: Trivial = UnlabelledNodes(1); let r = test_input(LabelledGraph::new("single_node", labels, vec![], None)); - assert_eq!(r.unwrap(), -r#"digraph single_node { + assert_eq!( + r.unwrap(), + r#"digraph single_node { N0[label="N0"]; } -"#); +"# + ); } #[test] @@ -913,89 +922,112 @@ r#"digraph single_node { let labels: Trivial = UnlabelledNodes(1); let styles = Some(vec![Style::Dashed]); let r = test_input(LabelledGraph::new("single_node", labels, vec![], styles)); - assert_eq!(r.unwrap(), -r#"digraph single_node { + assert_eq!( + r.unwrap(), + r#"digraph single_node { N0[label="N0"][style="dashed"]; } -"#); +"# + ); } #[test] fn single_edge() { let labels: Trivial = UnlabelledNodes(2); - let result = test_input(LabelledGraph::new("single_edge", - labels, - vec![edge(0, 1, "E", Style::None)], - None)); - assert_eq!(result.unwrap(), -r#"digraph single_edge { + let result = test_input(LabelledGraph::new( + "single_edge", + labels, + vec![edge(0, 1, "E", Style::None)], + None, + )); + assert_eq!( + result.unwrap(), + r#"digraph single_edge { N0[label="N0"]; N1[label="N1"]; N0 -> N1[label="E"]; } -"#); +"# + ); } #[test] fn single_edge_with_style() { let labels: Trivial = UnlabelledNodes(2); - let result = test_input(LabelledGraph::new("single_edge", - labels, - vec![edge(0, 1, "E", Style::Bold)], - None)); - assert_eq!(result.unwrap(), -r#"digraph single_edge { + let result = test_input(LabelledGraph::new( + "single_edge", + labels, + vec![edge(0, 1, "E", Style::Bold)], + None, + )); + assert_eq!( + result.unwrap(), + r#"digraph single_edge { N0[label="N0"]; N1[label="N1"]; N0 -> N1[label="E"][style="bold"]; } -"#); +"# + ); } #[test] fn test_some_labelled() { let labels: Trivial = SomeNodesLabelled(vec![Some("A"), None]); let styles = Some(vec![Style::None, Style::Dotted]); - let result = test_input(LabelledGraph::new("test_some_labelled", - labels, - vec![edge(0, 1, "A-1", Style::None)], - styles)); - assert_eq!(result.unwrap(), -r#"digraph test_some_labelled { + let result = test_input(LabelledGraph::new( + "test_some_labelled", + labels, + vec![edge(0, 1, "A-1", Style::None)], + styles, + )); + assert_eq!( + result.unwrap(), + r#"digraph test_some_labelled { N0[label="A"]; N1[label="N1"][style="dotted"]; N0 -> N1[label="A-1"]; } -"#); +"# + ); } #[test] fn single_cyclic_node() { let labels: Trivial = UnlabelledNodes(1); - let r = test_input(LabelledGraph::new("single_cyclic_node", - labels, - vec![edge(0, 0, "E", Style::None)], - None)); - assert_eq!(r.unwrap(), -r#"digraph single_cyclic_node { + let r = test_input(LabelledGraph::new( + "single_cyclic_node", + labels, + vec![edge(0, 0, "E", Style::None)], + None, + )); + assert_eq!( + r.unwrap(), + r#"digraph single_cyclic_node { N0[label="N0"]; N0 -> N0[label="E"]; } -"#); +"# + ); } #[test] fn hasse_diagram() { let labels = AllNodesLabelled(vec!["{x,y}", "{x}", "{y}", "{}"]); - let r = test_input(LabelledGraph::new("hasse_diagram", - labels, - vec![edge(0, 1, "", Style::None), - edge(0, 2, "", Style::None), - edge(1, 3, "", Style::None), - edge(2, 3, "", Style::None)], - None)); - assert_eq!(r.unwrap(), -r#"digraph hasse_diagram { + let r = test_input(LabelledGraph::new( + "hasse_diagram", + labels, + vec![ + edge(0, 1, "", Style::None), + edge(0, 2, "", Style::None), + edge(1, 3, "", Style::None), + edge(2, 3, "", Style::None), + ], + None, + )); + assert_eq!( + r.unwrap(), + r#"digraph hasse_diagram { N0[label="{x,y}"]; N1[label="{x}"]; N2[label="{y}"]; @@ -1005,38 +1037,45 @@ r#"digraph hasse_diagram { N1 -> N3[label=""]; N2 -> N3[label=""]; } -"#); +"# + ); } #[test] fn left_aligned_text() { let labels = AllNodesLabelled(vec![ "if test {\ - \\l branch1\ - \\l} else {\ - \\l branch2\ - \\l}\ - \\lafterward\ - \\l", + \\l branch1\ + \\l} else {\ + \\l branch2\ + \\l}\ + \\lafterward\ + \\l", "branch1", "branch2", - "afterward"]); + "afterward", + ]); let mut writer = Vec::new(); - let g = LabelledGraphWithEscStrs::new("syntax_tree", - labels, - vec![edge(0, 1, "then", Style::None), - edge(0, 2, "else", Style::None), - edge(1, 3, ";", Style::None), - edge(2, 3, ";", Style::None)]); + let g = LabelledGraphWithEscStrs::new( + "syntax_tree", + labels, + vec![ + edge(0, 1, "then", Style::None), + edge(0, 2, "else", Style::None), + edge(1, 3, ";", Style::None), + edge(2, 3, ";", Style::None), + ], + ); render(&g, &mut writer).unwrap(); let mut r = String::new(); Read::read_to_string(&mut &*writer, &mut r).unwrap(); - assert_eq!(r, -r#"digraph syntax_tree { + assert_eq!( + r, + r#"digraph syntax_tree { N0[label="if test {\l branch1\l} else {\l branch2\l}\lafterward\l"]; N1[label="branch1"]; N2[label="branch2"]; @@ -1046,7 +1085,8 @@ r#"digraph syntax_tree { N1 -> N3[label=";"]; N2 -> N3[label=";"]; } -"#); +"# + ); } #[test] diff --git a/src/libpanic_abort/lib.rs b/src/libpanic_abort/lib.rs index d8dd2e0a11337..8a3467941e8a6 100644 --- a/src/libpanic_abort/lib.rs +++ b/src/libpanic_abort/lib.rs @@ -5,13 +5,14 @@ #![no_std] #![unstable(feature = "panic_abort", issue = "32837")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")] +#![doc( + html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/" +)] #![panic_runtime] #![allow(unused_features)] - #![feature(cfg_target_vendor)] #![feature(core_intrinsics)] #![feature(libc)] @@ -23,10 +24,12 @@ // Rust's "try" function, but if we're aborting on panics we just call the // function as there's nothing else we need to do here. #[rustc_std_internal_symbol] -pub unsafe extern fn __rust_maybe_catch_panic(f: fn(*mut u8), - data: *mut u8, - _data_ptr: *mut usize, - _vtable_ptr: *mut usize) -> u32 { +pub unsafe extern "C" fn __rust_maybe_catch_panic( + f: fn(*mut u8), + data: *mut u8, + _data_ptr: *mut usize, + _vtable_ptr: *mut usize, +) -> u32 { f(data); 0 } @@ -42,7 +45,7 @@ pub unsafe extern fn __rust_maybe_catch_panic(f: fn(*mut u8), // will kill us with an illegal instruction, which will do a good enough job for // now hopefully. #[rustc_std_internal_symbol] -pub unsafe extern fn __rust_start_panic(_payload: usize) -> u32 { +pub unsafe extern "C" fn __rust_start_panic(_payload: usize) -> u32 { abort(); #[cfg(any(unix, target_os = "cloudabi"))] @@ -51,16 +54,20 @@ pub unsafe extern fn __rust_start_panic(_payload: usize) -> u32 { libc::abort(); } - #[cfg(any(target_os = "redox", - windows, - all(target_arch = "wasm32", not(target_os = "emscripten"))))] + #[cfg(any( + target_os = "redox", + windows, + all(target_arch = "wasm32", not(target_os = "emscripten")) + ))] unsafe fn abort() -> ! { core::intrinsics::abort(); } - #[cfg(all(target_vendor="fortanix", target_env="sgx"))] + #[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] unsafe fn abort() -> ! { - extern "C" { pub fn panic_exit() -> !; } + extern "C" { + pub fn panic_exit() -> !; + } panic_exit(); } } @@ -94,28 +101,21 @@ pub unsafe extern fn __rust_start_panic(_payload: usize) -> u32 { pub mod personalities { #[no_mangle] #[cfg(not(any( - all( - target_arch = "wasm32", - not(target_os = "emscripten"), - ), - all( - target_os = "windows", - target_env = "gnu", - target_arch = "x86_64", - ), + all(target_arch = "wasm32", not(target_os = "emscripten"),), + all(target_os = "windows", target_env = "gnu", target_arch = "x86_64",), )))] - pub extern fn rust_eh_personality() {} + pub extern "C" fn rust_eh_personality() {} // On x86_64-pc-windows-gnu we use our own personality function that needs // to return `ExceptionContinueSearch` as we're passing on all our frames. #[no_mangle] - #[cfg(all(target_os = "windows", - target_env = "gnu", - target_arch = "x86_64"))] - pub extern fn rust_eh_personality(_record: usize, - _frame: usize, - _context: usize, - _dispatcher: usize) -> u32 { + #[cfg(all(target_os = "windows", target_env = "gnu", target_arch = "x86_64"))] + pub extern "C" fn rust_eh_personality( + _record: usize, + _frame: usize, + _context: usize, + _dispatcher: usize, + ) -> u32 { 1 // `ExceptionContinueSearch` } @@ -126,14 +126,14 @@ pub mod personalities { // body is empty. #[no_mangle] #[cfg(all(target_os = "windows", target_env = "gnu"))] - pub extern fn rust_eh_unwind_resume() {} + pub extern "C" fn rust_eh_unwind_resume() {} // These two are called by our startup objects on i686-pc-windows-gnu, but // they don't need to do anything so the bodies are nops. #[no_mangle] #[cfg(all(target_os = "windows", target_env = "gnu", target_arch = "x86"))] - pub extern fn rust_eh_register_frames() {} + pub extern "C" fn rust_eh_register_frames() {} #[no_mangle] #[cfg(all(target_os = "windows", target_env = "gnu", target_arch = "x86"))] - pub extern fn rust_eh_unregister_frames() {} + pub extern "C" fn rust_eh_unregister_frames() {} } diff --git a/src/libpanic_unwind/dwarf/eh.rs b/src/libpanic_unwind/dwarf/eh.rs index ce7fab8584a28..a6a97a1363e02 100644 --- a/src/libpanic_unwind/dwarf/eh.rs +++ b/src/libpanic_unwind/dwarf/eh.rs @@ -11,8 +11,8 @@ #![allow(non_upper_case_globals)] #![allow(unused)] -use dwarf::DwarfReader; use core::mem; +use dwarf::DwarfReader; pub const DW_EH_PE_omit: u8 = 0xFF; pub const DW_EH_PE_absptr: u8 = 0x00; @@ -36,8 +36,8 @@ pub const DW_EH_PE_indirect: u8 = 0x80; #[derive(Copy, Clone)] pub struct EHContext<'a> { - pub ip: usize, // Current instruction pointer - pub func_start: usize, // Address of the current function + pub ip: usize, // Current instruction pointer + pub func_start: usize, // Address of the current function pub get_text_start: &'a dyn Fn() -> usize, // Get address of the code section pub get_data_start: &'a dyn Fn() -> usize, // Get address of the data section } @@ -51,11 +51,9 @@ pub enum EHAction { pub const USING_SJLJ_EXCEPTIONS: bool = cfg!(all(target_os = "ios", target_arch = "arm")); -pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext) - -> Result -{ +pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext) -> Result { if lsda.is_null() { - return Ok(EHAction::None) + return Ok(EHAction::None); } let func_start = context.func_start; @@ -93,10 +91,10 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext) } if ip < func_start + cs_start + cs_len { if cs_lpad == 0 { - return Ok(EHAction::None) + return Ok(EHAction::None); } else { let lpad = lpad_base + cs_lpad; - return Ok(interpret_cs_action(cs_action, lpad)) + return Ok(interpret_cs_action(cs_action, lpad)); } } } @@ -121,7 +119,7 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext) // Can never have null landing pad for sjlj -- that would have // been indicated by a -1 call site index. let lpad = (cs_lpad + 1) as usize; - return Ok(interpret_cs_action(cs_action, lpad)) + return Ok(interpret_cs_action(cs_action, lpad)); } } } @@ -144,18 +142,19 @@ fn round_up(unrounded: usize, align: usize) -> Result { } } -unsafe fn read_encoded_pointer(reader: &mut DwarfReader, - context: &EHContext, - encoding: u8) - -> Result { +unsafe fn read_encoded_pointer( + reader: &mut DwarfReader, + context: &EHContext, + encoding: u8, +) -> Result { if encoding == DW_EH_PE_omit { - return Err(()) + return Err(()); } // DW_EH_PE_aligned implies it's an absolute pointer value if encoding == DW_EH_PE_aligned { reader.ptr = round_up(reader.ptr as usize, mem::size_of::())? as *const u8; - return Ok(reader.read::()) + return Ok(reader.read::()); } let mut result = match encoding & 0x0F { @@ -177,7 +176,7 @@ unsafe fn read_encoded_pointer(reader: &mut DwarfReader, DW_EH_PE_pcrel => reader.ptr as usize, DW_EH_PE_funcrel => { if context.func_start == 0 { - return Err(()) + return Err(()); } context.func_start } diff --git a/src/libpanic_unwind/dwarf/mod.rs b/src/libpanic_unwind/dwarf/mod.rs index eb5fb81f61b83..e8903be874611 100644 --- a/src/libpanic_unwind/dwarf/mod.rs +++ b/src/libpanic_unwind/dwarf/mod.rs @@ -14,7 +14,7 @@ pub struct DwarfReader { pub ptr: *const u8, } -#[repr(C,packed)] +#[repr(C, packed)] struct Unaligned(T); impl DwarfReader { @@ -71,7 +71,9 @@ impl DwarfReader { #[test] fn dwarf_reader() { - let encoded: &[u8] = &[1, 2, 3, 4, 5, 6, 7, 0xE5, 0x8E, 0x26, 0x9B, 0xF1, 0x59, 0xFF, 0xFF]; + let encoded: &[u8] = &[ + 1, 2, 3, 4, 5, 6, 7, 0xE5, 0x8E, 0x26, 0x9B, 0xF1, 0x59, 0xFF, 0xFF, + ]; let mut reader = DwarfReader::new(encoded.as_ptr()); diff --git a/src/libpanic_unwind/lib.rs b/src/libpanic_unwind/lib.rs index 98f174710d243..cd1f119189ae7 100644 --- a/src/libpanic_unwind/lib.rs +++ b/src/libpanic_unwind/lib.rs @@ -14,11 +14,12 @@ #![no_std] #![unstable(feature = "panic_unwind", issue = "32837")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")] - +#![doc( + html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/" +)] #![feature(allocator_api)] #![feature(alloc)] #![feature(core_intrinsics)] @@ -30,20 +31,22 @@ #![feature(staged_api)] #![feature(std_internals)] #![feature(unwind_attributes)] - #![panic_runtime] #![feature(panic_runtime)] extern crate alloc; extern crate libc; -#[cfg(not(any(target_env = "msvc", all(windows, target_arch = "x86_64", target_env = "gnu"))))] +#[cfg(not(any( + target_env = "msvc", + all(windows, target_arch = "x86_64", target_env = "gnu") +)))] extern crate unwind; use alloc::boxed::Box; use core::intrinsics; use core::mem; -use core::raw; use core::panic::BoxMeUp; +use core::raw; #[macro_use] mod macros; @@ -83,11 +86,12 @@ mod windows; // hairy and tightly coupled, for more information see the compiler's // implementation of this. #[no_mangle] -pub unsafe extern "C" fn __rust_maybe_catch_panic(f: fn(*mut u8), - data: *mut u8, - data_ptr: *mut usize, - vtable_ptr: *mut usize) - -> u32 { +pub unsafe extern "C" fn __rust_maybe_catch_panic( + f: fn(*mut u8), + data: *mut u8, + data_ptr: *mut usize, + vtable_ptr: *mut usize, +) -> u32 { let mut payload = imp::payload(); if intrinsics::try(f, data, &mut payload as *mut _ as *mut _) == 0 { 0 diff --git a/src/libpanic_unwind/windows.rs b/src/libpanic_unwind/windows.rs index 3257a9d25a51a..b8b99b97a9b50 100644 --- a/src/libpanic_unwind/windows.rs +++ b/src/libpanic_unwind/windows.rs @@ -10,14 +10,15 @@ pub type ULONG_PTR = usize; pub type LPVOID = *mut c_void; pub const EXCEPTION_MAXIMUM_PARAMETERS: usize = 15; -pub const EXCEPTION_NONCONTINUABLE: DWORD = 0x1; // Noncontinuable exception -pub const EXCEPTION_UNWINDING: DWORD = 0x2; // Unwind is in progress -pub const EXCEPTION_EXIT_UNWIND: DWORD = 0x4; // Exit unwind is in progress -pub const EXCEPTION_TARGET_UNWIND: DWORD = 0x20; // Target unwind in progress +pub const EXCEPTION_NONCONTINUABLE: DWORD = 0x1; // Noncontinuable exception +pub const EXCEPTION_UNWINDING: DWORD = 0x2; // Unwind is in progress +pub const EXCEPTION_EXIT_UNWIND: DWORD = 0x4; // Exit unwind is in progress +pub const EXCEPTION_TARGET_UNWIND: DWORD = 0x20; // Target unwind in progress pub const EXCEPTION_COLLIDED_UNWIND: DWORD = 0x40; // Collided exception handler call -pub const EXCEPTION_UNWIND: DWORD = EXCEPTION_UNWINDING | EXCEPTION_EXIT_UNWIND | - EXCEPTION_TARGET_UNWIND | - EXCEPTION_COLLIDED_UNWIND; +pub const EXCEPTION_UNWIND: DWORD = EXCEPTION_UNWINDING + | EXCEPTION_EXIT_UNWIND + | EXCEPTION_TARGET_UNWIND + | EXCEPTION_COLLIDED_UNWIND; #[repr(C)] pub struct EXCEPTION_RECORD { @@ -70,17 +71,21 @@ pub use self::EXCEPTION_DISPOSITION::*; extern "system" { #[unwind(allowed)] - pub fn RaiseException(dwExceptionCode: DWORD, - dwExceptionFlags: DWORD, - nNumberOfArguments: DWORD, - lpArguments: *const ULONG_PTR); + pub fn RaiseException( + dwExceptionCode: DWORD, + dwExceptionFlags: DWORD, + nNumberOfArguments: DWORD, + lpArguments: *const ULONG_PTR, + ); #[unwind(allowed)] - pub fn RtlUnwindEx(TargetFrame: LPVOID, - TargetIp: LPVOID, - ExceptionRecord: *const EXCEPTION_RECORD, - ReturnValue: LPVOID, - OriginalContext: *const CONTEXT, - HistoryTable: *const UNWIND_HISTORY_TABLE); + pub fn RtlUnwindEx( + TargetFrame: LPVOID, + TargetIp: LPVOID, + ExceptionRecord: *const EXCEPTION_RECORD, + ReturnValue: LPVOID, + OriginalContext: *const CONTEXT, + HistoryTable: *const UNWIND_HISTORY_TABLE, + ); #[unwind(allowed)] pub fn _CxxThrowException(pExceptionObject: *mut c_void, pThrowInfo: *mut u8); } diff --git a/src/libproc_macro/diagnostic.rs b/src/libproc_macro/diagnostic.rs index 64d0c3893c730..1165213c65855 100644 --- a/src/libproc_macro/diagnostic.rs +++ b/src/libproc_macro/diagnostic.rs @@ -51,7 +51,7 @@ pub struct Diagnostic { level: Level, message: String, spans: Vec, - children: Vec + children: Vec, } macro_rules! diagnostic_child_methods { @@ -100,7 +100,7 @@ impl Diagnostic { level: level, message: message.into(), spans: vec![], - children: vec![] + children: vec![], } } @@ -108,13 +108,15 @@ impl Diagnostic { /// the given set of `spans`. #[unstable(feature = "proc_macro_diagnostic", issue = "54140")] pub fn spanned(spans: S, level: Level, message: T) -> Diagnostic - where S: MultiSpan, T: Into + where + S: MultiSpan, + T: Into, { Diagnostic { level: level, message: message.into(), spans: spans.into_spans(), - children: vec![] + children: vec![], } } diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 868190d01057d..f9b1cab1243fc 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -9,14 +9,15 @@ #![stable(feature = "proc_macro_lib", since = "1.15.0")] #![deny(missing_docs)] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - html_playground_url = "https://play.rust-lang.org/", - issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", - test(no_crate_inject, attr(deny(warnings))), - test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))] - +#![doc( + html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + html_playground_url = "https://play.rust-lang.org/", + issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", + test(no_crate_inject, attr(deny(warnings))), + test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))) +)] #![feature(nll)] #![feature(staged_api)] #![feature(const_fn)] @@ -25,8 +26,7 @@ #![feature(optin_builtin_traits)] #![feature(non_exhaustive)] #![feature(specialization)] - -#![recursion_limit="256"] +#![recursion_limit = "256"] #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] @@ -37,10 +37,10 @@ mod diagnostic; #[unstable(feature = "proc_macro_diagnostic", issue = "54140")] pub use diagnostic::{Diagnostic, Level, MultiSpan}; -use std::{fmt, iter, mem}; use std::ops::{Bound, RangeBounds}; use std::path::PathBuf; use std::str::FromStr; +use std::{fmt, iter, mem}; /// The main type provided by this crate, representing an abstract stream of /// tokens, or, more specifically, a sequence of token trees. @@ -132,20 +132,20 @@ impl fmt::Debug for TokenStream { pub use quote::{quote, quote_span}; /// Creates a token stream containing a single token tree. - #[stable(feature = "proc_macro_lib2", since = "1.29.0")] +#[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { TokenStream(bridge::client::TokenStream::from_token_tree(match tree { TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0), TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0), TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0), - TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0) + TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0), })) } } /// Collects a number of token trees into a single stream. - #[stable(feature = "proc_macro_lib2", since = "1.29.0")] +#[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl iter::FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { trees.into_iter().map(TokenStream::from).collect() @@ -176,14 +176,16 @@ impl Extend for TokenStream { impl Extend for TokenStream { fn extend>(&mut self, streams: I) { // FIXME(eddyb) Use an optimized implementation if/when possible. - *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect(); + *self = iter::once(mem::replace(self, Self::new())) + .chain(streams) + .collect(); } } /// Public implementation details for the `TokenStream` type, such as iterators. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] pub mod token_stream { - use {bridge, Group, Ident, Literal, Punct, TokenTree, TokenStream}; + use {bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree}; /// An iterator over `TokenStream`'s `TokenTree`s. /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, @@ -227,7 +229,9 @@ pub mod token_stream { /// This is a dummy macro, the actual implementation is in `quote::quote`.` #[unstable(feature = "proc_macro_quote", issue = "54722")] #[macro_export] -macro_rules! quote { () => {} } +macro_rules! quote { + () => {}; +} #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] @@ -355,7 +359,7 @@ pub struct LineColumn { /// The 0-indexed column (in UTF-8 characters) in the source file on which /// the span starts or ends (inclusive). #[unstable(feature = "proc_macro_span", issue = "54725")] - pub column: usize + pub column: usize, } #[unstable(feature = "proc_macro_span", issue = "54725")] @@ -395,7 +399,6 @@ impl SourceFile { } } - #[unstable(feature = "proc_macro_span", issue = "54725")] impl fmt::Debug for SourceFile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -422,28 +425,16 @@ impl Eq for SourceFile {} pub enum TokenTree { /// A token stream surrounded by bracket delimiters. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - Group( - #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - Group - ), + Group(#[stable(feature = "proc_macro_lib2", since = "1.29.0")] Group), /// An identifier. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - Ident( - #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - Ident - ), + Ident(#[stable(feature = "proc_macro_lib2", since = "1.29.0")] Ident), /// A single punctuation character (`+`, `,`, `$`, etc.). #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - Punct( - #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - Punct - ), + Punct(#[stable(feature = "proc_macro_lib2", since = "1.29.0")] Punct), /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - Literal( - #[stable(feature = "proc_macro_lib2", since = "1.29.0")] - Literal - ), + Literal(#[stable(feature = "proc_macro_lib2", since = "1.29.0")] Literal), } #[stable(feature = "proc_macro_lib2", since = "1.29.0")] @@ -1072,10 +1063,12 @@ impl Literal { } } - self.0.subspan( - cloned_bound(range.start_bound()), - cloned_bound(range.end_bound()), - ).map(Span) + self.0 + .subspan( + cloned_bound(range.start_bound()), + cloned_bound(range.end_bound()), + ) + .map(Span) } } diff --git a/src/libprofiler_builtins/build.rs b/src/libprofiler_builtins/build.rs index b66cd66448748..0e7ccc4ddb76b 100644 --- a/src/libprofiler_builtins/build.rs +++ b/src/libprofiler_builtins/build.rs @@ -11,20 +11,22 @@ fn main() { let target = env::var("TARGET").expect("TARGET was not set"); let cfg = &mut cc::Build::new(); - let mut profile_sources = vec!["GCDAProfiling.c", - "InstrProfiling.c", - "InstrProfilingBuffer.c", - "InstrProfilingFile.c", - "InstrProfilingMerge.c", - "InstrProfilingMergeFile.c", - "InstrProfilingNameVar.c", - "InstrProfilingPlatformDarwin.c", - "InstrProfilingPlatformLinux.c", - "InstrProfilingPlatformOther.c", - "InstrProfilingRuntime.cc", - "InstrProfilingUtil.c", - "InstrProfilingValue.c", - "InstrProfilingWriter.c"]; + let mut profile_sources = vec![ + "GCDAProfiling.c", + "InstrProfiling.c", + "InstrProfilingBuffer.c", + "InstrProfilingFile.c", + "InstrProfilingMerge.c", + "InstrProfilingMergeFile.c", + "InstrProfilingNameVar.c", + "InstrProfilingPlatformDarwin.c", + "InstrProfilingPlatformLinux.c", + "InstrProfilingPlatformOther.c", + "InstrProfilingRuntime.cc", + "InstrProfilingUtil.c", + "InstrProfilingValue.c", + "InstrProfilingWriter.c", + ]; if target.contains("msvc") { // Don't pull in extra libraries on MSVC diff --git a/src/libprofiler_builtins/lib.rs b/src/libprofiler_builtins/lib.rs index 0d12ba01c87a2..bde4926539f29 100644 --- a/src/libprofiler_builtins/lib.rs +++ b/src/libprofiler_builtins/lib.rs @@ -1,9 +1,11 @@ #![no_std] #![feature(profiler_runtime)] #![profiler_runtime] -#![unstable(feature = "profiler_runtime_lib", - reason = "internal implementation detail of rustc right now", - issue = "0")] +#![unstable( + feature = "profiler_runtime_lib", + reason = "internal implementation detail of rustc right now", + issue = "0" +)] #![allow(unused_features)] #![feature(nll)] #![feature(staged_api)] diff --git a/src/librustc/benches/dispatch.rs b/src/librustc/benches/dispatch.rs index e3b36be5696b3..ad6c6a3a0e58a 100644 --- a/src/librustc/benches/dispatch.rs +++ b/src/librustc/benches/dispatch.rs @@ -3,7 +3,7 @@ use test::Bencher; // Static/dynamic method dispatch struct Struct { - field: isize + field: isize, } trait Trait { @@ -20,15 +20,11 @@ impl Trait for Struct { fn trait_vtable_method_call(b: &mut Bencher) { let s = Struct { field: 10 }; let t = &s as &Trait; - b.iter(|| { - t.method() - }); + b.iter(|| t.method()); } #[bench] fn trait_static_method_call(b: &mut Bencher) { let s = Struct { field: 10 }; - b.iter(|| { - s.method() - }); + b.iter(|| s.method()); } diff --git a/src/librustc/benches/pattern.rs b/src/librustc/benches/pattern.rs index fd8cc5b83fd5a..38d97a3af997a 100644 --- a/src/librustc/benches/pattern.rs +++ b/src/librustc/benches/pattern.rs @@ -5,21 +5,17 @@ use test::Bencher; #[bench] fn option_some(b: &mut Bencher) { let x = Some(10); - b.iter(|| { - match x { - Some(y) => y, - None => 11 - } + b.iter(|| match x { + Some(y) => y, + None => 11, }); } #[bench] fn vec_pattern(b: &mut Bencher) { - let x = [1,2,3,4,5,6]; - b.iter(|| { - match x { - [1,2,3,..] => 10, - _ => 11, - } + let x = [1, 2, 3, 4, 5, 6]; + b.iter(|| match x { + [1, 2, 3, ..] => 10, + _ => 11, }); } diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs index 978d20ea94789..6cc452ad22a09 100644 --- a/src/librustc/cfg/construct.rs +++ b/src/librustc/cfg/construct.rs @@ -4,8 +4,8 @@ use rustc_data_structures::graph::implementation as graph; use syntax::ptr::P; use ty::{self, TyCtxt}; -use hir::{self, PatKind}; use hir::def_id::DefId; +use hir::{self, PatKind}; struct CFGBuilder<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, @@ -20,18 +20,17 @@ struct CFGBuilder<'a, 'tcx: 'a> { #[derive(Copy, Clone)] struct BlockScope { block_expr_id: hir::ItemLocalId, // id of breakable block expr node - break_index: CFGIndex, // where to go on `break` + break_index: CFGIndex, // where to go on `break` } #[derive(Copy, Clone)] struct LoopScope { - loop_id: hir::ItemLocalId, // id of loop/while node - continue_index: CFGIndex, // where to go on a `loop` - break_index: CFGIndex, // where to go on a `break` + loop_id: hir::ItemLocalId, // id of loop/while node + continue_index: CFGIndex, // where to go on a `loop` + break_index: CFGIndex, // where to go on a `break` } -pub fn construct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - body: &hir::Body) -> CFG { +pub fn construct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body: &hir::Body) -> CFG { let mut graph = graph::Graph::new(); let entry = graph.add_node(CFGNodeData::Entry); @@ -106,8 +105,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.add_ast_node(hir_id.local_id, &[exit]) } - hir::StmtKind::Expr(ref expr, _) | - hir::StmtKind::Semi(ref expr, _) => { + hir::StmtKind::Expr(ref expr, _) | hir::StmtKind::Semi(ref expr, _) => { let exit = self.expr(&expr, pred); self.add_ast_node(hir_id.local_id, &[exit]) } @@ -127,21 +125,20 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex { match pat.node { - PatKind::Binding(.., None) | - PatKind::Path(_) | - PatKind::Lit(..) | - PatKind::Range(..) | - PatKind::Wild => self.add_ast_node(pat.hir_id.local_id, &[pred]), - - PatKind::Box(ref subpat) | - PatKind::Ref(ref subpat, _) | - PatKind::Binding(.., Some(ref subpat)) => { + PatKind::Binding(.., None) + | PatKind::Path(_) + | PatKind::Lit(..) + | PatKind::Range(..) + | PatKind::Wild => self.add_ast_node(pat.hir_id.local_id, &[pred]), + + PatKind::Box(ref subpat) + | PatKind::Ref(ref subpat, _) + | PatKind::Binding(.., Some(ref subpat)) => { let subpat_exit = self.pat(&subpat, pred); self.add_ast_node(pat.hir_id.local_id, &[subpat_exit]) } - PatKind::TupleStruct(_, ref subpats, _) | - PatKind::Tuple(ref subpats, _) => { + PatKind::TupleStruct(_, ref subpats, _) | PatKind::Tuple(ref subpats, _) => { let pats_exit = self.pats_all(subpats.iter(), pred); self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) } @@ -160,9 +157,11 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } } - fn pats_all<'b, I: Iterator>>(&mut self, - pats: I, - pred: CFGIndex) -> CFGIndex { + fn pats_all<'b, I: Iterator>>( + &mut self, + pats: I, + pred: CFGIndex, + ) -> CFGIndex { //! Handles case where all of the patterns must match. pats.fold(pred, |pred, pat| self.pat(&pat, pred)) } @@ -189,9 +188,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // v 3 v 4 // [..expr..] // - let cond_exit = self.expr(&cond, pred); // 1 - let then_exit = self.expr(&then, cond_exit); // 2 - self.add_ast_node(expr.hir_id.local_id, &[cond_exit, then_exit]) // 3,4 + let cond_exit = self.expr(&cond, pred); // 1 + let then_exit = self.expr(&then, cond_exit); // 2 + self.add_ast_node(expr.hir_id.local_id, &[cond_exit, then_exit]) // 3,4 } hir::ExprKind::If(ref cond, ref then, Some(ref otherwise)) => { @@ -209,10 +208,10 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // v 4 v 5 // [..expr..] // - let cond_exit = self.expr(&cond, pred); // 1 - let then_exit = self.expr(&then, cond_exit); // 2 - let else_exit = self.expr(&otherwise, cond_exit); // 3 - self.add_ast_node(expr.hir_id.local_id, &[then_exit, else_exit]) // 4, 5 + let cond_exit = self.expr(&cond, pred); // 1 + let then_exit = self.expr(&then, cond_exit); // 2 + let else_exit = self.expr(&otherwise, cond_exit); // 3 + self.add_ast_node(expr.hir_id.local_id, &[then_exit, else_exit]) // 4, 5 } hir::ExprKind::While(ref cond, ref body, _) => { @@ -233,26 +232,26 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // Note that `break` and `continue` statements // may cause additional edges. - let loopback = self.add_dummy_node(&[pred]); // 1 + let loopback = self.add_dummy_node(&[pred]); // 1 // Create expr_exit without pred (cond_exit) - let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 3 + let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 3 // The LoopScope needs to be on the loop_scopes stack while evaluating the // condition and the body of the loop (both can break out of the loop) self.loop_scopes.push(LoopScope { loop_id: expr.hir_id.local_id, continue_index: loopback, - break_index: expr_exit + break_index: expr_exit, }); - let cond_exit = self.expr(&cond, loopback); // 2 + let cond_exit = self.expr(&cond, loopback); // 2 // Add pred (cond_exit) to expr_exit self.add_contained_edge(cond_exit, expr_exit); - let body_exit = self.block(&body, cond_exit); // 4 - self.add_contained_edge(body_exit, loopback); // 5 + let body_exit = self.block(&body, cond_exit); // 4 + self.add_contained_edge(body_exit, loopback); // 5 self.loop_scopes.pop(); expr_exit } @@ -272,15 +271,15 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // Note that `break` and `loop` statements // may cause additional edges. - let loopback = self.add_dummy_node(&[pred]); // 1 - let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 2 + let loopback = self.add_dummy_node(&[pred]); // 1 + let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 2 self.loop_scopes.push(LoopScope { loop_id: expr.hir_id.local_id, continue_index: loopback, break_index: expr_exit, }); - let body_exit = self.block(&body, loopback); // 3 - self.add_contained_edge(body_exit, loopback); // 4 + let body_exit = self.block(&body, loopback); // 3 + self.add_contained_edge(body_exit, loopback); // 4 self.loop_scopes.pop(); expr_exit } @@ -304,9 +303,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // v 3 v 4 // [..exit..] // - let l_exit = self.expr(&l, pred); // 1 - let r_exit = self.expr(&r, l_exit); // 2 - self.add_ast_node(expr.hir_id.local_id, &[l_exit, r_exit]) // 3,4 + let l_exit = self.expr(&l, pred); // 1 + let r_exit = self.expr(&r, l_exit); // 2 + self.add_ast_node(expr.hir_id.local_id, &[l_exit, r_exit]) // 3,4 } hir::ExprKind::Ret(ref v) => { @@ -345,8 +344,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.call(expr, pred, &args[0], args[1..].iter().map(|e| &*e)) } - hir::ExprKind::Index(ref l, ref r) | - hir::ExprKind::Binary(_, ref l, ref r) if self.tables.is_method_call(expr) => { + hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) + if self.tables.is_method_call(expr) => + { self.call(expr, pred, &l, Some(&**r).into_iter()) } @@ -363,24 +363,23 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.opt_expr(base, field_cfg) } - hir::ExprKind::Assign(ref l, ref r) | - hir::ExprKind::AssignOp(_, ref l, ref r) => { + hir::ExprKind::Assign(ref l, ref r) | hir::ExprKind::AssignOp(_, ref l, ref r) => { self.straightline(expr, pred, [r, l].iter().map(|&e| &**e)) } - hir::ExprKind::Index(ref l, ref r) | - hir::ExprKind::Binary(_, ref l, ref r) => { // N.B., && and || handled earlier + hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => { + // N.B., && and || handled earlier self.straightline(expr, pred, [l, r].iter().map(|&e| &**e)) } - hir::ExprKind::Box(ref e) | - hir::ExprKind::AddrOf(_, ref e) | - hir::ExprKind::Cast(ref e, _) | - hir::ExprKind::Type(ref e, _) | - hir::ExprKind::Unary(_, ref e) | - hir::ExprKind::Field(ref e, _) | - hir::ExprKind::Yield(ref e) | - hir::ExprKind::Repeat(ref e, _) => { + hir::ExprKind::Box(ref e) + | hir::ExprKind::AddrOf(_, ref e) + | hir::ExprKind::Cast(ref e, _) + | hir::ExprKind::Type(ref e, _) + | hir::ExprKind::Unary(_, ref e) + | hir::ExprKind::Field(ref e, _) + | hir::ExprKind::Yield(ref e) + | hir::ExprKind::Repeat(ref e, _) => { self.straightline(expr, pred, Some(&**e).into_iter()) } @@ -390,56 +389,66 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.add_ast_node(expr.hir_id.local_id, &[post_inputs]) } - hir::ExprKind::Closure(..) | - hir::ExprKind::Lit(..) | - hir::ExprKind::Path(_) | - hir::ExprKind::Err => { - self.straightline(expr, pred, None::.iter()) - } + hir::ExprKind::Closure(..) + | hir::ExprKind::Lit(..) + | hir::ExprKind::Path(_) + | hir::ExprKind::Err => self.straightline(expr, pred, None::.iter()), } } - fn call<'b, I: Iterator>(&mut self, - call_expr: &hir::Expr, - pred: CFGIndex, - func_or_rcvr: &hir::Expr, - args: I) -> CFGIndex { + fn call<'b, I: Iterator>( + &mut self, + call_expr: &hir::Expr, + pred: CFGIndex, + func_or_rcvr: &hir::Expr, + args: I, + ) -> CFGIndex { let func_or_rcvr_exit = self.expr(func_or_rcvr, pred); let ret = self.straightline(call_expr, func_or_rcvr_exit, args); let m = self.tcx.hir().get_module_parent(call_expr.id); - if self.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(call_expr)) { + if self + .tcx + .is_ty_uninhabited_from(m, self.tables.expr_ty(call_expr)) + { self.add_unreachable_node() } else { ret } } - fn exprs<'b, I: Iterator>(&mut self, - exprs: I, - pred: CFGIndex) -> CFGIndex { + fn exprs<'b, I: Iterator>( + &mut self, + exprs: I, + pred: CFGIndex, + ) -> CFGIndex { //! Constructs graph for `exprs` evaluated in order exprs.fold(pred, |p, e| self.expr(e, p)) } - fn opt_expr(&mut self, - opt_expr: &Option>, - pred: CFGIndex) -> CFGIndex { + fn opt_expr(&mut self, opt_expr: &Option>, pred: CFGIndex) -> CFGIndex { //! Constructs graph for `opt_expr` evaluated, if Some opt_expr.iter().fold(pred, |p, e| self.expr(&e, p)) } - fn straightline<'b, I: Iterator>(&mut self, - expr: &hir::Expr, - pred: CFGIndex, - subexprs: I) -> CFGIndex { + fn straightline<'b, I: Iterator>( + &mut self, + expr: &hir::Expr, + pred: CFGIndex, + subexprs: I, + ) -> CFGIndex { //! Handles case of an expression that evaluates `subexprs` in order let subexprs_exit = self.exprs(subexprs, pred); self.add_ast_node(expr.hir_id.local_id, &[subexprs_exit]) } - fn match_(&mut self, id: hir::ItemLocalId, discr: &hir::Expr, - arms: &[hir::Arm], pred: CFGIndex) -> CFGIndex { + fn match_( + &mut self, + id: hir::ItemLocalId, + discr: &hir::Expr, + arms: &[hir::Arm], + pred: CFGIndex, + ) -> CFGIndex { // The CFG for match expression is quite complex, so no ASCII // art for it (yet). // @@ -534,22 +543,26 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { node } - fn add_contained_edge(&mut self, - source: CFGIndex, - target: CFGIndex) { - let data = CFGEdgeData {exiting_scopes: vec![] }; + fn add_contained_edge(&mut self, source: CFGIndex, target: CFGIndex) { + let data = CFGEdgeData { + exiting_scopes: vec![], + }; self.graph.add_edge(source, target, data); } - fn add_exiting_edge(&mut self, - from_expr: &hir::Expr, - from_index: CFGIndex, - target_scope: region::Scope, - to_index: CFGIndex) { - let mut data = CFGEdgeData { exiting_scopes: vec![] }; + fn add_exiting_edge( + &mut self, + from_expr: &hir::Expr, + from_index: CFGIndex, + target_scope: region::Scope, + to_index: CFGIndex, + ) { + let mut data = CFGEdgeData { + exiting_scopes: vec![], + }; let mut scope = region::Scope { id: from_expr.hir_id.local_id, - data: region::ScopeData::Node + data: region::ScopeData::Node, }; let region_scope_tree = self.tcx.region_scope_tree(self.owner_def_id); while scope != target_scope { @@ -559,52 +572,59 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.graph.add_edge(from_index, to_index, data); } - fn add_returning_edge(&mut self, - _from_expr: &hir::Expr, - from_index: CFGIndex) { + fn add_returning_edge(&mut self, _from_expr: &hir::Expr, from_index: CFGIndex) { let data = CFGEdgeData { - exiting_scopes: self.loop_scopes.iter() - .rev() - .map(|&LoopScope { loop_id: id, .. }| id) - .collect() + exiting_scopes: self + .loop_scopes + .iter() + .rev() + .map(|&LoopScope { loop_id: id, .. }| id) + .collect(), }; self.graph.add_edge(from_index, self.fn_exit, data); } - fn find_scope_edge(&self, - expr: &hir::Expr, - destination: hir::Destination, - scope_cf_kind: ScopeCfKind) -> (region::Scope, CFGIndex) { - + fn find_scope_edge( + &self, + expr: &hir::Expr, + destination: hir::Destination, + scope_cf_kind: ScopeCfKind, + ) -> (region::Scope, CFGIndex) { match destination.target_id { Ok(loop_id) => { for b in &self.breakable_block_scopes { if b.block_expr_id == self.tcx.hir().node_to_hir_id(loop_id).local_id { let scope = region::Scope { id: self.tcx.hir().node_to_hir_id(loop_id).local_id, - data: region::ScopeData::Node + data: region::ScopeData::Node, }; - return (scope, match scope_cf_kind { - ScopeCfKind::Break => b.break_index, - ScopeCfKind::Continue => bug!("can't continue to block"), - }); + return ( + scope, + match scope_cf_kind { + ScopeCfKind::Break => b.break_index, + ScopeCfKind::Continue => bug!("can't continue to block"), + }, + ); } } for l in &self.loop_scopes { if l.loop_id == self.tcx.hir().node_to_hir_id(loop_id).local_id { let scope = region::Scope { id: self.tcx.hir().node_to_hir_id(loop_id).local_id, - data: region::ScopeData::Node + data: region::ScopeData::Node, }; - return (scope, match scope_cf_kind { - ScopeCfKind::Break => l.break_index, - ScopeCfKind::Continue => l.continue_index, - }); + return ( + scope, + match scope_cf_kind { + ScopeCfKind::Break => l.break_index, + ScopeCfKind::Continue => l.continue_index, + }, + ); } } span_bug!(expr.span, "no scope for id {}", loop_id); } - Err(err) => span_bug!(expr.span, "scope error: {}", err), + Err(err) => span_bug!(expr.span, "scope error: {}", err), } } } diff --git a/src/librustc/cfg/graphviz.rs b/src/librustc/cfg/graphviz.rs index 6dec421760899..94031f4ae3cbc 100644 --- a/src/librustc/cfg/graphviz.rs +++ b/src/librustc/cfg/graphviz.rs @@ -1,6 +1,5 @@ /// This module provides linkage between rustc::middle::graph and /// libgraphviz traits. - // For clarity, rename the graphviz crate locally to dot. use graphviz as dot; @@ -23,8 +22,12 @@ impl<'a, 'tcx> LabelledCFG<'a, 'tcx> { fn local_id_to_string(&self, local_id: hir::ItemLocalId) -> String { assert!(self.cfg.owner_def_id.is_local()); let node_id = self.tcx.hir().hir_to_node_id(hir::HirId { - owner: self.tcx.hir().def_index_to_hir_id(self.cfg.owner_def_id.index).owner, - local_id + owner: self + .tcx + .hir() + .def_index_to_hir_id(self.cfg.owner_def_id.index) + .owner, + local_id, }); let s = self.tcx.hir().node_to_string(node_id); @@ -36,8 +39,7 @@ impl<'a, 'tcx> LabelledCFG<'a, 'tcx> { // \l, not the line that follows; so, add \l at end of string // if not already present, ensuring last line gets left-aligned // as well. - let mut last_two: Vec<_> = - s.chars().rev().take(2).collect(); + let mut last_two: Vec<_> = s.chars().rev().take(2).collect(); last_two.reverse(); if last_two != ['\\', 'l'] { s.push_str("\\l"); @@ -52,9 +54,11 @@ impl<'a, 'tcx> LabelledCFG<'a, 'tcx> { impl<'a, 'hir> dot::Labeller<'a> for LabelledCFG<'a, 'hir> { type Node = Node<'a>; type Edge = Edge<'a>; - fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(&self.name[..]).unwrap() } + fn graph_id(&'a self) -> dot::Id<'a> { + dot::Id::new(&self.name[..]).unwrap() + } - fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> { + fn node_id(&'a self, &(i, _): &Node<'a>) -> dot::Id<'a> { dot::Id::new(format!("N{}", i.node_id())).unwrap() } @@ -84,9 +88,7 @@ impl<'a, 'hir> dot::Labeller<'a> for LabelledCFG<'a, 'hir> { put_one = true; } let s = self.local_id_to_string(id); - label.push_str(&format!("exiting scope_{} {}", - i, - &s[..])); + label.push_str(&format!("exiting scope_{} {}", i, &s[..])); } dot::LabelText::EscStr(label.into()) } @@ -112,12 +114,19 @@ impl<'a> dot::GraphWalk<'a> for &'a cfg::CFG { } } -impl<'a, 'hir> dot::GraphWalk<'a> for LabelledCFG<'a, 'hir> -{ +impl<'a, 'hir> dot::GraphWalk<'a> for LabelledCFG<'a, 'hir> { type Node = Node<'a>; type Edge = Edge<'a>; - fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.cfg.nodes() } - fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.cfg.edges() } - fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { self.cfg.source(edge) } - fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { self.cfg.target(edge) } + fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { + self.cfg.nodes() + } + fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { + self.cfg.edges() + } + fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { + self.cfg.source(edge) + } + fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { + self.cfg.target(edge) + } } diff --git a/src/librustc/cfg/mod.rs b/src/librustc/cfg/mod.rs index e58557839f9b9..0fc6a5196169b 100644 --- a/src/librustc/cfg/mod.rs +++ b/src/librustc/cfg/mod.rs @@ -1,10 +1,10 @@ //! Module that constructs a control-flow graph representing an item. //! Uses `Graph` as the underlying representation. -use rustc_data_structures::graph::implementation as graph; -use ty::TyCtxt; use hir; use hir::def_id::DefId; +use rustc_data_structures::graph::implementation as graph; +use ty::TyCtxt; mod construct; pub mod graphviz; @@ -37,7 +37,7 @@ impl CFGNodeData { #[derive(Debug)] pub struct CFGEdgeData { - pub exiting_scopes: Vec + pub exiting_scopes: Vec, } pub type CFGIndex = graph::NodeIndex; @@ -49,13 +49,13 @@ pub type CFGNode = graph::Node; pub type CFGEdge = graph::Edge; impl CFG { - pub fn new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - body: &hir::Body) -> CFG { + pub fn new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body: &hir::Body) -> CFG { construct::construct(tcx, body) } pub fn node_is_reachable(&self, id: hir::ItemLocalId) -> bool { - self.graph.depth_traverse(self.entry, graph::OUTGOING) - .any(|idx| self.graph.node_data(idx).id() == id) + self.graph + .depth_traverse(self.entry, graph::OUTGOING) + .any(|idx| self.graph.node_data(idx).id() == id) } } diff --git a/src/librustc/dep_graph/cgu_reuse_tracker.rs b/src/librustc/dep_graph/cgu_reuse_tracker.rs index e8d1b71048705..5848ef727b756 100644 --- a/src/librustc/dep_graph/cgu_reuse_tracker.rs +++ b/src/librustc/dep_graph/cgu_reuse_tracker.rs @@ -2,8 +2,8 @@ //! compilation. This is used for incremental compilation tests and debug //! output. -use session::Session; use rustc_data_structures::fx::FxHashMap; +use session::Session; use std::sync::{Arc, Mutex}; use syntax_pos::Span; @@ -51,19 +51,18 @@ impl CguReuseTracker { } pub fn new_disabled() -> CguReuseTracker { - CguReuseTracker { - data: None, - } + CguReuseTracker { data: None } } pub fn set_actual_reuse(&self, cgu_name: &str, kind: CguReuse) { if let Some(ref data) = self.data { debug!("set_actual_reuse({:?}, {:?})", cgu_name, kind); - let prev_reuse = data.lock() - .unwrap() - .actual_reuse - .insert(cgu_name.to_string(), kind); + let prev_reuse = data + .lock() + .unwrap() + .actual_reuse + .insert(cgu_name.to_string(), kind); if let Some(prev_reuse) = prev_reuse { // The only time it is legal to overwrite reuse state is when @@ -74,23 +73,30 @@ impl CguReuseTracker { } } - pub fn set_expectation(&self, - cgu_name: &str, - cgu_user_name: &str, - error_span: Span, - expected_reuse: CguReuse, - comparison_kind: ComparisonKind) { + pub fn set_expectation( + &self, + cgu_name: &str, + cgu_user_name: &str, + error_span: Span, + expected_reuse: CguReuse, + comparison_kind: ComparisonKind, + ) { if let Some(ref data) = self.data { - debug!("set_expectation({:?}, {:?}, {:?})", cgu_name, - expected_reuse, - comparison_kind); + debug!( + "set_expectation({:?}, {:?}, {:?})", + cgu_name, expected_reuse, comparison_kind + ); let mut data = data.lock().unwrap(); - data.expected_reuse.insert(cgu_name.to_string(), - (cgu_user_name.to_string(), - SendSpan(error_span), - expected_reuse, - comparison_kind)); + data.expected_reuse.insert( + cgu_name.to_string(), + ( + cgu_user_name.to_string(), + SendSpan(error_span), + expected_reuse, + comparison_kind, + ), + ); } } @@ -98,35 +104,30 @@ impl CguReuseTracker { if let Some(ref data) = self.data { let data = data.lock().unwrap(); - for (cgu_name, &(ref cgu_user_name, - ref error_span, - expected_reuse, - comparison_kind)) in &data.expected_reuse { + for (cgu_name, &(ref cgu_user_name, ref error_span, expected_reuse, comparison_kind)) in + &data.expected_reuse + { if let Some(&actual_reuse) = data.actual_reuse.get(cgu_name) { let (error, at_least) = match comparison_kind { - ComparisonKind::Exact => { - (expected_reuse != actual_reuse, false) - } - ComparisonKind::AtLeast => { - (actual_reuse < expected_reuse, true) - } + ComparisonKind::Exact => (expected_reuse != actual_reuse, false), + ComparisonKind::AtLeast => (actual_reuse < expected_reuse, true), }; if error { let at_least = if at_least { "at least " } else { "" }; - let msg = format!("CGU-reuse for `{}` is `{:?}` but \ - should be {}`{:?}`", - cgu_user_name, - actual_reuse, - at_least, - expected_reuse); + let msg = format!( + "CGU-reuse for `{}` is `{:?}` but \ + should be {}`{:?}`", + cgu_user_name, actual_reuse, at_least, expected_reuse + ); sess.span_err(error_span.0, &msg); } } else { - let msg = format!("CGU-reuse for `{}` (mangled: `{}`) was \ - not recorded", - cgu_user_name, - cgu_name); + let msg = format!( + "CGU-reuse for `{}` (mangled: `{}`) was \ + not recorded", + cgu_user_name, cgu_name + ); sess.span_fatal(error_span.0, &msg); } } diff --git a/src/librustc/dep_graph/debug.rs b/src/librustc/dep_graph/debug.rs index a9ad22c5e913e..663db0aa52d85 100644 --- a/src/librustc/dep_graph/debug.rs +++ b/src/librustc/dep_graph/debug.rs @@ -12,13 +12,13 @@ use std::error::Error; /// `z`. #[derive(Debug)] pub struct DepNodeFilter { - text: String + text: String, } impl DepNodeFilter { pub fn new(text: &str) -> Self { DepNodeFilter { - text: text.trim().to_string() + text: text.trim().to_string(), } } @@ -30,9 +30,10 @@ impl DepNodeFilter { /// Tests whether `node` meets the filter, returning true if so. pub fn test(&self, node: &DepNode) -> bool { let debug_str = format!("{:?}", node); - self.text.split('&') - .map(|s| s.trim()) - .all(|f| debug_str.contains(f)) + self.text + .split('&') + .map(|s| s.trim()) + .all(|f| debug_str.contains(f)) } } @@ -56,10 +57,7 @@ impl EdgeFilter { } } - pub fn test(&self, - source: &DepNode, - target: &DepNode) - -> bool { + pub fn test(&self, source: &DepNode, target: &DepNode) -> bool { self.source.test(source) && self.target.test(target) } } diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 070551c0b7b0a..fc8eada8b06be 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -49,31 +49,33 @@ //! user of the `DepNode` API of having to know how to compute the expected //! fingerprint for a given set of node parameters. -use mir::interpret::GlobalId; use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX}; use hir::map::DefPathHash; use hir::HirId; +use mir::interpret::GlobalId; use ich::{Fingerprint, StableHashingContext}; -use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use std::fmt; use std::hash::Hash; use syntax_pos::symbol::InternedString; use traits; use traits::query::{ - CanonicalProjectionGoal, CanonicalTyGoal, CanonicalTypeOpAscribeUserTypeGoal, - CanonicalTypeOpEqGoal, CanonicalTypeOpSubtypeGoal, CanonicalPredicateGoal, - CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpNormalizeGoal, + CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal, + CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpNormalizeGoal, + CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal, }; -use ty::{TyCtxt, FnSig, Instance, InstanceDef, - ParamEnv, ParamEnvAnd, Predicate, PolyFnSig, PolyTraitRef, Ty}; use ty::subst::Substs; +use ty::{ + FnSig, Instance, InstanceDef, ParamEnv, ParamEnvAnd, PolyFnSig, PolyTraitRef, Predicate, Ty, + TyCtxt, +}; // erase!() just makes tokens go away. It's used to specify which macro argument // is repeated (i.e., which sub-expression of the macro we are in) but don't need // to actually use any of the arguments. macro_rules! erase { - ($x:tt) => ({}) + ($x:tt) => {{}}; } macro_rules! replace { @@ -81,18 +83,30 @@ macro_rules! replace { } macro_rules! is_anon_attr { - (anon) => (true); - ($attr:ident) => (false); + (anon) => { + true + }; + ($attr:ident) => { + false + }; } macro_rules! is_input_attr { - (input) => (true); - ($attr:ident) => (false); + (input) => { + true + }; + ($attr:ident) => { + false + }; } macro_rules! is_eval_always_attr { - (eval_always) => (true); - ($attr:ident) => (false); + (eval_always) => { + true + }; + ($attr:ident) => { + false + }; } macro_rules! contains_anon_attr { @@ -408,7 +422,6 @@ impl fmt::Debug for DepNode { } } - impl DefPathHash { #[inline(always)] pub fn to_dep_node(self, kind: DepKind) -> DepNode { @@ -665,7 +678,7 @@ define_dep_nodes!( <'tcx> [] UpstreamMonomorphizationsFor(DefId), ); -trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug { +trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a>: fmt::Debug { const CAN_RECONSTRUCT_QUERY_KEY: bool; /// This method turns the parameters of a DepNodeConstructor into an opaque @@ -682,7 +695,8 @@ trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug { } impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a, T> DepNodeParams<'a, 'gcx, 'tcx> for T - where T: HashStable> + fmt::Debug +where + T: HashStable> + fmt::Debug, { default const CAN_RECONSTRUCT_QUERY_KEY: bool = false; @@ -758,9 +772,11 @@ impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for (DefId, De fn to_debug_str(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> String { let (def_id_0, def_id_1) = *self; - format!("({}, {})", - tcx.def_path_debug_str(def_id_0), - tcx.def_path_debug_str(def_id_1)) + format!( + "({}, {})", + tcx.def_path_debug_str(def_id_0), + tcx.def_path_debug_str(def_id_1) + ) } } @@ -771,10 +787,7 @@ impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for HirId { // method but it's faster to combine the hashes than to instantiate a full // hashing context and stable-hashing state. fn to_fingerprint(&self, tcx: TyCtxt<'_, '_, '_>) -> Fingerprint { - let HirId { - owner, - local_id, - } = *self; + let HirId { owner, local_id } = *self; let def_path_hash = tcx.def_path_hash(DefId::local(owner)); let local_id = Fingerprint::from_smaller_hash(local_id.as_u32().into()); @@ -788,10 +801,11 @@ impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for HirId { /// some independent path or string that persists between runs without /// the need to be mapped or unmapped. (This ensures we can serialize /// them even in the absence of a tcx.) -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, - RustcEncodable, RustcDecodable)] +#[derive( + Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, +)] pub struct WorkProductId { - hash: Fingerprint + hash: Fingerprint, } impl WorkProductId { @@ -800,14 +814,12 @@ impl WorkProductId { cgu_name.len().hash(&mut hasher); cgu_name.hash(&mut hasher); WorkProductId { - hash: hasher.finish() + hash: hasher.finish(), } } pub fn from_fingerprint(fingerprint: Fingerprint) -> WorkProductId { - WorkProductId { - hash: fingerprint - } + WorkProductId { hash: fingerprint } } } diff --git a/src/librustc/dep_graph/dep_tracking_map.rs b/src/librustc/dep_graph/dep_tracking_map.rs index 331a9c6109c4c..dc1f6d0255ab2 100644 --- a/src/librustc/dep_graph/dep_tracking_map.rs +++ b/src/librustc/dep_graph/dep_tracking_map.rs @@ -4,7 +4,7 @@ use std::hash::Hash; use std::marker::PhantomData; use util::common::MemoizationMap; -use super::{DepKind, DepNodeIndex, DepGraph}; +use super::{DepGraph, DepKind, DepNodeIndex}; /// A DepTrackingMap offers a subset of the `Map` API and ensures that /// we make calls to `read` and `write` as appropriate. We key the @@ -67,7 +67,8 @@ impl MemoizationMap for RefCell> { /// accesses the body of the item `item`, so we register a read /// from `Hir(item_def_id)`. fn memoize(&self, key: M::Key, op: OP) -> M::Value - where OP: FnOnce() -> M::Value + where + OP: FnOnce() -> M::Value, { let graph; { @@ -80,7 +81,9 @@ impl MemoizationMap for RefCell> { } let (result, dep_node) = graph.with_anon_task(M::to_dep_kind(), op); - self.borrow_mut().map.insert(key, (result.clone(), dep_node)); + self.borrow_mut() + .map + .insert(key, (result.clone(), dep_node)); graph.read_index(dep_node); result } diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index 961638151a2a8..29439e9e430ce 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -1,23 +1,23 @@ use errors::DiagnosticBuilder; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_data_structures::sync::{Lock, Lrc}; use smallvec::SmallVec; -use rustc_data_structures::sync::{Lrc, Lock}; +use std::collections::hash_map::Entry; use std::env; use std::hash::Hash; -use std::collections::hash_map::Entry; use ty::{self, TyCtxt}; -use util::common::{ProfileQueriesMsg, profq_msg}; +use util::common::{profq_msg, ProfileQueriesMsg}; -use ich::{StableHashingContext, StableHashingContextProvider, Fingerprint}; +use ich::{Fingerprint, StableHashingContext, StableHashingContextProvider}; use super::debug::EdgeFilter; -use super::dep_node::{DepNode, DepKind, WorkProductId}; +use super::dep_node::{DepKind, DepNode, WorkProductId}; +use super::prev::PreviousDepGraph; use super::query::DepGraphQuery; use super::safe::DepGraphSafe; use super::serialized::{SerializedDepGraph, SerializedDepNodeIndex}; -use super::prev::PreviousDepGraph; #[derive(Clone)] pub struct DepGraph { @@ -35,7 +35,7 @@ impl DepNodeIndex { #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum DepNodeColor { Red, - Green(DepNodeIndex) + Green(DepNodeIndex), } impl DepNodeColor { @@ -73,9 +73,10 @@ struct DepGraphData { } impl DepGraph { - - pub fn new(prev_graph: PreviousDepGraph, - prev_work_products: FxHashMap) -> DepGraph { + pub fn new( + prev_graph: PreviousDepGraph, + prev_work_products: FxHashMap, + ) -> DepGraph { let prev_graph_node_count = prev_graph.node_count(); DepGraph { @@ -91,9 +92,7 @@ impl DepGraph { } pub fn new_disabled() -> DepGraph { - DepGraph { - data: None, - } + DepGraph { data: None } } /// True if we are actually building the full dep-graph. @@ -106,8 +105,7 @@ impl DepGraph { let current_dep_graph = self.data.as_ref().unwrap().current.borrow(); let nodes: Vec<_> = current_dep_graph.data.iter().map(|n| n.node).collect(); let mut edges = Vec::new(); - for (from, edge_targets) in current_dep_graph.data.iter() - .map(|d| (d.node, &d.edges)) { + for (from, edge_targets) in current_dep_graph.data.iter().map(|d| (d.node, &d.edges)) { for &edge_target in edge_targets.iter() { let to = current_dep_graph.data[edge_target].node; edges.push((from, to)); @@ -117,8 +115,7 @@ impl DepGraph { DepGraphQuery::new(&nodes[..], &edges[..]) } - pub fn assert_ignored(&self) - { + pub fn assert_ignored(&self) { if let Some(..) = self.data { ty::tls::with_context_opt(|icx| { let icx = if let Some(icx) = icx { icx } else { return }; @@ -126,14 +123,15 @@ impl DepGraph { OpenTask::Ignore => { // ignored } - _ => panic!("expected an ignore context") + _ => panic!("expected an ignore context"), } }) } } - pub fn with_ignore(&self, op: OP) -> R - where OP: FnOnce() -> R + pub fn with_ignore(&self, op: OP) -> R + where + OP: FnOnce() -> R, { ty::tls::with_context(|icx| { let icx = ty::tls::ImplicitCtxt { @@ -141,9 +139,7 @@ impl DepGraph { ..icx.clone() }; - ty::tls::enter_context(&icx, |_| { - op() - }) + ty::tls::enter_context(&icx, |_| op()) }) } @@ -174,42 +170,56 @@ impl DepGraph { /// `arg` parameter. /// /// [rustc guide]: https://rust-lang.github.io/rustc-guide/incremental-compilation.html - pub fn with_task<'gcx, C, A, R>(&self, - key: DepNode, - cx: C, - arg: A, - task: fn(C, A) -> R) - -> (R, DepNodeIndex) - where C: DepGraphSafe + StableHashingContextProvider<'gcx>, - R: HashStable>, + pub fn with_task<'gcx, C, A, R>( + &self, + key: DepNode, + cx: C, + arg: A, + task: fn(C, A) -> R, + ) -> (R, DepNodeIndex) + where + C: DepGraphSafe + StableHashingContextProvider<'gcx>, + R: HashStable>, { - self.with_task_impl(key, cx, arg, false, task, - |key| OpenTask::Regular(Lock::new(RegularOpenTask { - node: key, - reads: SmallVec::new(), - read_set: Default::default(), - })), - |data, key, fingerprint, task| data.borrow_mut().complete_task(key, task, fingerprint)) + self.with_task_impl( + key, + cx, + arg, + false, + task, + |key| { + OpenTask::Regular(Lock::new(RegularOpenTask { + node: key, + reads: SmallVec::new(), + read_set: Default::default(), + })) + }, + |data, key, fingerprint, task| data.borrow_mut().complete_task(key, task, fingerprint), + ) } /// Creates a new dep-graph input with value `input` - pub fn input_task<'gcx, C, R>(&self, - key: DepNode, - cx: C, - input: R) - -> (R, DepNodeIndex) - where C: DepGraphSafe + StableHashingContextProvider<'gcx>, - R: HashStable>, + pub fn input_task<'gcx, C, R>(&self, key: DepNode, cx: C, input: R) -> (R, DepNodeIndex) + where + C: DepGraphSafe + StableHashingContextProvider<'gcx>, + R: HashStable>, { fn identity_fn(_: C, arg: A) -> A { arg } - self.with_task_impl(key, cx, input, true, identity_fn, + self.with_task_impl( + key, + cx, + input, + true, + identity_fn, |_| OpenTask::Ignore, |data, key, fingerprint, _| { - data.borrow_mut().alloc_node(key, SmallVec::new(), fingerprint) - }) + data.borrow_mut() + .alloc_node(key, SmallVec::new(), fingerprint) + }, + ) } fn with_task_impl<'gcx, C, A, R>( @@ -220,10 +230,12 @@ impl DepGraph { no_tcx: bool, task: fn(C, A) -> R, create_task: fn(DepNode) -> OpenTask, - finish_task_and_alloc_depnode: fn(&Lock, - DepNode, - Fingerprint, - OpenTask) -> DepNodeIndex + finish_task_and_alloc_depnode: fn( + &Lock, + DepNode, + Fingerprint, + OpenTask, + ) -> DepNodeIndex, ) -> (R, DepNodeIndex) where C: DepGraphSafe + StableHashingContextProvider<'gcx>, @@ -252,9 +264,7 @@ impl DepGraph { ..icx.clone() }; - ty::tls::enter_context(&icx, |_| { - task(cx, arg) - }) + ty::tls::enter_context(&icx, |_| task(cx, arg)) }) }; @@ -267,12 +277,8 @@ impl DepGraph { let current_fingerprint = stable_hasher.finish(); - let dep_node_index = finish_task_and_alloc_depnode( - &data.current, - key, - current_fingerprint, - open_task - ); + let dep_node_index = + finish_task_and_alloc_depnode(&data.current, key, current_fingerprint, open_task); // Determine the color of the new DepNode. if let Some(prev_index) = data.previous.node_to_index_opt(&key) { @@ -285,9 +291,12 @@ impl DepGraph { }; let mut colors = data.colors.borrow_mut(); - debug_assert!(colors.get(prev_index).is_none(), - "DepGraph::with_task() - Duplicate DepNodeColor \ - insertion for {:?}", key); + debug_assert!( + colors.get(prev_index).is_none(), + "DepGraph::with_task() - Duplicate DepNodeColor \ + insertion for {:?}", + key + ); colors.insert(prev_index, color); } @@ -300,8 +309,9 @@ impl DepGraph { /// Execute something within an "anonymous" task, that is, a task the /// DepNode of which is determined by the list of inputs it read from. - pub fn with_anon_task(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeIndex) - where OP: FnOnce() -> R + pub fn with_anon_task(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeIndex) + where + OP: FnOnce() -> R, { if let Some(ref data) = self.data { let (result, open_task) = ty::tls::with_context(|icx| { @@ -316,16 +326,12 @@ impl DepGraph { ..icx.clone() }; - ty::tls::enter_context(&icx, |_| { - op() - }) + ty::tls::enter_context(&icx, |_| op()) }; (r, task) }); - let dep_node_index = data.current - .borrow_mut() - .pop_anon_task(dep_kind, open_task); + let dep_node_index = data.current.borrow_mut().pop_anon_task(dep_kind, open_task); (result, dep_node_index) } else { (op(), DepNodeIndex::INVALID) @@ -334,20 +340,29 @@ impl DepGraph { /// Execute something within an "eval-always" task which is a task // that runs whenever anything changes. - pub fn with_eval_always_task<'gcx, C, A, R>(&self, - key: DepNode, - cx: C, - arg: A, - task: fn(C, A) -> R) - -> (R, DepNodeIndex) - where C: DepGraphSafe + StableHashingContextProvider<'gcx>, - R: HashStable>, + pub fn with_eval_always_task<'gcx, C, A, R>( + &self, + key: DepNode, + cx: C, + arg: A, + task: fn(C, A) -> R, + ) -> (R, DepNodeIndex) + where + C: DepGraphSafe + StableHashingContextProvider<'gcx>, + R: HashStable>, { - self.with_task_impl(key, cx, arg, false, task, + self.with_task_impl( + key, + cx, + arg, + false, + task, |key| OpenTask::EvalAlways { node: key }, |data, key, fingerprint, task| { - data.borrow_mut().complete_eval_always_task(key, task, fingerprint) - }) + data.borrow_mut() + .complete_eval_always_task(key, task, fingerprint) + }, + ) } #[inline] @@ -385,7 +400,10 @@ impl DepGraph { #[inline] pub fn dep_node_exists(&self, dep_node: &DepNode) -> bool { if let Some(ref data) = self.data { - data.current.borrow_mut().node_to_node_index.contains_key(dep_node) + data.current + .borrow_mut() + .node_to_node_index + .contains_key(dep_node) } else { false } @@ -393,12 +411,21 @@ impl DepGraph { #[inline] pub fn fingerprint_of(&self, dep_node_index: DepNodeIndex) -> Fingerprint { - let current = self.data.as_ref().expect("dep graph enabled").current.borrow_mut(); + let current = self + .data + .as_ref() + .expect("dep graph enabled") + .current + .borrow_mut(); current.data[dep_node_index].fingerprint } pub fn prev_fingerprint_of(&self, dep_node: &DepNode) -> Option { - self.data.as_ref().unwrap().previous.fingerprint_of(dep_node) + self.data + .as_ref() + .unwrap() + .previous + .fingerprint_of(dep_node) } #[inline] @@ -411,9 +438,7 @@ impl DepGraph { pub fn previous_work_product(&self, v: &WorkProductId) -> Option { self.data .as_ref() - .and_then(|data| { - data.previous_work_products.get(v).cloned() - }) + .and_then(|data| data.previous_work_products.get(v).cloned()) } /// Access the map of work-products created during the cached run. Only @@ -423,15 +448,14 @@ impl DepGraph { } #[inline(always)] - pub fn register_dep_node_debug_str(&self, - dep_node: DepNode, - debug_str_gen: F) - where F: FnOnce() -> String + pub fn register_dep_node_debug_str(&self, dep_node: DepNode, debug_str_gen: F) + where + F: FnOnce() -> String, { let dep_node_debug = &self.data.as_ref().unwrap().dep_node_debug; if dep_node_debug.borrow().contains_key(&dep_node) { - return + return; } let debug_str = debug_str_gen(); dep_node_debug.borrow_mut().insert(dep_node, debug_str); @@ -449,26 +473,33 @@ impl DepGraph { pub fn edge_deduplication_data(&self) -> (u64, u64) { let current_dep_graph = self.data.as_ref().unwrap().current.borrow(); - (current_dep_graph.total_read_count, current_dep_graph.total_duplicate_read_count) + ( + current_dep_graph.total_read_count, + current_dep_graph.total_duplicate_read_count, + ) } pub fn serialize(&self) -> SerializedDepGraph { let current_dep_graph = self.data.as_ref().unwrap().current.borrow(); - let fingerprints: IndexVec = - current_dep_graph.data.iter().map(|d| d.fingerprint).collect(); + let fingerprints: IndexVec = current_dep_graph + .data + .iter() + .map(|d| d.fingerprint) + .collect(); let nodes: IndexVec = current_dep_graph.data.iter().map(|d| d.node).collect(); - let total_edge_count: usize = current_dep_graph.data.iter() - .map(|d| d.edges.len()) - .sum(); + let total_edge_count: usize = current_dep_graph.data.iter().map(|d| d.edges.len()).sum(); let mut edge_list_indices = IndexVec::with_capacity(nodes.len()); let mut edge_list_data = Vec::with_capacity(total_edge_count); - for (current_dep_node_index, edges) in current_dep_graph.data.iter_enumerated() - .map(|(i, d)| (i, &d.edges)) { + for (current_dep_node_index, edges) in current_dep_graph + .data + .iter_enumerated() + .map(|(i, d)| (i, &d.edges)) + { let start = edge_list_data.len() as u32; // This should really just be a memcpy :/ edge_list_data.extend(edges.iter().map(|i| SerializedDepNodeIndex::new(i.index()))); @@ -492,26 +523,31 @@ impl DepGraph { pub fn node_color(&self, dep_node: &DepNode) -> Option { if let Some(ref data) = self.data { if let Some(prev_index) = data.previous.node_to_index_opt(dep_node) { - return data.colors.borrow().get(prev_index) + return data.colors.borrow().get(prev_index); } else { // This is a node that did not exist in the previous compilation // session, so we consider it to be red. - return Some(DepNodeColor::Red) + return Some(DepNodeColor::Red); } } None } - pub fn try_mark_green<'tcx>(&self, - tcx: TyCtxt<'_, 'tcx, 'tcx>, - dep_node: &DepNode) - -> Option { + pub fn try_mark_green<'tcx>( + &self, + tcx: TyCtxt<'_, 'tcx, 'tcx>, + dep_node: &DepNode, + ) -> Option { debug!("try_mark_green({:?}) - BEGIN", dep_node); let data = self.data.as_ref().unwrap(); #[cfg(not(parallel_queries))] - debug_assert!(!data.current.borrow().node_to_node_index.contains_key(dep_node)); + debug_assert!(!data + .current + .borrow() + .node_to_node_index + .contains_key(dep_node)); if dep_node.kind.is_input() { // We should only hit try_mark_green() for inputs that do not exist @@ -519,7 +555,10 @@ impl DepGraph { // eagerly marked as either red/green before any queries are // executed. debug_assert!(dep_node.extract_def_id(tcx).is_none()); - debug!("try_mark_green({:?}) - END - DepNode is deleted input", dep_node); + debug!( + "try_mark_green({:?}) - END - DepNode is deleted input", + dep_node + ); return None; } @@ -534,9 +573,12 @@ impl DepGraph { None => { // This DepNode did not exist in the previous compilation session, // so we cannot mark it as green. - debug!("try_mark_green({:?}) - END - DepNode does not exist in \ - current compilation session anymore", dep_node); - return None + debug!( + "try_mark_green({:?}) - END - DepNode does not exist in \ + current compilation session anymore", + dep_node + ); + return None; } }; @@ -552,10 +594,12 @@ impl DepGraph { // This dependency has been marked as green before, we are // still fine and can continue with checking the other // dependencies. - debug!("try_mark_green({:?}) --- found dependency {:?} to \ - be immediately green", - dep_node, - data.previous.index_to_node(dep_dep_node_index)); + debug!( + "try_mark_green({:?}) --- found dependency {:?} to \ + be immediately green", + dep_node, + data.previous.index_to_node(dep_dep_node_index) + ); current_deps.push(node_index); } Some(DepNodeColor::Red) => { @@ -563,11 +607,13 @@ impl DepGraph { // compared to the previous compilation session. We cannot // mark the DepNode as green and also don't need to bother // with checking any of the other dependencies. - debug!("try_mark_green({:?}) - END - dependency {:?} was \ - immediately red", - dep_node, - data.previous.index_to_node(dep_dep_node_index)); - return None + debug!( + "try_mark_green({:?}) - END - dependency {:?} was \ + immediately red", + dep_node, + data.previous.index_to_node(dep_dep_node_index) + ); + return None; } None => { let dep_dep_node = &data.previous.index_to_node(dep_dep_node_index); @@ -575,31 +621,36 @@ impl DepGraph { // We don't know the state of this dependency. If it isn't // an input node, let's try to mark it green recursively. if !dep_dep_node.kind.is_input() { - debug!("try_mark_green({:?}) --- state of dependency {:?} \ - is unknown, trying to mark it green", dep_node, - dep_dep_node); + debug!( + "try_mark_green({:?}) --- state of dependency {:?} \ + is unknown, trying to mark it green", + dep_node, dep_dep_node + ); if let Some(node_index) = self.try_mark_green(tcx, dep_dep_node) { - debug!("try_mark_green({:?}) --- managed to MARK \ - dependency {:?} as green", dep_node, dep_dep_node); + debug!( + "try_mark_green({:?}) --- managed to MARK \ + dependency {:?} as green", + dep_node, dep_dep_node + ); current_deps.push(node_index); continue; } } else { match dep_dep_node.kind { - DepKind::Hir | - DepKind::HirBody | - DepKind::CrateMetadata => { + DepKind::Hir | DepKind::HirBody | DepKind::CrateMetadata => { if dep_node.extract_def_id(tcx).is_none() { // If the node does not exist anymore, we // just fail to mark green. - return None + return None; } else { // If the node does exist, it should have // been pre-allocated. - bug!("DepNode {:?} should have been \ - pre-allocated but wasn't.", - dep_dep_node) + bug!( + "DepNode {:?} should have been \ + pre-allocated but wasn't.", + dep_dep_node + ) } } _ => { @@ -610,29 +661,37 @@ impl DepGraph { } // We failed to mark it green, so we try to force the query. - debug!("try_mark_green({:?}) --- trying to force \ - dependency {:?}", dep_node, dep_dep_node); + debug!( + "try_mark_green({:?}) --- trying to force \ + dependency {:?}", + dep_node, dep_dep_node + ); if ::ty::query::force_from_dep_node(tcx, dep_dep_node) { let dep_dep_node_color = data.colors.borrow().get(dep_dep_node_index); match dep_dep_node_color { Some(DepNodeColor::Green(node_index)) => { - debug!("try_mark_green({:?}) --- managed to \ - FORCE dependency {:?} to green", - dep_node, dep_dep_node); + debug!( + "try_mark_green({:?}) --- managed to \ + FORCE dependency {:?} to green", + dep_node, dep_dep_node + ); current_deps.push(node_index); } Some(DepNodeColor::Red) => { - debug!("try_mark_green({:?}) - END - \ - dependency {:?} was red after forcing", - dep_node, - dep_dep_node); - return None + debug!( + "try_mark_green({:?}) - END - \ + dependency {:?} was red after forcing", + dep_node, dep_dep_node + ); + return None; } None => { if !tcx.sess.has_errors() { - bug!("try_mark_green() - Forcing the DepNode \ - should have set its color") + bug!( + "try_mark_green() - Forcing the DepNode \ + should have set its color" + ) } else { // If the query we just forced has resulted // in some kind of compilation error, we @@ -643,9 +702,12 @@ impl DepGraph { } } else { // The DepNode could not be forced. - debug!("try_mark_green({:?}) - END - dependency {:?} \ - could not be forced", dep_node, dep_dep_node); - return None + debug!( + "try_mark_green({:?}) - END - dependency {:?} \ + could not be forced", + dep_node, dep_dep_node + ); + return None; } } } @@ -678,15 +740,18 @@ impl DepGraph { // and emit other diagnostics before these diagnostics are emitted. // Such diagnostics should be emitted after these. // See https://github.com/rust-lang/rust/issues/48685 - let diagnostics = tcx.queries.on_disk_cache - .load_diagnostics(tcx, prev_dep_node_index); + let diagnostics = tcx + .queries + .on_disk_cache + .load_diagnostics(tcx, prev_dep_node_index); if diagnostics.len() > 0 { let handle = tcx.sess.diagnostic(); // Promote the previous diagnostics to the current session. - tcx.queries.on_disk_cache - .store_diagnostics(dep_node_index, diagnostics.clone()); + tcx.queries + .on_disk_cache + .store_diagnostics(dep_node_index, diagnostics.clone()); for diagnostic in diagnostics { DiagnosticBuilder::new_diagnostic(handle, diagnostic).emit(); @@ -698,20 +763,28 @@ impl DepGraph { let mut colors = data.colors.borrow_mut(); // Multiple threads can all write the same color here #[cfg(not(parallel_queries))] - debug_assert!(colors.get(prev_dep_node_index).is_none(), - "DepGraph::try_mark_green() - Duplicate DepNodeColor \ - insertion for {:?}", dep_node); + debug_assert!( + colors.get(prev_dep_node_index).is_none(), + "DepGraph::try_mark_green() - Duplicate DepNodeColor \ + insertion for {:?}", + dep_node + ); colors.insert(prev_dep_node_index, DepNodeColor::Green(dep_node_index)); - debug!("try_mark_green({:?}) - END - successfully marked as green", dep_node); + debug!( + "try_mark_green({:?}) - END - successfully marked as green", + dep_node + ); Some(dep_node_index) } // Returns true if the given node has been marked as green during the // current compilation session. Used in various assertions pub fn is_green(&self, dep_node: &DepNode) -> bool { - self.node_color(dep_node).map(|c| c.is_green()).unwrap_or(false) + self.node_color(dep_node) + .map(|c| c.is_green()) + .unwrap_or(false) } // This method loads all on-disk cacheable query results into memory, so @@ -726,25 +799,28 @@ impl DepGraph { let green_nodes: Vec = { let data = self.data.as_ref().unwrap(); let colors = data.colors.borrow(); - colors.values.indices().filter_map(|prev_index| { - match colors.get(prev_index) { - Some(DepNodeColor::Green(_)) => { - let dep_node = data.previous.index_to_node(prev_index); - if dep_node.cache_on_disk(tcx) { - Some(dep_node) - } else { + colors + .values + .indices() + .filter_map(|prev_index| { + match colors.get(prev_index) { + Some(DepNodeColor::Green(_)) => { + let dep_node = data.previous.index_to_node(prev_index); + if dep_node.cache_on_disk(tcx) { + Some(dep_node) + } else { + None + } + } + None | Some(DepNodeColor::Red) => { + // We can skip red nodes because a node can only be marked + // as red if the query result was recomputed and thus is + // already in memory. None } } - None | - Some(DepNodeColor::Red) => { - // We can skip red nodes because a node can only be marked - // as red if the query result was recomputed and thus is - // already in memory. - None - } - } - }).collect() + }) + .collect() }; for dep_node in green_nodes { @@ -753,9 +829,11 @@ impl DepGraph { } pub fn mark_loaded_from_cache(&self, dep_node_index: DepNodeIndex, state: bool) { - debug!("mark_loaded_from_cache({:?}, {})", - self.data.as_ref().unwrap().current.borrow().data[dep_node_index].node, - state); + debug!( + "mark_loaded_from_cache({:?}, {})", + self.data.as_ref().unwrap().current.borrow().data[dep_node_index].node, + state + ); self.data .as_ref() @@ -768,7 +846,10 @@ impl DepGraph { pub fn was_loaded_from_cache(&self, dep_node: &DepNode) -> Option { let data = self.data.as_ref().unwrap(); let dep_node_index = data.current.borrow().node_to_node_index[dep_node]; - data.loaded_from_cache.borrow().get(&dep_node_index).cloned() + data.loaded_from_cache + .borrow() + .get(&dep_node_index) + .cloned() } } @@ -851,19 +932,16 @@ impl CurrentDepGraph { use std::time::{SystemTime, UNIX_EPOCH}; let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); - let nanos = duration.as_secs() * 1_000_000_000 + - duration.subsec_nanos() as u64; + let nanos = duration.as_secs() * 1_000_000_000 + duration.subsec_nanos() as u64; let mut stable_hasher = StableHasher::new(); nanos.hash(&mut stable_hasher); let forbidden_edge = if cfg!(debug_assertions) { match env::var("RUST_FORBID_DEP_GRAPH_EDGE") { - Ok(s) => { - match EdgeFilter::new(&s) { - Ok(f) => Some(f), - Err(err) => bug!("RUST_FORBID_DEP_GRAPH_EDGE invalid: {}", err), - } - } + Ok(s) => match EdgeFilter::new(&s) { + Ok(f) => Some(f), + Err(err) => bug!("RUST_FORBID_DEP_GRAPH_EDGE invalid: {}", err), + }, Err(_) => None, } } else { @@ -892,13 +970,13 @@ impl CurrentDepGraph { &mut self, key: DepNode, task: OpenTask, - fingerprint: Fingerprint + fingerprint: Fingerprint, ) -> DepNodeIndex { if let OpenTask::Regular(task) = task { let RegularOpenTask { node, read_set: _, - reads + reads, } = task.into_inner(); assert_eq!(node, key); @@ -918,9 +996,11 @@ impl CurrentDepGraph { self.data[i].node.kind == DepKind::Krate) }) { - bug!("Input node {:?} with unexpected reads: {:?}", + bug!( + "Input node {:?} with unexpected reads: {:?}", node, - reads.iter().map(|&i| self.data[i].node).collect::>()) + reads.iter().map(|&i| self.data[i].node).collect::>() + ) } } @@ -932,10 +1012,7 @@ impl CurrentDepGraph { fn pop_anon_task(&mut self, kind: DepKind, task: OpenTask) -> DepNodeIndex { if let OpenTask::Anon(task) = task { - let AnonOpenTask { - read_set: _, - reads - } = task.into_inner(); + let AnonOpenTask { read_set: _, reads } = task.into_inner(); debug_assert!(!kind.is_input()); let mut fingerprint = self.anon_id_seed; @@ -959,7 +1036,8 @@ impl CurrentDepGraph { hash: fingerprint, }; - self.intern_node(target_dep_node, reads, Fingerprint::ZERO).0 + self.intern_node(target_dep_node, reads, Fingerprint::ZERO) + .0 } else { bug!("pop_anon_task() - Expected anonymous task to be popped") } @@ -969,11 +1047,9 @@ impl CurrentDepGraph { &mut self, key: DepNode, task: OpenTask, - fingerprint: Fingerprint + fingerprint: Fingerprint, ) -> DepNodeIndex { - if let OpenTask::EvalAlways { - node, - } = task { + if let OpenTask::EvalAlways { node } = task { debug_assert_eq!(node, key); let krate_idx = self.node_to_node_index[&DepNode::new_no_params(DepKind::Krate)]; self.alloc_node(node, smallvec![krate_idx], fingerprint) @@ -997,9 +1073,7 @@ impl CurrentDepGraph { let target = &task.node; let source = self.data[source].node; if forbidden_edge.test(&source, &target) { - bug!("forbidden edge {:?} -> {:?} created", - source, - target) + bug!("forbidden edge {:?} -> {:?} created", source, target) } } } @@ -1024,7 +1098,7 @@ impl CurrentDepGraph { &mut self, dep_node: DepNode, edges: SmallVec<[DepNodeIndex; 8]>, - fingerprint: Fingerprint + fingerprint: Fingerprint, ) -> DepNodeIndex { debug_assert!(!self.node_to_node_index.contains_key(&dep_node)); self.intern_node(dep_node, edges, fingerprint).0 @@ -1034,7 +1108,7 @@ impl CurrentDepGraph { &mut self, dep_node: DepNode, edges: SmallVec<[DepNodeIndex; 8]>, - fingerprint: Fingerprint + fingerprint: Fingerprint, ) -> (DepNodeIndex, bool) { debug_assert_eq!(self.node_to_node_index.len(), self.data.len()); @@ -1045,7 +1119,7 @@ impl CurrentDepGraph { self.data.push(DepNodeData { node: dep_node, edges, - fingerprint + fingerprint, }); entry.insert(dep_node_index); (dep_node_index, true) @@ -1069,9 +1143,7 @@ pub enum OpenTask { Regular(Lock), Anon(Lock), Ignore, - EvalAlways { - node: DepNode, - }, + EvalAlways { node: DepNode }, } // A data structure that stores Option values as a contiguous @@ -1087,7 +1159,7 @@ const COMPRESSED_FIRST_GREEN: u32 = 2; impl DepNodeColorMap { fn new(size: usize) -> DepNodeColorMap { DepNodeColorMap { - values: IndexVec::from_elem_n(COMPRESSED_NONE, size) + values: IndexVec::from_elem_n(COMPRESSED_NONE, size), } } @@ -1096,8 +1168,8 @@ impl DepNodeColorMap { COMPRESSED_NONE => None, COMPRESSED_RED => Some(DepNodeColor::Red), value => Some(DepNodeColor::Green(DepNodeIndex::from_u32( - value - COMPRESSED_FIRST_GREEN - ))) + value - COMPRESSED_FIRST_GREEN, + ))), } } diff --git a/src/librustc/dep_graph/mod.rs b/src/librustc/dep_graph/mod.rs index d153b7435c9b8..c8d9aaa8a1974 100644 --- a/src/librustc/dep_graph/mod.rs +++ b/src/librustc/dep_graph/mod.rs @@ -1,3 +1,4 @@ +pub mod cgu_reuse_tracker; pub mod debug; mod dep_node; mod dep_tracking_map; @@ -6,12 +7,11 @@ mod prev; mod query; mod safe; mod serialized; -pub mod cgu_reuse_tracker; +pub use self::dep_node::{label_strs, DepConstructor, DepKind, DepNode, WorkProductId}; pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig}; -pub use self::dep_node::{DepNode, DepKind, DepConstructor, WorkProductId, label_strs}; -pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor, OpenTask}; pub use self::graph::WorkProductFileKind; +pub use self::graph::{DepGraph, DepNodeColor, DepNodeIndex, OpenTask, WorkProduct}; pub use self::prev::PreviousDepGraph; pub use self::query::DepGraphQuery; pub use self::safe::AssertDepGraphSafe; diff --git a/src/librustc/dep_graph/prev.rs b/src/librustc/dep_graph/prev.rs index 76d2954b4e35c..19117191df63b 100644 --- a/src/librustc/dep_graph/prev.rs +++ b/src/librustc/dep_graph/prev.rs @@ -1,7 +1,7 @@ -use ich::Fingerprint; -use rustc_data_structures::fx::FxHashMap; use super::dep_node::DepNode; use super::serialized::{SerializedDepGraph, SerializedDepNodeIndex}; +use ich::Fingerprint; +use rustc_data_structures::fx::FxHashMap; #[derive(Debug, RustcEncodable, RustcDecodable, Default)] pub struct PreviousDepGraph { @@ -11,7 +11,8 @@ pub struct PreviousDepGraph { impl PreviousDepGraph { pub fn new(data: SerializedDepGraph) -> PreviousDepGraph { - let index: FxHashMap<_, _> = data.nodes + let index: FxHashMap<_, _> = data + .nodes .iter_enumerated() .map(|(idx, &dep_node)| (dep_node, idx)) .collect(); @@ -19,14 +20,13 @@ impl PreviousDepGraph { } #[inline] - pub fn edges_from(&self, - dep_node: &DepNode) - -> Option<(&[SerializedDepNodeIndex], SerializedDepNodeIndex)> { + pub fn edges_from( + &self, + dep_node: &DepNode, + ) -> Option<(&[SerializedDepNodeIndex], SerializedDepNodeIndex)> { self.index .get(dep_node) - .map(|&node_index| { - (self.data.edge_targets_from(node_index), node_index) - }) + .map(|&node_index| (self.data.edge_targets_from(node_index), node_index)) } #[inline] @@ -52,9 +52,7 @@ impl PreviousDepGraph { } #[inline] - pub fn fingerprint_by_index(&self, - dep_node_index: SerializedDepNodeIndex) - -> Fingerprint { + pub fn fingerprint_by_index(&self, dep_node_index: SerializedDepNodeIndex) -> Fingerprint { self.data.fingerprints[dep_node_index] } diff --git a/src/librustc/dep_graph/query.rs b/src/librustc/dep_graph/query.rs index cd4ced238d360..0bd9f0a25bd34 100644 --- a/src/librustc/dep_graph/query.rs +++ b/src/librustc/dep_graph/query.rs @@ -1,6 +1,6 @@ use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph::implementation::{ - Direction, INCOMING, Graph, NodeIndex, OUTGOING + Direction, Graph, NodeIndex, INCOMING, OUTGOING, }; use super::DepNode; @@ -11,9 +11,7 @@ pub struct DepGraphQuery { } impl DepGraphQuery { - pub fn new(nodes: &[DepNode], - edges: &[(DepNode, DepNode)]) - -> DepGraphQuery { + pub fn new(nodes: &[DepNode], edges: &[(DepNode, DepNode)]) -> DepGraphQuery { let mut graph = Graph::with_capacity(nodes.len(), edges.len()); let mut indices = FxHashMap::default(); for node in nodes { @@ -26,10 +24,7 @@ impl DepGraphQuery { graph.add_edge(source, target, ()); } - DepGraphQuery { - graph, - indices, - } + DepGraphQuery { graph, indices } } pub fn contains_node(&self, node: &DepNode) -> bool { @@ -37,26 +32,24 @@ impl DepGraphQuery { } pub fn nodes(&self) -> Vec<&DepNode> { - self.graph.all_nodes() - .iter() - .map(|n| &n.data) - .collect() + self.graph.all_nodes().iter().map(|n| &n.data).collect() } - pub fn edges(&self) -> Vec<(&DepNode,&DepNode)> { - self.graph.all_edges() - .iter() - .map(|edge| (edge.source(), edge.target())) - .map(|(s, t)| (self.graph.node_data(s), - self.graph.node_data(t))) - .collect() + pub fn edges(&self) -> Vec<(&DepNode, &DepNode)> { + self.graph + .all_edges() + .iter() + .map(|edge| (edge.source(), edge.target())) + .map(|(s, t)| (self.graph.node_data(s), self.graph.node_data(t))) + .collect() } fn reachable_nodes(&self, node: &DepNode, direction: Direction) -> Vec<&DepNode> { if let Some(&index) = self.indices.get(node) { - self.graph.depth_traverse(index, direction) - .map(|s| self.graph.node_data(s)) - .collect() + self.graph + .depth_traverse(index, direction) + .map(|s| self.graph.node_data(s)) + .collect() } else { vec![] } @@ -76,9 +69,10 @@ impl DepGraphQuery { /// Just the outgoing edges from `node`. pub fn immediate_successors(&self, node: &DepNode) -> Vec<&DepNode> { if let Some(&index) = self.indices.get(&node) { - self.graph.successor_nodes(index) - .map(|s| self.graph.node_data(s)) - .collect() + self.graph + .successor_nodes(index) + .map(|s| self.graph.node_data(s)) + .collect() } else { vec![] } diff --git a/src/librustc/dep_graph/safe.rs b/src/librustc/dep_graph/safe.rs index f1e8224a70d14..8bd26334ed00e 100644 --- a/src/librustc/dep_graph/safe.rs +++ b/src/librustc/dep_graph/safe.rs @@ -1,7 +1,7 @@ //! The `DepGraphSafe` trait -use hir::BodyId; use hir::def_id::DefId; +use hir::BodyId; use syntax::ast::NodeId; use ty::TyCtxt; @@ -10,59 +10,47 @@ use ty::TyCtxt; /// only be implemented for things like the tcx as well as various id /// types, which will create reads in the dep-graph whenever the trait /// loads anything that might depend on the input program. -pub trait DepGraphSafe { -} +pub trait DepGraphSafe {} /// A `BodyId` on its own doesn't give access to any particular state. /// You must fetch the state from the various maps or generate /// on-demand queries, all of which create reads. -impl DepGraphSafe for BodyId { -} +impl DepGraphSafe for BodyId {} /// A `NodeId` on its own doesn't give access to any particular state. /// You must fetch the state from the various maps or generate /// on-demand queries, all of which create reads. -impl DepGraphSafe for NodeId { -} +impl DepGraphSafe for NodeId {} /// A `DefId` on its own doesn't give access to any particular state. /// You must fetch the state from the various maps or generate /// on-demand queries, all of which create reads. -impl DepGraphSafe for DefId { -} +impl DepGraphSafe for DefId {} /// The type context itself can be used to access all kinds of tracked /// state, but those accesses should always generate read events. -impl<'a, 'gcx, 'tcx> DepGraphSafe for TyCtxt<'a, 'gcx, 'tcx> { -} +impl<'a, 'gcx, 'tcx> DepGraphSafe for TyCtxt<'a, 'gcx, 'tcx> {} /// Tuples make it easy to build up state. impl DepGraphSafe for (A, B) - where A: DepGraphSafe, B: DepGraphSafe +where + A: DepGraphSafe, + B: DepGraphSafe, { } /// Shared ref to dep-graph-safe stuff should still be dep-graph-safe. -impl<'a, A> DepGraphSafe for &'a A - where A: DepGraphSafe, -{ -} +impl<'a, A> DepGraphSafe for &'a A where A: DepGraphSafe {} /// Mut ref to dep-graph-safe stuff should still be dep-graph-safe. -impl<'a, A> DepGraphSafe for &'a mut A - where A: DepGraphSafe, -{ -} - +impl<'a, A> DepGraphSafe for &'a mut A where A: DepGraphSafe {} /// No data here! :) -impl DepGraphSafe for () { -} +impl DepGraphSafe for () {} /// A convenient override that lets you pass arbitrary state into a /// task. Every use should be accompanied by a comment explaining why /// it makes sense (or how it could be refactored away in the future). pub struct AssertDepGraphSafe(pub T); -impl DepGraphSafe for AssertDepGraphSafe { -} +impl DepGraphSafe for AssertDepGraphSafe {} diff --git a/src/librustc/dep_graph/serialized.rs b/src/librustc/dep_graph/serialized.rs index 3c04f01a5e1eb..f94f479e3c000 100644 --- a/src/librustc/dep_graph/serialized.rs +++ b/src/librustc/dep_graph/serialized.rs @@ -2,7 +2,7 @@ use dep_graph::DepNode; use ich::Fingerprint; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use rustc_data_structures::indexed_vec::{Idx, IndexVec}; newtype_index! { pub struct SerializedDepNodeIndex { .. } @@ -27,9 +27,7 @@ pub struct SerializedDepGraph { impl SerializedDepGraph { #[inline] - pub fn edge_targets_from(&self, - source: SerializedDepNodeIndex) - -> &[SerializedDepNodeIndex] { + pub fn edge_targets_from(&self, source: SerializedDepNodeIndex) -> &[SerializedDepNodeIndex] { let targets = self.edge_list_indices[source]; &self.edge_list_data[targets.0 as usize..targets.1 as usize] } diff --git a/src/librustc/diagnostics.rs b/src/librustc/diagnostics.rs index 4bc52e82f9be1..4d541a534e6f0 100644 --- a/src/librustc/diagnostics.rs +++ b/src/librustc/diagnostics.rs @@ -2123,7 +2123,6 @@ static X: u32 = 42; } - register_diagnostics! { // E0006, // merged with E0005 // E0101, // replaced with E0282 diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index 2e0e9672758fd..f4d379fb026c4 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -5,10 +5,10 @@ //! item. use hir; -use hir::intravisit::{self, Visitor, NestedVisitorMap}; -use ty::TyCtxt; +use hir::intravisit::{self, NestedVisitorMap, Visitor}; use std::fmt::{self, Display}; use syntax_pos::Span; +use ty::TyCtxt; #[derive(Copy, Clone, PartialEq)] pub(crate) enum Target { @@ -35,27 +35,31 @@ pub(crate) enum Target { impl Display for Target { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", match *self { - Target::ExternCrate => "extern crate", - Target::Use => "use", - Target::Static => "static item", - Target::Const => "constant item", - Target::Fn => "function", - Target::Closure => "closure", - Target::Mod => "module", - Target::ForeignMod => "foreign module", - Target::GlobalAsm => "global asm", - Target::Ty => "type alias", - Target::Existential => "existential type", - Target::Enum => "enum", - Target::Struct => "struct", - Target::Union => "union", - Target::Trait => "trait", - Target::TraitAlias => "trait alias", - Target::Impl => "item", - Target::Expression => "expression", - Target::Statement => "statement", - }) + write!( + f, + "{}", + match *self { + Target::ExternCrate => "extern crate", + Target::Use => "use", + Target::Static => "static item", + Target::Const => "constant item", + Target::Fn => "function", + Target::Closure => "closure", + Target::Mod => "module", + Target::ForeignMod => "foreign module", + Target::GlobalAsm => "global asm", + Target::Ty => "type alias", + Target::Existential => "existential type", + Target::Enum => "enum", + Target::Struct => "struct", + Target::Union => "union", + Target::Trait => "trait", + Target::TraitAlias => "trait alias", + Target::Impl => "item", + Target::Expression => "expression", + Target::Statement => "statement", + } + ) } } @@ -90,9 +94,12 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { /// Check any attribute. fn check_attributes(&self, item: &hir::Item, target: Target) { if target == Target::Fn || target == Target::Const { - self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.id)); + self.tcx + .codegen_fn_attrs(self.tcx.hir().local_def_id(item.id)); } else if let Some(a) = item.attrs.iter().find(|a| a.check_name("target_feature")) { - self.tcx.sess.struct_span_err(a.span, "attribute should be applied to a function") + self.tcx + .sess + .struct_span_err(a.span, "attribute should be applied to a function") .span_label(item.span, "not a function") .emit(); } @@ -114,35 +121,36 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { /// Check if an `#[inline]` is applied to a function or a closure. fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) { if target != Target::Fn && target != Target::Closure { - struct_span_err!(self.tcx.sess, - attr.span, - E0518, - "attribute should be applied to function or closure") - .span_label(*span, "not a function or closure") - .emit(); + struct_span_err!( + self.tcx.sess, + attr.span, + E0518, + "attribute should be applied to function or closure" + ) + .span_label(*span, "not a function or closure") + .emit(); } } /// Check if the `#[non_exhaustive]` attribute on an `item` is valid. fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) { match target { - Target::Struct | Target::Enum => { /* Valid */ }, + Target::Struct | Target::Enum => { /* Valid */ } _ => { - struct_span_err!(self.tcx.sess, - attr.span, - E0701, - "attribute can only be applied to a struct or enum") - .span_label(item.span, "not a struct or enum") - .emit(); + struct_span_err!( + self.tcx.sess, + attr.span, + E0701, + "attribute can only be applied to a struct or enum" + ) + .span_label(item.span, "not a struct or enum") + .emit(); return; } } if attr.meta_item_list().is_some() || attr.value_str().is_some() { - struct_span_err!(self.tcx.sess, - attr.span, - E0702, - "attribute should be empty") + struct_span_err!(self.tcx.sess, attr.span, E0702, "attribute should be empty") .span_label(item.span, "not empty") .emit(); } @@ -151,9 +159,10 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { /// Check if the `#[marker]` attribute on an `item` is valid. fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) { match target { - Target::Trait => { /* Valid */ }, + Target::Trait => { /* Valid */ } _ => { - self.tcx.sess + self.tcx + .sess .struct_span_err(attr.span, "attribute can only be applied to a trait") .span_label(item.span, "not a trait") .emit(); @@ -162,7 +171,8 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { } if !attr.is_word() { - self.tcx.sess + self.tcx + .sess .struct_span_err(attr.span, "attribute should be empty") .emit(); } @@ -175,7 +185,8 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { // #[repr(foo)] // #[repr(bar, align(8))] // ``` - let hints: Vec<_> = item.attrs + let hints: Vec<_> = item + .attrs .iter() .filter(|attr| attr.name() == "repr") .filter_map(|attr| attr.meta_item_list()) @@ -199,20 +210,18 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { let (article, allowed_targets) = match &*name.as_str() { "C" => { is_c = true; - if target != Target::Struct && - target != Target::Union && - target != Target::Enum { - ("a", "struct, enum or union") + if target != Target::Struct && target != Target::Union && target != Target::Enum + { + ("a", "struct, enum or union") } else { - continue + continue; } } "packed" => { - if target != Target::Struct && - target != Target::Union { - ("a", "struct or union") + if target != Target::Struct && target != Target::Union { + ("a", "struct or union") } else { - continue + continue; } } "simd" => { @@ -220,15 +229,14 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { if target != Target::Struct { ("a", "struct") } else { - continue + continue; } } "align" => { - if target != Target::Struct && - target != Target::Union { + if target != Target::Struct && target != Target::Union { ("a", "struct or union") } else { - continue + continue; } } "transparent" => { @@ -236,17 +244,15 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { if target != Target::Struct { ("a", "struct") } else { - continue + continue; } } - "i8" | "u8" | "i16" | "u16" | - "i32" | "u32" | "i64" | "u64" | - "isize" | "usize" => { + "i8" | "u8" | "i16" | "u16" | "i32" | "u32" | "i64" | "u64" | "isize" | "usize" => { int_reprs += 1; if target != Target::Enum { ("an", "enum") } else { - continue + continue; } } _ => continue, @@ -266,16 +272,23 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { // Error on repr(transparent, ). if is_transparent && hints.len() > 1 { let hint_spans: Vec<_> = hint_spans.clone().collect(); - span_err!(self.tcx.sess, hint_spans, E0692, - "transparent struct cannot have other repr hints"); + span_err!( + self.tcx.sess, + hint_spans, + E0692, + "transparent struct cannot have other repr hints" + ); } // Warn on repr(u8, u16), repr(C, simd), and c-like-enum-repr(C, u8) - if (int_reprs > 1) - || (is_simd && is_c) - || (int_reprs == 1 && is_c && is_c_like_enum(item)) { + if (int_reprs > 1) || (is_simd && is_c) || (int_reprs == 1 && is_c && is_c_like_enum(item)) + { let hint_spans: Vec<_> = hint_spans.collect(); - span_warn!(self.tcx.sess, hint_spans, E0566, - "conflicting representation hints"); + span_warn!( + self.tcx.sess, + hint_spans, + E0566, + "conflicting representation hints" + ); } } @@ -333,8 +346,10 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { fn check_used(&self, item: &hir::Item, target: Target) { for attr in &item.attrs { if attr.name() == "used" && target != Target::Static { - self.tcx.sess - .span_err(attr.span, "attribute must be applied to a `static` variable"); + self.tcx.sess.span_err( + attr.span, + "attribute must be applied to a `static` variable", + ); } } } @@ -351,7 +366,6 @@ impl<'a, 'tcx> Visitor<'tcx> for CheckAttrVisitor<'a, 'tcx> { intravisit::walk_item(self, item) } - fn visit_stmt(&mut self, stmt: &'tcx hir::Stmt) { self.check_stmt_attributes(stmt); intravisit::walk_stmt(self, stmt) @@ -365,7 +379,9 @@ impl<'a, 'tcx> Visitor<'tcx> for CheckAttrVisitor<'a, 'tcx> { pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut checker = CheckAttrVisitor { tcx }; - tcx.hir().krate().visit_all_item_likes(&mut checker.as_deep_visitor()); + tcx.hir() + .krate() + .visit_all_item_likes(&mut checker.as_deep_visitor()); } fn is_c_like_enum(item: &hir::Item) -> bool { @@ -373,7 +389,9 @@ fn is_c_like_enum(item: &hir::Item) -> bool { for variant in &def.variants { match variant.node.data { hir::VariantData::Unit(_) => { /* continue */ } - _ => { return false; } + _ => { + return false; + } } } true diff --git a/src/librustc/hir/def.rs b/src/librustc/hir/def.rs index 20ec620a281fd..507e61dbf35ab 100644 --- a/src/librustc/hir/def.rs +++ b/src/librustc/hir/def.rs @@ -1,10 +1,10 @@ +use hir; use hir::def_id::DefId; -use util::nodemap::{NodeMap, DefIdMap}; use syntax::ast; use syntax::ext::base::MacroKind; use syntax_pos::Span; -use hir; use ty; +use util::nodemap::{DefIdMap, NodeMap}; use self::Namespace::*; @@ -52,7 +52,10 @@ pub enum Def { AssociatedExistential(DefId), PrimTy(hir::PrimTy), TyParam(DefId), - SelfTy(Option /* trait */, Option /* impl */), + SelfTy( + Option, /* trait */ + Option, /* impl */ + ), ToolMod, // e.g., `rustfmt` in `#[rustfmt::skip]` // Value namespace @@ -66,9 +69,11 @@ pub enum Def { AssociatedConst(DefId), Local(ast::NodeId), - Upvar(ast::NodeId, // `NodeId` of closed over local - usize, // index in the `freevars` list of the closure - ast::NodeId), // expr node that creates the closure + Upvar( + ast::NodeId, // `NodeId` of closed over local + usize, // index in the `freevars` list of the closure + ast::NodeId, + ), // expr node that creates the closure Label(ast::NodeId), // Macro namespace @@ -101,12 +106,20 @@ pub struct PathResolution { impl PathResolution { pub fn new(def: Def) -> Self { - PathResolution { base_def: def, unresolved_segments: 0 } + PathResolution { + base_def: def, + unresolved_segments: 0, + } } pub fn with_unresolved_segments(def: Def, mut unresolved_segments: usize) -> Self { - if def == Def::Err { unresolved_segments = 0 } - PathResolution { base_def: def, unresolved_segments: unresolved_segments } + if def == Def::Err { + unresolved_segments = 0 + } + PathResolution { + base_def: def, + unresolved_segments: unresolved_segments, + } } #[inline] @@ -187,7 +200,7 @@ impl PerNS> { } /// Returns an iterator over the items which are `Some`. - pub fn present_items(self) -> impl Iterator { + pub fn present_items(self) -> impl Iterator { use std::iter::once; once(self.type_ns) @@ -253,34 +266,43 @@ impl NonMacroAttrKind { impl Def { pub fn def_id(&self) -> DefId { - self.opt_def_id().unwrap_or_else(|| { - bug!("attempted .def_id() on invalid def: {:?}", self) - }) + self.opt_def_id() + .unwrap_or_else(|| bug!("attempted .def_id() on invalid def: {:?}", self)) } pub fn opt_def_id(&self) -> Option { match *self { - Def::Fn(id) | Def::Mod(id) | Def::Static(id, _) | - Def::Variant(id) | Def::VariantCtor(id, ..) | Def::Enum(id) | - Def::TyAlias(id) | Def::TraitAlias(id) | - Def::AssociatedTy(id) | Def::TyParam(id) | Def::Struct(id) | Def::StructCtor(id, ..) | - Def::Union(id) | Def::Trait(id) | Def::Method(id) | Def::Const(id) | - Def::AssociatedConst(id) | Def::Macro(id, ..) | - Def::Existential(id) | Def::AssociatedExistential(id) | Def::ForeignTy(id) => { - Some(id) - } - - Def::Local(..) | - Def::Upvar(..) | - Def::Label(..) | - Def::PrimTy(..) | - Def::SelfTy(..) | - Def::SelfCtor(..) | - Def::ToolMod | - Def::NonMacroAttr(..) | - Def::Err => { - None - } + Def::Fn(id) + | Def::Mod(id) + | Def::Static(id, _) + | Def::Variant(id) + | Def::VariantCtor(id, ..) + | Def::Enum(id) + | Def::TyAlias(id) + | Def::TraitAlias(id) + | Def::AssociatedTy(id) + | Def::TyParam(id) + | Def::Struct(id) + | Def::StructCtor(id, ..) + | Def::Union(id) + | Def::Trait(id) + | Def::Method(id) + | Def::Const(id) + | Def::AssociatedConst(id) + | Def::Macro(id, ..) + | Def::Existential(id) + | Def::AssociatedExistential(id) + | Def::ForeignTy(id) => Some(id), + + Def::Local(..) + | Def::Upvar(..) + | Def::Label(..) + | Def::PrimTy(..) + | Def::SelfTy(..) + | Def::SelfCtor(..) + | Def::ToolMod + | Def::NonMacroAttr(..) + | Def::Err => None, } } @@ -326,8 +348,12 @@ impl Def { pub fn article(&self) -> &'static str { match *self { - Def::AssociatedTy(..) | Def::AssociatedConst(..) | Def::AssociatedExistential(..) | - Def::Enum(..) | Def::Existential(..) | Def::Err => "an", + Def::AssociatedTy(..) + | Def::AssociatedConst(..) + | Def::AssociatedExistential(..) + | Def::Enum(..) + | Def::Existential(..) + | Def::Err => "an", Def::Macro(.., macro_kind) => macro_kind.article(), _ => "a", } diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index fb2c873d740ca..ae914cd556c82 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -1,9 +1,9 @@ -use ty; use hir::map::definitions::FIRST_FREE_HIGH_DEF_INDEX; use rustc_data_structures::indexed_vec::Idx; use serialize; use std::fmt; use std::u32; +use ty; newtype_index! { pub struct CrateId { @@ -40,7 +40,6 @@ impl ::std::fmt::Debug for CrateNum { /// LOCAL_CRATE in their DefId. pub const LOCAL_CRATE: CrateNum = CrateNum::Index(CrateId::from_u32_const(0)); - impl Idx for CrateNum { #[inline] fn new(value: usize) -> Self { @@ -83,7 +82,12 @@ impl CrateNum { } } - pub fn as_def_id(&self) -> DefId { DefId { krate: *self, index: CRATE_DEF_INDEX } } + pub fn as_def_id(&self) -> DefId { + DefId { + krate: *self, + index: CRATE_DEF_INDEX, + } + } } impl fmt::Display for CrateNum { @@ -123,10 +127,12 @@ pub const CRATE_DEF_INDEX: DefIndex = DefIndex(0); impl fmt::Debug for DefIndex { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, - "DefIndex({}:{})", - self.address_space().index(), - self.as_array_index()) + write!( + f, + "DefIndex({}:{})", + self.address_space().index(), + self.as_array_index() + ) } } @@ -136,7 +142,7 @@ impl DefIndex { match self.0 & 1 { 0 => DefIndexAddressSpace::Low, 1 => DefIndexAddressSpace::High, - _ => unreachable!() + _ => unreachable!(), } } @@ -161,9 +167,11 @@ impl DefIndex { // because the first FIRST_FREE_HIGH_DEF_INDEX indexes are reserved // for internal use. let def_index = DefIndex::from_array_index( - proc_macro_index.checked_add(FIRST_FREE_HIGH_DEF_INDEX) + proc_macro_index + .checked_add(FIRST_FREE_HIGH_DEF_INDEX) .expect("integer overflow adding `proc_macro_index`"), - DefIndexAddressSpace::High); + DefIndexAddressSpace::High, + ); assert!(def_index != CRATE_DEF_INDEX); def_index } @@ -172,10 +180,9 @@ impl DefIndex { pub fn to_proc_macro_index(self: DefIndex) -> usize { assert_eq!(self.address_space(), DefIndexAddressSpace::High); - self.as_array_index().checked_sub(FIRST_FREE_HIGH_DEF_INDEX) - .unwrap_or_else(|| { - bug!("using local index {:?} as proc-macro index", self) - }) + self.as_array_index() + .checked_sub(FIRST_FREE_HIGH_DEF_INDEX) + .unwrap_or_else(|| bug!("using local index {:?} as proc-macro index", self)) } // Don't use this if you don't know about the DefIndex encoding. @@ -215,10 +222,13 @@ pub struct DefId { impl fmt::Debug for DefId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "DefId({}/{}:{}", - self.krate, - self.index.address_space().index(), - self.index.as_array_index())?; + write!( + f, + "DefId({}/{}:{}", + self.krate, + self.index.address_space().index(), + self.index.as_array_index() + )?; ty::tls::with_opt(|opt_tcx| { if let Some(tcx) = opt_tcx { @@ -235,7 +245,10 @@ impl DefId { /// Make a local `DefId` with the given index. #[inline] pub fn local(index: DefIndex) -> DefId { - DefId { krate: LOCAL_CRATE, index: index } + DefId { + krate: LOCAL_CRATE, + index: index, + } } #[inline] @@ -272,7 +285,7 @@ impl LocalDefId { pub fn to_def_id(self) -> DefId { DefId { krate: LOCAL_CRATE, - index: self.0 + index: self.0, } } } diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index d4f891c874a40..e0abdc71e8ecf 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -31,22 +31,33 @@ //! This order consistency is required in a few places in rustc, for //! example generator inference, and possibly also HIR borrowck. -use syntax::ast::{NodeId, CRATE_NODE_ID, Ident, Name, Attribute}; -use syntax_pos::Span; -use hir::*; +use super::itemlikevisit::DeepVisitor; use hir::def::Def; use hir::map::{self, Map}; -use super::itemlikevisit::DeepVisitor; +use hir::*; +use syntax::ast::{Attribute, Ident, Name, NodeId, CRATE_NODE_ID}; +use syntax_pos::Span; use std::cmp; #[derive(Copy, Clone)] pub enum FnKind<'a> { /// #[xxx] pub async/const/extern "Abi" fn foo() - ItemFn(Name, &'a Generics, FnHeader, &'a Visibility, &'a [Attribute]), + ItemFn( + Name, + &'a Generics, + FnHeader, + &'a Visibility, + &'a [Attribute], + ), /// fn foo(&self) - Method(Ident, &'a MethodSig, Option<&'a Visibility>, &'a [Attribute]), + Method( + Ident, + &'a MethodSig, + Option<&'a Visibility>, + &'a [Attribute], + ), /// |x, y| {} Closure(&'a [Attribute]), @@ -134,7 +145,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { /// explicitly, you need to override each method. (And you also need /// to monitor future changes to `Visitor` in case a new method with a /// new default implementation gets introduced.) -pub trait Visitor<'v> : Sized { +pub trait Visitor<'v>: Sized { /////////////////////////////////////////////////////////////////////////// // Nested items. @@ -166,7 +177,10 @@ pub trait Visitor<'v> : Sized { /// but cannot supply a `Map`; see `nested_visit_map` for advice. #[allow(unused_variables)] fn visit_nested_item(&mut self, id: ItemId) { - let opt_item = self.nested_visit_map().inter().map(|map| map.expect_item(id.id)); + let opt_item = self + .nested_visit_map() + .inter() + .map(|map| map.expect_item(id.id)); if let Some(item) = opt_item { self.visit_item(item); } @@ -177,7 +191,10 @@ pub trait Visitor<'v> : Sized { /// method. #[allow(unused_variables)] fn visit_nested_trait_item(&mut self, id: TraitItemId) { - let opt_item = self.nested_visit_map().inter().map(|map| map.trait_item(id)); + let opt_item = self + .nested_visit_map() + .inter() + .map(|map| map.trait_item(id)); if let Some(item) = opt_item { self.visit_trait_item(item); } @@ -311,22 +328,26 @@ pub trait Visitor<'v> : Sized { fn visit_poly_trait_ref(&mut self, t: &'v PolyTraitRef, m: TraitBoundModifier) { walk_poly_trait_ref(self, t, m) } - fn visit_variant_data(&mut self, - s: &'v VariantData, - _: Name, - _: &'v Generics, - _parent_id: NodeId, - _: Span) { + fn visit_variant_data( + &mut self, + s: &'v VariantData, + _: Name, + _: &'v Generics, + _parent_id: NodeId, + _: Span, + ) { walk_struct_def(self, s) } fn visit_struct_field(&mut self, s: &'v StructField) { walk_struct_field(self, s) } - fn visit_enum_def(&mut self, - enum_definition: &'v EnumDef, - generics: &'v Generics, - item_id: NodeId, - _: Span) { + fn visit_enum_def( + &mut self, + enum_definition: &'v EnumDef, + generics: &'v Generics, + item_id: NodeId, + _: Span, + ) { walk_enum_def(self, enum_definition, generics, item_id) } fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) { @@ -359,8 +380,7 @@ pub trait Visitor<'v> : Sized { fn visit_assoc_type_binding(&mut self, type_binding: &'v TypeBinding) { walk_assoc_type_binding(self, type_binding) } - fn visit_attribute(&mut self, _attr: &'v Attribute) { - } + fn visit_attribute(&mut self, _attr: &'v Attribute) {} fn visit_macro_def(&mut self, macro_def: &'v MacroDef) { walk_macro_def(self, macro_def) } @@ -427,26 +447,33 @@ pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime LifetimeName::Param(ParamName::Plain(ident)) => { visitor.visit_ident(ident); } - LifetimeName::Param(ParamName::Fresh(_)) | - LifetimeName::Param(ParamName::Error) | - LifetimeName::Static | - LifetimeName::Error | - LifetimeName::Implicit | - LifetimeName::Underscore => {} + LifetimeName::Param(ParamName::Fresh(_)) + | LifetimeName::Param(ParamName::Error) + | LifetimeName::Static + | LifetimeName::Error + | LifetimeName::Implicit + | LifetimeName::Underscore => {} } } -pub fn walk_poly_trait_ref<'v, V>(visitor: &mut V, - trait_ref: &'v PolyTraitRef, - _modifier: TraitBoundModifier) - where V: Visitor<'v> +pub fn walk_poly_trait_ref<'v, V>( + visitor: &mut V, + trait_ref: &'v PolyTraitRef, + _modifier: TraitBoundModifier, +) where + V: Visitor<'v>, { - walk_list!(visitor, visit_generic_param, &trait_ref.bound_generic_params); + walk_list!( + visitor, + visit_generic_param, + &trait_ref.bound_generic_params + ); visitor.visit_trait_ref(&trait_ref.trait_ref); } pub fn walk_trait_ref<'v, V>(visitor: &mut V, trait_ref: &'v TraitRef) - where V: Visitor<'v> +where + V: Visitor<'v>, { visitor.visit_id(trait_ref.ref_id); visitor.visit_path(&trait_ref.path, trait_ref.hir_ref_id) @@ -465,23 +492,18 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { ItemKind::Use(ref path, _) => { visitor.visit_use(path, item.id, item.hir_id); } - ItemKind::Static(ref typ, _, body) | - ItemKind::Const(ref typ, body) => { + ItemKind::Static(ref typ, _, body) | ItemKind::Const(ref typ, body) => { visitor.visit_id(item.id); visitor.visit_ty(typ); visitor.visit_nested_body(body); } - ItemKind::Fn(ref declaration, header, ref generics, body_id) => { - visitor.visit_fn(FnKind::ItemFn(item.ident.name, - generics, - header, - &item.vis, - &item.attrs), - declaration, - body_id, - item.span, - item.id) - } + ItemKind::Fn(ref declaration, header, ref generics, body_id) => visitor.visit_fn( + FnKind::ItemFn(item.ident.name, generics, header, &item.vis, &item.attrs), + declaration, + body_id, + item.span, + item.id, + ), ItemKind::Mod(ref module) => { // `visit_mod()` takes care of visiting the `Item`'s `NodeId`. visitor.visit_mod(module, item.span, item.id) @@ -498,7 +520,11 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { visitor.visit_ty(typ); visitor.visit_generics(type_parameters) } - ItemKind::Existential(ExistTy {ref generics, ref bounds, impl_trait_fn}) => { + ItemKind::Existential(ExistTy { + ref generics, + ref bounds, + impl_trait_fn, + }) => { visitor.visit_id(item.id); walk_generics(visitor, generics); walk_list!(visitor, visit_param_bound, bounds); @@ -524,12 +550,17 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { visitor.visit_ty(typ); walk_list!(visitor, visit_impl_item_ref, impl_item_refs); } - ItemKind::Struct(ref struct_definition, ref generics) | - ItemKind::Union(ref struct_definition, ref generics) => { + ItemKind::Struct(ref struct_definition, ref generics) + | ItemKind::Union(ref struct_definition, ref generics) => { visitor.visit_generics(generics); visitor.visit_id(item.id); - visitor.visit_variant_data(struct_definition, item.ident.name, generics, item.id, - item.span); + visitor.visit_variant_data( + struct_definition, + item.ident.name, + generics, + item.id, + item.span, + ); } ItemKind::Trait(.., ref generics, ref bounds, ref trait_item_refs) => { visitor.visit_id(item.id); @@ -546,36 +577,46 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { walk_list!(visitor, visit_attribute, &item.attrs); } -pub fn walk_use<'v, V: Visitor<'v>>(visitor: &mut V, - path: &'v Path, - item_id: NodeId, - hir_id: HirId) { +pub fn walk_use<'v, V: Visitor<'v>>( + visitor: &mut V, + path: &'v Path, + item_id: NodeId, + hir_id: HirId, +) { visitor.visit_id(item_id); visitor.visit_path(path, hir_id); } -pub fn walk_enum_def<'v, V: Visitor<'v>>(visitor: &mut V, - enum_definition: &'v EnumDef, - generics: &'v Generics, - item_id: NodeId) { +pub fn walk_enum_def<'v, V: Visitor<'v>>( + visitor: &mut V, + enum_definition: &'v EnumDef, + generics: &'v Generics, + item_id: NodeId, +) { visitor.visit_id(item_id); - walk_list!(visitor, - visit_variant, - &enum_definition.variants, - generics, - item_id); + walk_list!( + visitor, + visit_variant, + &enum_definition.variants, + generics, + item_id + ); } -pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, - variant: &'v Variant, - generics: &'v Generics, - parent_item_id: NodeId) { +pub fn walk_variant<'v, V: Visitor<'v>>( + visitor: &mut V, + variant: &'v Variant, + generics: &'v Generics, + parent_item_id: NodeId, +) { visitor.visit_ident(variant.node.ident); - visitor.visit_variant_data(&variant.node.data, - variant.node.ident.name, - generics, - parent_item_id, - variant.span); + visitor.visit_variant_data( + &variant.node.data, + variant.node.ident.name, + generics, + parent_item_id, + variant.span, + ); walk_list!(visitor, visit_anon_const, &variant.node.disr_expr); walk_list!(visitor, visit_attribute, &variant.node.attrs); } @@ -584,22 +625,22 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { visitor.visit_id(typ.id); match typ.node { - TyKind::Slice(ref ty) => { - visitor.visit_ty(ty) - } - TyKind::Ptr(ref mutable_type) => { - visitor.visit_ty(&mutable_type.ty) - } + TyKind::Slice(ref ty) => visitor.visit_ty(ty), + TyKind::Ptr(ref mutable_type) => visitor.visit_ty(&mutable_type.ty), TyKind::Rptr(ref lifetime, ref mutable_type) => { visitor.visit_lifetime(lifetime); visitor.visit_ty(&mutable_type.ty) } - TyKind::Never => {}, + TyKind::Never => {} TyKind::Tup(ref tuple_element_types) => { walk_list!(visitor, visit_ty, tuple_element_types); } TyKind::BareFn(ref function_declaration) => { - walk_list!(visitor, visit_generic_param, &function_declaration.generic_params); + walk_list!( + visitor, + visit_generic_param, + &function_declaration.generic_params + ); visitor.visit_fn_decl(&function_declaration.decl); } TyKind::Path(ref qpath) => { @@ -619,9 +660,7 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { } visitor.visit_lifetime(lifetime); } - TyKind::Typeof(ref expression) => { - visitor.visit_anon_const(expression) - } + TyKind::Typeof(ref expression) => visitor.visit_anon_const(expression), TyKind::Infer | TyKind::Err => {} } } @@ -648,9 +687,11 @@ pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) { } } -pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, - path_span: Span, - segment: &'v PathSegment) { +pub fn walk_path_segment<'v, V: Visitor<'v>>( + visitor: &mut V, + path_span: Span, + segment: &'v PathSegment, +) { visitor.visit_ident(segment.ident); if let Some(id) = segment.id { visitor.visit_id(id); @@ -660,15 +701,16 @@ pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, } } -pub fn walk_generic_args<'v, V: Visitor<'v>>(visitor: &mut V, - _path_span: Span, - generic_args: &'v GenericArgs) { +pub fn walk_generic_args<'v, V: Visitor<'v>>( + visitor: &mut V, + _path_span: Span, + generic_args: &'v GenericArgs, +) { walk_list!(visitor, visit_generic_arg, &generic_args.args); walk_list!(visitor, visit_assoc_type_binding, &generic_args.bindings); } -pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V, - type_binding: &'v TypeBinding) { +pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V, type_binding: &'v TypeBinding) { visitor.visit_id(type_binding.id); visitor.visit_ident(type_binding.ident); visitor.visit_ty(&type_binding.ty); @@ -695,8 +737,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { PatKind::Tuple(ref tuple_elements, _) => { walk_list!(visitor, visit_pat, tuple_elements); } - PatKind::Box(ref subpattern) | - PatKind::Ref(ref subpattern, _) => { + PatKind::Box(ref subpattern) | PatKind::Ref(ref subpattern, _) => { visitor.visit_pat(subpattern) } PatKind::Binding(_, canonical_id, ident, ref optional_subpattern) => { @@ -764,32 +805,39 @@ pub fn walk_generic_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v Generi pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics) { walk_list!(visitor, visit_generic_param, &generics.params); visitor.visit_id(generics.where_clause.id); - walk_list!(visitor, visit_where_predicate, &generics.where_clause.predicates); + walk_list!( + visitor, + visit_where_predicate, + &generics.where_clause.predicates + ); } -pub fn walk_where_predicate<'v, V: Visitor<'v>>( - visitor: &mut V, - predicate: &'v WherePredicate) -{ +pub fn walk_where_predicate<'v, V: Visitor<'v>>(visitor: &mut V, predicate: &'v WherePredicate) { match predicate { - &WherePredicate::BoundPredicate(WhereBoundPredicate{ref bounded_ty, - ref bounds, - ref bound_generic_params, - ..}) => { + &WherePredicate::BoundPredicate(WhereBoundPredicate { + ref bounded_ty, + ref bounds, + ref bound_generic_params, + .. + }) => { visitor.visit_ty(bounded_ty); walk_list!(visitor, visit_param_bound, bounds); walk_list!(visitor, visit_generic_param, bound_generic_params); } - &WherePredicate::RegionPredicate(WhereRegionPredicate{ref lifetime, - ref bounds, - ..}) => { + &WherePredicate::RegionPredicate(WhereRegionPredicate { + ref lifetime, + ref bounds, + .. + }) => { visitor.visit_lifetime(lifetime); walk_list!(visitor, visit_param_bound, bounds); } - &WherePredicate::EqPredicate(WhereEqPredicate{id, - ref lhs_ty, - ref rhs_ty, - ..}) => { + &WherePredicate::EqPredicate(WhereEqPredicate { + id, + ref lhs_ty, + ref rhs_ty, + .. + }) => { visitor.visit_id(id); visitor.visit_ty(lhs_ty); visitor.visit_ty(rhs_ty); @@ -815,17 +863,18 @@ pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<' FnKind::ItemFn(_, generics, ..) => { visitor.visit_generics(generics); } - FnKind::Method(..) | - FnKind::Closure(_) => {} + FnKind::Method(..) | FnKind::Closure(_) => {} } } -pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V, - function_kind: FnKind<'v>, - function_declaration: &'v FnDecl, - body_id: BodyId, - _span: Span, - id: NodeId) { +pub fn walk_fn<'v, V: Visitor<'v>>( + visitor: &mut V, + function_kind: FnKind<'v>, + function_declaration: &'v FnDecl, + body_id: BodyId, + _span: Span, + id: NodeId, +) { visitor.visit_id(id); visitor.visit_fn_decl(function_declaration); walk_fn_kind(visitor, function_kind); @@ -850,14 +899,13 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai } } TraitItemKind::Method(ref sig, TraitMethod::Provided(body_id)) => { - visitor.visit_fn(FnKind::Method(trait_item.ident, - sig, - None, - &trait_item.attrs), - &sig.decl, - body_id, - trait_item.span, - trait_item.id); + visitor.visit_fn( + FnKind::Method(trait_item.ident, sig, None, &trait_item.attrs), + &sig.decl, + body_id, + trait_item.span, + trait_item.id, + ); } TraitItemKind::Type(ref bounds, ref default) => { visitor.visit_id(trait_item.id); @@ -869,7 +917,13 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai pub fn walk_trait_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_item_ref: &'v TraitItemRef) { // N.B., deliberately force a compilation error if/when new fields are added. - let TraitItemRef { id, ident, ref kind, span: _, ref defaultness } = *trait_item_ref; + let TraitItemRef { + id, + ident, + ref kind, + span: _, + ref defaultness, + } = *trait_item_ref; visitor.visit_nested_trait_item(id); visitor.visit_ident(ident); visitor.visit_associated_item_kind(kind); @@ -902,14 +956,13 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt visitor.visit_nested_body(body); } ImplItemKind::Method(ref sig, body_id) => { - visitor.visit_fn(FnKind::Method(impl_item.ident, - sig, - Some(&impl_item.vis), - &impl_item.attrs), - &sig.decl, - body_id, - impl_item.span, - impl_item.id); + visitor.visit_fn( + FnKind::Method(impl_item.ident, sig, Some(&impl_item.vis), &impl_item.attrs), + &sig.decl, + body_id, + impl_item.span, + impl_item.id, + ); } ImplItemKind::Type(ref ty) => { visitor.visit_id(impl_item.id); @@ -924,7 +977,14 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'v ImplItemRef) { // N.B., deliberately force a compilation error if/when new fields are added. - let ImplItemRef { id, ident, ref kind, span: _, ref vis, ref defaultness } = *impl_item_ref; + let ImplItemRef { + id, + ident, + ref kind, + span: _, + ref vis, + ref defaultness, + } = *impl_item_ref; visitor.visit_nested_impl_item(id); visitor.visit_ident(ident); visitor.visit_associated_item_kind(kind); @@ -932,7 +992,6 @@ pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &' visitor.visit_defaultness(defaultness); } - pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V, struct_definition: &'v VariantData) { visitor.visit_id(struct_definition.id()); walk_list!(visitor, visit_struct_field, struct_definition.fields()); @@ -958,8 +1017,7 @@ pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt) { visitor.visit_id(id); visitor.visit_decl(declaration) } - StmtKind::Expr(ref expression, id) | - StmtKind::Semi(ref expression, id) => { + StmtKind::Expr(ref expression, id) | StmtKind::Semi(ref expression, id) => { visitor.visit_id(id); visitor.visit_expr(expression) } @@ -982,9 +1040,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_id(expression.id); walk_list!(visitor, visit_attribute, expression.attrs.iter()); match expression.node { - ExprKind::Box(ref subexpression) => { - visitor.visit_expr(subexpression) - } + ExprKind::Box(ref subexpression) => visitor.visit_expr(subexpression), ExprKind::Array(ref subexpressions) => { walk_list!(visitor, visit_expr, subexpressions); } @@ -1042,13 +1098,14 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(subexpression); walk_list!(visitor, visit_arm, arms); } - ExprKind::Closure(_, ref function_declaration, body, _fn_decl_span, _gen) => { - visitor.visit_fn(FnKind::Closure(&expression.attrs), - function_declaration, - body, - expression.span, - expression.id) - } + ExprKind::Closure(_, ref function_declaration, body, _fn_decl_span, _gen) => visitor + .visit_fn( + FnKind::Closure(&expression.attrs), + function_declaration, + body, + expression.span, + expression.id, + ), ExprKind::Block(ref block, ref opt_label) => { walk_list!(visitor, visit_label, opt_label); visitor.visit_block(block); @@ -1116,7 +1173,12 @@ pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) { } pub fn walk_vis<'v, V: Visitor<'v>>(visitor: &mut V, vis: &'v Visibility) { - if let VisibilityKind::Restricted { ref path, id, hir_id } = vis.node { + if let VisibilityKind::Restricted { + ref path, + id, + hir_id, + } = vis.node + { visitor.visit_id(id); visitor.visit_path(path, hir_id) } @@ -1162,7 +1224,6 @@ impl IdRange { } } - pub struct IdRangeComputingVisitor<'a, 'hir: 'a> { result: IdRange, map: &'a map::Map<'hir>, @@ -1170,7 +1231,10 @@ pub struct IdRangeComputingVisitor<'a, 'hir: 'a> { impl<'a, 'hir> IdRangeComputingVisitor<'a, 'hir> { pub fn new(map: &'a map::Map<'hir>) -> IdRangeComputingVisitor<'a, 'hir> { - IdRangeComputingVisitor { result: IdRange::max(), map: map } + IdRangeComputingVisitor { + result: IdRange::max(), + map: map, + } } pub fn result(&self) -> IdRange { diff --git a/src/librustc/hir/itemlikevisit.rs b/src/librustc/hir/itemlikevisit.rs index bfc9e8f06e235..f20b1b4a38a21 100644 --- a/src/librustc/hir/itemlikevisit.rs +++ b/src/librustc/hir/itemlikevisit.rs @@ -1,5 +1,5 @@ -use super::{Item, ImplItem, TraitItem}; use super::intravisit::Visitor; +use super::{ImplItem, Item, TraitItem}; /// The "item-like visitor" visitor defines only the top-level methods /// that can be invoked by `Crate::visit_all_item_likes()`. Whether @@ -56,7 +56,8 @@ pub struct DeepVisitor<'v, V: 'v> { } impl<'v, 'hir, V> DeepVisitor<'v, V> - where V: Visitor<'hir> + 'v +where + V: Visitor<'hir> + 'v, { pub fn new(base: &'v mut V) -> Self { DeepVisitor { visitor: base } @@ -64,7 +65,8 @@ impl<'v, 'hir, V> DeepVisitor<'v, V> } impl<'v, 'hir, V> ItemLikeVisitor<'hir> for DeepVisitor<'v, V> - where V: Visitor<'hir> +where + V: Visitor<'hir>, { fn visit_item(&mut self, item: &'hir Item) { self.visitor.visit_item(item); @@ -94,7 +96,8 @@ pub trait IntoVisitor<'hir> { pub struct ParDeepVisitor(pub V); impl<'hir, V> ParItemLikeVisitor<'hir> for ParDeepVisitor - where V: IntoVisitor<'hir> +where + V: IntoVisitor<'hir>, { fn visit_item(&self, item: &'hir Item) { self.0.into_visitor().visit_item(item); diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 359bd37488241..8ffcad8717533 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -31,41 +31,40 @@ //! in the HIR, especially for multiple identifiers. use dep_graph::DepGraph; -use hir::{self, ParamName}; -use hir::HirVec; -use hir::map::{DefKey, DefPathData, Definitions}; -use hir::def_id::{DefId, DefIndex, DefIndexAddressSpace, CRATE_DEF_INDEX}; use hir::def::{Def, PathResolution, PerNS}; +use hir::def_id::{DefId, DefIndex, DefIndexAddressSpace, CRATE_DEF_INDEX}; +use hir::map::{DefKey, DefPathData, Definitions}; use hir::GenericArg; -use lint::builtin::{self, PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES, - ELIDED_LIFETIMES_IN_PATHS}; +use hir::HirVec; +use hir::{self, ParamName}; +use lint::builtin::{self, ELIDED_LIFETIMES_IN_PATHS, PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES}; use middle::cstore::CrateStore; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::indexed_vec::IndexVec; use rustc_data_structures::thin_vec::ThinVec; -use session::Session; use session::config::nightly_options; +use session::Session; use util::common::FN_OUTPUT_NAME; use util::nodemap::{DefIdMap, NodeMap}; +use smallvec::SmallVec; use std::collections::BTreeMap; use std::fmt::Debug; use std::mem; -use smallvec::SmallVec; -use syntax::attr; use syntax::ast; use syntax::ast::*; +use syntax::attr; use syntax::errors; use syntax::ext::hygiene::{Mark, SyntaxContext}; +use syntax::parse::token::Token; use syntax::print::pprust; use syntax::ptr::P; use syntax::source_map::{self, respan, CompilerDesugaringKind, Spanned}; use syntax::std_inject; use syntax::symbol::{keywords, Symbol}; use syntax::tokenstream::{TokenStream, TokenTree}; -use syntax::parse::token::Token; use syntax::visit::{self, Visitor}; -use syntax_pos::{Span, MultiSpan}; +use syntax_pos::{MultiSpan, Span}; const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF; @@ -133,11 +132,7 @@ pub struct LoweringContext<'a> { pub trait Resolver { /// Resolve a path generated by the lowerer when expanding `for`, `if let`, etc. - fn resolve_hir_path( - &mut self, - path: &ast::Path, - is_value: bool, - ) -> hir::Path; + fn resolve_hir_path(&mut self, path: &ast::Path, is_value: bool) -> hir::Path; /// Obtain the resolution for a node-id. fn get_resolution(&mut self, id: NodeId) -> Option; @@ -242,7 +237,8 @@ pub fn lower_crate( lifetimes_to_define: Vec::new(), is_collecting_in_band_lifetimes: false, in_scope_lifetimes: Vec::new(), - }.lower_crate(krate) + } + .lower_crate(krate) } #[derive(Copy, Clone, PartialEq)] @@ -315,26 +311,22 @@ enum AnonymousLifetimeMode { PassThrough, } -struct ImplTraitTypeIdVisitor<'a> { ids: &'a mut SmallVec<[hir::ItemId; 1]> } +struct ImplTraitTypeIdVisitor<'a> { + ids: &'a mut SmallVec<[hir::ItemId; 1]>, +} impl<'a, 'b> Visitor<'a> for ImplTraitTypeIdVisitor<'b> { fn visit_ty(&mut self, ty: &'a Ty) { match ty.node { - | TyKind::Typeof(_) - | TyKind::BareFn(_) - => return, + TyKind::Typeof(_) | TyKind::BareFn(_) => return, TyKind::ImplTrait(id, _) => self.ids.push(hir::ItemId { id }), - _ => {}, + _ => {} } visit::walk_ty(self, ty); } - fn visit_path_segment( - &mut self, - path_span: Span, - path_segment: &'v PathSegment, - ) { + fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) { if let Some(ref p) = path_segment.args { if let GenericArgs::Parenthesized(_) = **p { return; @@ -427,22 +419,21 @@ impl<'a> LoweringContext<'a> { if item_lowered { let item_generics = match self.lctx.items.get(&item.id).unwrap().node { hir::ItemKind::Impl(_, _, _, ref generics, ..) - | hir::ItemKind::Trait(_, _, ref generics, ..) => { - generics.params.clone() - } + | hir::ItemKind::Trait(_, _, ref generics, ..) => generics.params.clone(), _ => HirVec::new(), }; - self.lctx.with_parent_impl_lifetime_defs(&item_generics, |this| { - let this = &mut ItemLowerer { lctx: this }; - if let ItemKind::Impl(.., ref opt_trait_ref, _, _) = item.node { - this.with_trait_impl_ref(opt_trait_ref, |this| { - visit::walk_item(this, item) - }); - } else { - visit::walk_item(this, item); - } - }); + self.lctx + .with_parent_impl_lifetime_defs(&item_generics, |this| { + let this = &mut ItemLowerer { lctx: this }; + if let ItemKind::Impl(.., ref opt_trait_ref, _, _) = item.node { + this.with_trait_impl_ref(opt_trait_ref, |this| { + visit::walk_item(this, item) + }); + } else { + visit::walk_item(this, item); + } + }); } } @@ -545,7 +536,8 @@ impl<'a> LoweringContext<'a> { where F: FnOnce(&mut Self) -> T, { - let counter = self.item_local_id_counters + let counter = self + .item_local_id_counters .insert(owner, HIR_ID_COUNTER_LOCKED) .unwrap_or_else(|| panic!("No item_local_id_counters entry for {:?}", owner)); let def_index = self.resolver.definitions().opt_def_index(owner).unwrap(); @@ -556,7 +548,8 @@ impl<'a> LoweringContext<'a> { debug_assert!(def_index == new_def_index); debug_assert!(new_counter >= counter); - let prev = self.item_local_id_counters + let prev = self + .item_local_id_counters .insert(owner, new_counter) .unwrap(); debug_assert!(prev == HIR_ID_COUNTER_LOCKED); @@ -596,12 +589,10 @@ impl<'a> LoweringContext<'a> { debug_assert!(local_id != HIR_ID_COUNTER_LOCKED); *local_id_counter += 1; - let def_index = this - .resolver - .definitions() - .opt_def_index(owner) - .expect("You forgot to call `create_def_with_parent` or are lowering node ids \ - that do not belong to the current owner"); + let def_index = this.resolver.definitions().opt_def_index(owner).expect( + "You forgot to call `create_def_with_parent` or are lowering node ids \ + that do not belong to the current owner", + ); hir::HirId { owner: def_index, @@ -719,10 +710,9 @@ impl<'a> LoweringContext<'a> { // that collisions are ok here and this shouldn't // really show up for end-user. let (str_name, kind) = match hir_name { - ParamName::Plain(ident) => ( - ident.as_interned_str(), - hir::LifetimeParamKind::InBand, - ), + ParamName::Plain(ident) => { + (ident.as_interned_str(), hir::LifetimeParamKind::InBand) + } ParamName::Fresh(_) => ( keywords::UnderscoreLifetime.name().as_interned_str(), hir::LifetimeParamKind::Elided, @@ -750,7 +740,7 @@ impl<'a> LoweringContext<'a> { bounds: hir_vec![], span, pure_wrt_drop: false, - kind: hir::GenericParamKind::Lifetime { kind } + kind: hir::GenericParamKind::Lifetime { kind }, } }) .chain(in_band_ty_params.into_iter()) @@ -778,8 +768,11 @@ impl<'a> LoweringContext<'a> { let hir_name = ParamName::Plain(ident); - if self.lifetimes_to_define.iter() - .any(|(_, lt_name)| lt_name.modern() == hir_name.modern()) { + if self + .lifetimes_to_define + .iter() + .any(|(_, lt_name)| lt_name.modern() == hir_name.modern()) + { return; } @@ -822,10 +815,12 @@ impl<'a> LoweringContext<'a> { // This should only be used with generics that have already had their // in-band lifetimes added. In practice, this means that this function is // only used when lowering a child item of a trait or impl. - fn with_parent_impl_lifetime_defs(&mut self, + fn with_parent_impl_lifetime_defs( + &mut self, params: &HirVec, - f: F - ) -> T where + f: F, + ) -> T + where F: FnOnce(&mut LoweringContext<'_>) -> T, { let old_len = self.in_scope_lifetimes.len(); @@ -857,20 +852,16 @@ impl<'a> LoweringContext<'a> { where F: FnOnce(&mut LoweringContext<'_>, &mut Vec) -> T, { - let (in_band_defs, (mut lowered_generics, res)) = self.with_in_scope_lifetime_defs( - &generics.params, - |this| { + let (in_band_defs, (mut lowered_generics, res)) = + self.with_in_scope_lifetime_defs(&generics.params, |this| { this.collect_in_band_defs(parent_id, anonymous_lifetime_mode, |this| { let mut params = Vec::new(); - let generics = this.lower_generics( - generics, - ImplTraitContext::Universal(&mut params), - ); + let generics = + this.lower_generics(generics, ImplTraitContext::Universal(&mut params)); let res = f(this, &mut params); (params, (generics, res)) }) - }, - ); + }); lowered_generics.params = lowered_generics .params @@ -918,7 +909,7 @@ impl<'a> LoweringContext<'a> { let decl = FnDecl { inputs: vec![], output, - variadic: false + variadic: false, }; let body_id = self.record_body(body_expr, Some(&decl)); self.is_generator = prev_is_generator; @@ -929,15 +920,24 @@ impl<'a> LoweringContext<'a> { let generator = hir::Expr { id: closure_node_id, hir_id: closure_hir_id, - node: hir::ExprKind::Closure(capture_clause, decl, body_id, span, - Some(hir::GeneratorMovability::Static)), + node: hir::ExprKind::Closure( + capture_clause, + decl, + body_id, + span, + Some(hir::GeneratorMovability::Static), + ), span, attrs: ThinVec::new(), }; let unstable_span = self.allow_internal_unstable(CompilerDesugaringKind::Async, span); let gen_future = self.expr_std_path( - unstable_span, &["future", "from_generator"], None, ThinVec::new()); + unstable_span, + &["future", "from_generator"], + None, + ThinVec::new(), + ); hir::ExprKind::Call(P(gen_future), hir_vec![generator]) } @@ -1018,9 +1018,7 @@ impl<'a> LoweringContext<'a> { } fn lower_label(&mut self, label: Option Trait for Foo { .. }` - Impl(Unsafety, - ImplPolarity, - Defaultness, - Generics, - Option, // (optional) trait this impl implements - P, // self - HirVec), + Impl( + Unsafety, + ImplPolarity, + Defaultness, + Generics, + Option, // (optional) trait this impl implements + P, // self + HirVec, + ), } impl ItemKind { @@ -2286,15 +2306,19 @@ impl ItemKind { pub fn generics(&self) -> Option<&Generics> { Some(match *self { - ItemKind::Fn(_, _, ref generics, _) | - ItemKind::Ty(_, ref generics) | - ItemKind::Existential(ExistTy { ref generics, impl_trait_fn: None, .. }) | - ItemKind::Enum(_, ref generics) | - ItemKind::Struct(_, ref generics) | - ItemKind::Union(_, ref generics) | - ItemKind::Trait(_, _, ref generics, _, _) | - ItemKind::Impl(_, _, _, ref generics, _, _, _)=> generics, - _ => return None + ItemKind::Fn(_, _, ref generics, _) + | ItemKind::Ty(_, ref generics) + | ItemKind::Existential(ExistTy { + ref generics, + impl_trait_fn: None, + .. + }) + | ItemKind::Enum(_, ref generics) + | ItemKind::Struct(_, ref generics) + | ItemKind::Union(_, ref generics) + | ItemKind::Trait(_, _, ref generics, _, _) + | ItemKind::Impl(_, _, _, ref generics, _, _, _) => generics, + _ => return None, }) } } @@ -2377,14 +2401,14 @@ pub struct Freevar { pub def: Def, // First span where it is accessed (there can be multiple). - pub span: Span + pub span: Span, } impl Freevar { pub fn var_id(&self) -> NodeId { match self.def { Def::Local(id) | Def::Upvar(id, ..) => id, - _ => bug!("Freevar::var_id: bad def ({:?})", self.def) + _ => bug!("Freevar::var_id: bad def ({:?})", self.def), } } } @@ -2406,7 +2430,6 @@ pub type TraitMap = NodeMap>; // imported. pub type GlobMap = NodeMap>; - pub fn provide(providers: &mut Providers<'_>) { providers.describe_def = map::describe_def; } @@ -2498,14 +2521,12 @@ impl CodegenFnAttrs { /// * `#[export_name(...)]` is present /// * `#[linkage]` is present pub fn contains_extern_indicator(&self) -> bool { - self.flags.contains(CodegenFnAttrFlags::NO_MANGLE) || - self.export_name.is_some() || - match self.linkage { + self.flags.contains(CodegenFnAttrFlags::NO_MANGLE) + || self.export_name.is_some() + || match self.linkage { // these are private, make sure we don't try to consider // them external - None | - Some(Linkage::Internal) | - Some(Linkage::Private) => false, + None | Some(Linkage::Internal) | Some(Linkage::Private) => false, Some(_) => true, } } diff --git a/src/librustc/hir/pat_util.rs b/src/librustc/hir/pat_util.rs index 4df71a8768ab2..ed5b72c7e2c10 100644 --- a/src/librustc/hir/pat_util.rs +++ b/src/librustc/hir/pat_util.rs @@ -12,12 +12,22 @@ pub struct EnumerateAndAdjust { gap_len: usize, } -impl Iterator for EnumerateAndAdjust where I: Iterator { +impl Iterator for EnumerateAndAdjust +where + I: Iterator, +{ type Item = (usize, ::Item); fn next(&mut self) -> Option<(usize, ::Item)> { self.enumerate.next().map(|(i, elem)| { - (if i < self.gap_pos { i } else { i + self.gap_len }, elem) + ( + if i < self.gap_pos { + i + } else { + i + self.gap_len + }, + elem, + ) }) } @@ -27,13 +37,24 @@ impl Iterator for EnumerateAndAdjust where I: Iterator { } pub trait EnumerateAndAdjustIterator { - fn enumerate_and_adjust(self, expected_len: usize, gap_pos: Option) - -> EnumerateAndAdjust where Self: Sized; + fn enumerate_and_adjust( + self, + expected_len: usize, + gap_pos: Option, + ) -> EnumerateAndAdjust + where + Self: Sized; } impl EnumerateAndAdjustIterator for T { - fn enumerate_and_adjust(self, expected_len: usize, gap_pos: Option) - -> EnumerateAndAdjust where Self: Sized { + fn enumerate_and_adjust( + self, + expected_len: usize, + gap_pos: Option, + ) -> EnumerateAndAdjust + where + Self: Sized, + { let actual_len = self.len(); EnumerateAndAdjust { enumerate: self.enumerate(), @@ -46,41 +67,38 @@ impl EnumerateAndAdjustIterator for T { impl hir::Pat { pub fn is_refutable(&self) -> bool { match self.node { - PatKind::Lit(_) | - PatKind::Range(..) | - PatKind::Path(hir::QPath::Resolved(Some(..), _)) | - PatKind::Path(hir::QPath::TypeRelative(..)) => true, - - PatKind::Path(hir::QPath::Resolved(_, ref path)) | - PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) | - PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => { - match path.def { - Def::Variant(..) | Def::VariantCtor(..) => true, - _ => false - } - } + PatKind::Lit(_) + | PatKind::Range(..) + | PatKind::Path(hir::QPath::Resolved(Some(..), _)) + | PatKind::Path(hir::QPath::TypeRelative(..)) => true, + + PatKind::Path(hir::QPath::Resolved(_, ref path)) + | PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) + | PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => match path.def { + Def::Variant(..) | Def::VariantCtor(..) => true, + _ => false, + }, PatKind::Slice(..) => true, - _ => false + _ => false, } } pub fn is_const(&self) -> bool { match self.node { PatKind::Path(hir::QPath::TypeRelative(..)) => true, - PatKind::Path(hir::QPath::Resolved(_, ref path)) => { - match path.def { - Def::Const(..) | Def::AssociatedConst(..) => true, - _ => false - } - } - _ => false + PatKind::Path(hir::QPath::Resolved(_, ref path)) => match path.def { + Def::Const(..) | Def::AssociatedConst(..) => true, + _ => false, + }, + _ => false, } } /// Call `f` on every "binding" in a pattern, e.g., on `a` in /// `match foo() { Some(a) => (), None => () }` pub fn each_binding(&self, mut f: F) - where F: FnMut(hir::BindingAnnotation, HirId, Span, ast::Ident), + where + F: FnMut(hir::BindingAnnotation, HirId, Span, ast::Ident), { self.walk(|p| { if let PatKind::Binding(binding_mode, _, ident, _) = p.node { @@ -115,7 +133,7 @@ impl hir::Pat { contains_bindings = true; false // there's at least one binding/wildcard, can short circuit now. } - _ => true + _ => true, } }); contains_bindings @@ -123,8 +141,8 @@ impl hir::Pat { pub fn simple_ident(&self) -> Option { match self.node { - PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, None) | - PatKind::Binding(hir::BindingAnnotation::Mutable, _, ident, None) => Some(ident), + PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, None) + | PatKind::Binding(hir::BindingAnnotation::Mutable, _, ident, None) => Some(ident), _ => None, } } @@ -134,16 +152,13 @@ impl hir::Pat { let mut variants = vec![]; self.walk(|p| { match p.node { - PatKind::Path(hir::QPath::Resolved(_, ref path)) | - PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) | - PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => { - match path.def { - Def::Variant(id) | - Def::VariantCtor(id, ..) => variants.push(id), - _ => () - } - } - _ => () + PatKind::Path(hir::QPath::Resolved(_, ref path)) + | PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) + | PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => match path.def { + Def::Variant(id) | Def::VariantCtor(id, ..) => variants.push(id), + _ => (), + }, + _ => (), } true }); @@ -161,17 +176,13 @@ impl hir::Pat { /// See #44848. pub fn contains_explicit_ref_binding(&self) -> Option { let mut result = None; - self.each_binding(|annotation, _, _, _| { - match annotation { - hir::BindingAnnotation::Ref => { - match result { - None | Some(hir::MutImmutable) => result = Some(hir::MutImmutable), - _ => (), - } - } - hir::BindingAnnotation::RefMut => result = Some(hir::MutMutable), + self.each_binding(|annotation, _, _, _| match annotation { + hir::BindingAnnotation::Ref => match result { + None | Some(hir::MutImmutable) => result = Some(hir::MutImmutable), _ => (), - } + }, + hir::BindingAnnotation::RefMut => result = Some(hir::MutMutable), + _ => (), }); result } @@ -185,11 +196,12 @@ impl hir::Arm { // for #42640 (default match binding modes). // // See #44848. - self.pats.iter() - .filter_map(|pat| pat.contains_explicit_ref_binding()) - .max_by_key(|m| match *m { - hir::MutMutable => 1, - hir::MutImmutable => 0, - }) + self.pats + .iter() + .filter_map(|pat| pat.contains_explicit_ref_binding()) + .max_by_key(|m| match *m { + hir::MutMutable => 1, + hir::MutImmutable => 0, + }) } } diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index d7acdefcc7d71..47233e6c0d36e 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -1,23 +1,23 @@ use rustc_target::spec::abi::Abi; use syntax::ast; -use syntax::source_map::{SourceMap, Spanned}; -use syntax::parse::ParseSess; use syntax::parse::lexer::comments; -use syntax::print::pp::{self, Breaks}; +use syntax::parse::ParseSess; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; +use syntax::print::pp::{self, Breaks}; use syntax::print::pprust::PrintState; use syntax::ptr::P; +use syntax::source_map::{SourceMap, Spanned}; use syntax::symbol::keywords; use syntax::util::parser::{self, AssocOp, Fixity}; use syntax_pos::{self, BytePos, FileName}; use hir; -use hir::{PatKind, GenericBound, TraitBoundModifier, RangeEnd}; -use hir::{GenericParam, GenericParamKind, GenericArg}; +use hir::{GenericArg, GenericParam, GenericParamKind}; +use hir::{GenericBound, PatKind, RangeEnd, TraitBoundModifier}; use std::borrow::Cow; use std::cell::Cell; -use std::io::{self, Write, Read}; +use std::io::{self, Read, Write}; use std::iter::Peekable; use std::vec; @@ -35,7 +35,7 @@ pub enum Nested { TraitItem(hir::TraitItemId), ImplItem(hir::ImplItemId), Body(hir::BodyId), - BodyArgPat(hir::BodyId, usize) + BodyArgPat(hir::BodyId, usize), } pub trait PpAnn { @@ -67,7 +67,7 @@ impl PpAnn for hir::Crate { Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)), Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)), Nested::Body(id) => state.print_expr(&self.body(id).value), - Nested::BodyArgPat(id, i) => state.print_pat(&self.body(id).arguments[i].pat) + Nested::BodyArgPat(id, i) => state.print_pat(&self.body(id).arguments[i].pat), } } } @@ -114,19 +114,19 @@ pub const indent_unit: usize = 4; #[allow(non_upper_case_globals)] pub const default_columns: usize = 78; - /// Requires you to pass an input filename and reader so that /// it can scan the input text for comments and literals to /// copy forward. -pub fn print_crate<'a>(cm: &'a SourceMap, - sess: &ParseSess, - krate: &hir::Crate, - filename: FileName, - input: &mut dyn Read, - out: Box, - ann: &'a dyn PpAnn, - is_expanded: bool) - -> io::Result<()> { +pub fn print_crate<'a>( + cm: &'a SourceMap, + sess: &ParseSess, + krate: &hir::Crate, + filename: FileName, + input: &mut dyn Read, + out: Box, + ann: &'a dyn PpAnn, + is_expanded: bool, +) -> io::Result<()> { let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded); // When printing the AST, we sometimes need to inject `#[no_std]` here. @@ -138,36 +138,36 @@ pub fn print_crate<'a>(cm: &'a SourceMap, } impl<'a> State<'a> { - pub fn new_from_input(cm: &'a SourceMap, - sess: &ParseSess, - filename: FileName, - input: &mut dyn Read, - out: Box, - ann: &'a dyn PpAnn, - is_expanded: bool) - -> State<'a> { + pub fn new_from_input( + cm: &'a SourceMap, + sess: &ParseSess, + filename: FileName, + input: &mut dyn Read, + out: Box, + ann: &'a dyn PpAnn, + is_expanded: bool, + ) -> State<'a> { let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input); - State::new(cm, - out, - ann, - Some(cmnts), - // If the code is post expansion, don't use the table of - // literals, since it doesn't correspond with the literals - // in the AST anymore. - if is_expanded { - None - } else { - Some(lits) - }) - } - - pub fn new(cm: &'a SourceMap, - out: Box, - ann: &'a dyn PpAnn, - comments: Option>, - literals: Option>) - -> State<'a> { + State::new( + cm, + out, + ann, + Some(cmnts), + // If the code is post expansion, don't use the table of + // literals, since it doesn't correspond with the literals + // in the AST anymore. + if is_expanded { None } else { Some(lits) }, + ) + } + + pub fn new( + cm: &'a SourceMap, + out: Box, + ann: &'a dyn PpAnn, + comments: Option>, + literals: Option>, + ) -> State<'a> { State { s: pp::mk_printer(out, default_columns), cm: Some(cm), @@ -181,7 +181,8 @@ impl<'a> State<'a> { } pub fn to_string(ann: &dyn PpAnn, f: F) -> String - where F: FnOnce(&mut State<'_>) -> io::Result<()> +where + F: FnOnce(&mut State<'_>) -> io::Result<()>, { let mut wr = Vec::new(); { @@ -244,11 +245,12 @@ impl<'a> State<'a> { self.bclose_maybe_open(span, indented, true) } - pub fn bclose_maybe_open(&mut self, - span: syntax_pos::Span, - indented: usize, - close_box: bool) - -> io::Result<()> { + pub fn bclose_maybe_open( + &mut self, + span: syntax_pos::Span, + indented: usize, + close_box: bool, + ) -> io::Result<()> { self.maybe_print_comment(span.hi())?; self.break_offset_if_not_bol(1, -(indented as isize))?; self.s.word("}")?; @@ -284,7 +286,8 @@ impl<'a> State<'a> { // We do something pretty sketchy here: tuck the nonzero // offset-adjustment we were going to deposit along with the // break into the previous hardbreak. - self.s.replace_last_token(pp::Printer::hardbreak_tok_offset(off)); + self.s + .replace_last_token(pp::Printer::hardbreak_tok_offset(off)); } Ok(()) } @@ -300,14 +303,16 @@ impl<'a> State<'a> { self.s.word("*/") } - pub fn commasep_cmnt(&mut self, - b: Breaks, - elts: &[T], - mut op: F, - mut get_span: G) - -> io::Result<()> - where F: FnMut(&mut State<'_>, &T) -> io::Result<()>, - G: FnMut(&T) -> syntax_pos::Span + pub fn commasep_cmnt( + &mut self, + b: Breaks, + elts: &[T], + mut op: F, + mut get_span: G, + ) -> io::Result<()> + where + F: FnMut(&mut State<'_>, &T) -> io::Result<()>, + G: FnMut(&T) -> syntax_pos::Span, { self.rbox(0, b)?; let len = elts.len(); @@ -337,10 +342,11 @@ impl<'a> State<'a> { Ok(()) } - pub fn print_foreign_mod(&mut self, - nmod: &hir::ForeignMod, - attrs: &[ast::Attribute]) - -> io::Result<()> { + pub fn print_foreign_mod( + &mut self, + nmod: &hir::ForeignMod, + attrs: &[ast::Attribute], + ) -> io::Result<()> { self.print_inner_attributes(attrs)?; for item in &nmod.items { self.print_foreign_item(item)?; @@ -380,7 +386,7 @@ impl<'a> State<'a> { } hir::TyKind::Never => { self.s.word("!")?; - }, + } hir::TyKind::Tup(ref elts) => { self.popen()?; self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(&ty))?; @@ -390,13 +396,17 @@ impl<'a> State<'a> { self.pclose()?; } hir::TyKind::BareFn(ref f) => { - self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &f.generic_params, - &f.arg_names[..])?; - } - hir::TyKind::Def(..) => {}, - hir::TyKind::Path(ref qpath) => { - self.print_qpath(qpath, false)? - } + self.print_ty_fn( + f.abi, + f.unsafety, + &f.decl, + None, + &f.generic_params, + &f.arg_names[..], + )?; + } + hir::TyKind::Def(..) => {} + hir::TyKind::Path(ref qpath) => self.print_qpath(qpath, false)?, hir::TyKind::TraitObject(ref bounds, ref lifetime) => { let mut first = true; for bound in bounds { @@ -445,18 +455,20 @@ impl<'a> State<'a> { match item.node { hir::ForeignItemKind::Fn(ref decl, ref arg_names, ref generics) => { self.head("")?; - self.print_fn(decl, - hir::FnHeader { - unsafety: hir::Unsafety::Normal, - constness: hir::Constness::NotConst, - abi: Abi::Rust, - asyncness: hir::IsAsync::NotAsync, - }, - Some(item.ident.name), - generics, - &item.vis, - arg_names, - None)?; + self.print_fn( + decl, + hir::FnHeader { + unsafety: hir::Unsafety::Normal, + constness: hir::Constness::NotConst, + abi: Abi::Rust, + asyncness: hir::IsAsync::NotAsync, + }, + Some(item.ident.name), + generics, + &item.vis, + arg_names, + None, + )?; self.end()?; // end head-ibox self.s.word(";")?; self.end() // end the outer fn box @@ -483,12 +495,13 @@ impl<'a> State<'a> { } } - fn print_associated_const(&mut self, - ident: ast::Ident, - ty: &hir::Ty, - default: Option, - vis: &hir::Visibility) - -> io::Result<()> { + fn print_associated_const( + &mut self, + ident: ast::Ident, + ty: &hir::Ty, + default: Option, + vis: &hir::Visibility, + ) -> io::Result<()> { self.s.word(visibility_qualified(vis, ""))?; self.word_space("const")?; self.print_ident(ident)?; @@ -502,11 +515,12 @@ impl<'a> State<'a> { self.s.word(";") } - fn print_associated_type(&mut self, - ident: ast::Ident, - bounds: Option<&hir::GenericBounds>, - ty: Option<&hir::Ty>) - -> io::Result<()> { + fn print_associated_type( + &mut self, + ident: ast::Ident, + bounds: Option<&hir::GenericBounds>, + ty: Option<&hir::Ty>, + ) -> io::Result<()> { self.word_space("type")?; self.print_ident(ident)?; if let Some(bounds) = bounds { @@ -554,7 +568,7 @@ impl<'a> State<'a> { self.s.word(";")?; } hir::UseKind::Glob => self.s.word("::*;")?, - hir::UseKind::ListStem => self.s.word("::{};")? + hir::UseKind::ListStem => self.s.word("::{};")?, } self.end()?; // end inner head-block self.end()?; // end outer head-block @@ -590,13 +604,15 @@ impl<'a> State<'a> { } hir::ItemKind::Fn(ref decl, header, ref typarams, body) => { self.head("")?; - self.print_fn(decl, - header, - Some(item.ident.name), - typarams, - &item.vis, - &[], - Some(body))?; + self.print_fn( + decl, + header, + Some(item.ident.name), + typarams, + &item.vis, + &[], + Some(body), + )?; self.s.word(" ")?; self.end()?; // need to close a box self.end()?; // need to close a box @@ -659,8 +675,13 @@ impl<'a> State<'a> { self.end()?; // end the outer ibox } hir::ItemKind::Enum(ref enum_definition, ref params) => { - self.print_enum_def(enum_definition, params, item.ident.name, item.span, - &item.vis)?; + self.print_enum_def( + enum_definition, + params, + item.ident.name, + item.span, + &item.vis, + )?; } hir::ItemKind::Struct(ref struct_def, ref generics) => { self.head(visibility_qualified(&item.vis, "struct"))?; @@ -670,13 +691,15 @@ impl<'a> State<'a> { self.head(visibility_qualified(&item.vis, "union"))?; self.print_struct(struct_def, generics, item.ident.name, item.span, true)?; } - hir::ItemKind::Impl(unsafety, - polarity, - defaultness, - ref generics, - ref opt_trait, - ref ty, - ref impl_items) => { + hir::ItemKind::Impl( + unsafety, + polarity, + defaultness, + ref generics, + ref opt_trait, + ref ty, + ref impl_items, + ) => { self.head("")?; self.print_visibility(&item.vis)?; self.print_defaultness(defaultness)?; @@ -768,7 +791,7 @@ impl<'a> State<'a> { fn print_formal_generic_params( &mut self, - generic_params: &[hir::GenericParam] + generic_params: &[hir::GenericParam], ) -> io::Result<()> { if !generic_params.is_empty() { self.s.word("for")?; @@ -783,13 +806,14 @@ impl<'a> State<'a> { self.print_trait_ref(&t.trait_ref) } - pub fn print_enum_def(&mut self, - enum_definition: &hir::EnumDef, - generics: &hir::Generics, - name: ast::Name, - span: syntax_pos::Span, - visibility: &hir::Visibility) - -> io::Result<()> { + pub fn print_enum_def( + &mut self, + enum_definition: &hir::EnumDef, + generics: &hir::Generics, + name: ast::Name, + span: syntax_pos::Span, + visibility: &hir::Visibility, + ) -> io::Result<()> { self.head(visibility_qualified(visibility, "enum"))?; self.print_name(name)?; self.print_generic_params(&generics.params)?; @@ -798,10 +822,11 @@ impl<'a> State<'a> { self.print_variants(&enum_definition.variants, span) } - pub fn print_variants(&mut self, - variants: &[hir::Variant], - span: syntax_pos::Span) - -> io::Result<()> { + pub fn print_variants( + &mut self, + variants: &[hir::Variant], + span: syntax_pos::Span, + ) -> io::Result<()> { self.bopen()?; for v in variants { self.space_if_not_bol()?; @@ -820,11 +845,13 @@ impl<'a> State<'a> { match vis.node { hir::VisibilityKind::Public => self.word_nbsp("pub")?, hir::VisibilityKind::Crate(ast::CrateSugar::JustCrate) => self.word_nbsp("crate")?, - hir::VisibilityKind::Crate(ast::CrateSugar::PubCrate) => self.word_nbsp("pub(crate)")?, + hir::VisibilityKind::Crate(ast::CrateSugar::PubCrate) => { + self.word_nbsp("pub(crate)")? + } hir::VisibilityKind::Restricted { ref path, .. } => { self.s.word("pub(")?; - if path.segments.len() == 1 && - path.segments[0].ident.name == keywords::Super.name() { + if path.segments.len() == 1 && path.segments[0].ident.name == keywords::Super.name() + { // Special case: `super` can print like `pub(super)`. self.s.word("super")?; } else { @@ -834,7 +861,7 @@ impl<'a> State<'a> { } self.word_nbsp(")")?; } - hir::VisibilityKind::Inherited => () + hir::VisibilityKind::Inherited => (), } Ok(()) @@ -848,13 +875,14 @@ impl<'a> State<'a> { Ok(()) } - pub fn print_struct(&mut self, - struct_def: &hir::VariantData, - generics: &hir::Generics, - name: ast::Name, - span: syntax_pos::Span, - print_finalizer: bool) - -> io::Result<()> { + pub fn print_struct( + &mut self, + struct_def: &hir::VariantData, + generics: &hir::Generics, + name: ast::Name, + span: syntax_pos::Span, + print_finalizer: bool, + ) -> io::Result<()> { self.print_name(name)?; self.print_generic_params(&generics.params)?; if !struct_def.is_struct() { @@ -906,21 +934,24 @@ impl<'a> State<'a> { } Ok(()) } - pub fn print_method_sig(&mut self, - ident: ast::Ident, - m: &hir::MethodSig, - generics: &hir::Generics, - vis: &hir::Visibility, - arg_names: &[ast::Ident], - body_id: Option) - -> io::Result<()> { - self.print_fn(&m.decl, - m.header, - Some(ident.name), - generics, - vis, - arg_names, - body_id) + pub fn print_method_sig( + &mut self, + ident: ast::Ident, + m: &hir::MethodSig, + generics: &hir::Generics, + vis: &hir::Visibility, + arg_names: &[ast::Ident], + body_id: Option, + ) -> io::Result<()> { + self.print_fn( + &m.decl, + m.header, + Some(ident.name), + generics, + vis, + arg_names, + body_id, + ) } pub fn print_trait_item(&mut self, ti: &hir::TraitItem) -> io::Result<()> { @@ -930,19 +961,25 @@ impl<'a> State<'a> { self.print_outer_attributes(&ti.attrs)?; match ti.node { hir::TraitItemKind::Const(ref ty, default) => { - let vis = Spanned { span: syntax_pos::DUMMY_SP, - node: hir::VisibilityKind::Inherited }; + let vis = Spanned { + span: syntax_pos::DUMMY_SP, + node: hir::VisibilityKind::Inherited, + }; self.print_associated_const(ti.ident, &ty, default, &vis)?; } hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref arg_names)) => { - let vis = Spanned { span: syntax_pos::DUMMY_SP, - node: hir::VisibilityKind::Inherited }; + let vis = Spanned { + span: syntax_pos::DUMMY_SP, + node: hir::VisibilityKind::Inherited, + }; self.print_method_sig(ti.ident, sig, &ti.generics, &vis, arg_names, None)?; self.s.word(";")?; } hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => { - let vis = Spanned { span: syntax_pos::DUMMY_SP, - node: hir::VisibilityKind::Inherited }; + let vis = Spanned { + span: syntax_pos::DUMMY_SP, + node: hir::VisibilityKind::Inherited, + }; self.head("")?; self.print_method_sig(ti.ident, sig, &ti.generics, &vis, &[], Some(body))?; self.nbsp()?; @@ -951,9 +988,11 @@ impl<'a> State<'a> { self.ann.nested(self, Nested::Body(body))?; } hir::TraitItemKind::Type(ref bounds, ref default) => { - self.print_associated_type(ti.ident, - Some(bounds), - default.as_ref().map(|ty| &**ty))?; + self.print_associated_type( + ti.ident, + Some(bounds), + default.as_ref().map(|ty| &**ty), + )?; } } self.ann.post(self, AnnNode::SubItem(ti.id)) @@ -1019,26 +1058,29 @@ impl<'a> State<'a> { self.print_block_unclosed_indent(blk, indent_unit) } - pub fn print_block_unclosed_indent(&mut self, - blk: &hir::Block, - indented: usize) - -> io::Result<()> { + pub fn print_block_unclosed_indent( + &mut self, + blk: &hir::Block, + indented: usize, + ) -> io::Result<()> { self.print_block_maybe_unclosed(blk, indented, &[], false) } - pub fn print_block_with_attrs(&mut self, - blk: &hir::Block, - attrs: &[ast::Attribute]) - -> io::Result<()> { + pub fn print_block_with_attrs( + &mut self, + blk: &hir::Block, + attrs: &[ast::Attribute], + ) -> io::Result<()> { self.print_block_maybe_unclosed(blk, indent_unit, attrs, true) } - pub fn print_block_maybe_unclosed(&mut self, - blk: &hir::Block, - indented: usize, - attrs: &[ast::Attribute], - close_box: bool) - -> io::Result<()> { + pub fn print_block_maybe_unclosed( + &mut self, + blk: &hir::Block, + indented: usize, + attrs: &[ast::Attribute], + close_box: bool, + ) -> io::Result<()> { match blk.rules { hir::UnsafeBlock(..) => self.word_space("unsafe")?, hir::PushUnsafeBlock(..) => self.word_space("push_unsafe")?, @@ -1094,11 +1136,12 @@ impl<'a> State<'a> { } } - pub fn print_if(&mut self, - test: &hir::Expr, - blk: &hir::Expr, - elseopt: Option<&hir::Expr>) - -> io::Result<()> { + pub fn print_if( + &mut self, + test: &hir::Expr, + blk: &hir::Expr, + elseopt: Option<&hir::Expr>, + ) -> io::Result<()> { self.head("if")?; self.print_expr_as_cond(test)?; self.s.space()?; @@ -1106,12 +1149,13 @@ impl<'a> State<'a> { self.print_else(elseopt) } - pub fn print_if_let(&mut self, - pat: &hir::Pat, - expr: &hir::Expr, - blk: &hir::Block, - elseopt: Option<&hir::Expr>) - -> io::Result<()> { + pub fn print_if_let( + &mut self, + pat: &hir::Pat, + expr: &hir::Expr, + blk: &hir::Block, + elseopt: Option<&hir::Expr>, + ) -> io::Result<()> { self.head("if let")?; self.print_pat(pat)?; self.s.space()?; @@ -1150,9 +1194,7 @@ impl<'a> State<'a> { let needs_par = match expr.node { // These cases need parens due to the parse error observed in #26461: `if return {}` // parses as the erroneous construct `if (return {})`, not `if (return) {}`. - hir::ExprKind::Closure(..) | - hir::ExprKind::Ret(..) | - hir::ExprKind::Break(..) => true, + hir::ExprKind::Closure(..) | hir::ExprKind::Ret(..) | hir::ExprKind::Break(..) => true, _ => contains_exterior_struct_lit(expr), }; @@ -1185,25 +1227,28 @@ impl<'a> State<'a> { self.end() } - fn print_expr_struct(&mut self, - qpath: &hir::QPath, - fields: &[hir::Field], - wth: &Option>) - -> io::Result<()> { + fn print_expr_struct( + &mut self, + qpath: &hir::QPath, + fields: &[hir::Field], + wth: &Option>, + ) -> io::Result<()> { self.print_qpath(qpath, true)?; self.s.word("{")?; - self.commasep_cmnt(Consistent, - &fields[..], - |s, field| { - s.ibox(indent_unit)?; - if !field.is_shorthand { - s.print_ident(field.ident)?; - s.word_space(":")?; - } - s.print_expr(&field.expr)?; - s.end() - }, - |f| f.span)?; + self.commasep_cmnt( + Consistent, + &fields[..], + |s, field| { + s.ibox(indent_unit)?; + if !field.is_shorthand { + s.print_ident(field.ident)?; + s.word_space(":")?; + } + s.print_expr(&field.expr)?; + s.end() + }, + |f| f.span, + )?; match *wth { Some(ref expr) => { self.ibox(indent_unit)?; @@ -1215,9 +1260,11 @@ impl<'a> State<'a> { self.print_expr(&expr)?; self.end()?; } - _ => if !fields.is_empty() { - self.s.word(",")? - }, + _ => { + if !fields.is_empty() { + self.s.word(",")? + } + } } self.s.word("}")?; Ok(()) @@ -1233,20 +1280,20 @@ impl<'a> State<'a> { } fn print_expr_call(&mut self, func: &hir::Expr, args: &[hir::Expr]) -> io::Result<()> { - let prec = - match func.node { - hir::ExprKind::Field(..) => parser::PREC_FORCE_PAREN, - _ => parser::PREC_POSTFIX, - }; + let prec = match func.node { + hir::ExprKind::Field(..) => parser::PREC_FORCE_PAREN, + _ => parser::PREC_POSTFIX, + }; self.print_expr_maybe_paren(func, prec)?; self.print_call_post(args) } - fn print_expr_method_call(&mut self, - segment: &hir::PathSegment, - args: &[hir::Expr]) - -> io::Result<()> { + fn print_expr_method_call( + &mut self, + segment: &hir::PathSegment, + args: &[hir::Expr], + ) -> io::Result<()> { let base_args = &args[1..]; self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX)?; self.s.word(".")?; @@ -1261,11 +1308,12 @@ impl<'a> State<'a> { self.print_call_post(base_args) } - fn print_expr_binary(&mut self, - op: hir::BinOp, - lhs: &hir::Expr, - rhs: &hir::Expr) - -> io::Result<()> { + fn print_expr_binary( + &mut self, + op: hir::BinOp, + lhs: &hir::Expr, + rhs: &hir::Expr, + ) -> io::Result<()> { let assoc_op = bin_op_to_assoc_op(op.node); let prec = assoc_op.precedence() as i8; let fixity = assoc_op.fixity(); @@ -1280,8 +1328,8 @@ impl<'a> State<'a> { // These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is // the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead // of `(x as i32) < ...`. We need to convince it _not_ to do that. - (&hir::ExprKind::Cast { .. }, hir::BinOpKind::Lt) | - (&hir::ExprKind::Cast { .. }, hir::BinOpKind::Shl) => parser::PREC_FORCE_PAREN, + (&hir::ExprKind::Cast { .. }, hir::BinOpKind::Lt) + | (&hir::ExprKind::Cast { .. }, hir::BinOpKind::Shl) => parser::PREC_FORCE_PAREN, _ => left_prec, }; @@ -1296,10 +1344,11 @@ impl<'a> State<'a> { self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) } - fn print_expr_addr_of(&mut self, - mutability: hir::Mutability, - expr: &hir::Expr) - -> io::Result<()> { + fn print_expr_addr_of( + &mut self, + mutability: hir::Mutability, + expr: &hir::Expr, + ) -> io::Result<()> { self.s.word("&")?; self.print_mutability(mutability)?; self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) @@ -1444,9 +1493,7 @@ impl<'a> State<'a> { self.print_expr(&index)?; self.s.word("]")?; } - hir::ExprKind::Path(ref qpath) => { - self.print_qpath(qpath, true)? - } + hir::ExprKind::Path(ref qpath) => self.print_qpath(qpath, true)?, hir::ExprKind::Break(destination, ref opt_expr) => { self.s.word("break")?; self.s.space()?; @@ -1486,8 +1533,7 @@ impl<'a> State<'a> { let mut ch = constraint.chars(); match ch.next() { Some('=') if out.is_rw => { - s.print_string(&format!("+{}", ch.as_str()), - ast::StrStyle::Cooked)? + s.print_string(&format!("+{}", ch.as_str()), ast::StrStyle::Cooked)? } _ => s.print_string(&constraint, ast::StrStyle::Cooked)?, } @@ -1580,9 +1626,7 @@ impl<'a> State<'a> { } self.end() } - hir::DeclKind::Item(item) => { - self.ann.nested(self, Nested::Item(item)) - } + hir::DeclKind::Item(item) => self.ann.nested(self, Nested::Item(item)), } } @@ -1610,10 +1654,7 @@ impl<'a> State<'a> { self.print_expr(coll) } - pub fn print_path(&mut self, - path: &hir::Path, - colons_before_params: bool) - -> io::Result<()> { + pub fn print_path(&mut self, path: &hir::Path, colons_before_params: bool) -> io::Result<()> { self.maybe_print_comment(path.span.lo())?; for (i, segment) in path.segments.iter().enumerate() { @@ -1621,11 +1662,10 @@ impl<'a> State<'a> { self.s.word("::")? } if segment.ident.name != keywords::PathRoot.name() { - self.print_ident(segment.ident)?; - segment.with_generic_args(|generic_args| { - self.print_generic_args(generic_args, segment.infer_types, - colons_before_params) - })?; + self.print_ident(segment.ident)?; + segment.with_generic_args(|generic_args| { + self.print_generic_args(generic_args, segment.infer_types, colons_before_params) + })?; } } @@ -1634,22 +1674,21 @@ impl<'a> State<'a> { pub fn print_path_segment(&mut self, segment: &hir::PathSegment) -> io::Result<()> { if segment.ident.name != keywords::PathRoot.name() { - self.print_ident(segment.ident)?; - segment.with_generic_args(|generic_args| { - self.print_generic_args(generic_args, segment.infer_types, false) - })?; + self.print_ident(segment.ident)?; + segment.with_generic_args(|generic_args| { + self.print_generic_args(generic_args, segment.infer_types, false) + })?; } Ok(()) } - pub fn print_qpath(&mut self, - qpath: &hir::QPath, - colons_before_params: bool) - -> io::Result<()> { + pub fn print_qpath( + &mut self, + qpath: &hir::QPath, + colons_before_params: bool, + ) -> io::Result<()> { match *qpath { - hir::QPath::Resolved(None, ref path) => { - self.print_path(path, colons_before_params) - } + hir::QPath::Resolved(None, ref path) => self.print_path(path, colons_before_params), hir::QPath::Resolved(Some(ref qself), ref path) => { self.s.word("<")?; self.print_type(qself)?; @@ -1663,9 +1702,11 @@ impl<'a> State<'a> { if segment.ident.name != keywords::PathRoot.name() { self.print_ident(segment.ident)?; segment.with_generic_args(|generic_args| { - self.print_generic_args(generic_args, - segment.infer_types, - colons_before_params) + self.print_generic_args( + generic_args, + segment.infer_types, + colons_before_params, + ) })?; } } @@ -1675,9 +1716,11 @@ impl<'a> State<'a> { let item_segment = path.segments.last().unwrap(); self.print_ident(item_segment.ident)?; item_segment.with_generic_args(|generic_args| { - self.print_generic_args(generic_args, - item_segment.infer_types, - colons_before_params) + self.print_generic_args( + generic_args, + item_segment.infer_types, + colons_before_params, + ) }) } hir::QPath::TypeRelative(ref qself, ref item_segment) => { @@ -1687,22 +1730,27 @@ impl<'a> State<'a> { self.s.word("::")?; self.print_ident(item_segment.ident)?; item_segment.with_generic_args(|generic_args| { - self.print_generic_args(generic_args, - item_segment.infer_types, - colons_before_params) + self.print_generic_args( + generic_args, + item_segment.infer_types, + colons_before_params, + ) }) } } } - fn print_generic_args(&mut self, - generic_args: &hir::GenericArgs, - infer_types: bool, - colons_before_params: bool) - -> io::Result<()> { + fn print_generic_args( + &mut self, + generic_args: &hir::GenericArgs, + infer_types: bool, + colons_before_params: bool, + ) -> io::Result<()> { if generic_args.parenthesized { self.s.word("(")?; - self.commasep(Inconsistent, generic_args.inputs(), |s, ty| s.print_type(&ty))?; + self.commasep(Inconsistent, generic_args.inputs(), |s, ty| { + s.print_type(&ty) + })?; self.s.word(")")?; self.space_if_not_bol()?; @@ -1736,12 +1784,14 @@ impl<'a> State<'a> { } if !elide_lifetimes { start_or_comma(self)?; - self.commasep(Inconsistent, &generic_args.args, |s, generic_arg| { - match generic_arg { + self.commasep( + Inconsistent, + &generic_args.args, + |s, generic_arg| match generic_arg { GenericArg::Lifetime(lt) => s.print_lifetime(lt), GenericArg::Type(ty) => s.print_type(ty), - } - })?; + }, + )?; } else if !types.is_empty() { start_or_comma(self)?; self.commasep(Inconsistent, &types, |s, ty| s.print_type(&ty))?; @@ -1823,18 +1873,20 @@ impl<'a> State<'a> { self.print_qpath(qpath, true)?; self.nbsp()?; self.word_space("{")?; - self.commasep_cmnt(Consistent, - &fields[..], - |s, f| { - s.cbox(indent_unit)?; - if !f.node.is_shorthand { - s.print_ident(f.node.ident)?; - s.word_nbsp(":")?; - } - s.print_pat(&f.node.pat)?; - s.end() - }, - |f| f.node.pat.span)?; + self.commasep_cmnt( + Consistent, + &fields[..], + |s, f| { + s.cbox(indent_unit)?; + if !f.node.is_shorthand { + s.print_ident(f.node.ident)?; + s.word_nbsp(":")?; + } + s.print_pat(&f.node.pat)?; + s.end() + }, + |f| f.node.pat.span, + )?; if etc { if !fields.is_empty() { self.word_space(",")?; @@ -1983,15 +2035,16 @@ impl<'a> State<'a> { self.end() // close enclosing cbox } - pub fn print_fn(&mut self, - decl: &hir::FnDecl, - header: hir::FnHeader, - name: Option, - generics: &hir::Generics, - vis: &hir::Visibility, - arg_names: &[ast::Ident], - body_id: Option) - -> io::Result<()> { + pub fn print_fn( + &mut self, + decl: &hir::FnDecl, + header: hir::FnHeader, + name: Option, + generics: &hir::Generics, + vis: &hir::Visibility, + arg_names: &[ast::Ident], + body_id: Option, + ) -> io::Result<()> { self.print_fn_header_info(header, vis)?; if let Some(name) = name { @@ -2070,8 +2123,11 @@ impl<'a> State<'a> { } } - pub fn print_bounds(&mut self, prefix: &'static str, bounds: &[hir::GenericBound]) - -> io::Result<()> { + pub fn print_bounds( + &mut self, + prefix: &'static str, + bounds: &[hir::GenericBound], + ) -> io::Result<()> { if !bounds.is_empty() { self.s.word(prefix)?; let mut first = true; @@ -2173,9 +2229,11 @@ impl<'a> State<'a> { self.print_type(&bounded_ty)?; self.print_bounds(":", bounds)?; } - &hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate{ref lifetime, - ref bounds, - ..}) => { + &hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate { + ref lifetime, + ref bounds, + .. + }) => { self.print_lifetime(lifetime)?; self.s.word(":")?; @@ -2192,9 +2250,11 @@ impl<'a> State<'a> { } } } - &hir::WherePredicate::EqPredicate(hir::WhereEqPredicate{ref lhs_ty, - ref rhs_ty, - ..}) => { + &hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { + ref lhs_ty, + ref rhs_ty, + .. + }) => { self.print_type(lhs_ty)?; self.s.space()?; self.word_space("=")?; @@ -2238,14 +2298,15 @@ impl<'a> State<'a> { } } - pub fn print_ty_fn(&mut self, - abi: Abi, - unsafety: hir::Unsafety, - decl: &hir::FnDecl, - name: Option, - generic_params: &[hir::GenericParam], - arg_names: &[ast::Ident]) - -> io::Result<()> { + pub fn print_ty_fn( + &mut self, + abi: Abi, + unsafety: hir::Unsafety, + decl: &hir::FnDecl, + name: Option, + generic_params: &[hir::GenericParam], + arg_names: &[ast::Ident], + ) -> io::Result<()> { self.ibox(indent_unit)?; if !generic_params.is_empty() { self.s.word("for")?; @@ -2259,26 +2320,31 @@ impl<'a> State<'a> { }, span: syntax_pos::DUMMY_SP, }; - self.print_fn(decl, - hir::FnHeader { - unsafety, - abi, - constness: hir::Constness::NotConst, - asyncness: hir::IsAsync::NotAsync, - }, - name, - &generics, - &Spanned { span: syntax_pos::DUMMY_SP, - node: hir::VisibilityKind::Inherited }, - arg_names, - None)?; + self.print_fn( + decl, + hir::FnHeader { + unsafety, + abi, + constness: hir::Constness::NotConst, + asyncness: hir::IsAsync::NotAsync, + }, + name, + &generics, + &Spanned { + span: syntax_pos::DUMMY_SP, + node: hir::VisibilityKind::Inherited, + }, + arg_names, + None, + )?; self.end() } - pub fn maybe_print_trailing_comment(&mut self, - span: syntax_pos::Span, - next_pos: Option) - -> io::Result<()> { + pub fn maybe_print_trailing_comment( + &mut self, + span: syntax_pos::Span, + next_pos: Option, + ) -> io::Result<()> { let cm = match self.cm { Some(cm) => cm, _ => return Ok(()), @@ -2293,8 +2359,8 @@ impl<'a> State<'a> { if let Some(p) = next_pos { next = p; } - if span.hi() < (*cmnt).pos && (*cmnt).pos < next && - span_line.line == comment_line.line { + if span.hi() < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line + { self.print_comment(cmnt)?; } } @@ -2313,9 +2379,10 @@ impl<'a> State<'a> { Ok(()) } - pub fn print_opt_abi_and_extern_if_nondefault(&mut self, - opt_abi: Option) - -> io::Result<()> { + pub fn print_opt_abi_and_extern_if_nondefault( + &mut self, + opt_abi: Option, + ) -> io::Result<()> { match opt_abi { Some(Abi::Rust) => Ok(()), Some(abi) => { @@ -2336,10 +2403,11 @@ impl<'a> State<'a> { } } - pub fn print_fn_header_info(&mut self, - header: hir::FnHeader, - vis: &hir::Visibility) - -> io::Result<()> { + pub fn print_fn_header_info( + &mut self, + header: hir::FnHeader, + vis: &hir::Visibility, + ) -> io::Result<()> { self.s.word(visibility_qualified(vis, ""))?; match header.constness { @@ -2387,11 +2455,11 @@ impl<'a> State<'a> { /// isn't parsed as (if true {...} else {...} | x) | 5 fn expr_requires_semi_to_be_stmt(e: &hir::Expr) -> bool { match e.node { - hir::ExprKind::If(..) | - hir::ExprKind::Match(..) | - hir::ExprKind::Block(..) | - hir::ExprKind::While(..) | - hir::ExprKind::Loop(..) => false, + hir::ExprKind::If(..) + | hir::ExprKind::Match(..) + | hir::ExprKind::Block(..) + | hir::ExprKind::While(..) + | hir::ExprKind::Loop(..) => false, _ => true, } } @@ -2401,18 +2469,12 @@ fn expr_requires_semi_to_be_stmt(e: &hir::Expr) -> bool { /// seen the semicolon, and thus don't need another. fn stmt_ends_with_semi(stmt: &hir::StmtKind) -> bool { match *stmt { - hir::StmtKind::Decl(ref d, _) => { - match d.node { - hir::DeclKind::Local(_) => true, - hir::DeclKind::Item(_) => false, - } - } - hir::StmtKind::Expr(ref e, _) => { - expr_requires_semi_to_be_stmt(&e) - } - hir::StmtKind::Semi(..) => { - false - } + hir::StmtKind::Decl(ref d, _) => match d.node { + hir::DeclKind::Local(_) => true, + hir::DeclKind::Item(_) => false, + }, + hir::StmtKind::Expr(ref e, _) => expr_requires_semi_to_be_stmt(&e), + hir::StmtKind::Semi(..) => false, } } @@ -2450,17 +2512,17 @@ fn contains_exterior_struct_lit(value: &hir::Expr) -> bool { match value.node { hir::ExprKind::Struct(..) => true, - hir::ExprKind::Assign(ref lhs, ref rhs) | - hir::ExprKind::AssignOp(_, ref lhs, ref rhs) | - hir::ExprKind::Binary(_, ref lhs, ref rhs) => { + hir::ExprKind::Assign(ref lhs, ref rhs) + | hir::ExprKind::AssignOp(_, ref lhs, ref rhs) + | hir::ExprKind::Binary(_, ref lhs, ref rhs) => { // X { y: 1 } + X { y: 2 } contains_exterior_struct_lit(&lhs) || contains_exterior_struct_lit(&rhs) } - hir::ExprKind::Unary(_, ref x) | - hir::ExprKind::Cast(ref x, _) | - hir::ExprKind::Type(ref x, _) | - hir::ExprKind::Field(ref x, _) | - hir::ExprKind::Index(ref x, _) => { + hir::ExprKind::Unary(_, ref x) + | hir::ExprKind::Cast(ref x, _) + | hir::ExprKind::Type(ref x, _) + | hir::ExprKind::Field(ref x, _) + | hir::ExprKind::Index(ref x, _) => { // &X { y: 1 }, X { y: 1 }.y contains_exterior_struct_lit(&x) } diff --git a/src/librustc/ich/caching_source_map_view.rs b/src/librustc/ich/caching_source_map_view.rs index bfe2ca6dd09d1..1038d61e13e0a 100644 --- a/src/librustc/ich/caching_source_map_view.rs +++ b/src/librustc/ich/caching_source_map_view.rs @@ -39,9 +39,10 @@ impl<'cm> CachingSourceMapView<'cm> { } } - pub fn byte_pos_to_line_and_col(&mut self, - pos: BytePos) - -> Option<(Lrc, usize, BytePos)> { + pub fn byte_pos_to_line_and_col( + &mut self, + pos: BytePos, + ) -> Option<(Lrc, usize, BytePos)> { self.time_stamp += 1; // Check if the position is in one of the cached lines @@ -49,15 +50,17 @@ impl<'cm> CachingSourceMapView<'cm> { if pos >= cache_entry.line_start && pos < cache_entry.line_end { cache_entry.time_stamp = self.time_stamp; - return Some((cache_entry.file.clone(), - cache_entry.line_number, - pos - cache_entry.line_start)); + return Some(( + cache_entry.file.clone(), + cache_entry.line_number, + pos - cache_entry.line_start, + )); } } // No cache hit ... let mut oldest = 0; - for index in 1 .. self.line_cache.len() { + for index in 1..self.line_cache.len() { if self.line_cache[index].time_stamp < self.line_cache[oldest].time_stamp { oldest = index; } @@ -96,8 +99,10 @@ impl<'cm> CachingSourceMapView<'cm> { cache_entry.line_end = line_bounds.1; cache_entry.time_stamp = self.time_stamp; - return Some((cache_entry.file.clone(), - cache_entry.line_number, - pos - cache_entry.line_start)); + return Some(( + cache_entry.file.clone(), + cache_entry.line_number, + pos - cache_entry.line_start, + )); } } diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index d5c9d9ff16dcb..3812f71cdeb48 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -1,35 +1,38 @@ use hir; use hir::def_id::{DefId, DefIndex}; -use hir::map::DefPathHash; use hir::map::definitions::Definitions; +use hir::map::DefPathHash; use ich::{self, CachingSourceMapView, Fingerprint}; use middle::cstore::CrateStore; -use ty::{TyCtxt, fast_reject}; use session::Session; +use ty::{fast_reject, TyCtxt}; +use std::cell::RefCell; use std::cmp::Ord; -use std::hash as std_hash; use std::collections::HashMap; -use std::cell::RefCell; +use std::hash as std_hash; use syntax::ast; -use syntax::source_map::SourceMap; use syntax::ext::hygiene::SyntaxContext; +use syntax::source_map::SourceMap; use syntax::symbol::Symbol; use syntax::tokenstream::DelimSpan; -use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::hygiene; +use syntax_pos::{Span, DUMMY_SP}; -use rustc_data_structures::stable_hasher::{HashStable, - StableHasher, StableHasherResult, - ToStableHashKey}; -use rustc_data_structures::fx::{FxHashSet, FxHashMap}; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_data_structures::stable_hasher::{ + HashStable, StableHasher, StableHasherResult, ToStableHashKey, +}; use smallvec::SmallVec; fn compute_ignored_attr_names() -> FxHashSet { debug_assert!(ich::IGNORED_ATTRIBUTES.len() > 0); - ich::IGNORED_ATTRIBUTES.iter().map(|&s| Symbol::intern(s)).collect() + ich::IGNORED_ATTRIBUTES + .iter() + .map(|&s| Symbol::intern(s)) + .collect() } /// This is the context state available during incr. comp. hashing. It contains @@ -77,11 +80,12 @@ impl<'a> StableHashingContext<'a> { // Don't use it for anything else or you'll run the risk of // leaking data out of the tracking system. #[inline] - pub fn new(sess: &'a Session, - krate: &'a hir::Crate, - definitions: &'a Definitions, - cstore: &'a dyn CrateStore) - -> Self { + pub fn new( + sess: &'a Session, + krate: &'a hir::Crate, + definitions: &'a Definitions, + cstore: &'a dyn CrateStore, + ) -> Self { let hash_spans_initial = !sess.opts.debugging_opts.incremental_ignore_spans; StableHashingContext { @@ -103,9 +107,7 @@ impl<'a> StableHashingContext<'a> { } #[inline] - pub fn while_hashing_hir_bodies(&mut self, - hash_bodies: bool, - f: F) { + pub fn while_hashing_hir_bodies(&mut self, hash_bodies: bool, f: F) { let prev_hash_bodies = self.hash_bodies; self.hash_bodies = hash_bodies; f(self); @@ -113,9 +115,7 @@ impl<'a> StableHashingContext<'a> { } #[inline] - pub fn while_hashing_spans(&mut self, - hash_spans: bool, - f: F) { + pub fn while_hashing_spans(&mut self, hash_spans: bool, f: F) { let prev_hash_spans = self.hash_spans; self.hash_spans = hash_spans; f(self); @@ -123,9 +123,11 @@ impl<'a> StableHashingContext<'a> { } #[inline] - pub fn with_node_id_hashing_mode(&mut self, - mode: NodeIdHashingMode, - f: F) { + pub fn with_node_id_hashing_mode( + &mut self, + mode: NodeIdHashingMode, + f: F, + ) { let prev = self.node_id_hashing_mode; self.node_id_hashing_mode = mode; f(self); @@ -159,9 +161,7 @@ impl<'a> StableHashingContext<'a> { #[inline] pub fn source_map(&mut self) -> &mut CachingSourceMapView<'a> { match self.caching_source_map { - Some(ref mut cm) => { - cm - } + Some(ref mut cm) => cm, ref mut none => { *none = Some(CachingSourceMapView::new(self.raw_source_map)); none.as_mut().unwrap() @@ -192,15 +192,13 @@ pub trait StableHashingContextProvider<'a> { fn get_stable_hashing_context(&self) -> StableHashingContext<'a>; } -impl<'a, 'b, T: StableHashingContextProvider<'a>> StableHashingContextProvider<'a> -for &'b T { +impl<'a, 'b, T: StableHashingContextProvider<'a>> StableHashingContextProvider<'a> for &'b T { fn get_stable_hashing_context(&self) -> StableHashingContext<'a> { (**self).get_stable_hashing_context() } } -impl<'a, 'b, T: StableHashingContextProvider<'a>> StableHashingContextProvider<'a> -for &'b mut T { +impl<'a, 'b, T: StableHashingContextProvider<'a>> StableHashingContextProvider<'a> for &'b mut T { fn get_stable_hashing_context(&self) -> StableHashingContext<'a> { (**self).get_stable_hashing_context() } @@ -218,14 +216,14 @@ impl<'a> StableHashingContextProvider<'a> for StableHashingContext<'a> { } } -impl<'a> ::dep_graph::DepGraphSafe for StableHashingContext<'a> { -} - +impl<'a> ::dep_graph::DepGraphSafe for StableHashingContext<'a> {} impl<'a> HashStable> for hir::BodyId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { if hcx.hash_bodies() { hcx.body_resolver.body(*self).hash_stable(hcx, hasher); } @@ -234,18 +232,17 @@ impl<'a> HashStable> for hir::BodyId { impl<'a> HashStable> for hir::HirId { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { match hcx.node_id_hashing_mode { NodeIdHashingMode::Ignore => { // Don't do anything. } NodeIdHashingMode::HashDefPath => { - let hir::HirId { - owner, - local_id, - } = *self; + let hir::HirId { owner, local_id } = *self; hcx.local_def_path_hash(owner).hash_stable(hcx, hasher); local_id.hash_stable(hcx, hasher); @@ -258,24 +255,29 @@ impl<'a> ToStableHashKey> for hir::HirId { type KeyType = (DefPathHash, hir::ItemLocalId); #[inline] - fn to_stable_hash_key(&self, - hcx: &StableHashingContext<'a>) - -> (DefPathHash, hir::ItemLocalId) { + fn to_stable_hash_key( + &self, + hcx: &StableHashingContext<'a>, + ) -> (DefPathHash, hir::ItemLocalId) { let def_path_hash = hcx.local_def_path_hash(self.owner); (def_path_hash, self.local_id) } } impl<'a> HashStable> for ast::NodeId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { match hcx.node_id_hashing_mode { NodeIdHashingMode::Ignore => { // Don't do anything. } NodeIdHashingMode::HashDefPath => { - hcx.definitions.node_to_hir_id(*self).hash_stable(hcx, hasher); + hcx.definitions + .node_to_hir_id(*self) + .hash_stable(hcx, hasher); } } } @@ -285,15 +287,17 @@ impl<'a> ToStableHashKey> for ast::NodeId { type KeyType = (DefPathHash, hir::ItemLocalId); #[inline] - fn to_stable_hash_key(&self, - hcx: &StableHashingContext<'a>) - -> (DefPathHash, hir::ItemLocalId) { - hcx.definitions.node_to_hir_id(*self).to_stable_hash_key(hcx) + fn to_stable_hash_key( + &self, + hcx: &StableHashingContext<'a>, + ) -> (DefPathHash, hir::ItemLocalId) { + hcx.definitions + .node_to_hir_id(*self) + .to_stable_hash_key(hcx) } } impl<'a> HashStable> for Span { - // Hash a span in a stable way. We can't directly hash the span's BytePos // fields (that would be similar to hashing pointers, since those are just // offsets into the SourceMap). Instead, we hash the (file name, line, column) @@ -303,16 +307,18 @@ impl<'a> HashStable> for Span { // codepoint offsets. For the purpose of the hash that's sufficient. // Also, hashing filenames is expensive so we avoid doing it twice when the // span starts and ends in the same file, which is almost always the case. - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { const TAG_VALID_SPAN: u8 = 0; const TAG_INVALID_SPAN: u8 = 1; const TAG_EXPANSION: u8 = 0; const TAG_NO_EXPANSION: u8 = 1; if !hcx.hash_spans { - return + return; } if *self == DUMMY_SP { @@ -328,8 +334,7 @@ impl<'a> HashStable> for Span { return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher); } - let (file_lo, line_lo, col_lo) = match hcx.source_map() - .byte_pos_to_line_and_col(span.lo) { + let (file_lo, line_lo, col_lo) = match hcx.source_map().byte_pos_to_line_and_col(span.lo) { Some(pos) => pos, None => { return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher); @@ -398,9 +403,10 @@ pub fn hash_stable_trait_impls<'a, 'gcx, W, R>( hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher, blanket_impls: &[DefId], - non_blanket_impls: &HashMap, R>) - where W: StableHasherResult, - R: std_hash::BuildHasher, + non_blanket_impls: &HashMap, R>, +) where + W: StableHasherResult, + R: std_hash::BuildHasher, { { let mut blanket_impls: SmallVec<[_; 8]> = blanket_impls @@ -416,15 +422,15 @@ pub fn hash_stable_trait_impls<'a, 'gcx, W, R>( } { - let mut keys: SmallVec<[_; 8]> = - non_blanket_impls.keys() - .map(|k| (k, k.map_def(|d| hcx.def_path_hash(d)))) - .collect(); + let mut keys: SmallVec<[_; 8]> = non_blanket_impls + .keys() + .map(|k| (k, k.map_def(|d| hcx.def_path_hash(d)))) + .collect(); keys.sort_unstable_by(|&(_, ref k1), &(_, ref k2)| k1.cmp(k2)); keys.len().hash_stable(hcx, hasher); for (key, ref stable_key) in keys { stable_key.hash_stable(hcx, hasher); - let mut impls : SmallVec<[_; 8]> = non_blanket_impls[key] + let mut impls: SmallVec<[_; 8]> = non_blanket_impls[key] .iter() .map(|&impl_id| hcx.def_path_hash(impl_id)) .collect(); @@ -437,4 +443,3 @@ pub fn hash_stable_trait_impls<'a, 'gcx, W, R>( } } } - diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs index 8ff60e5f56225..2829934c796fd 100644 --- a/src/librustc/ich/impls_hir.rs +++ b/src/librustc/ich/impls_hir.rs @@ -2,20 +2,23 @@ //! types in no particular order. use hir; +use hir::def_id::{CrateNum, DefId, LocalDefId, CRATE_DEF_INDEX}; use hir::map::DefPathHash; -use hir::def_id::{DefId, LocalDefId, CrateNum, CRATE_DEF_INDEX}; -use ich::{StableHashingContext, NodeIdHashingMode, Fingerprint}; -use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, - StableHasher, StableHasherResult}; +use ich::{Fingerprint, NodeIdHashingMode, StableHashingContext}; +use rustc_data_structures::stable_hasher::{ + HashStable, StableHasher, StableHasherResult, ToStableHashKey, +}; use std::mem; use syntax::ast; use syntax::attr; impl<'a> HashStable> for DefId { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { hcx.def_path_hash(*self).hash_stable(hcx, hasher); } } @@ -31,9 +34,11 @@ impl<'a> ToStableHashKey> for DefId { impl<'a> HashStable> for LocalDefId { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { hcx.def_path_hash(self.to_def_id()).hash_stable(hcx, hasher); } } @@ -49,13 +54,16 @@ impl<'a> ToStableHashKey> for LocalDefId { impl<'a> HashStable> for CrateNum { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { hcx.def_path_hash(DefId { krate: *self, - index: CRATE_DEF_INDEX - }).hash_stable(hcx, hasher); + index: CRATE_DEF_INDEX, + }) + .hash_stable(hcx, hasher); } } @@ -64,28 +72,30 @@ impl<'a> ToStableHashKey> for CrateNum { #[inline] fn to_stable_hash_key(&self, hcx: &StableHashingContext<'a>) -> DefPathHash { - let def_id = DefId { krate: *self, index: CRATE_DEF_INDEX }; + let def_id = DefId { + krate: *self, + index: CRATE_DEF_INDEX, + }; def_id.to_stable_hash_key(hcx) } } impl<'a> HashStable> for hir::ItemLocalId { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.as_u32().hash_stable(hcx, hasher); } } -impl<'a> ToStableHashKey> -for hir::ItemLocalId { +impl<'a> ToStableHashKey> for hir::ItemLocalId { type KeyType = hir::ItemLocalId; #[inline] - fn to_stable_hash_key(&self, - _: &StableHashingContext<'a>) - -> hir::ItemLocalId { + fn to_stable_hash_key(&self, _: &StableHashingContext<'a>) -> hir::ItemLocalId { *self } } @@ -98,12 +108,12 @@ for hir::ItemLocalId { // in "DefPath Mode". impl<'a> HashStable> for hir::ItemId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - let hir::ItemId { - id - } = *self; + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { + let hir::ItemId { id } = *self; hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { id.hash_stable(hcx, hasher); @@ -112,12 +122,12 @@ impl<'a> HashStable> for hir::ItemId { } impl<'a> HashStable> for hir::TraitItemId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - let hir::TraitItemId { - node_id - } = * self; + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { + let hir::TraitItemId { node_id } = *self; hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { node_id.hash_stable(hcx, hasher); @@ -126,12 +136,12 @@ impl<'a> HashStable> for hir::TraitItemId { } impl<'a> HashStable> for hir::ImplItemId { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - let hir::ImplItemId { - node_id - } = * self; + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { + let hir::ImplItemId { node_id } = *self; hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { node_id.hash_stable(hcx, hasher); @@ -216,15 +226,20 @@ impl_stable_hash_for!(enum hir::LifetimeParamKind { }); impl<'a> HashStable> for hir::GenericParamKind { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match self { hir::GenericParamKind::Lifetime { kind } => { kind.hash_stable(hcx, hasher); } - hir::GenericParamKind::Type { ref default, synthetic } => { + hir::GenericParamKind::Type { + ref default, + synthetic, + } => { default.hash_stable(hcx, hasher); synthetic.hash_stable(hcx, hasher); } @@ -298,9 +313,11 @@ impl_stable_hash_for!(struct hir::FnHeader { }); impl<'a> HashStable> for hir::Ty { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { hcx.while_hashing_hir_bodies(true, |hcx| { let hir::Ty { id: _, @@ -523,7 +540,6 @@ impl_stable_hash_for!(struct hir::Field { impl_stable_hash_for_spanned!(ast::Name); - impl_stable_hash_for!(enum hir::BlockCheckMode { DefaultBlock, UnsafeBlock(src), @@ -543,16 +559,18 @@ impl_stable_hash_for!(struct hir::AnonConst { }); impl<'a> HashStable> for hir::Expr { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { hcx.while_hashing_hir_bodies(true, |hcx| { let hir::Expr { id: _, hir_id: _, ref span, ref node, - ref attrs + ref attrs, } = *self; span.hash_stable(hcx, hasher); @@ -607,20 +625,24 @@ impl_stable_hash_for!(enum hir::LoopSource { }); impl<'a> HashStable> for hir::MatchSource { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use hir::MatchSource; mem::discriminant(self).hash_stable(hcx, hasher); match *self { - MatchSource::Normal | - MatchSource::WhileLetDesugar | - MatchSource::ForLoopDesugar | - MatchSource::TryDesugar => { + MatchSource::Normal + | MatchSource::WhileLetDesugar + | MatchSource::ForLoopDesugar + | MatchSource::TryDesugar => { // No fields to hash. } - MatchSource::IfLetDesugar { contains_else_clause } => { + MatchSource::IfLetDesugar { + contains_else_clause, + } => { contains_else_clause.hash_stable(hcx, hasher); } } @@ -658,9 +680,11 @@ impl_stable_hash_for!(struct ast::Ident { }); impl<'a> HashStable> for hir::TraitItem { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let hir::TraitItem { id: _, hir_id: _, @@ -668,7 +692,7 @@ impl<'a> HashStable> for hir::TraitItem { ref attrs, ref generics, ref node, - span + span, } = *self; hcx.hash_hir_item_like(|hcx| { @@ -693,9 +717,11 @@ impl_stable_hash_for!(enum hir::TraitItemKind { }); impl<'a> HashStable> for hir::ImplItem { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let hir::ImplItem { id: _, hir_id: _, @@ -705,7 +731,7 @@ impl<'a> HashStable> for hir::ImplItem { ref attrs, ref generics, ref node, - span + span, } = *self; hcx.hash_hir_item_like(|hcx| { @@ -733,19 +759,24 @@ impl_stable_hash_for!(enum ::syntax::ast::CrateSugar { }); impl<'a> HashStable> for hir::VisibilityKind { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - hir::VisibilityKind::Public | - hir::VisibilityKind::Inherited => { + hir::VisibilityKind::Public | hir::VisibilityKind::Inherited => { // No fields to hash. } hir::VisibilityKind::Crate(sugar) => { sugar.hash_stable(hcx, hasher); } - hir::VisibilityKind::Restricted { ref path, id, hir_id } => { + hir::VisibilityKind::Restricted { + ref path, + id, + hir_id, + } => { hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { id.hash_stable(hcx, hasher); hir_id.hash_stable(hcx, hasher); @@ -759,9 +790,11 @@ impl<'a> HashStable> for hir::VisibilityKind { impl_stable_hash_for_spanned!(hir::VisibilityKind); impl<'a> HashStable> for hir::Defaultness { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { hir::Defaultness::Final => { @@ -780,9 +813,11 @@ impl_stable_hash_for!(enum hir::ImplPolarity { }); impl<'a> HashStable> for hir::Mod { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let hir::Mod { inner: ref inner_span, ref item_ids, @@ -799,9 +834,8 @@ impl<'a> HashStable> for hir::Mod { let (def_path_hash, local_id) = id.id.to_stable_hash_key(hcx); debug_assert_eq!(local_id, hir::ItemLocalId::from_u32(0)); def_path_hash.0 - }).fold(Fingerprint::ZERO, |a, b| { - a.combine_commutative(b) - }); + }) + .fold(Fingerprint::ZERO, |a, b| a.combine_commutative(b)); item_ids.len().hash_stable(hcx, hasher); item_ids_hash.hash_stable(hcx, hasher); @@ -848,9 +882,11 @@ impl_stable_hash_for!(enum hir::VariantData { }); impl<'a> HashStable> for hir::Item { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let hir::Item { ident, ref attrs, @@ -858,7 +894,7 @@ impl<'a> HashStable> for hir::Item { hir_id: _, ref node, ref vis, - span + span, } = *self; hcx.hash_hir_item_like(|hcx| { @@ -908,14 +944,16 @@ impl_stable_hash_for!(struct hir::ImplItemRef { }); impl<'a> HashStable> for hir::AssociatedItemKind { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - hir::AssociatedItemKind::Const | - hir::AssociatedItemKind::Existential | - hir::AssociatedItemKind::Type => { + hir::AssociatedItemKind::Const + | hir::AssociatedItemKind::Existential + | hir::AssociatedItemKind::Type => { // No fields to hash. } hir::AssociatedItemKind::Method { has_self } => { @@ -953,9 +991,11 @@ impl_stable_hash_for!(struct hir::Arg { }); impl<'a> HashStable> for hir::Body { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let hir::Body { ref arguments, ref value, @@ -974,9 +1014,10 @@ impl<'a> ToStableHashKey> for hir::BodyId { type KeyType = (DefPathHash, hir::ItemLocalId); #[inline] - fn to_stable_hash_key(&self, - hcx: &StableHashingContext<'a>) - -> (DefPathHash, hir::ItemLocalId) { + fn to_stable_hash_key( + &self, + hcx: &StableHashingContext<'a>, + ) -> (DefPathHash, hir::ItemLocalId) { let hir::BodyId { node_id } = *self; node_id.to_stable_hash_key(hcx) } @@ -1079,21 +1120,21 @@ impl_stable_hash_for!(enum hir::Constness { }); impl<'a> HashStable> for hir::def_id::DefIndex { - - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { hcx.local_def_path_hash(*self).hash_stable(hcx, hasher); } } -impl<'a> ToStableHashKey> -for hir::def_id::DefIndex { +impl<'a> ToStableHashKey> for hir::def_id::DefIndex { type KeyType = DefPathHash; #[inline] fn to_stable_hash_key(&self, hcx: &StableHashingContext<'a>) -> DefPathHash { - hcx.local_def_path_hash(*self) + hcx.local_def_path_hash(*self) } } @@ -1110,9 +1151,11 @@ impl_stable_hash_for!(struct ::middle::lib_features::LibFeatures { }); impl<'a> HashStable> for ::middle::lang_items::LangItem { - fn hash_stable(&self, - _: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + _: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { ::std::hash::Hash::hash(self, hasher); } } @@ -1123,14 +1166,13 @@ impl_stable_hash_for!(struct ::middle::lang_items::LanguageItems { }); impl<'a> HashStable> for hir::TraitCandidate { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { - let hir::TraitCandidate { - def_id, - import_id, - } = *self; + let hir::TraitCandidate { def_id, import_id } = *self; def_id.hash_stable(hcx, hasher); import_id.hash_stable(hcx, hasher); @@ -1141,17 +1183,12 @@ impl<'a> HashStable> for hir::TraitCandidate { impl<'a> ToStableHashKey> for hir::TraitCandidate { type KeyType = (DefPathHash, Option<(DefPathHash, hir::ItemLocalId)>); - fn to_stable_hash_key(&self, - hcx: &StableHashingContext<'a>) - -> Self::KeyType { - let hir::TraitCandidate { - def_id, - import_id, - } = *self; + fn to_stable_hash_key(&self, hcx: &StableHashingContext<'a>) -> Self::KeyType { + let hir::TraitCandidate { def_id, import_id } = *self; - let import_id = import_id.map(|node_id| hcx.node_to_hir_id(node_id)) - .map(|hir_id| (hcx.local_def_path_hash(hir_id.owner), - hir_id.local_id)); + let import_id = import_id + .map(|node_id| hcx.node_to_hir_id(node_id)) + .map(|hir_id| (hcx.local_def_path_hash(hir_id.owner), hir_id.local_id)); (hcx.def_path_hash(def_id), import_id) } } @@ -1166,19 +1203,22 @@ impl_stable_hash_for!(struct hir::CodegenFnAttrs { link_section, }); -impl<'hir> HashStable> for hir::CodegenFnAttrFlags -{ - fn hash_stable(&self, - hcx: &mut StableHashingContext<'hir>, - hasher: &mut StableHasher) { +impl<'hir> HashStable> for hir::CodegenFnAttrFlags { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'hir>, + hasher: &mut StableHasher, + ) { self.bits().hash_stable(hcx, hasher); } } impl<'hir> HashStable> for attr::InlineAttr { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'hir>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'hir>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); } } diff --git a/src/librustc/ich/impls_mir.rs b/src/librustc/ich/impls_mir.rs index 002ac7cc7a9bb..ccb44bcfe2eca 100644 --- a/src/librustc/ich/impls_mir.rs +++ b/src/librustc/ich/impls_mir.rs @@ -3,8 +3,7 @@ use ich::StableHashingContext; use mir; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use std::mem; impl_stable_hash_for!(struct mir::GeneratorLayout<'tcx> { fields }); @@ -55,125 +54,155 @@ impl_stable_hash_for!( impl<'a> HashStable> for mir::Local { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.index().hash_stable(hcx, hasher); } } impl<'a> HashStable> for mir::BasicBlock { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.index().hash_stable(hcx, hasher); } } impl<'a> HashStable> for mir::Field { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.index().hash_stable(hcx, hasher); } } -impl<'a> HashStable> -for mir::SourceScope { +impl<'a> HashStable> for mir::SourceScope { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.index().hash_stable(hcx, hasher); } } impl<'a> HashStable> for mir::Promoted { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.index().hash_stable(hcx, hasher); } } -impl<'a, 'gcx> HashStable> -for mir::TerminatorKind<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for mir::TerminatorKind<'gcx> { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { mir::TerminatorKind::Goto { ref target } => { target.hash_stable(hcx, hasher); } - mir::TerminatorKind::SwitchInt { ref discr, - switch_ty, - ref values, - ref targets } => { + mir::TerminatorKind::SwitchInt { + ref discr, + switch_ty, + ref values, + ref targets, + } => { discr.hash_stable(hcx, hasher); switch_ty.hash_stable(hcx, hasher); values.hash_stable(hcx, hasher); targets.hash_stable(hcx, hasher); } - mir::TerminatorKind::Resume | - mir::TerminatorKind::Abort | - mir::TerminatorKind::Return | - mir::TerminatorKind::GeneratorDrop | - mir::TerminatorKind::Unreachable => {} - mir::TerminatorKind::Drop { ref location, target, unwind } => { + mir::TerminatorKind::Resume + | mir::TerminatorKind::Abort + | mir::TerminatorKind::Return + | mir::TerminatorKind::GeneratorDrop + | mir::TerminatorKind::Unreachable => {} + mir::TerminatorKind::Drop { + ref location, + target, + unwind, + } => { location.hash_stable(hcx, hasher); target.hash_stable(hcx, hasher); unwind.hash_stable(hcx, hasher); } - mir::TerminatorKind::DropAndReplace { ref location, - ref value, - target, - unwind, } => { + mir::TerminatorKind::DropAndReplace { + ref location, + ref value, + target, + unwind, + } => { location.hash_stable(hcx, hasher); value.hash_stable(hcx, hasher); target.hash_stable(hcx, hasher); unwind.hash_stable(hcx, hasher); } - mir::TerminatorKind::Yield { ref value, - resume, - drop } => { + mir::TerminatorKind::Yield { + ref value, + resume, + drop, + } => { value.hash_stable(hcx, hasher); resume.hash_stable(hcx, hasher); drop.hash_stable(hcx, hasher); } - mir::TerminatorKind::Call { ref func, - ref args, - ref destination, - cleanup, - from_hir_call, } => { + mir::TerminatorKind::Call { + ref func, + ref args, + ref destination, + cleanup, + from_hir_call, + } => { func.hash_stable(hcx, hasher); args.hash_stable(hcx, hasher); destination.hash_stable(hcx, hasher); cleanup.hash_stable(hcx, hasher); from_hir_call.hash_stable(hcx, hasher); } - mir::TerminatorKind::Assert { ref cond, - expected, - ref msg, - target, - cleanup } => { + mir::TerminatorKind::Assert { + ref cond, + expected, + ref msg, + target, + cleanup, + } => { cond.hash_stable(hcx, hasher); expected.hash_stable(hcx, hasher); msg.hash_stable(hcx, hasher); target.hash_stable(hcx, hasher); cleanup.hash_stable(hcx, hasher); } - mir::TerminatorKind::FalseEdges { ref real_target, ref imaginary_targets } => { + mir::TerminatorKind::FalseEdges { + ref real_target, + ref imaginary_targets, + } => { real_target.hash_stable(hcx, hasher); for target in imaginary_targets { target.hash_stable(hcx, hasher); } } - mir::TerminatorKind::FalseUnwind { ref real_target, ref unwind } => { + mir::TerminatorKind::FalseUnwind { + ref real_target, + ref unwind, + } => { real_target.hash_stable(hcx, hasher); unwind.hash_stable(hcx, hasher); } @@ -199,9 +228,11 @@ impl_stable_hash_for!(enum mir::RetagKind { FnEntry, TwoPhase, Raw, Default }); impl_stable_hash_for!(enum mir::FakeReadCause { ForMatchGuard, ForMatchedPlace, ForLet }); impl<'a, 'gcx> HashStable> for mir::Place<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { mir::Place::Local(ref local) => { @@ -220,33 +251,34 @@ impl<'a, 'gcx> HashStable> for mir::Place<'gcx> { } } -impl<'a, 'gcx, B, V, T> HashStable> -for mir::Projection<'gcx, B, V, T> - where B: HashStable>, - V: HashStable>, - T: HashStable> +impl<'a, 'gcx, B, V, T> HashStable> for mir::Projection<'gcx, B, V, T> +where + B: HashStable>, + V: HashStable>, + T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - let mir::Projection { - ref base, - ref elem, - } = *self; + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { + let mir::Projection { ref base, ref elem } = *self; base.hash_stable(hcx, hasher); elem.hash_stable(hcx, hasher); } } -impl<'a, 'gcx, V, T> HashStable> -for mir::ProjectionElem<'gcx, V, T> - where V: HashStable>, - T: HashStable> +impl<'a, 'gcx, V, T> HashStable> for mir::ProjectionElem<'gcx, V, T> +where + V: HashStable>, + T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { mir::ProjectionElem::Deref => {} @@ -257,7 +289,11 @@ for mir::ProjectionElem<'gcx, V, T> mir::ProjectionElem::Index(ref value) => { value.hash_stable(hcx, hasher); } - mir::ProjectionElem::ConstantIndex { offset, min_length, from_end } => { + mir::ProjectionElem::ConstantIndex { + offset, + min_length, + from_end, + } => { offset.hash_stable(hcx, hasher); min_length.hash_stable(hcx, hasher); from_end.hash_stable(hcx, hasher); @@ -280,15 +316,15 @@ impl_stable_hash_for!(struct mir::SourceScopeLocalData { }); impl<'a> HashStable> for mir::Safety { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - mir::Safety::Safe | - mir::Safety::BuiltinUnsafe | - mir::Safety::FnUnsafe => {} + mir::Safety::Safe | mir::Safety::BuiltinUnsafe | mir::Safety::FnUnsafe => {} mir::Safety::ExplicitUnsafe(node_id) => { node_id.hash_stable(hcx, hasher); } @@ -297,9 +333,11 @@ impl<'a> HashStable> for mir::Safety { } impl<'a, 'gcx> HashStable> for mir::Operand<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { @@ -317,9 +355,11 @@ impl<'a, 'gcx> HashStable> for mir::Operand<'gcx> { } impl<'a, 'gcx> HashStable> for mir::Rvalue<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { @@ -343,8 +383,8 @@ impl<'a, 'gcx> HashStable> for mir::Rvalue<'gcx> { operand.hash_stable(hcx, hasher); ty.hash_stable(hcx, hasher); } - mir::Rvalue::BinaryOp(op, ref operand1, ref operand2) | - mir::Rvalue::CheckedBinaryOp(op, ref operand1, ref operand2) => { + mir::Rvalue::BinaryOp(op, ref operand1, ref operand2) + | mir::Rvalue::CheckedBinaryOp(op, ref operand1, ref operand2) => { op.hash_stable(hcx, hasher); operand1.hash_stable(hcx, hasher); operand2.hash_stable(hcx, hasher); @@ -376,11 +416,12 @@ impl_stable_hash_for!(enum mir::CastKind { Unsize }); -impl<'a, 'gcx> HashStable> -for mir::AggregateKind<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for mir::AggregateKind<'gcx> { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { mir::AggregateKind::Tuple => {} @@ -477,9 +518,11 @@ impl_stable_hash_for!(enum mir::ConstraintCategory { }); impl<'a, 'gcx> HashStable> for mir::ClosureOutlivesSubject<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { mir::ClosureOutlivesSubject::Ty(ref ty) => { diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 70ec72d73bc6c..46b361fd2c94c 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -13,17 +13,20 @@ use syntax::symbol::{InternedString, LocalInternedString}; use syntax::tokenstream; use syntax_pos::SourceFile; -use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX}; +use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX}; +use rustc_data_structures::stable_hasher::{ + HashStable, StableHasher, StableHasherResult, ToStableHashKey, +}; use smallvec::SmallVec; -use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, - StableHasher, StableHasherResult}; impl<'a> HashStable> for InternedString { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.with(|s| s.hash_stable(hcx, hasher)) } } @@ -32,18 +35,18 @@ impl<'a> ToStableHashKey> for InternedString { type KeyType = InternedString; #[inline] - fn to_stable_hash_key(&self, - _: &StableHashingContext<'a>) - -> InternedString { + fn to_stable_hash_key(&self, _: &StableHashingContext<'a>) -> InternedString { self.clone() } } impl<'a> HashStable> for LocalInternedString { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let s: &str = &**self; s.hash_stable(hcx, hasher); } @@ -53,18 +56,18 @@ impl<'a> ToStableHashKey> for LocalInternedString { type KeyType = LocalInternedString; #[inline] - fn to_stable_hash_key(&self, - _: &StableHashingContext<'a>) - -> LocalInternedString { + fn to_stable_hash_key(&self, _: &StableHashingContext<'a>) -> LocalInternedString { self.clone() } } impl<'a> HashStable> for ast::Name { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.as_str().hash_stable(hcx, hasher); } } @@ -73,9 +76,7 @@ impl<'a> ToStableHashKey> for ast::Name { type KeyType = InternedString; #[inline] - fn to_stable_hash_key(&self, - _: &StableHashingContext<'a>) - -> InternedString { + fn to_stable_hash_key(&self, _: &StableHashingContext<'a>) -> InternedString { self.as_interned_str() } } @@ -92,7 +93,6 @@ impl_stable_hash_for!(enum ::syntax::ext::base::MacroKind { ProcMacroStub, }); - impl_stable_hash_for!(enum ::rustc_target::spec::abi::Abi { Cdecl, Stdcall, @@ -129,14 +129,18 @@ impl_stable_hash_for!(enum ::syntax::edition::Edition { Edition2018, }); -impl<'a> HashStable> -for ::syntax::attr::StabilityLevel { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a> HashStable> for ::syntax::attr::StabilityLevel { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - ::syntax::attr::StabilityLevel::Unstable { ref reason, ref issue } => { + ::syntax::attr::StabilityLevel::Unstable { + ref reason, + ref issue, + } => { reason.hash_stable(hcx, hasher); issue.hash_stable(hcx, hasher); } @@ -149,7 +153,6 @@ for ::syntax::attr::StabilityLevel { impl_stable_hash_for!(struct ::syntax::attr::RustcDeprecation { since, reason }); - impl_stable_hash_for!(enum ::syntax::attr::IntType { SignedInt(int_ty), UnsignedInt(uint_ty) @@ -184,20 +187,20 @@ impl_stable_hash_for!(enum ::syntax::ast::StrStyle { Cooked, Raw(pounds) }); impl_stable_hash_for!(enum ::syntax::ast::AttrStyle { Outer, Inner }); impl<'a> HashStable> for [ast::Attribute] { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { if self.len() == 0 { self.len().hash_stable(hcx, hasher); - return + return; } // Some attributes are always ignored during hashing. let filtered: SmallVec<[&ast::Attribute; 8]> = self .iter() - .filter(|attr| { - !attr.is_sugared_doc && !hcx.is_ignored_attr(attr.name()) - }) + .filter(|attr| !attr.is_sugared_doc && !hcx.is_ignored_attr(attr.name())) .collect(); filtered.len().hash_stable(hcx, hasher); @@ -208,9 +211,11 @@ impl<'a> HashStable> for [ast::Attribute] { } impl<'a> HashStable> for ast::Path { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.segments.len().hash_stable(hcx, hasher); for segment in &self.segments { segment.ident.name.hash_stable(hcx, hasher); @@ -219,9 +224,11 @@ impl<'a> HashStable> for ast::Path { } impl<'a> HashStable> for ast::Attribute { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { // Make sure that these have been filtered out. debug_assert!(!hcx.is_ignored_attr(self.name())); debug_assert!(!self.is_sugared_doc); @@ -244,11 +251,12 @@ impl<'a> HashStable> for ast::Attribute { } } -impl<'a> HashStable> -for tokenstream::TokenTree { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a> HashStable> for tokenstream::TokenTree { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { tokenstream::TokenTree::Token(span, ref token) => { @@ -266,11 +274,12 @@ for tokenstream::TokenTree { } } -impl<'a> HashStable> -for tokenstream::TokenStream { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a> HashStable> for tokenstream::TokenStream { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { for sub_tt in self.trees() { sub_tt.hash_stable(hcx, hasher); } @@ -284,57 +293,54 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>( ) { mem::discriminant(token).hash_stable(hcx, hasher); match *token { - token::Token::Eq | - token::Token::Lt | - token::Token::Le | - token::Token::EqEq | - token::Token::Ne | - token::Token::Ge | - token::Token::Gt | - token::Token::AndAnd | - token::Token::OrOr | - token::Token::Not | - token::Token::Tilde | - token::Token::At | - token::Token::Dot | - token::Token::DotDot | - token::Token::DotDotDot | - token::Token::DotDotEq | - token::Token::Comma | - token::Token::Semi | - token::Token::Colon | - token::Token::ModSep | - token::Token::RArrow | - token::Token::LArrow | - token::Token::FatArrow | - token::Token::Pound | - token::Token::Dollar | - token::Token::Question | - token::Token::SingleQuote | - token::Token::Whitespace | - token::Token::Comment | - token::Token::Eof => {} - - token::Token::BinOp(bin_op_token) | - token::Token::BinOpEq(bin_op_token) => { + token::Token::Eq + | token::Token::Lt + | token::Token::Le + | token::Token::EqEq + | token::Token::Ne + | token::Token::Ge + | token::Token::Gt + | token::Token::AndAnd + | token::Token::OrOr + | token::Token::Not + | token::Token::Tilde + | token::Token::At + | token::Token::Dot + | token::Token::DotDot + | token::Token::DotDotDot + | token::Token::DotDotEq + | token::Token::Comma + | token::Token::Semi + | token::Token::Colon + | token::Token::ModSep + | token::Token::RArrow + | token::Token::LArrow + | token::Token::FatArrow + | token::Token::Pound + | token::Token::Dollar + | token::Token::Question + | token::Token::SingleQuote + | token::Token::Whitespace + | token::Token::Comment + | token::Token::Eof => {} + + token::Token::BinOp(bin_op_token) | token::Token::BinOpEq(bin_op_token) => { std_hash::Hash::hash(&bin_op_token, hasher); } - token::Token::OpenDelim(delim_token) | - token::Token::CloseDelim(delim_token) => { + token::Token::OpenDelim(delim_token) | token::Token::CloseDelim(delim_token) => { std_hash::Hash::hash(&delim_token, hasher); } token::Token::Literal(ref lit, ref opt_name) => { mem::discriminant(lit).hash_stable(hcx, hasher); match *lit { - token::Lit::Byte(val) | - token::Lit::Char(val) | - token::Lit::Integer(val) | - token::Lit::Float(val) | - token::Lit::Str_(val) | - token::Lit::ByteStr(val) => val.hash_stable(hcx, hasher), - token::Lit::StrRaw(val, n) | - token::Lit::ByteStrRaw(val, n) => { + token::Lit::Byte(val) + | token::Lit::Char(val) + | token::Lit::Integer(val) + | token::Lit::Float(val) + | token::Lit::Str_(val) + | token::Lit::ByteStr(val) => val.hash_stable(hcx, hasher), + token::Lit::StrRaw(val, n) | token::Lit::ByteStrRaw(val, n) => { val.hash_stable(hcx, hasher); n.hash_stable(hcx, hasher); } @@ -352,8 +358,7 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>( bug!("interpolated tokens should not be present in the HIR") } - token::Token::DocComment(val) | - token::Token::Shebang(val) => val.hash_stable(hcx, hasher), + token::Token::DocComment(val) | token::Token::Shebang(val) => val.hash_stable(hcx, hasher), } } @@ -414,9 +419,11 @@ impl_stable_hash_for!(enum ::syntax_pos::FileName { }); impl<'a> HashStable> for SourceFile { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let SourceFile { name: _, // We hash the smaller name_hash instead of this name_hash, @@ -440,7 +447,8 @@ impl<'a> HashStable> for SourceFile { DefId { krate: CrateNum::from_u32(crate_of_origin), index: CRATE_DEF_INDEX, - }.hash_stable(hcx, hasher); + } + .hash_stable(hcx, hasher); src_hash.hash_stable(hcx, hasher); @@ -463,38 +471,35 @@ impl<'a> HashStable> for SourceFile { } } -fn stable_byte_pos(pos: ::syntax_pos::BytePos, - source_file_start: ::syntax_pos::BytePos) - -> u32 { +fn stable_byte_pos(pos: ::syntax_pos::BytePos, source_file_start: ::syntax_pos::BytePos) -> u32 { pos.0 - source_file_start.0 } -fn stable_multibyte_char(mbc: ::syntax_pos::MultiByteChar, - source_file_start: ::syntax_pos::BytePos) - -> (u32, u32) { - let ::syntax_pos::MultiByteChar { - pos, - bytes, - } = mbc; +fn stable_multibyte_char( + mbc: ::syntax_pos::MultiByteChar, + source_file_start: ::syntax_pos::BytePos, +) -> (u32, u32) { + let ::syntax_pos::MultiByteChar { pos, bytes } = mbc; (pos.0 - source_file_start.0, bytes as u32) } -fn stable_non_narrow_char(swc: ::syntax_pos::NonNarrowChar, - source_file_start: ::syntax_pos::BytePos) - -> (u32, u32) { +fn stable_non_narrow_char( + swc: ::syntax_pos::NonNarrowChar, + source_file_start: ::syntax_pos::BytePos, +) -> (u32, u32) { let pos = swc.pos(); let width = swc.width(); (pos.0 - source_file_start.0, width as u32) } - - impl<'gcx> HashStable> for feature_gate::Features { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'gcx>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'gcx>, + hasher: &mut StableHasher, + ) { // Unfortunately we cannot exhaustively list fields here, since the // struct is macro generated. self.declared_lang_features.hash_stable(hcx, hasher); diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs index b2fe4b7561c5a..9869a0173a0d0 100644 --- a/src/librustc/ich/impls_ty.rs +++ b/src/librustc/ich/impls_ty.rs @@ -1,25 +1,29 @@ //! This module contains `HashStable` implementations for various data types //! from rustc::ty in no particular order. -use ich::{Fingerprint, StableHashingContext, NodeIdHashingMode}; +use ich::{Fingerprint, NodeIdHashingMode, StableHashingContext}; +use infer; +use middle::region; +use mir; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, - StableHasher, StableHasherResult}; +use rustc_data_structures::stable_hasher::{ + HashStable, StableHasher, StableHasherResult, ToStableHashKey, +}; use std::cell::RefCell; use std::hash as std_hash; use std::mem; -use middle::region; -use infer; use traits; use ty; -use mir; -impl<'a, 'gcx, T> HashStable> -for &'gcx ty::List - where T: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx, T> HashStable> for &'gcx ty::List +where + T: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { thread_local! { static CACHE: RefCell> = RefCell::new(Default::default()); @@ -44,7 +48,8 @@ for &'gcx ty::List } impl<'a, 'gcx, T> ToStableHashKey> for &'gcx ty::List - where T: HashStable> +where + T: HashStable>, { type KeyType = Fingerprint; @@ -58,18 +63,21 @@ impl<'a, 'gcx, T> ToStableHashKey> for &'gcx ty::List HashStable> for ty::subst::Kind<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.unpack().hash_stable(hcx, hasher); } } -impl<'a, 'gcx> HashStable> -for ty::subst::UnpackedKind<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for ty::subst::UnpackedKind<'gcx> { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match self { ty::subst::UnpackedKind::Lifetime(lt) => lt.hash_stable(hcx, hasher), @@ -78,16 +86,15 @@ for ty::subst::UnpackedKind<'gcx> { } } -impl<'a> HashStable> -for ty::RegionKind { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a> HashStable> for ty::RegionKind { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - ty::ReErased | - ty::ReStatic | - ty::ReEmpty => { + ty::ReErased | ty::ReStatic | ty::ReEmpty => { // No variant fields to hash for these ... } ty::ReLateBound(db, ty::BrAnon(i)) => { @@ -102,7 +109,11 @@ for ty::RegionKind { ty::ReLateBound(db, ty::BrEnv) => { db.hash_stable(hcx, hasher); } - ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, index, name }) => { + ty::ReEarlyBound(ty::EarlyBoundRegion { + def_id, + index, + name, + }) => { def_id.hash_stable(hcx, hasher); index.hash_stable(hcx, hasher); name.hash_stable(hcx, hasher); @@ -116,9 +127,7 @@ for ty::RegionKind { ty::ReClosureBound(vid) => { vid.hash_stable(hcx, hasher); } - ty::ReLateBound(..) | - ty::ReVar(..) | - ty::RePlaceholder(..) => { + ty::ReLateBound(..) | ty::ReVar(..) | ty::RePlaceholder(..) => { bug!("StableHasher: unexpected region {:?}", *self) } } @@ -127,27 +136,32 @@ for ty::RegionKind { impl<'a> HashStable> for ty::RegionVid { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.index().hash_stable(hcx, hasher); } } impl<'gcx> HashStable> for ty::BoundVar { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'gcx>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'gcx>, + hasher: &mut StableHasher, + ) { self.index().hash_stable(hcx, hasher); } } -impl<'a, 'gcx> HashStable> -for ty::adjustment::AutoBorrow<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for ty::adjustment::AutoBorrow<'gcx> { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::adjustment::AutoBorrow::Ref(ref region, mutability) => { @@ -161,19 +175,20 @@ for ty::adjustment::AutoBorrow<'gcx> { } } -impl<'a, 'gcx> HashStable> -for ty::adjustment::Adjust<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for ty::adjustment::Adjust<'gcx> { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - ty::adjustment::Adjust::NeverToAny | - ty::adjustment::Adjust::ReifyFnPointer | - ty::adjustment::Adjust::UnsafeFnPointer | - ty::adjustment::Adjust::ClosureFnPointer | - ty::adjustment::Adjust::MutToConstPointer | - ty::adjustment::Adjust::Unsize => {} + ty::adjustment::Adjust::NeverToAny + | ty::adjustment::Adjust::ReifyFnPointer + | ty::adjustment::Adjust::UnsafeFnPointer + | ty::adjustment::Adjust::ClosureFnPointer + | ty::adjustment::Adjust::MutToConstPointer + | ty::adjustment::Adjust::Unsize => {} ty::adjustment::Adjust::Deref(ref overloaded) => { overloaded.hash_stable(hcx, hasher); } @@ -193,12 +208,16 @@ impl_stable_hash_for!(enum ty::adjustment::AllowTwoPhase { }); impl<'gcx> HashStable> for ty::adjustment::AutoBorrowMutability { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'gcx>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'gcx>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - ty::adjustment::AutoBorrowMutability::Mutable { ref allow_two_phase_borrow } => { + ty::adjustment::AutoBorrowMutability::Mutable { + ref allow_two_phase_borrow, + } => { allow_two_phase_borrow.hash_stable(hcx, hasher); } ty::adjustment::AutoBorrowMutability::Immutable => {} @@ -234,11 +253,14 @@ impl_stable_hash_for!(struct ty::FnSig<'tcx> { }); impl<'a, 'gcx, T> HashStable> for ty::Binder - where T: HashStable> +where + T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.skip_binder().hash_stable(hcx, hasher); } } @@ -273,17 +295,21 @@ impl_stable_hash_for!( ); impl<'a> HashStable> for ty::AdtFlags { - fn hash_stable(&self, - _: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + _: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { std_hash::Hash::hash(self, hasher); } } impl<'a> HashStable> for ty::VariantFlags { - fn hash_stable(&self, - _: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + _: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { std_hash::Hash::hash(self, hasher); } } @@ -594,21 +620,17 @@ impl_stable_hash_for!(enum ty::BoundRegion { BrEnv }); -impl<'a, 'gcx> HashStable> -for ty::TyKind<'gcx> -{ - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for ty::TyKind<'gcx> { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use ty::TyKind::*; mem::discriminant(self).hash_stable(hcx, hasher); match *self { - Bool | - Char | - Str | - Error | - Never => { + Bool | Char | Str | Error | Never => { // Nothing more to hash. } Int(int_ty) => { @@ -617,7 +639,7 @@ for ty::TyKind<'gcx> Uint(uint_ty) => { uint_ty.hash_stable(hcx, hasher); } - Float(float_ty) => { + Float(float_ty) => { float_ty.hash_stable(hcx, hasher); } Adt(adt_def, substs) => { @@ -659,9 +681,7 @@ for ty::TyKind<'gcx> generator_substs.hash_stable(hcx, hasher); movability.hash_stable(hcx, hasher); } - GeneratorWitness(types) => { - types.hash_stable(hcx, hasher) - } + GeneratorWitness(types) => types.hash_stable(hcx, hasher), Tuple(inner_tys) => { inner_tys.hash_stable(hcx, hasher); } @@ -701,39 +721,48 @@ impl_stable_hash_for!(enum ty::InferTy { FreshFloatTy(a), }); -impl<'a, 'gcx> HashStable> -for ty::TyVid -{ - fn hash_stable(&self, - _hcx: &mut StableHashingContext<'a>, - _hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for ty::TyVid { + fn hash_stable( + &self, + _hcx: &mut StableHashingContext<'a>, + _hasher: &mut StableHasher, + ) { // TyVid values are confined to an inference context and hence // should not be hashed. - bug!("ty::TyKind::hash_stable() - can't hash a TyVid {:?}.", *self) + bug!( + "ty::TyKind::hash_stable() - can't hash a TyVid {:?}.", + *self + ) } } -impl<'a, 'gcx> HashStable> -for ty::IntVid -{ - fn hash_stable(&self, - _hcx: &mut StableHashingContext<'a>, - _hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for ty::IntVid { + fn hash_stable( + &self, + _hcx: &mut StableHashingContext<'a>, + _hasher: &mut StableHasher, + ) { // IntVid values are confined to an inference context and hence // should not be hashed. - bug!("ty::TyKind::hash_stable() - can't hash an IntVid {:?}.", *self) + bug!( + "ty::TyKind::hash_stable() - can't hash an IntVid {:?}.", + *self + ) } } -impl<'a, 'gcx> HashStable> -for ty::FloatVid -{ - fn hash_stable(&self, - _hcx: &mut StableHashingContext<'a>, - _hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for ty::FloatVid { + fn hash_stable( + &self, + _hcx: &mut StableHashingContext<'a>, + _hasher: &mut StableHasher, + ) { // FloatVid values are confined to an inference context and hence // should not be hashed. - bug!("ty::TyKind::hash_stable() - can't hash a FloatVid {:?}.", *self) + bug!( + "ty::TyKind::hash_stable() - can't hash a FloatVid {:?}.", + *self + ) } } @@ -747,12 +776,12 @@ impl_stable_hash_for!(struct ty::TypeAndMut<'tcx> { mutbl }); -impl<'a, 'gcx> HashStable> -for ty::ExistentialPredicate<'gcx> -{ - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for ty::ExistentialPredicate<'gcx> { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::ExistentialPredicate::Trait(ref trait_ref) => { @@ -785,9 +814,11 @@ impl_stable_hash_for!(struct ty::Instance<'tcx> { }); impl<'a, 'gcx> HashStable> for ty::InstanceDef<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { @@ -871,14 +902,15 @@ impl_stable_hash_for!(enum ty::AssociatedItemContainer { ImplContainer(def_id) }); - -impl<'a, 'gcx, T> HashStable> -for ty::steal::Steal - where T: HashStable> +impl<'a, 'gcx, T> HashStable> for ty::steal::Steal +where + T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.borrow().hash_stable(hcx, hasher); } } @@ -901,15 +933,14 @@ impl_stable_hash_for!(enum ::middle::privacy::AccessLevel { Public }); -impl<'a> HashStable> -for ::middle::privacy::AccessLevels { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a> HashStable> for ::middle::privacy::AccessLevels { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { - let ::middle::privacy::AccessLevels { - ref map - } = *self; + let ::middle::privacy::AccessLevels { ref map } = *self; map.hash_stable(hcx, hasher); }); @@ -927,15 +958,17 @@ impl_stable_hash_for!(enum ::session::CompileIncomplete { impl_stable_hash_for!(struct ::util::common::ErrorReported {}); -impl_stable_hash_for!(tuple_struct ::middle::reachable::ReachableSet { - reachable_set -}); +impl_stable_hash_for!(tuple_struct::middle::reachable::ReachableSet { reachable_set }); -impl<'a, 'gcx, N> HashStable> -for traits::Vtable<'gcx, N> where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx, N> HashStable> for traits::Vtable<'gcx, N> +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use traits::Vtable::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -954,11 +987,15 @@ for traits::Vtable<'gcx, N> where N: HashStable> { } } -impl<'a, 'gcx, N> HashStable> -for traits::VtableImplData<'gcx, N> where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx, N> HashStable> for traits::VtableImplData<'gcx, N> +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let traits::VtableImplData { impl_def_id, substs, @@ -970,11 +1007,15 @@ for traits::VtableImplData<'gcx, N> where N: HashStable } } -impl<'a, 'gcx, N> HashStable> -for traits::VtableAutoImplData where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx, N> HashStable> for traits::VtableAutoImplData +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let traits::VtableAutoImplData { trait_def_id, ref nested, @@ -984,11 +1025,15 @@ for traits::VtableAutoImplData where N: HashStable> } } -impl<'a, 'gcx, N> HashStable> -for traits::VtableObjectData<'gcx, N> where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx, N> HashStable> for traits::VtableObjectData<'gcx, N> +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let traits::VtableObjectData { upcast_trait_ref, vtable_base, @@ -1000,23 +1045,29 @@ for traits::VtableObjectData<'gcx, N> where N: HashStable HashStable> -for traits::VtableBuiltinData where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - let traits::VtableBuiltinData { - ref nested, - } = *self; +impl<'a, 'gcx, N> HashStable> for traits::VtableBuiltinData +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { + let traits::VtableBuiltinData { ref nested } = *self; nested.hash_stable(hcx, hasher); } } -impl<'a, 'gcx, N> HashStable> -for traits::VtableClosureData<'gcx, N> where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx, N> HashStable> for traits::VtableClosureData<'gcx, N> +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let traits::VtableClosureData { closure_def_id, substs, @@ -1028,25 +1079,30 @@ for traits::VtableClosureData<'gcx, N> where N: HashStable HashStable> -for traits::VtableFnPointerData<'gcx, N> where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - let traits::VtableFnPointerData { - fn_ty, - ref nested, - } = *self; +impl<'a, 'gcx, N> HashStable> for traits::VtableFnPointerData<'gcx, N> +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { + let traits::VtableFnPointerData { fn_ty, ref nested } = *self; fn_ty.hash_stable(hcx, hasher); nested.hash_stable(hcx, hasher); } } -impl<'a, 'gcx, N> HashStable> -for traits::VtableGeneratorData<'gcx, N> where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx, N> HashStable> for traits::VtableGeneratorData<'gcx, N> +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let traits::VtableGeneratorData { generator_def_id, substs, @@ -1058,11 +1114,15 @@ for traits::VtableGeneratorData<'gcx, N> where N: HashStable HashStable> -for traits::VtableTraitAliasData<'gcx, N> where N: HashStable> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx, N> HashStable> for traits::VtableTraitAliasData<'gcx, N> +where + N: HashStable>, +{ + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let traits::VtableTraitAliasData { alias_def_id, substs, @@ -1114,9 +1174,11 @@ impl_stable_hash_for!(enum infer::canonical::Certainty { }); impl<'a, 'tcx> HashStable> for traits::WhereClause<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use traits::WhereClause::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1130,9 +1192,11 @@ impl<'a, 'tcx> HashStable> for traits::WhereClause<'tcx } impl<'a, 'tcx> HashStable> for traits::WellFormed<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use traits::WellFormed::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1144,9 +1208,11 @@ impl<'a, 'tcx> HashStable> for traits::WellFormed<'tcx> } impl<'a, 'tcx> HashStable> for traits::FromEnv<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use traits::FromEnv::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1158,9 +1224,11 @@ impl<'a, 'tcx> HashStable> for traits::FromEnv<'tcx> { } impl<'a, 'tcx> HashStable> for traits::DomainGoal<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use traits::DomainGoal::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1174,9 +1242,11 @@ impl<'a, 'tcx> HashStable> for traits::DomainGoal<'tcx> } impl<'a, 'tcx> HashStable> for traits::Goal<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use traits::GoalKind::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1184,7 +1254,7 @@ impl<'a, 'tcx> HashStable> for traits::Goal<'tcx> { Implies(hypotheses, goal) => { hypotheses.hash_stable(hcx, hasher); goal.hash_stable(hcx, hasher); - }, + } And(goal1, goal2) => { goal1.hash_stable(hcx, hasher); goal2.hash_stable(hcx, hasher); @@ -1194,12 +1264,12 @@ impl<'a, 'tcx> HashStable> for traits::Goal<'tcx> { Quantified(quantifier, goal) => { quantifier.hash_stable(hcx, hasher); goal.hash_stable(hcx, hasher); - }, + } Subtype(a, b) => { a.hash_stable(hcx, hasher); b.hash_stable(hcx, hasher); } - CannotProve => { }, + CannotProve => {} } } } @@ -1217,9 +1287,11 @@ impl_stable_hash_for!(enum traits::ProgramClauseCategory { }); impl<'a, 'tcx> HashStable> for traits::Clause<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use traits::Clause::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1253,9 +1325,11 @@ impl_stable_hash_for!( ); impl<'a, 'gcx> HashStable> for ty::UserTypeAnnotation<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::UserTypeAnnotation::Ty(ref ty) => { @@ -1271,9 +1345,11 @@ impl<'a, 'gcx> HashStable> for ty::UserTypeAnnotation<' impl<'a> HashStable> for ty::UserTypeAnnotationIndex { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.index().hash_stable(hcx, hasher); } } diff --git a/src/librustc/ich/mod.rs b/src/librustc/ich/mod.rs index a3a4c54178057..36d6b8c841d6b 100644 --- a/src/librustc/ich/mod.rs +++ b/src/librustc/ich/mod.rs @@ -1,9 +1,10 @@ //! ICH - Incremental Compilation Hash -crate use rustc_data_structures::fingerprint::Fingerprint; pub use self::caching_source_map_view::CachingSourceMapView; -pub use self::hcx::{StableHashingContextProvider, StableHashingContext, NodeIdHashingMode, - hash_stable_trait_impls}; +pub use self::hcx::{ + hash_stable_trait_impls, NodeIdHashingMode, StableHashingContext, StableHashingContextProvider, +}; +crate use rustc_data_structures::fingerprint::Fingerprint; mod caching_source_map_view; mod hcx; @@ -11,8 +12,8 @@ mod impls_cstore; mod impls_hir; mod impls_mir; mod impls_misc; -mod impls_ty; mod impls_syntax; +mod impls_ty; pub const ATTR_DIRTY: &str = "rustc_dirty"; pub const ATTR_CLEAN: &str = "rustc_clean"; diff --git a/src/librustc/infer/at.rs b/src/librustc/infer/at.rs index 328d518ca66aa..64415bc919a30 100644 --- a/src/librustc/infer/at.rs +++ b/src/librustc/infer/at.rs @@ -43,48 +43,47 @@ pub struct Trace<'a, 'gcx: 'tcx, 'tcx: 'a> { impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { #[inline] - pub fn at(&'a self, - cause: &'a ObligationCause<'tcx>, - param_env: ty::ParamEnv<'tcx>) - -> At<'a, 'gcx, 'tcx> - { - At { infcx: self, cause, param_env } + pub fn at( + &'a self, + cause: &'a ObligationCause<'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> At<'a, 'gcx, 'tcx> { + At { + infcx: self, + cause, + param_env, + } } } pub trait ToTrace<'tcx>: Relate<'tcx> + Copy { - fn to_trace(cause: &ObligationCause<'tcx>, - a_is_expected: bool, - a: Self, - b: Self) - -> TypeTrace<'tcx>; + fn to_trace( + cause: &ObligationCause<'tcx>, + a_is_expected: bool, + a: Self, + b: Self, + ) -> TypeTrace<'tcx>; } impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { /// Hacky routine for equating two impl headers in coherence. - pub fn eq_impl_headers(self, - expected: &ty::ImplHeader<'tcx>, - actual: &ty::ImplHeader<'tcx>) - -> InferResult<'tcx, ()> - { + pub fn eq_impl_headers( + self, + expected: &ty::ImplHeader<'tcx>, + actual: &ty::ImplHeader<'tcx>, + ) -> InferResult<'tcx, ()> { debug!("eq_impl_header({:?} = {:?})", expected, actual); match (expected.trait_ref, actual.trait_ref) { - (Some(a_ref), Some(b_ref)) => - self.eq(a_ref, b_ref), - (None, None) => - self.eq(expected.self_ty, actual.self_ty), - _ => - bug!("mk_eq_impl_headers given mismatched impl kinds"), + (Some(a_ref), Some(b_ref)) => self.eq(a_ref, b_ref), + (None, None) => self.eq(expected.self_ty, actual.self_ty), + _ => bug!("mk_eq_impl_headers given mismatched impl kinds"), } } /// Make `a <: b` where `a` may or may not be expected - pub fn sub_exp(self, - a_is_expected: bool, - a: T, - b: T) - -> InferResult<'tcx, ()> - where T: ToTrace<'tcx> + pub fn sub_exp(self, a_is_expected: bool, a: T, b: T) -> InferResult<'tcx, ()> + where + T: ToTrace<'tcx>, { self.trace_exp(a_is_expected, a, b).sub(&a, &b) } @@ -93,53 +92,40 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { /// call like `foo(x)`, where `foo: fn(i32)`, you might have /// `sup(i32, x)`, since the "expected" type is the type that /// appears in the signature. - pub fn sup(self, - expected: T, - actual: T) - -> InferResult<'tcx, ()> - where T: ToTrace<'tcx> + pub fn sup(self, expected: T, actual: T) -> InferResult<'tcx, ()> + where + T: ToTrace<'tcx>, { self.sub_exp(false, actual, expected) } /// Make `expected <: actual` - pub fn sub(self, - expected: T, - actual: T) - -> InferResult<'tcx, ()> - where T: ToTrace<'tcx> + pub fn sub(self, expected: T, actual: T) -> InferResult<'tcx, ()> + where + T: ToTrace<'tcx>, { self.sub_exp(true, expected, actual) } /// Make `expected <: actual` - pub fn eq_exp(self, - a_is_expected: bool, - a: T, - b: T) - -> InferResult<'tcx, ()> - where T: ToTrace<'tcx> + pub fn eq_exp(self, a_is_expected: bool, a: T, b: T) -> InferResult<'tcx, ()> + where + T: ToTrace<'tcx>, { self.trace_exp(a_is_expected, a, b).eq(&a, &b) } /// Make `expected <: actual` - pub fn eq(self, - expected: T, - actual: T) - -> InferResult<'tcx, ()> - where T: ToTrace<'tcx> + pub fn eq(self, expected: T, actual: T) -> InferResult<'tcx, ()> + where + T: ToTrace<'tcx>, { self.trace(expected, actual).eq(&expected, &actual) } - pub fn relate( - self, - expected: T, - variance: ty::Variance, - actual: T, - ) -> InferResult<'tcx, ()> - where T: ToTrace<'tcx> + pub fn relate(self, expected: T, variance: ty::Variance, actual: T) -> InferResult<'tcx, ()> + where + T: ToTrace<'tcx>, { match variance { ty::Variance::Covariant => self.sub(expected, actual), @@ -160,11 +146,9 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { /// this can result in an error (e.g., if asked to compute LUB of /// u32 and i32), it is meaningful to call one of them the /// "expected type". - pub fn lub(self, - expected: T, - actual: T) - -> InferResult<'tcx, T> - where T: ToTrace<'tcx> + pub fn lub(self, expected: T, actual: T) -> InferResult<'tcx, T> + where + T: ToTrace<'tcx>, { self.trace(expected, actual).lub(&expected, &actual) } @@ -172,11 +156,9 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { /// Compute the greatest-lower-bound, or mutual subtype, of two /// values. As with `lub` order doesn't matter, except for error /// cases. - pub fn glb(self, - expected: T, - actual: T) - -> InferResult<'tcx, T> - where T: ToTrace<'tcx> + pub fn glb(self, expected: T, actual: T) -> InferResult<'tcx, T> + where + T: ToTrace<'tcx>, { self.trace(expected, actual).glb(&expected, &actual) } @@ -185,11 +167,9 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { /// error-reporting, but doesn't actually perform any operation /// yet (this is useful when you want to set the trace using /// distinct values from those you wish to operate upon). - pub fn trace(self, - expected: T, - actual: T) - -> Trace<'a, 'gcx, 'tcx> - where T: ToTrace<'tcx> + pub fn trace(self, expected: T, actual: T) -> Trace<'a, 'gcx, 'tcx> + where + T: ToTrace<'tcx>, { self.trace_exp(true, expected, actual) } @@ -197,15 +177,16 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { /// Like `trace`, but the expected value is determined by the /// boolean argument (if true, then the first argument `a` is the /// "expected" value). - pub fn trace_exp(self, - a_is_expected: bool, - a: T, - b: T) - -> Trace<'a, 'gcx, 'tcx> - where T: ToTrace<'tcx> + pub fn trace_exp(self, a_is_expected: bool, a: T, b: T) -> Trace<'a, 'gcx, 'tcx> + where + T: ToTrace<'tcx>, { let trace = ToTrace::to_trace(self.cause, a_is_expected, a, b); - Trace { at: self, trace: trace, a_is_expected } + Trace { + at: self, + trace: trace, + a_is_expected, + } } } @@ -213,125 +194,149 @@ impl<'a, 'gcx, 'tcx> Trace<'a, 'gcx, 'tcx> { /// Make `a <: b` where `a` may or may not be expected (if /// `a_is_expected` is true, then `a` is expected). /// Make `expected <: actual` - pub fn sub(self, - a: &T, - b: &T) - -> InferResult<'tcx, ()> - where T: Relate<'tcx> + pub fn sub(self, a: &T, b: &T) -> InferResult<'tcx, ()> + where + T: Relate<'tcx>, { debug!("sub({:?} <: {:?})", a, b); - let Trace { at, trace, a_is_expected } = self; + let Trace { + at, + trace, + a_is_expected, + } = self; at.infcx.commit_if_ok(|_| { let mut fields = at.infcx.combine_fields(trace, at.param_env); - fields.sub(a_is_expected) - .relate(a, b) - .map(move |_| InferOk { value: (), obligations: fields.obligations }) + fields + .sub(a_is_expected) + .relate(a, b) + .map(move |_| InferOk { + value: (), + obligations: fields.obligations, + }) }) } /// Make `a == b`; the expectation is set by the call to /// `trace()`. - pub fn eq(self, - a: &T, - b: &T) - -> InferResult<'tcx, ()> - where T: Relate<'tcx> + pub fn eq(self, a: &T, b: &T) -> InferResult<'tcx, ()> + where + T: Relate<'tcx>, { debug!("eq({:?} == {:?})", a, b); - let Trace { at, trace, a_is_expected } = self; + let Trace { + at, + trace, + a_is_expected, + } = self; at.infcx.commit_if_ok(|_| { let mut fields = at.infcx.combine_fields(trace, at.param_env); - fields.equate(a_is_expected) - .relate(a, b) - .map(move |_| InferOk { value: (), obligations: fields.obligations }) + fields + .equate(a_is_expected) + .relate(a, b) + .map(move |_| InferOk { + value: (), + obligations: fields.obligations, + }) }) } - pub fn lub(self, - a: &T, - b: &T) - -> InferResult<'tcx, T> - where T: Relate<'tcx> + pub fn lub(self, a: &T, b: &T) -> InferResult<'tcx, T> + where + T: Relate<'tcx>, { debug!("lub({:?} \\/ {:?})", a, b); - let Trace { at, trace, a_is_expected } = self; + let Trace { + at, + trace, + a_is_expected, + } = self; at.infcx.commit_if_ok(|_| { let mut fields = at.infcx.combine_fields(trace, at.param_env); - fields.lub(a_is_expected) - .relate(a, b) - .map(move |t| InferOk { value: t, obligations: fields.obligations }) + fields + .lub(a_is_expected) + .relate(a, b) + .map(move |t| InferOk { + value: t, + obligations: fields.obligations, + }) }) } - pub fn glb(self, - a: &T, - b: &T) - -> InferResult<'tcx, T> - where T: Relate<'tcx> + pub fn glb(self, a: &T, b: &T) -> InferResult<'tcx, T> + where + T: Relate<'tcx>, { debug!("glb({:?} /\\ {:?})", a, b); - let Trace { at, trace, a_is_expected } = self; + let Trace { + at, + trace, + a_is_expected, + } = self; at.infcx.commit_if_ok(|_| { let mut fields = at.infcx.combine_fields(trace, at.param_env); - fields.glb(a_is_expected) - .relate(a, b) - .map(move |t| InferOk { value: t, obligations: fields.obligations }) + fields + .glb(a_is_expected) + .relate(a, b) + .map(move |t| InferOk { + value: t, + obligations: fields.obligations, + }) }) } } impl<'tcx> ToTrace<'tcx> for Ty<'tcx> { - fn to_trace(cause: &ObligationCause<'tcx>, - a_is_expected: bool, - a: Self, - b: Self) - -> TypeTrace<'tcx> - { + fn to_trace( + cause: &ObligationCause<'tcx>, + a_is_expected: bool, + a: Self, + b: Self, + ) -> TypeTrace<'tcx> { TypeTrace { cause: cause.clone(), - values: Types(ExpectedFound::new(a_is_expected, a, b)) + values: Types(ExpectedFound::new(a_is_expected, a, b)), } } } impl<'tcx> ToTrace<'tcx> for ty::Region<'tcx> { - fn to_trace(cause: &ObligationCause<'tcx>, - a_is_expected: bool, - a: Self, - b: Self) - -> TypeTrace<'tcx> - { + fn to_trace( + cause: &ObligationCause<'tcx>, + a_is_expected: bool, + a: Self, + b: Self, + ) -> TypeTrace<'tcx> { TypeTrace { cause: cause.clone(), - values: Regions(ExpectedFound::new(a_is_expected, a, b)) + values: Regions(ExpectedFound::new(a_is_expected, a, b)), } } } impl<'tcx> ToTrace<'tcx> for ty::TraitRef<'tcx> { - fn to_trace(cause: &ObligationCause<'tcx>, - a_is_expected: bool, - a: Self, - b: Self) - -> TypeTrace<'tcx> - { + fn to_trace( + cause: &ObligationCause<'tcx>, + a_is_expected: bool, + a: Self, + b: Self, + ) -> TypeTrace<'tcx> { TypeTrace { cause: cause.clone(), - values: TraitRefs(ExpectedFound::new(a_is_expected, a, b)) + values: TraitRefs(ExpectedFound::new(a_is_expected, a, b)), } } } impl<'tcx> ToTrace<'tcx> for ty::PolyTraitRef<'tcx> { - fn to_trace(cause: &ObligationCause<'tcx>, - a_is_expected: bool, - a: Self, - b: Self) - -> TypeTrace<'tcx> - { + fn to_trace( + cause: &ObligationCause<'tcx>, + a_is_expected: bool, + a: Self, + b: Self, + ) -> TypeTrace<'tcx> { TypeTrace { cause: cause.clone(), - values: PolyTraitRefs(ExpectedFound::new(a_is_expected, a, b)) + values: PolyTraitRefs(ExpectedFound::new(a_is_expected, a, b)), } } } diff --git a/src/librustc/infer/canonical/canonicalizer.rs b/src/librustc/infer/canonical/canonicalizer.rs index 408cba42ae04b..948fb23529349 100644 --- a/src/librustc/infer/canonical/canonicalizer.rs +++ b/src/librustc/infer/canonical/canonicalizer.rs @@ -280,7 +280,8 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> } fn fold_binder(&mut self, t: &ty::Binder) -> ty::Binder - where T: TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { self.binder_index.shift_in(1); let t = t.super_fold_with(self); @@ -299,7 +300,8 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> } ty::ReVar(vid) => { - let r = self.infcx + let r = self + .infcx .unwrap() .borrow_region_constraints() .opportunistic_resolve_var(self.tcx, vid); @@ -318,7 +320,8 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> | ty::ReScope(_) | ty::RePlaceholder(..) | ty::ReEmpty - | ty::ReErased => self.canonicalize_region_mode + | ty::ReErased => self + .canonicalize_region_mode .canonicalize_free_region(self, r), ty::ReClosureBound(..) => { @@ -347,9 +350,9 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> } self.canonicalize_ty_var( CanonicalVarInfo { - kind: CanonicalVarKind::Ty(CanonicalTyVarKind::General(ui)) + kind: CanonicalVarKind::Ty(CanonicalTyVarKind::General(ui)), }, - t + t, ) } } @@ -357,16 +360,16 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> ty::Infer(ty::IntVar(_)) => self.canonicalize_ty_var( CanonicalVarInfo { - kind: CanonicalVarKind::Ty(CanonicalTyVarKind::Int) + kind: CanonicalVarKind::Ty(CanonicalTyVarKind::Int), }, - t + t, ), ty::Infer(ty::FloatVar(_)) => self.canonicalize_ty_var( CanonicalVarInfo { - kind: CanonicalVarKind::Ty(CanonicalTyVarKind::Float) + kind: CanonicalVarKind::Ty(CanonicalTyVarKind::Float), }, - t + t, ), ty::Infer(ty::FreshTy(_)) @@ -377,9 +380,9 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> ty::Placeholder(placeholder) => self.canonicalize_ty_var( CanonicalVarInfo { - kind: CanonicalVarKind::PlaceholderTy(placeholder) + kind: CanonicalVarKind::PlaceholderTy(placeholder), }, - t + t, ), ty::Bound(debruijn, _) => { @@ -443,21 +446,18 @@ impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> { TypeFlags::HAS_FREE_REGIONS | // `HAS_RE_PLACEHOLDER` implies `HAS_FREE_REGIONS` TypeFlags::HAS_TY_PLACEHOLDER } else { - TypeFlags::KEEP_IN_LOCAL_TCX | - TypeFlags::HAS_RE_PLACEHOLDER | - TypeFlags::HAS_TY_PLACEHOLDER + TypeFlags::KEEP_IN_LOCAL_TCX + | TypeFlags::HAS_RE_PLACEHOLDER + | TypeFlags::HAS_TY_PLACEHOLDER }; let gcx = tcx.global_tcx(); // Fast path: nothing that needs to be canonicalized. if !value.has_type_flags(needs_canonical_flags) { - let out_value = gcx.lift(value).unwrap_or_else(|| { - bug!( - "failed to lift `{:?}` (nothing to canonicalize)", - value - ) - }); + let out_value = gcx + .lift(value) + .unwrap_or_else(|| bug!("failed to lift `{:?}` (nothing to canonicalize)", value)); let canon_value = Canonical { max_universe: ty::UniverseIndex::ROOT, variables: List::empty(), @@ -603,10 +603,7 @@ impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> { r: ty::Region<'tcx>, ) -> ty::Region<'tcx> { let var = self.canonical_var(info, r.into()); - let region = ty::ReLateBound( - self.binder_index, - ty::BoundRegion::BrAnon(var.as_u32()) - ); + let region = ty::ReLateBound(self.binder_index, ty::BoundRegion::BrAnon(var.as_u32())); self.tcx().mk_region(region) } diff --git a/src/librustc/infer/canonical/mod.rs b/src/librustc/infer/canonical/mod.rs index eaf72f5a68710..5bb7dd1cc48fc 100644 --- a/src/librustc/infer/canonical/mod.rs +++ b/src/librustc/infer/canonical/mod.rs @@ -145,7 +145,7 @@ impl CanonicalVarKind { CanonicalVarKind::Ty(kind) => match kind { CanonicalTyVarKind::General(ui) => ui, CanonicalTyVarKind::Float | CanonicalTyVarKind::Int => ty::UniverseIndex::ROOT, - } + }, CanonicalVarKind::PlaceholderTy(placeholder) => placeholder.universe, CanonicalVarKind::Region(ui) => ui, @@ -352,12 +352,10 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { match cv_info.kind { CanonicalVarKind::Ty(ty_kind) => { let ty = match ty_kind { - CanonicalTyVarKind::General(ui) => { - self.next_ty_var_in_universe( - TypeVariableOrigin::MiscVariable(span), - universe_map(ui) - ) - } + CanonicalTyVarKind::General(ui) => self.next_ty_var_in_universe( + TypeVariableOrigin::MiscVariable(span), + universe_map(ui), + ), CanonicalTyVarKind::Int => self.tcx.mk_int_var(self.next_int_var_id()), @@ -375,10 +373,12 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { self.tcx.mk_ty(ty::Placeholder(placeholder_mapped)).into() } - CanonicalVarKind::Region(ui) => self.next_region_var_in_universe( - RegionVariableOrigin::MiscVariable(span), - universe_map(ui), - ).into(), + CanonicalVarKind::Region(ui) => self + .next_region_var_in_universe( + RegionVariableOrigin::MiscVariable(span), + universe_map(ui), + ) + .into(), CanonicalVarKind::PlaceholderRegion(ty::PlaceholderRegion { universe, name }) => { let universe_mapped = universe_map(universe); @@ -386,7 +386,9 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { universe: universe_mapped, name, }; - self.tcx.mk_region(ty::RePlaceholder(placeholder_mapped)).into() + self.tcx + .mk_region(ty::RePlaceholder(placeholder_mapped)) + .into() } } } @@ -434,17 +436,19 @@ impl<'tcx> CanonicalVarValues<'tcx> { use ty::subst::UnpackedKind; CanonicalVarValues { - var_values: self.var_values.iter() + var_values: self + .var_values + .iter() .zip(0..) .map(|(kind, i)| match kind.unpack() { - UnpackedKind::Type(..) => tcx.mk_ty( - ty::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i).into()) - ).into(), - UnpackedKind::Lifetime(..) => tcx.mk_region( - ty::ReLateBound(ty::INNERMOST, ty::BoundRegion::BrAnon(i)) - ).into(), + UnpackedKind::Type(..) => tcx + .mk_ty(ty::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i).into())) + .into(), + UnpackedKind::Lifetime(..) => tcx + .mk_region(ty::ReLateBound(ty::INNERMOST, ty::BoundRegion::BrAnon(i))) + .into(), }) - .collect() + .collect(), } } } diff --git a/src/librustc/infer/canonical/query_response.rs b/src/librustc/infer/canonical/query_response.rs index 7f113f07276d8..2e35da5401031 100644 --- a/src/librustc/infer/canonical/query_response.rs +++ b/src/librustc/infer/canonical/query_response.rs @@ -9,8 +9,8 @@ use infer::canonical::substitute::substitute_value; use infer::canonical::{ - Canonical, CanonicalVarValues, CanonicalizedQueryResponse, Certainty, - OriginalQueryValues, QueryRegionConstraint, QueryResponse, + Canonical, CanonicalVarValues, CanonicalizedQueryResponse, Certainty, OriginalQueryValues, + QueryRegionConstraint, QueryResponse, }; use infer::region_constraints::{Constraint, RegionConstraintData}; use infer::InferCtxtBuilder; @@ -48,8 +48,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> { pub fn enter_canonical_trait_query( &'tcx mut self, canonical_key: &Canonical<'tcx, K>, - operation: impl FnOnce(&InferCtxt<'_, 'gcx, 'tcx>, &mut dyn TraitEngine<'tcx>, K) - -> Fallible, + operation: impl FnOnce(&InferCtxt<'_, 'gcx, 'tcx>, &mut dyn TraitEngine<'tcx>, K) -> Fallible, ) -> Fallible> where K: TypeFoldable<'tcx>, @@ -64,7 +63,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> { infcx.make_canonicalized_query_response( canonical_inference_vars, value, - &mut *fulfill_cx + &mut *fulfill_cx, ) }, ) @@ -123,7 +122,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { pub fn make_query_response_ignoring_pending_obligations( &self, inference_vars: CanonicalVarValues<'tcx>, - answer: T + answer: T, ) -> Canonical<'gcx, QueryResponse<'gcx, >::Lifted>> where T: Debug + Lift<'gcx> + TypeFoldable<'tcx>, @@ -157,7 +156,10 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { ); // Select everything, returning errors. - let true_errors = fulfill_cx.select_where_possible(self).err().unwrap_or_else(Vec::new); + let true_errors = fulfill_cx + .select_where_possible(self) + .err() + .unwrap_or_else(Vec::new); debug!("true_errors = {:#?}", true_errors); if !true_errors.is_empty() { @@ -167,7 +169,10 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { } // Anything left unselected *now* must be an ambiguity. - let ambig_errors = fulfill_cx.select_all_or_error(self).err().unwrap_or_else(Vec::new); + let ambig_errors = fulfill_cx + .select_all_or_error(self) + .err() + .unwrap_or_else(Vec::new); debug!("ambig_errors = {:#?}", ambig_errors); let region_obligations = self.take_registered_region_obligations(); @@ -327,19 +332,23 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { // ...also include the other query region constraints from the query. output_query_region_constraints.extend( - query_response.value.region_constraints.iter().filter_map(|r_c| { - let r_c = substitute_value(self.tcx, &result_subst, r_c); - - // Screen out `'a: 'a` cases -- we skip the binder here but - // only care the inner values to one another, so they are still at - // consistent binding levels. - let &ty::OutlivesPredicate(k1, r2) = r_c.skip_binder(); - if k1 != r2.into() { - Some(r_c) - } else { - None - } - }) + query_response + .value + .region_constraints + .iter() + .filter_map(|r_c| { + let r_c = substitute_value(self.tcx, &result_subst, r_c); + + // Screen out `'a: 'a` cases -- we skip the binder here but + // only care the inner values to one another, so they are still at + // consistent binding levels. + let &ty::OutlivesPredicate(k1, r2) = r_c.skip_binder(); + if k1 != r2.into() { + Some(r_c) + } else { + None + } + }), ); let user_result: R = @@ -379,13 +388,14 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { let result_subst = self.query_response_substitution_guess(cause, original_values, query_response); - let obligations = self.unify_query_response_substitution_guess( - cause, - param_env, - original_values, - &result_subst, - query_response, - )? + let obligations = self + .unify_query_response_substitution_guess( + cause, + param_env, + original_values, + &result_subst, + query_response, + )? .into_obligations(); Ok(InferOk { @@ -559,16 +569,12 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { param_env, match k1.unpack() { UnpackedKind::Lifetime(r1) => ty::Predicate::RegionOutlives( - ty::Binder::bind( - ty::OutlivesPredicate(r1, r2) - ) - ), - UnpackedKind::Type(t1) => ty::Predicate::TypeOutlives( - ty::Binder::bind( - ty::OutlivesPredicate(t1, r2) - ) + ty::Binder::bind(ty::OutlivesPredicate(r1, r2)), ), - } + UnpackedKind::Type(t1) => ty::Predicate::TypeOutlives(ty::Binder::bind( + ty::OutlivesPredicate(t1, r2), + )), + }, ) }) } @@ -652,7 +658,7 @@ pub fn make_query_outlives<'tcx>( .chain( outlives_obligations .map(|(ty, r)| ty::OutlivesPredicate(ty.into(), r)) - .map(ty::Binder::dummy) // no bound vars in the code above + .map(ty::Binder::dummy), // no bound vars in the code above ) .collect(); diff --git a/src/librustc/infer/canonical/substitute.rs b/src/librustc/infer/canonical/substitute.rs index d3ed00481dcee..381d5f7d237ad 100644 --- a/src/librustc/infer/canonical/substitute.rs +++ b/src/librustc/infer/canonical/substitute.rs @@ -56,18 +56,15 @@ where if var_values.var_values.is_empty() { value.clone() } else { - let fld_r = |br: ty::BoundRegion| { - match var_values.var_values[br.assert_bound_var()].unpack() { + let fld_r = + |br: ty::BoundRegion| match var_values.var_values[br.assert_bound_var()].unpack() { UnpackedKind::Lifetime(l) => l, r => bug!("{:?} is a region but value is {:?}", br, r), - } - }; + }; - let fld_t = |bound_ty: ty::BoundTy| { - match var_values.var_values[bound_ty.var].unpack() { - UnpackedKind::Type(ty) => ty, - r => bug!("{:?} is a type but value is {:?}", bound_ty, r), - } + let fld_t = |bound_ty: ty::BoundTy| match var_values.var_values[bound_ty.var].unpack() { + UnpackedKind::Type(ty) => ty, + r => bug!("{:?} is a type but value is {:?}", bound_ty, r), }; tcx.replace_escaping_bound_vars(value, fld_r, fld_t).0 diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs index ed251d81e625e..6f3f2472089fd 100644 --- a/src/librustc/infer/combine.rs +++ b/src/librustc/infer/combine.rs @@ -24,24 +24,24 @@ use super::equate::Equate; use super::glb::Glb; -use super::{InferCtxt, MiscVariable, TypeTrace}; use super::lub::Lub; use super::sub::Sub; use super::type_variable::TypeVariableValue; +use super::{InferCtxt, MiscVariable, TypeTrace}; use hir::def_id::DefId; -use ty::{IntType, UintType}; -use ty::{self, Ty, TyCtxt}; +use traits::{Obligation, PredicateObligations}; use ty::error::TypeError; use ty::relate::{self, Relate, RelateResult, TypeRelation}; use ty::subst::Substs; -use traits::{Obligation, PredicateObligations}; +use ty::{self, Ty, TyCtxt}; +use ty::{IntType, UintType}; use syntax::ast; use syntax_pos::Span; #[derive(Clone)] -pub struct CombineFields<'infcx, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> { +pub struct CombineFields<'infcx, 'gcx: 'infcx + 'tcx, 'tcx: 'infcx> { pub infcx: &'infcx InferCtxt<'infcx, 'gcx, 'tcx>, pub trace: TypeTrace<'tcx>, pub cause: Option, @@ -51,16 +51,20 @@ pub struct CombineFields<'infcx, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> { #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub enum RelationDir { - SubtypeOf, SupertypeOf, EqTo + SubtypeOf, + SupertypeOf, + EqTo, } impl<'infcx, 'gcx, 'tcx> InferCtxt<'infcx, 'gcx, 'tcx> { - pub fn super_combine_tys(&self, - relation: &mut R, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where R: TypeRelation<'infcx, 'gcx, 'tcx> + pub fn super_combine_tys( + &self, + relation: &mut R, + a: Ty<'tcx>, + b: Ty<'tcx>, + ) -> RelateResult<'tcx, Ty<'tcx>> + where + R: TypeRelation<'infcx, 'gcx, 'tcx>, { let a_is_expected = relation.a_is_expected(); @@ -102,24 +106,20 @@ impl<'infcx, 'gcx, 'tcx> InferCtxt<'infcx, 'gcx, 'tcx> { } // All other cases of inference are errors - (&ty::Infer(_), _) | - (_, &ty::Infer(_)) => { - Err(TypeError::Sorts(ty::relate::expected_found(relation, &a, &b))) - } + (&ty::Infer(_), _) | (_, &ty::Infer(_)) => Err(TypeError::Sorts( + ty::relate::expected_found(relation, &a, &b), + )), - - _ => { - ty::relate::super_relate_tys(relation, a, b) - } + _ => ty::relate::super_relate_tys(relation, a, b), } } - fn unify_integral_variable(&self, - vid_is_expected: bool, - vid: ty::IntVid, - val: ty::IntVarValue) - -> RelateResult<'tcx, Ty<'tcx>> - { + fn unify_integral_variable( + &self, + vid_is_expected: bool, + vid: ty::IntVid, + val: ty::IntVarValue, + ) -> RelateResult<'tcx, Ty<'tcx>> { self.int_unification_table .borrow_mut() .unify_var_value(vid, Some(val)) @@ -130,12 +130,12 @@ impl<'infcx, 'gcx, 'tcx> InferCtxt<'infcx, 'gcx, 'tcx> { } } - fn unify_float_variable(&self, - vid_is_expected: bool, - vid: ty::FloatVid, - val: ast::FloatTy) - -> RelateResult<'tcx, Ty<'tcx>> - { + fn unify_float_variable( + &self, + vid_is_expected: bool, + vid: ty::FloatVid, + val: ast::FloatTy, + ) -> RelateResult<'tcx, Ty<'tcx>> { self.float_unification_table .borrow_mut() .unify_var_value(vid, Some(ty::FloatVarValue(val))) @@ -174,19 +174,27 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { /// will first instantiate `b_vid` with a *generalized* version /// of `a_ty`. Generalization introduces other inference /// variables wherever subtyping could occur. - pub fn instantiate(&mut self, - a_ty: Ty<'tcx>, - dir: RelationDir, - b_vid: ty::TyVid, - a_is_expected: bool) - -> RelateResult<'tcx, ()> - { + pub fn instantiate( + &mut self, + a_ty: Ty<'tcx>, + dir: RelationDir, + b_vid: ty::TyVid, + a_is_expected: bool, + ) -> RelateResult<'tcx, ()> { use self::RelationDir::*; // Get the actual variable that b_vid has been inferred to - debug_assert!(self.infcx.type_variables.borrow_mut().probe(b_vid).is_unknown()); + debug_assert!(self + .infcx + .type_variables + .borrow_mut() + .probe(b_vid) + .is_unknown()); - debug!("instantiate(a_ty={:?} dir={:?} b_vid={:?})", a_ty, dir, b_vid); + debug!( + "instantiate(a_ty={:?} dir={:?} b_vid={:?})", + a_ty, dir, b_vid + ); // Generalize type of `a_ty` appropriately depending on the // direction. As an example, assume: @@ -200,14 +208,21 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { // variables. (Down below, we will relate `a_ty <: b_ty`, // adding constraints like `'x: '?2` and `?1 <: ?3`.) let Generalization { ty: b_ty, needs_wf } = self.generalize(a_ty, b_vid, dir)?; - debug!("instantiate(a_ty={:?}, dir={:?}, b_vid={:?}, generalized b_ty={:?})", - a_ty, dir, b_vid, b_ty); - self.infcx.type_variables.borrow_mut().instantiate(b_vid, b_ty); + debug!( + "instantiate(a_ty={:?}, dir={:?}, b_vid={:?}, generalized b_ty={:?})", + a_ty, dir, b_vid, b_ty + ); + self.infcx + .type_variables + .borrow_mut() + .instantiate(b_vid, b_ty); if needs_wf { - self.obligations.push(Obligation::new(self.trace.cause.clone(), - self.param_env, - ty::Predicate::WellFormed(b_ty))); + self.obligations.push(Obligation::new( + self.trace.cause.clone(), + self.param_env, + ty::Predicate::WellFormed(b_ty), + )); } // Finally, relate `b_ty` to `a_ty`, as described in previous comment. @@ -219,8 +234,10 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { match dir { EqTo => self.equate(a_is_expected).relate(&a_ty, &b_ty), SubtypeOf => self.sub(a_is_expected).relate(&a_ty, &b_ty), - SupertypeOf => self.sub(a_is_expected).relate_with_variance( - ty::Contravariant, &a_ty, &b_ty), + SupertypeOf => { + self.sub(a_is_expected) + .relate_with_variance(ty::Contravariant, &a_ty, &b_ty) + } }?; Ok(()) @@ -235,13 +252,16 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { /// Preconditions: /// /// - `for_vid` is a "root vid" - fn generalize(&self, - ty: Ty<'tcx>, - for_vid: ty::TyVid, - dir: RelationDir) - -> RelateResult<'tcx, Generalization<'tcx>> - { - debug!("generalize(ty={:?}, for_vid={:?}, dir={:?}", ty, for_vid, dir); + fn generalize( + &self, + ty: Ty<'tcx>, + for_vid: ty::TyVid, + dir: RelationDir, + ) -> RelateResult<'tcx, Generalization<'tcx>> { + debug!( + "generalize(ty={:?}, for_vid={:?}, dir={:?}", + ty, for_vid, dir + ); // Determine the ambient variance within which `ty` appears. // The surrounding equation is: // @@ -277,7 +297,7 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { } } -struct Generalizer<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { +struct Generalizer<'cx, 'gcx: 'cx + 'tcx, 'tcx: 'cx> { infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, /// Span, used when creating new type variables and things. @@ -349,19 +369,25 @@ impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, ' true } - fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where T: Relate<'tcx> + fn binders( + &mut self, + a: &ty::Binder, + b: &ty::Binder, + ) -> RelateResult<'tcx, ty::Binder> + where + T: Relate<'tcx>, { - Ok(ty::Binder::bind(self.relate(a.skip_binder(), b.skip_binder())?)) + Ok(ty::Binder::bind( + self.relate(a.skip_binder(), b.skip_binder())?, + )) } - fn relate_item_substs(&mut self, - item_def_id: DefId, - a_subst: &'tcx Substs<'tcx>, - b_subst: &'tcx Substs<'tcx>) - -> RelateResult<'tcx, &'tcx Substs<'tcx>> - { + fn relate_item_substs( + &mut self, + item_def_id: DefId, + a_subst: &'tcx Substs<'tcx>, + b_subst: &'tcx Substs<'tcx>, + ) -> RelateResult<'tcx, &'tcx Substs<'tcx>> { if self.ambient_variance == ty::Variance::Invariant { // Avoid fetching the variance if we are in an invariant // context; no need, and it can induce dependency cycles @@ -373,12 +399,12 @@ impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, ' } } - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> - { + fn relate_with_variance>( + &mut self, + variance: ty::Variance, + a: &T, + b: &T, + ) -> RelateResult<'tcx, T> { let old_ambient_variance = self.ambient_variance; self.ambient_variance = self.ambient_variance.xform(variance); @@ -428,63 +454,58 @@ impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, ' let origin = *variables.var_origin(vid); let new_var_id = variables.new_var(universe, false, origin); let u = self.tcx().mk_var(new_var_id); - debug!("generalize: replacing original vid={:?} with new={:?}", - vid, u); + debug!( + "generalize: replacing original vid={:?} with new={:?}", + vid, u + ); return Ok(u); } } } } - ty::Infer(ty::IntVar(_)) | - ty::Infer(ty::FloatVar(_)) => { + ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) => { // No matter what mode we are in, // integer/floating-point types must be equal to be // relatable. Ok(t) } - _ => { - relate::super_relate_tys(self, t, t) - } + _ => relate::super_relate_tys(self, t, t), } } - fn regions(&mut self, r: ty::Region<'tcx>, r2: ty::Region<'tcx>) - -> RelateResult<'tcx, ty::Region<'tcx>> { + fn regions( + &mut self, + r: ty::Region<'tcx>, + r2: ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>> { assert_eq!(r, r2); // we are abusing TypeRelation here; both LHS and RHS ought to be == match *r { // Never make variables for regions bound within the type itself, // nor for erased regions. - ty::ReLateBound(..) | - ty::ReErased => { + ty::ReLateBound(..) | ty::ReErased => { return Ok(r); } // Always make a fresh region variable for placeholder // regions; the higher-ranked decision procedures rely on // this. - ty::RePlaceholder(..) => { } + ty::RePlaceholder(..) => {} // For anything else, we make a region variable, unless we // are *equating*, in which case it's just wasteful. - ty::ReEmpty | - ty::ReStatic | - ty::ReScope(..) | - ty::ReVar(..) | - ty::ReEarlyBound(..) | - ty::ReFree(..) => { - match self.ambient_variance { - ty::Invariant => return Ok(r), - ty::Bivariant | ty::Covariant | ty::Contravariant => (), - } - } + ty::ReEmpty + | ty::ReStatic + | ty::ReScope(..) + | ty::ReVar(..) + | ty::ReEarlyBound(..) + | ty::ReFree(..) => match self.ambient_variance { + ty::Invariant => return Ok(r), + ty::Bivariant | ty::Covariant | ty::Contravariant => (), + }, ty::ReClosureBound(..) => { - span_bug!( - self.span, - "encountered unexpected ReClosureBound: {:?}", - r, - ); + span_bug!(self.span, "encountered unexpected ReClosureBound: {:?}", r,); } } @@ -495,35 +516,33 @@ impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, ' } pub trait RelateResultCompare<'tcx, T> { - fn compare(&self, t: T, f: F) -> RelateResult<'tcx, T> where + fn compare(&self, t: T, f: F) -> RelateResult<'tcx, T> + where F: FnOnce() -> TypeError<'tcx>; } -impl<'tcx, T:Clone + PartialEq> RelateResultCompare<'tcx, T> for RelateResult<'tcx, T> { - fn compare(&self, t: T, f: F) -> RelateResult<'tcx, T> where +impl<'tcx, T: Clone + PartialEq> RelateResultCompare<'tcx, T> for RelateResult<'tcx, T> { + fn compare(&self, t: T, f: F) -> RelateResult<'tcx, T> + where F: FnOnce() -> TypeError<'tcx>, { - self.clone().and_then(|s| { - if s == t { - self.clone() - } else { - Err(f()) - } - }) + self.clone() + .and_then(|s| if s == t { self.clone() } else { Err(f()) }) } } -fn int_unification_error<'tcx>(a_is_expected: bool, v: (ty::IntVarValue, ty::IntVarValue)) - -> TypeError<'tcx> -{ +fn int_unification_error<'tcx>( + a_is_expected: bool, + v: (ty::IntVarValue, ty::IntVarValue), +) -> TypeError<'tcx> { let (a, b) = v; TypeError::IntMismatch(ty::relate::expected_found_bool(a_is_expected, &a, &b)) } -fn float_unification_error<'tcx>(a_is_expected: bool, - v: (ty::FloatVarValue, ty::FloatVarValue)) - -> TypeError<'tcx> -{ +fn float_unification_error<'tcx>( + a_is_expected: bool, + v: (ty::FloatVarValue, ty::FloatVarValue), +) -> TypeError<'tcx> { let (ty::FloatVarValue(a), ty::FloatVarValue(b)) = v; TypeError::FloatMismatch(ty::relate::expected_found_bool(a_is_expected, &a, &b)) } diff --git a/src/librustc/infer/equate.rs b/src/librustc/infer/equate.rs index 38d3258d140e1..15c79274f096a 100644 --- a/src/librustc/infer/equate.rs +++ b/src/librustc/infer/equate.rs @@ -1,46 +1,56 @@ use super::combine::{CombineFields, RelationDir}; -use super::{Subtype}; +use super::Subtype; use hir::def_id::DefId; -use ty::{self, Ty, TyCtxt}; -use ty::TyVar; -use ty::subst::Substs; use ty::relate::{self, Relate, RelateResult, TypeRelation}; +use ty::subst::Substs; +use ty::TyVar; +use ty::{self, Ty, TyCtxt}; /// Ensures `a` is made equal to `b`. Returns `a` on success. -pub struct Equate<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> { +pub struct Equate<'combine, 'infcx: 'combine, 'gcx: 'infcx + 'tcx, 'tcx: 'infcx> { fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool, } impl<'combine, 'infcx, 'gcx, 'tcx> Equate<'combine, 'infcx, 'gcx, 'tcx> { - pub fn new(fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool) - -> Equate<'combine, 'infcx, 'gcx, 'tcx> - { - Equate { fields: fields, a_is_expected: a_is_expected } + pub fn new( + fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, + a_is_expected: bool, + ) -> Equate<'combine, 'infcx, 'gcx, 'tcx> { + Equate { + fields: fields, + a_is_expected: a_is_expected, + } } } impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> for Equate<'combine, 'infcx, 'gcx, 'tcx> { - fn tag(&self) -> &'static str { "Equate" } + fn tag(&self) -> &'static str { + "Equate" + } - fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { self.fields.tcx() } + fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { + self.fields.tcx() + } fn trait_object_mode(&self) -> relate::TraitObjectMode { self.fields.infcx.trait_object_mode() } - fn a_is_expected(&self) -> bool { self.a_is_expected } + fn a_is_expected(&self) -> bool { + self.a_is_expected + } - fn relate_item_substs(&mut self, - _item_def_id: DefId, - a_subst: &'tcx Substs<'tcx>, - b_subst: &'tcx Substs<'tcx>) - -> RelateResult<'tcx, &'tcx Substs<'tcx>> - { + fn relate_item_substs( + &mut self, + _item_def_id: DefId, + a_subst: &'tcx Substs<'tcx>, + b_subst: &'tcx Substs<'tcx>, + ) -> RelateResult<'tcx, &'tcx Substs<'tcx>> { // N.B., once we are equating types, we don't care about // variance, so don't try to lookup the variance here. This // also avoids some cycles (e.g., #41849) since looking up @@ -51,19 +61,20 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> relate::relate_substs(self, None, a_subst, b_subst) } - fn relate_with_variance>(&mut self, - _: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> - { + fn relate_with_variance>( + &mut self, + _: ty::Variance, + a: &T, + b: &T, + ) -> RelateResult<'tcx, T> { self.relate(a, b) } fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> { - debug!("{}.tys({:?}, {:?})", self.tag(), - a, b); - if a == b { return Ok(a); } + debug!("{}.tys({:?}, {:?})", self.tag(), a, b); + if a == b { + return Ok(a); + } let infcx = self.fields.infcx; let a = infcx.type_variables.borrow_mut().replace_if_possible(a); @@ -77,11 +88,13 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> } (&ty::Infer(TyVar(a_id)), _) => { - self.fields.instantiate(b, RelationDir::EqTo, a_id, self.a_is_expected)?; + self.fields + .instantiate(b, RelationDir::EqTo, a_id, self.a_is_expected)?; } (_, &ty::Infer(TyVar(b_id))) => { - self.fields.instantiate(a, RelationDir::EqTo, b_id, self.a_is_expected)?; + self.fields + .instantiate(a, RelationDir::EqTo, b_id, self.a_is_expected)?; } _ => { @@ -92,21 +105,27 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> Ok(a) } - fn regions(&mut self, a: ty::Region<'tcx>, b: ty::Region<'tcx>) - -> RelateResult<'tcx, ty::Region<'tcx>> { - debug!("{}.regions({:?}, {:?})", - self.tag(), - a, - b); + fn regions( + &mut self, + a: ty::Region<'tcx>, + b: ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>> { + debug!("{}.regions({:?}, {:?})", self.tag(), a, b); let origin = Subtype(self.fields.trace.clone()); - self.fields.infcx.borrow_region_constraints() - .make_eqregion(origin, a, b); + self.fields + .infcx + .borrow_region_constraints() + .make_eqregion(origin, a, b); Ok(a) } - fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where T: Relate<'tcx> + fn binders( + &mut self, + a: &ty::Binder, + b: &ty::Binder, + ) -> RelateResult<'tcx, ty::Binder> + where + T: Relate<'tcx>, { self.fields.higher_ranked_sub(a, b, self.a_is_expected)?; self.fields.higher_ranked_sub(b, a, self.a_is_expected) diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs index 4cce8343c02c8..1020c6adb9ad1 100644 --- a/src/librustc/infer/error_reporting/mod.rs +++ b/src/librustc/infer/error_reporting/mod.rs @@ -193,7 +193,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let (prefix, span) = match *region { ty::ReEarlyBound(ref br) => { let mut sp = cm.def_span(self.hir().span(node)); - if let Some(param) = self.hir() + if let Some(param) = self + .hir() .get_generics(scope) .and_then(|generics| generics.get_named(&br.name)) { @@ -206,7 +207,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { .. }) => { let mut sp = cm.def_span(self.hir().span(node)); - if let Some(param) = self.hir() + if let Some(param) = self + .hir() .get_generics(scope) .and_then(|generics| generics.get_named(&name)) { @@ -360,7 +362,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { sub_r, sup_r, ) - .emit(); + .emit(); } else if sup_r.is_placeholder() { self.report_placeholder_failure( region_scope_tree, @@ -368,7 +370,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { sub_r, sup_r, ) - .emit(); + .emit(); } else { self.report_sub_sup_conflict( region_scope_tree, @@ -423,10 +425,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { errors.clone() } else { errors - .iter() - .filter(|&e| !is_bound_failure(e)) - .cloned() - .collect() + .iter() + .filter(|&e| !is_bound_failure(e)) + .cloned() + .collect() }; // sort the errors by span, for better error message stability. @@ -472,8 +474,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { TypeError::Sorts(ref exp_found) => { // if they are both "path types", there's a chance of ambiguity // due to different versions of the same crate - if let (&ty::Adt(exp_adt, _), &ty::Adt(found_adt, _)) - = (&exp_found.expected.sty, &exp_found.found.sty) + if let (&ty::Adt(exp_adt, _), &ty::Adt(found_adt, _)) = + (&exp_found.expected.sty, &exp_found.found.sty) { report_path_match(err, exp_adt.did, found_adt.did); } @@ -541,7 +543,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } // Output the lifetimes for the first type - let lifetimes = sub.regions() + let lifetimes = sub + .regions() .map(|lifetime| { let s = lifetime.to_string(); if s.is_empty() { @@ -821,14 +824,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // Foo // ------- this type argument is exactly the same as the other type // Bar - if self.cmp_type_arg( - &mut values.0, - &mut values.1, - path1.clone(), - sub_no_defaults_1, - path2.clone(), - &t2, - ).is_some() + if self + .cmp_type_arg( + &mut values.0, + &mut values.1, + path1.clone(), + sub_no_defaults_1, + path2.clone(), + &t2, + ) + .is_some() { return values; } @@ -837,14 +842,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // Bar // Foo> // ------- this type argument is exactly the same as the other type - if self.cmp_type_arg( - &mut values.1, - &mut values.0, - path2, - sub_no_defaults_2, - path1, - &t1, - ).is_some() + if self + .cmp_type_arg( + &mut values.1, + &mut values.0, + path2, + sub_no_defaults_2, + path1, + &t1, + ) + .is_some() { return values; } @@ -1022,22 +1029,22 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { ("std::result::Result", result_msg), ("core::result::Result", result_msg), ]; - if let Some(msg) = have_as_ref.iter() - .filter_map(|(path, msg)| if &path_str == path { - Some(msg) - } else { - None - }).next() + if let Some(msg) = have_as_ref + .iter() + .filter_map( + |(path, msg)| if &path_str == path { Some(msg) } else { None }, + ) + .next() { let mut show_suggestion = true; for (exp_ty, found_ty) in exp_substs.types().zip(found_substs.types()) { match exp_ty.sty { TyKind::Ref(_, exp_ty, _) => { match (&exp_ty.sty, &found_ty.sty) { - (_, TyKind::Param(_)) | - (_, TyKind::Infer(_)) | - (TyKind::Param(_), _) | - (TyKind::Infer(_), _) => {} + (_, TyKind::Param(_)) + | (_, TyKind::Infer(_)) + | (TyKind::Param(_), _) + | (TyKind::Infer(_), _) => {} _ if ty::TyS::same_type(exp_ty, found_ty) => {} _ => show_suggestion = false, }; @@ -1180,7 +1187,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // `T:` when appropriate let is_impl_trait = bound_kind.to_string().starts_with("impl "); let sp = if has_bounds && !is_impl_trait { - sp.to(self.tcx + sp.to(self + .tcx .sess .source_map() .next_point(self.tcx.sess.source_map().next_point(sp))) @@ -1226,7 +1234,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { ) { let consider = format!( "consider adding an explicit lifetime bound {}", - if type_param_span.map(|(_, _, is_impl_trait)| is_impl_trait).unwrap_or(false) { + if type_param_span + .map(|(_, _, is_impl_trait)| is_impl_trait) + .unwrap_or(false) + { format!(" `{}` to `{}`...", sub, bound_kind) } else { format!("`{}: {}`...", bound_kind, sub) @@ -1339,8 +1350,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { debug!("report_sub_sup_conflict: sup_origin={:?}", sup_origin); debug!("report_sub_sup_conflict: sup_trace={:?}", sup_trace); debug!("report_sub_sup_conflict: sub_trace={:?}", sub_trace); - debug!("report_sub_sup_conflict: sup_trace.values={:?}", sup_trace.values); - debug!("report_sub_sup_conflict: sub_trace.values={:?}", sub_trace.values); + debug!( + "report_sub_sup_conflict: sup_trace.values={:?}", + sup_trace.values + ); + debug!( + "report_sub_sup_conflict: sub_trace.values={:?}", + sub_trace.values + ); if let (Some((sup_expected, sup_found)), Some((sub_expected, sub_found))) = ( self.values_str(&sup_trace.values), diff --git a/src/librustc/infer/error_reporting/need_type_info.rs b/src/librustc/infer/error_reporting/need_type_info.rs index 8ee367c87c3ea..7819abac4bb9b 100644 --- a/src/librustc/infer/error_reporting/need_type_info.rs +++ b/src/librustc/infer/error_reporting/need_type_info.rs @@ -1,11 +1,11 @@ -use hir::{self, Local, Pat, Body, HirId}; -use hir::intravisit::{self, Visitor, NestedVisitorMap}; -use infer::InferCtxt; +use errors::DiagnosticBuilder; +use hir::intravisit::{self, NestedVisitorMap, Visitor}; +use hir::{self, Body, HirId, Local, Pat}; use infer::type_variable::TypeVariableOrigin; -use ty::{self, Ty, Infer, TyVar}; +use infer::InferCtxt; use syntax::source_map::CompilerDesugaringKind; use syntax_pos::Span; -use errors::DiagnosticBuilder; +use ty::{self, Infer, Ty, TyVar}; struct FindLocalByTypeVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, @@ -17,22 +17,23 @@ struct FindLocalByTypeVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { impl<'a, 'gcx, 'tcx> FindLocalByTypeVisitor<'a, 'gcx, 'tcx> { fn node_matches_type(&mut self, node_id: HirId) -> bool { - let ty_opt = self.infcx.in_progress_tables.and_then(|tables| { - tables.borrow().node_id_to_type_opt(node_id) - }); + let ty_opt = self + .infcx + .in_progress_tables + .and_then(|tables| tables.borrow().node_id_to_type_opt(node_id)); match ty_opt { Some(ty) => { let ty = self.infcx.resolve_type_vars_if_possible(&ty); ty.walk().any(|inner_ty| { - inner_ty == *self.target_ty || match (&inner_ty.sty, &self.target_ty.sty) { - (&Infer(TyVar(a_vid)), &Infer(TyVar(b_vid))) => { - self.infcx + inner_ty == *self.target_ty + || match (&inner_ty.sty, &self.target_ty.sty) { + (&Infer(TyVar(a_vid)), &Infer(TyVar(b_vid))) => self + .infcx .type_variables .borrow_mut() - .sub_unified(a_vid, b_vid) + .sub_unified(a_vid, b_vid), + _ => false, } - _ => false, - } }) } None => false, @@ -62,13 +63,13 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindLocalByTypeVisitor<'a, 'gcx, 'tcx> { } } - impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { pub fn extract_type_name(&self, ty: &'a Ty<'tcx>) -> String { if let ty::Infer(ty::TyVar(ty_vid)) = (*ty).sty { let ty_vars = self.type_variables.borrow(); if let TypeVariableOrigin::TypeParameterDefinition(_, name) = - *ty_vars.var_origin(ty_vid) { + *ty_vars.var_origin(ty_vid) + { name.to_string() } else { ty.to_string() @@ -78,11 +79,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } - pub fn need_type_info_err(&self, - body_id: Option, - span: Span, - ty: Ty<'tcx>) - -> DiagnosticBuilder<'gcx> { + pub fn need_type_info_err( + &self, + body_id: Option, + span: Span, + ty: Ty<'tcx>, + ) -> DiagnosticBuilder<'gcx> { let ty = self.resolve_type_vars_if_possible(&ty); let name = self.extract_type_name(&ty); @@ -127,13 +129,17 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // ^ consider giving this closure parameter a type // ``` labels.clear(); - labels.push( - (pattern.span, "consider giving this closure parameter a type".to_owned())); + labels.push(( + pattern.span, + "consider giving this closure parameter a type".to_owned(), + )); } else if let Some(pattern) = local_visitor.found_local_pattern { if let Some(simple_ident) = pattern.simple_ident() { match pattern.span.compiler_desugaring_kind() { - None => labels.push((pattern.span, - format!("consider giving `{}` a type", simple_ident))), + None => labels.push(( + pattern.span, + format!("consider giving `{}` a type", simple_ident), + )), Some(CompilerDesugaringKind::ForLoop) => labels.push(( pattern.span, "the element type for this iterator is not specified".to_owned(), @@ -141,14 +147,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { _ => {} } } else { - labels.push((pattern.span, "consider giving the pattern a type".to_owned())); + labels.push(( + pattern.span, + "consider giving the pattern a type".to_owned(), + )); } } - let mut err = struct_span_err!(self.tcx.sess, - err_span, - E0282, - "type annotations needed"); + let mut err = struct_span_err!(self.tcx.sess, err_span, E0282, "type annotations needed"); for (target_span, label_message) in labels { err.span_label(target_span, label_message); diff --git a/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs b/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs index bfb5b61d0aa1f..d0dcecf83a03b 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs @@ -1,8 +1,8 @@ //! Error Reporting for Anonymous Region Lifetime Errors //! where both the regions are anonymous. -use infer::error_reporting::nice_region_error::NiceRegionError; use infer::error_reporting::nice_region_error::util::AnonymousArgInfo; +use infer::error_reporting::nice_region_error::NiceRegionError; use util::common::ErrorReported; impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { @@ -60,15 +60,11 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { debug!( "try_report_anon_anon_conflict: found_arg1={:?} sup={:?} br1={:?}", - ty_sub, - sup, - bregion_sup + ty_sub, sup, bregion_sup ); debug!( "try_report_anon_anon_conflict: found_arg2={:?} sub={:?} br2={:?}", - ty_sup, - sub, - bregion_sub + ty_sup, sub, bregion_sub ); let (ty_sup, ty_fndecl_sup) = ty_sup; @@ -98,21 +94,19 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { String::new() }; - let (span_1, span_2, main_label, span_label) = match (sup_is_ret_type, sub_is_ret_type) { (None, None) => { let (main_label_1, span_label_1) = if ty_sup.id == ty_sub.id { ( "this type is declared with multiple lifetimes...".to_owned(), - "...but data with one lifetime flows into the other here".to_owned() + "...but data with one lifetime flows into the other here".to_owned(), ) } else { ( "these two types are declared with different lifetimes...".to_owned(), format!( "...but data{} flows{} here", - span_label_var1, - span_label_var2 + span_label_var1, span_label_var2 ), ) }; @@ -123,21 +117,20 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { ty_sub.span, ret_span, "this parameter and the return type are declared \ - with different lifetimes...".to_owned() - , + with different lifetimes..." + .to_owned(), format!("...but data{} is returned here", span_label_var1), ), (_, Some(ret_span)) => ( ty_sup.span, ret_span, "this parameter and the return type are declared \ - with different lifetimes...".to_owned() - , + with different lifetimes..." + .to_owned(), format!("...but data{} is returned here", span_label_var1), ), }; - struct_span_err!(self.tcx.sess, span, E0623, "lifetime mismatch") .span_label(span_1, main_label) .span_label(span_2, String::new()) diff --git a/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs b/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs index d230ce55471e9..c2304ffc4ce02 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs @@ -1,9 +1,9 @@ use hir; -use ty::{self, Region, TyCtxt}; -use hir::Node; -use middle::resolve_lifetime as rl; use hir::intravisit::{self, NestedVisitorMap, Visitor}; +use hir::Node; use infer::error_reporting::nice_region_error::NiceRegionError; +use middle::resolve_lifetime as rl; +use ty::{self, Region, TyCtxt}; impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { /// This function calls the `visit_ty` method for the parameters @@ -106,11 +106,13 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindNestedTypeVisitor<'a, 'gcx, 'tcx> { return; } - hir::TyKind::TraitObject(ref bounds, _) => for bound in bounds { - self.current_index.shift_in(1); - self.visit_poly_trait_ref(bound, hir::TraitBoundModifier::None); - self.current_index.shift_out(1); - }, + hir::TyKind::TraitObject(ref bounds, _) => { + for bound in bounds { + self.current_index.shift_in(1); + self.visit_poly_trait_ref(bound, hir::TraitBoundModifier::None); + self.current_index.shift_out(1); + } + } hir::TyKind::Rptr(ref lifetime, _) => { // the lifetime of the TyRptr @@ -125,9 +127,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindNestedTypeVisitor<'a, 'gcx, 'tcx> { ) => { debug!( "LateBoundAnon depth = {:?} anon_index = {:?} br_index={:?}", - debruijn_index, - anon_index, - br_index + debruijn_index, anon_index, br_index ); if debruijn_index == self.current_index && anon_index == br_index { self.found_type = Some(arg); @@ -142,8 +142,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindNestedTypeVisitor<'a, 'gcx, 'tcx> { debug!( "EarlyBound self.infcx.tcx.hir().local_def_id(id)={:?} \ def_id={:?}", - id, - def_id + id, def_id ); if id == def_id { self.found_type = Some(arg); @@ -235,8 +234,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for TyPathVisitor<'a, 'gcx, 'tcx> { debug!( "EarlyBound self.infcx.tcx.hir().local_def_id(id)={:?} \ def_id={:?}", - id, - def_id + id, def_id ); if id == def_id { self.found_it = true; diff --git a/src/librustc/infer/error_reporting/nice_region_error/mod.rs b/src/librustc/infer/error_reporting/nice_region_error/mod.rs index f7ba546fa7f3b..ab73ebb251c93 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/mod.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/mod.rs @@ -1,6 +1,6 @@ -use infer::InferCtxt; use infer::lexical_region_resolve::RegionResolutionError; use infer::lexical_region_resolve::RegionResolutionError::*; +use infer::InferCtxt; use syntax::source_map::Span; use ty::{self, TyCtxt}; use util::common::ErrorReported; @@ -8,8 +8,8 @@ use util::common::ErrorReported; mod different_lifetimes; mod find_anon_type; mod named_anon_conflict; -mod placeholder_error; mod outlives_closure; +mod placeholder_error; mod static_impl_trait; mod util; @@ -17,14 +17,18 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { pub fn try_report_nice_region_error(&self, error: &RegionResolutionError<'tcx>) -> bool { match *error { ConcreteFailure(..) | SubSupConflict(..) => {} - _ => return false, // inapplicable + _ => return false, // inapplicable } if let Some(tables) = self.in_progress_tables { let tables = tables.borrow(); - NiceRegionError::new(self.tcx, error.clone(), Some(&tables)).try_report().is_some() + NiceRegionError::new(self.tcx, error.clone(), Some(&tables)) + .try_report() + .is_some() } else { - NiceRegionError::new(self.tcx, error.clone(), None).try_report().is_some() + NiceRegionError::new(self.tcx, error.clone(), None) + .try_report() + .is_some() } } } @@ -42,7 +46,12 @@ impl<'cx, 'gcx, 'tcx> NiceRegionError<'cx, 'gcx, 'tcx> { error: RegionResolutionError<'tcx>, tables: Option<&'cx ty::TypeckTables<'tcx>>, ) -> Self { - Self { tcx, error: Some(error), regions: None, tables } + Self { + tcx, + error: Some(error), + regions: None, + tables, + } } pub fn new_from_span( @@ -52,7 +61,12 @@ impl<'cx, 'gcx, 'tcx> NiceRegionError<'cx, 'gcx, 'tcx> { sup: ty::Region<'tcx>, tables: Option<&'cx ty::TypeckTables<'tcx>>, ) -> Self { - Self { tcx, error: None, regions: Some((span, sub, sup)), tables } + Self { + tcx, + error: None, + regions: Some((span, sub, sup)), + tables, + } } pub fn try_report_from_nll(&self) -> Option { diff --git a/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs b/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs index daab0a8e96263..a981fc69ffb19 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs @@ -1,9 +1,9 @@ //! Error Reporting for Anonymous Region Lifetime Errors //! where one region is named and the other is anonymous. +use errors::Applicability; use infer::error_reporting::nice_region_error::NiceRegionError; use ty; use util::common::ErrorReported; -use errors::Applicability; impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { /// When given a `ConcreteFailure` for a function with arguments containing a named region and @@ -13,8 +13,7 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { debug!( "try_report_named_anon_conflict(sub={:?}, sup={:?})", - sub, - sup + sub, sup ); // Determine whether the sub and sup consist of one named region ('a) @@ -33,7 +32,8 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { self.find_arg_with_region(sup, sub).unwrap(), self.tcx.is_suitable_region(sup).unwrap(), ) - } else if self.is_named_region(sup) && self.tcx.is_suitable_region(sub).is_some() + } else if self.is_named_region(sup) + && self.tcx.is_suitable_region(sub).is_some() && self.find_arg_with_region(sub, sup).is_some() { ( @@ -102,7 +102,8 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { E0621, "explicit lifetime required in {}", error_var - ).span_suggestion_with_applicability( + ) + .span_suggestion_with_applicability( new_ty_span, &format!("add explicit lifetime `{}` to {}", named, span_label_var), new_ty.to_string(), diff --git a/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs b/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs index c4c71037d8b35..b8e47bfb3acc2 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs @@ -1,13 +1,13 @@ //! Error Reporting for Anonymous Region Lifetime Errors //! where both the regions are anonymous. +use hir::Node; +use hir::{Expr, ExprKind::Closure}; use infer::error_reporting::nice_region_error::NiceRegionError; +use infer::lexical_region_resolve::RegionResolutionError::SubSupConflict; use infer::SubregionOrigin; use ty::RegionKind; -use hir::{Expr, ExprKind::Closure}; -use hir::Node; use util::common::ErrorReported; -use infer::lexical_region_resolve::RegionResolutionError::SubSupConflict; impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { /// Print the error message for lifetime errors when binding escapes a closure. @@ -36,69 +36,77 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { /// ...because it cannot outlive this closure /// ``` pub(super) fn try_report_outlives_closure(&self) -> Option { - if let Some(SubSupConflict(_, - origin, - ref sub_origin, - _, - ref sup_origin, - sup_region)) = self.error { - + if let Some(SubSupConflict(_, origin, ref sub_origin, _, ref sup_origin, sup_region)) = + self.error + { // #45983: when trying to assign the contents of an argument to a binding outside of a // closure, provide a specific message pointing this out. - if let (&SubregionOrigin::BindingTypeIsNotValidAtDecl(ref external_span), - &RegionKind::ReFree(ref free_region)) = (&sub_origin, sup_region) { + if let ( + &SubregionOrigin::BindingTypeIsNotValidAtDecl(ref external_span), + &RegionKind::ReFree(ref free_region), + ) = (&sub_origin, sup_region) + { let hir = &self.tcx.hir(); if let Some(node_id) = hir.as_local_node_id(free_region.scope) { if let Node::Expr(Expr { node: Closure(_, _, _, closure_span, None), .. - }) = hir.get(node_id) { + }) = hir.get(node_id) + { let sup_sp = sup_origin.span(); let origin_sp = origin.span(); let mut err = self.tcx.sess.struct_span_err( sup_sp, - "borrowed data cannot be stored outside of its closure"); + "borrowed data cannot be stored outside of its closure", + ); err.span_label(sup_sp, "cannot be stored outside of its closure"); if origin_sp == sup_sp || origin_sp.contains(sup_sp) { -// // sup_sp == origin.span(): -// -// let mut x = None; -// ----- borrowed data cannot be stored into here... -// with_int(|y| x = Some(y)); -// --- ^ cannot be stored outside of its closure -// | -// ...because it cannot outlive this closure -// -// // origin.contains(&sup_sp): -// -// let mut f: Option<&u32> = None; -// ----- borrowed data cannot be stored into here... -// closure_expecting_bound(|x: &'x u32| { -// ------------ ... because it cannot outlive this closure -// f = Some(x); -// ^ cannot be stored outside of its closure - err.span_label(*external_span, - "borrowed data cannot be stored into here..."); - err.span_label(*closure_span, - "...because it cannot outlive this closure"); + // // sup_sp == origin.span(): + // + // let mut x = None; + // ----- borrowed data cannot be stored into here... + // with_int(|y| x = Some(y)); + // --- ^ cannot be stored outside of its closure + // | + // ...because it cannot outlive this closure + // + // // origin.contains(&sup_sp): + // + // let mut f: Option<&u32> = None; + // ----- borrowed data cannot be stored into here... + // closure_expecting_bound(|x: &'x u32| { + // ------------ ... because it cannot outlive this closure + // f = Some(x); + // ^ cannot be stored outside of its closure + err.span_label( + *external_span, + "borrowed data cannot be stored into here...", + ); + err.span_label( + *closure_span, + "...because it cannot outlive this closure", + ); } else { -// FIXME: the wording for this case could be much improved -// -// let mut lines_to_use: Vec<&CrateId> = Vec::new(); -// - cannot infer an appropriate lifetime... -// let push_id = |installed_id: &CrateId| { -// ------- ------------------------ borrowed data cannot outlive this closure -// | -// ...so that variable is valid at time of its declaration -// lines_to_use.push(installed_id); -// ^^^^^^^^^^^^ cannot be stored outside of its closure - err.span_label(origin_sp, - "cannot infer an appropriate lifetime..."); - err.span_label(*external_span, - "...so that variable is valid at time of its \ - declaration"); - err.span_label(*closure_span, - "borrowed data cannot outlive this closure"); + // FIXME: the wording for this case could be much improved + // + // let mut lines_to_use: Vec<&CrateId> = Vec::new(); + // - cannot infer an appropriate lifetime... + // let push_id = |installed_id: &CrateId| { + // ------- ------------------------ borrowed data cannot outlive this closure + // | + // ...so that variable is valid at time of its declaration + // lines_to_use.push(installed_id); + // ^^^^^^^^^^^^ cannot be stored outside of its closure + err.span_label(origin_sp, "cannot infer an appropriate lifetime..."); + err.span_label( + *external_span, + "...so that variable is valid at time of its \ + declaration", + ); + err.span_label( + *closure_span, + "borrowed data cannot outlive this closure", + ); } err.emit(); return Some(ErrorReported); @@ -109,4 +117,3 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { None } } - diff --git a/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs b/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs index 0dda636a9bd53..859fb10b25cd1 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs @@ -34,19 +34,15 @@ impl NiceRegionError<'me, 'gcx, 'tcx> { sub_placeholder @ ty::RePlaceholder(_), _, sup_placeholder @ ty::RePlaceholder(_), - )) - if expected.def_id == found.def_id => - { - Some(self.try_report_placeholders_trait( - Some(self.tcx.mk_region(ty::ReVar(*vid))), - cause, - Some(sub_placeholder), - Some(sup_placeholder), - expected.def_id, - expected.substs, - found.substs, - )) - } + )) if expected.def_id == found.def_id => Some(self.try_report_placeholders_trait( + Some(self.tcx.mk_region(ty::ReVar(*vid))), + cause, + Some(sub_placeholder), + Some(sup_placeholder), + expected.def_id, + expected.substs, + found.substs, + )), Some(RegionResolutionError::SubSupConflict( vid, @@ -58,19 +54,15 @@ impl NiceRegionError<'me, 'gcx, 'tcx> { sub_placeholder @ ty::RePlaceholder(_), _, _, - )) - if expected.def_id == found.def_id => - { - Some(self.try_report_placeholders_trait( - Some(self.tcx.mk_region(ty::ReVar(*vid))), - cause, - Some(sub_placeholder), - None, - expected.def_id, - expected.substs, - found.substs, - )) - } + )) if expected.def_id == found.def_id => Some(self.try_report_placeholders_trait( + Some(self.tcx.mk_region(ty::ReVar(*vid))), + cause, + Some(sub_placeholder), + None, + expected.def_id, + expected.substs, + found.substs, + )), Some(RegionResolutionError::SubSupConflict( vid, @@ -82,19 +74,15 @@ impl NiceRegionError<'me, 'gcx, 'tcx> { _, _, sup_placeholder @ ty::RePlaceholder(_), - )) - if expected.def_id == found.def_id => - { - Some(self.try_report_placeholders_trait( - Some(self.tcx.mk_region(ty::ReVar(*vid))), - cause, - None, - Some(*sup_placeholder), - expected.def_id, - expected.substs, - found.substs, - )) - } + )) if expected.def_id == found.def_id => Some(self.try_report_placeholders_trait( + Some(self.tcx.mk_region(ty::ReVar(*vid))), + cause, + None, + Some(*sup_placeholder), + expected.def_id, + expected.substs, + found.substs, + )), Some(RegionResolutionError::ConcreteFailure( SubregionOrigin::Subtype(TypeTrace { @@ -103,19 +91,15 @@ impl NiceRegionError<'me, 'gcx, 'tcx> { }), sub_region @ ty::RePlaceholder(_), sup_region @ ty::RePlaceholder(_), - )) - if expected.def_id == found.def_id => - { - Some(self.try_report_placeholders_trait( - None, - cause, - Some(*sub_region), - Some(*sup_region), - expected.def_id, - expected.substs, - found.substs, - )) - } + )) if expected.def_id == found.def_id => Some(self.try_report_placeholders_trait( + None, + cause, + Some(*sub_region), + Some(*sup_region), + expected.def_id, + expected.substs, + found.substs, + )), Some(RegionResolutionError::ConcreteFailure( SubregionOrigin::Subtype(TypeTrace { @@ -124,19 +108,15 @@ impl NiceRegionError<'me, 'gcx, 'tcx> { }), sub_region @ ty::RePlaceholder(_), sup_region, - )) - if expected.def_id == found.def_id => - { - Some(self.try_report_placeholders_trait( - Some(sup_region), - cause, - Some(*sub_region), - None, - expected.def_id, - expected.substs, - found.substs, - )) - } + )) if expected.def_id == found.def_id => Some(self.try_report_placeholders_trait( + Some(sup_region), + cause, + Some(*sub_region), + None, + expected.def_id, + expected.substs, + found.substs, + )), Some(RegionResolutionError::ConcreteFailure( SubregionOrigin::Subtype(TypeTrace { @@ -145,19 +125,15 @@ impl NiceRegionError<'me, 'gcx, 'tcx> { }), sub_region, sup_region @ ty::RePlaceholder(_), - )) - if expected.def_id == found.def_id => - { - Some(self.try_report_placeholders_trait( - Some(sub_region), - cause, - None, - Some(*sup_region), - expected.def_id, - expected.substs, - found.substs, - )) - } + )) if expected.def_id == found.def_id => Some(self.try_report_placeholders_trait( + Some(sub_region), + cause, + None, + Some(*sup_region), + expected.def_id, + expected.substs, + found.substs, + )), _ => None, } diff --git a/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs b/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs index 9fc3bb05cdab1..f830b82407542 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs @@ -1,54 +1,52 @@ //! Error Reporting for static impl Traits. +use errors::Applicability; use infer::error_reporting::nice_region_error::NiceRegionError; use infer::lexical_region_resolve::RegionResolutionError; use ty::{BoundRegion, FreeRegion, RegionKind}; use util::common::ErrorReported; -use errors::Applicability; impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { /// Print the error message for lifetime errors when the return type is a static impl Trait. pub(super) fn try_report_static_impl_trait(&self) -> Option { if let Some(ref error) = self.error { if let RegionResolutionError::SubSupConflict( - _, - var_origin, - sub_origin, - sub_r, - sup_origin, - sup_r, - ) = error.clone() + _, + var_origin, + sub_origin, + sub_r, + sup_origin, + sup_r, + ) = error.clone() { let anon_reg_sup = self.tcx.is_suitable_region(sup_r)?; - if sub_r == &RegionKind::ReStatic && - self.tcx.return_type_impl_trait(anon_reg_sup.def_id).is_some() + if sub_r == &RegionKind::ReStatic + && self + .tcx + .return_type_impl_trait(anon_reg_sup.def_id) + .is_some() { let sp = var_origin.span(); let return_sp = sub_origin.span(); - let mut err = self.tcx.sess.struct_span_err( - sp, - "cannot infer an appropriate lifetime", - ); + let mut err = self + .tcx + .sess + .struct_span_err(sp, "cannot infer an appropriate lifetime"); err.span_label( return_sp, "this return type evaluates to the `'static` lifetime...", ); - err.span_label( - sup_origin.span(), - "...but this borrow...", - ); + err.span_label(sup_origin.span(), "...but this borrow..."); let (lifetime, lt_sp_opt) = self.tcx.msg_span_from_free_region(sup_r); if let Some(lifetime_sp) = lt_sp_opt { - err.span_note( - lifetime_sp, - &format!("...can't outlive {}", lifetime), - ); + err.span_note(lifetime_sp, &format!("...can't outlive {}", lifetime)); } let lifetime_name = match sup_r { RegionKind::ReFree(FreeRegion { - bound_region: BoundRegion::BrNamed(_, ref name), .. + bound_region: BoundRegion::BrNamed(_, ref name), + .. }) => name.to_string(), _ => "'_".to_owned(), }; diff --git a/src/librustc/infer/error_reporting/nice_region_error/util.rs b/src/librustc/infer/error_reporting/nice_region_error/util.rs index 43590a606ae90..bd00569cb8379 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/util.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/util.rs @@ -2,10 +2,10 @@ //! anonymous regions. use hir; -use infer::error_reporting::nice_region_error::NiceRegionError; -use ty::{self, Region, Ty}; use hir::def_id::DefId; +use infer::error_reporting::nice_region_error::NiceRegionError; use syntax_pos::Span; +use ty::{self, Region, Ty}; // The struct contains the information about the anonymous region // we are searching for. @@ -18,7 +18,7 @@ pub(super) struct AnonymousArgInfo<'tcx> { // the ty::BoundRegion corresponding to the anonymous region pub bound_region: ty::BoundRegion, // arg_ty_span contains span of argument type - pub arg_ty_span : Span, + pub arg_ty_span: Span, // corresponds to id the argument is the first parameter // in the declaration pub is_first: bool, @@ -79,7 +79,7 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { Some(AnonymousArgInfo { arg: arg, arg_ty: new_arg_ty, - arg_ty_span : arg_ty_span, + arg_ty_span: arg_ty_span, bound_region: bound_region, is_first: is_first, }) @@ -111,7 +111,8 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { let ret_ty = self.tcx.type_of(scope_def_id); if let ty::FnDef(_, _) = ret_ty.sty { let sig = ret_ty.fn_sig(self.tcx); - let late_bound_regions = self.tcx + let late_bound_regions = self + .tcx .collect_referenced_late_bound_regions(&sig.output()); if late_bound_regions.iter().any(|r| *r == br) { return Some(decl.output.span()); @@ -126,9 +127,10 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { // enable E0621 for it. pub(super) fn is_self_anon(&self, is_first: bool, scope_def_id: DefId) -> bool { is_first - && self.tcx - .opt_associated_item(scope_def_id) - .map(|i| i.method_has_self_argument) == Some(true) + && self + .tcx + .opt_associated_item(scope_def_id) + .map(|i| i.method_has_self_argument) + == Some(true) } - } diff --git a/src/librustc/infer/error_reporting/note.rs b/src/librustc/infer/error_reporting/note.rs index e45a4b17cdd9c..83720266fbb45 100644 --- a/src/librustc/infer/error_reporting/note.rs +++ b/src/librustc/infer/error_reporting/note.rs @@ -1,57 +1,75 @@ +use errors::DiagnosticBuilder; use infer::{self, InferCtxt, SubregionOrigin}; use middle::region; -use ty::{self, Region}; use ty::error::TypeError; -use errors::DiagnosticBuilder; +use ty::{self, Region}; impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { - pub(super) fn note_region_origin(&self, - err: &mut DiagnosticBuilder<'_>, - origin: &SubregionOrigin<'tcx>) { + pub(super) fn note_region_origin( + &self, + err: &mut DiagnosticBuilder<'_>, + origin: &SubregionOrigin<'tcx>, + ) { match *origin { infer::Subtype(ref trace) => { if let Some((expected, found)) = self.values_str(&trace.values) { let expected = expected.content(); let found = found.content(); - err.note(&format!("...so that the {}:\nexpected {}\n found {}", - trace.cause.as_requirement_str(), - expected, - found)); + err.note(&format!( + "...so that the {}:\nexpected {}\n found {}", + trace.cause.as_requirement_str(), + expected, + found + )); } else { // FIXME: this really should be handled at some earlier stage. Our // handling of region checking when type errors are present is // *terrible*. - err.span_note(trace.cause.span, - &format!("...so that {}", trace.cause.as_requirement_str())); + err.span_note( + trace.cause.span, + &format!("...so that {}", trace.cause.as_requirement_str()), + ); } } infer::Reborrow(span) => { - err.span_note(span, - "...so that reference does not outlive borrowed content"); + err.span_note( + span, + "...so that reference does not outlive borrowed content", + ); } infer::ReborrowUpvar(span, ref upvar_id) => { let var_node_id = self.tcx.hir().hir_to_node_id(upvar_id.var_path.hir_id); let var_name = self.tcx.hir().name(var_node_id); - err.span_note(span, - &format!("...so that closure can access `{}`", var_name)); + err.span_note( + span, + &format!("...so that closure can access `{}`", var_name), + ); } infer::InfStackClosure(span) => { err.span_note(span, "...so that closure does not outlive its stack frame"); } infer::InvokeClosure(span) => { - err.span_note(span, - "...so that closure is not invoked outside its lifetime"); + err.span_note( + span, + "...so that closure is not invoked outside its lifetime", + ); } infer::DerefPointer(span) => { - err.span_note(span, - "...so that pointer is not dereferenced outside its lifetime"); + err.span_note( + span, + "...so that pointer is not dereferenced outside its lifetime", + ); } infer::FreeVariable(span, id) => { - err.span_note(span, - &format!("...so that captured variable `{}` does not outlive the \ - enclosing closure", - self.tcx.hir().name(id))); + err.span_note( + span, + &format!( + "...so that captured variable `{}` does not outlive the \ + enclosing closure", + self.tcx.hir().name(id) + ), + ); } infer::IndexSlice(span) => { err.span_note(span, "...so that slice is not indexed outside the lifetime"); @@ -60,8 +78,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { err.span_note(span, "...so that it can be closed over into an object"); } infer::CallRcvr(span) => { - err.span_note(span, - "...so that method receiver is valid for the method call"); + err.span_note( + span, + "...so that method receiver is valid for the method call", + ); } infer::CallArg(span) => { err.span_note(span, "...so that argument is valid for the call"); @@ -76,370 +96,586 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { err.span_note(span, "...so that reference is valid at the time of borrow"); } infer::AutoBorrow(span) => { - err.span_note(span, - "...so that auto-reference is valid at the time of borrow"); + err.span_note( + span, + "...so that auto-reference is valid at the time of borrow", + ); } infer::ExprTypeIsNotInScope(t, span) => { - err.span_note(span, - &format!("...so type `{}` of expression is valid during the \ - expression", - self.ty_to_string(t))); + err.span_note( + span, + &format!( + "...so type `{}` of expression is valid during the \ + expression", + self.ty_to_string(t) + ), + ); } infer::BindingTypeIsNotValidAtDecl(span) => { - err.span_note(span, - "...so that variable is valid at time of its declaration"); + err.span_note( + span, + "...so that variable is valid at time of its declaration", + ); } infer::ParameterInScope(_, span) => { - err.span_note(span, - "...so that a type/lifetime parameter is in scope here"); + err.span_note( + span, + "...so that a type/lifetime parameter is in scope here", + ); } infer::DataBorrowed(ty, span) => { - err.span_note(span, - &format!("...so that the type `{}` is not borrowed for too long", - self.ty_to_string(ty))); + err.span_note( + span, + &format!( + "...so that the type `{}` is not borrowed for too long", + self.ty_to_string(ty) + ), + ); } infer::ReferenceOutlivesReferent(ty, span) => { - err.span_note(span, - &format!("...so that the reference type `{}` does not outlive the \ - data it points at", - self.ty_to_string(ty))); + err.span_note( + span, + &format!( + "...so that the reference type `{}` does not outlive the \ + data it points at", + self.ty_to_string(ty) + ), + ); } infer::RelateParamBound(span, t) => { - err.span_note(span, - &format!("...so that the type `{}` will meet its required \ - lifetime bounds", - self.ty_to_string(t))); + err.span_note( + span, + &format!( + "...so that the type `{}` will meet its required \ + lifetime bounds", + self.ty_to_string(t) + ), + ); } infer::RelateDefaultParamBound(span, t) => { - err.span_note(span, - &format!("...so that type parameter instantiated with `{}`, will \ - meet its declared lifetime bounds", - self.ty_to_string(t))); + err.span_note( + span, + &format!( + "...so that type parameter instantiated with `{}`, will \ + meet its declared lifetime bounds", + self.ty_to_string(t) + ), + ); } infer::RelateRegionParamBound(span) => { - err.span_note(span, - "...so that the declared lifetime parameter bounds are satisfied"); + err.span_note( + span, + "...so that the declared lifetime parameter bounds are satisfied", + ); } infer::SafeDestructor(span) => { - err.span_note(span, - "...so that references are valid when the destructor runs"); + err.span_note( + span, + "...so that references are valid when the destructor runs", + ); } infer::CompareImplMethodObligation { span, .. } => { - err.span_note(span, - "...so that the definition in impl matches the definition from the \ - trait"); + err.span_note( + span, + "...so that the definition in impl matches the definition from the \ + trait", + ); } } } - pub(super) fn report_concrete_failure(&self, - region_scope_tree: ®ion::ScopeTree, - origin: SubregionOrigin<'tcx>, - sub: Region<'tcx>, - sup: Region<'tcx>) - -> DiagnosticBuilder<'tcx> { + pub(super) fn report_concrete_failure( + &self, + region_scope_tree: ®ion::ScopeTree, + origin: SubregionOrigin<'tcx>, + sub: Region<'tcx>, + sup: Region<'tcx>, + ) -> DiagnosticBuilder<'tcx> { match origin { infer::Subtype(trace) => { let terr = TypeError::RegionsDoesNotOutlive(sup, sub); let mut err = self.report_and_explain_type_error(trace, &terr); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, "", sup, "..."); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "...does not necessarily outlive ", sub, ""); + self.tcx + .note_and_explain_region(region_scope_tree, &mut err, "", sup, "..."); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "...does not necessarily outlive ", + sub, + "", + ); err } infer::Reborrow(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0312, - "lifetime of reference outlives lifetime of \ - borrowed content..."); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "...the reference is valid for ", - sub, - "..."); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "...but the borrowed content is only valid for ", - sup, - ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0312, + "lifetime of reference outlives lifetime of \ + borrowed content..." + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "...the reference is valid for ", + sub, + "...", + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "...but the borrowed content is only valid for ", + sup, + "", + ); err } infer::ReborrowUpvar(span, ref upvar_id) => { let var_node_id = self.tcx.hir().hir_to_node_id(upvar_id.var_path.hir_id); let var_name = self.tcx.hir().name(var_node_id); - let mut err = struct_span_err!(self.tcx.sess, - span, - E0313, - "lifetime of borrowed pointer outlives lifetime \ - of captured variable `{}`...", - var_name); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "...the borrowed pointer is valid for ", - sub, - "..."); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0313, + "lifetime of borrowed pointer outlives lifetime \ + of captured variable `{}`...", + var_name + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "...the borrowed pointer is valid for ", + sub, + "...", + ); self.tcx.note_and_explain_region( region_scope_tree, &mut err, &format!("...but `{}` is only valid for ", var_name), sup, - ""); + "", + ); err } infer::InfStackClosure(span) => { let mut err = struct_span_err!(self.tcx.sess, span, E0314, "closure outlives stack frame"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "...the closure must be valid for ", - sub, - "..."); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "...but the closure's stack frame is only valid \ - for ", - sup, - ""); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "...the closure must be valid for ", + sub, + "...", + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "...but the closure's stack frame is only valid \ + for ", + sup, + "", + ); err } infer::InvokeClosure(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0315, - "cannot invoke closure outside of its lifetime"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the closure is only valid for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0315, + "cannot invoke closure outside of its lifetime" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the closure is only valid for ", + sup, + "", + ); err } infer::DerefPointer(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0473, - "dereference of reference outside its lifetime"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the reference is only valid for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0473, + "dereference of reference outside its lifetime" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the reference is only valid for ", + sup, + "", + ); err } infer::FreeVariable(span, id) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0474, - "captured variable `{}` does not outlive the \ - enclosing closure", - self.tcx.hir().name(id)); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "captured variable is valid for ", sup, ""); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "closure is valid for ", sub, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0474, + "captured variable `{}` does not outlive the \ + enclosing closure", + self.tcx.hir().name(id) + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "captured variable is valid for ", + sup, + "", + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "closure is valid for ", + sub, + "", + ); err } infer::IndexSlice(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0475, - "index of slice outside its lifetime"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the slice is only valid for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0475, + "index of slice outside its lifetime" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the slice is only valid for ", + sup, + "", + ); err } infer::RelateObjectBound(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0476, - "lifetime of the source pointer does not outlive \ - lifetime bound of the object type"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "object type is valid for ", sub, ""); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "source pointer is only valid for ", - sup, - ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0476, + "lifetime of the source pointer does not outlive \ + lifetime bound of the object type" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "object type is valid for ", + sub, + "", + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "source pointer is only valid for ", + sup, + "", + ); err } infer::RelateParamBound(span, ty) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0477, - "the type `{}` does not fulfill the required \ - lifetime", - self.ty_to_string(ty)); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0477, + "the type `{}` does not fulfill the required \ + lifetime", + self.ty_to_string(ty) + ); match *sub { - ty::ReStatic => { - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "type must satisfy ", sub, "") - } - _ => { - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "type must outlive ", sub, "") - } + ty::ReStatic => self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "type must satisfy ", + sub, + "", + ), + _ => self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "type must outlive ", + sub, + "", + ), } err } infer::RelateRegionParamBound(span) => { let mut err = struct_span_err!(self.tcx.sess, span, E0478, "lifetime bound not satisfied"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "lifetime parameter instantiated with ", - sup, - ""); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "but lifetime parameter must outlive ", - sub, - ""); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "lifetime parameter instantiated with ", + sup, + "", + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "but lifetime parameter must outlive ", + sub, + "", + ); err } infer::RelateDefaultParamBound(span, ty) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0479, - "the type `{}` (provided as the value of a type \ - parameter) is not valid at this point", - self.ty_to_string(ty)); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "type must outlive ", sub, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0479, + "the type `{}` (provided as the value of a type \ + parameter) is not valid at this point", + self.ty_to_string(ty) + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "type must outlive ", + sub, + "", + ); err } infer::CallRcvr(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0480, - "lifetime of method receiver does not outlive the \ - method call"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the receiver is only valid for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0480, + "lifetime of method receiver does not outlive the \ + method call" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the receiver is only valid for ", + sup, + "", + ); err } infer::CallArg(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0481, - "lifetime of function argument does not outlive \ - the function call"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the function argument is only valid for ", - sup, - ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0481, + "lifetime of function argument does not outlive \ + the function call" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the function argument is only valid for ", + sup, + "", + ); err } infer::CallReturn(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0482, - "lifetime of return value does not outlive the \ - function call"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the return value is only valid for ", - sup, - ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0482, + "lifetime of return value does not outlive the \ + function call" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the return value is only valid for ", + sup, + "", + ); err } infer::Operand(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0483, - "lifetime of operand does not outlive the \ - operation"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the operand is only valid for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0483, + "lifetime of operand does not outlive the \ + operation" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the operand is only valid for ", + sup, + "", + ); err } infer::AddrOf(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0484, - "reference is not valid at the time of borrow"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the borrow is only valid for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0484, + "reference is not valid at the time of borrow" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the borrow is only valid for ", + sup, + "", + ); err } infer::AutoBorrow(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0485, - "automatically reference is not valid at the time \ - of borrow"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the automatic borrow is only valid for ", - sup, - ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0485, + "automatically reference is not valid at the time \ + of borrow" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the automatic borrow is only valid for ", + sup, + "", + ); err } infer::ExprTypeIsNotInScope(t, span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0486, - "type of expression contains references that are \ - not valid during the expression: `{}`", - self.ty_to_string(t)); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "type is only valid for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0486, + "type of expression contains references that are \ + not valid during the expression: `{}`", + self.ty_to_string(t) + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "type is only valid for ", + sup, + "", + ); err } infer::SafeDestructor(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0487, - "unsafe use of destructor: destructor might be \ - called while references are dead"); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0487, + "unsafe use of destructor: destructor might be \ + called while references are dead" + ); // FIXME (22171): terms "super/subregion" are suboptimal - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "superregion: ", sup, ""); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "subregion: ", sub, ""); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "superregion: ", + sup, + "", + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "subregion: ", + sub, + "", + ); err } infer::BindingTypeIsNotValidAtDecl(span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0488, - "lifetime of variable does not enclose its \ - declaration"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the variable is only valid for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0488, + "lifetime of variable does not enclose its \ + declaration" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the variable is only valid for ", + sup, + "", + ); err } infer::ParameterInScope(_, span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0489, - "type/lifetime parameter not in scope here"); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the parameter is only valid for ", sub, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0489, + "type/lifetime parameter not in scope here" + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the parameter is only valid for ", + sub, + "", + ); err } infer::DataBorrowed(ty, span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0490, - "a value of type `{}` is borrowed for too long", - self.ty_to_string(ty)); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the type is valid for ", sub, ""); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "but the borrow lasts for ", sup, ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0490, + "a value of type `{}` is borrowed for too long", + self.ty_to_string(ty) + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the type is valid for ", + sub, + "", + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "but the borrow lasts for ", + sup, + "", + ); err } infer::ReferenceOutlivesReferent(ty, span) => { - let mut err = struct_span_err!(self.tcx.sess, - span, - E0491, - "in type `{}`, reference has a longer lifetime \ - than the data it references", - self.ty_to_string(ty)); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "the pointer is valid for ", sub, ""); - self.tcx.note_and_explain_region(region_scope_tree, &mut err, - "but the referenced data is only valid for ", - sup, - ""); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0491, + "in type `{}`, reference has a longer lifetime \ + than the data it references", + self.ty_to_string(ty) + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "the pointer is valid for ", + sub, + "", + ); + self.tcx.note_and_explain_region( + region_scope_tree, + &mut err, + "but the referenced data is only valid for ", + sup, + "", + ); err } - infer::CompareImplMethodObligation { span, - item_name, - impl_item_def_id, - trait_item_def_id } => { - self.report_extra_impl_obligation(span, - item_name, - impl_item_def_id, - trait_item_def_id, - &format!("`{}: {}`", sup, sub)) - } + infer::CompareImplMethodObligation { + span, + item_name, + impl_item_def_id, + trait_item_def_id, + } => self.report_extra_impl_obligation( + span, + item_name, + impl_item_def_id, + trait_item_def_id, + &format!("`{}: {}`", sup, sub), + ), } } @@ -457,9 +693,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.report_and_explain_type_error(trace, &terr) } - _ => { - self.report_concrete_failure(region_scope_tree, placeholder_origin, sub, sup) - } + _ => self.report_concrete_failure(region_scope_tree, placeholder_origin, sub, sup), } } } diff --git a/src/librustc/infer/freshen.rs b/src/librustc/infer/freshen.rs index 74abcf82529cb..9d17a9a68b290 100644 --- a/src/librustc/infer/freshen.rs +++ b/src/librustc/infer/freshen.rs @@ -31,24 +31,23 @@ //! variable only once, and it does so as soon as it can, so it is reasonable to ask what the type //! inferencer knows "so far". -use ty::{self, Ty, TyCtxt, TypeFoldable}; use ty::fold::TypeFolder; +use ty::{self, Ty, TyCtxt, TypeFoldable}; use util::nodemap::FxHashMap; use std::collections::hash_map::Entry; -use super::InferCtxt; use super::unify_key::ToType; +use super::InferCtxt; -pub struct TypeFreshener<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct TypeFreshener<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, freshen_count: u32, freshen_map: FxHashMap>, } impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> { - pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) - -> TypeFreshener<'a, 'gcx, 'tcx> { + pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> TypeFreshener<'a, 'gcx, 'tcx> { TypeFreshener { infcx, freshen_count: 0, @@ -56,11 +55,8 @@ impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> { } } - fn freshen(&mut self, - opt_ty: Option>, - key: ty::InferTy, - freshener: F) - -> Ty<'tcx> where + fn freshen(&mut self, opt_ty: Option>, key: ty::InferTy, freshener: F) -> Ty<'tcx> + where F: FnOnce(u32) -> ty::InferTy, { if let Some(ty) = opt_ty { @@ -92,30 +88,29 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for TypeFreshener<'a, 'gcx, 'tcx> { r } - ty::ReStatic | - ty::ReEarlyBound(..) | - ty::ReFree(_) | - ty::ReScope(_) | - ty::ReVar(_) | - ty::RePlaceholder(..) | - ty::ReEmpty | - ty::ReErased => { + ty::ReStatic + | ty::ReEarlyBound(..) + | ty::ReFree(_) + | ty::ReScope(_) + | ty::ReVar(_) + | ty::RePlaceholder(..) + | ty::ReEmpty + | ty::ReErased => { // replace all free regions with 'erased self.tcx().types.re_erased } ty::ReClosureBound(..) => { - bug!( - "encountered unexpected region: {:?}", - r, - ); + bug!("encountered unexpected region: {:?}", r,); } } } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { - if !t.needs_infer() && !t.has_erasable_regions() && - !(t.has_closure_types() && self.infcx.in_progress_tables.is_some()) { + if !t.needs_infer() + && !t.has_erasable_regions() + && !(t.has_closure_types() && self.infcx.in_progress_tables.is_some()) + { return t; } @@ -124,72 +119,70 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for TypeFreshener<'a, 'gcx, 'tcx> { match t.sty { ty::Infer(ty::TyVar(v)) => { let opt_ty = self.infcx.type_variables.borrow_mut().probe(v).known(); - self.freshen( - opt_ty, - ty::TyVar(v), - ty::FreshTy) - } - - ty::Infer(ty::IntVar(v)) => { - self.freshen( - self.infcx.int_unification_table.borrow_mut() - .probe_value(v) - .map(|v| v.to_type(tcx)), - ty::IntVar(v), - ty::FreshIntTy) - } - - ty::Infer(ty::FloatVar(v)) => { - self.freshen( - self.infcx.float_unification_table.borrow_mut() - .probe_value(v) - .map(|v| v.to_type(tcx)), - ty::FloatVar(v), - ty::FreshFloatTy) + self.freshen(opt_ty, ty::TyVar(v), ty::FreshTy) } - ty::Infer(ty::FreshTy(c)) | - ty::Infer(ty::FreshIntTy(c)) | - ty::Infer(ty::FreshFloatTy(c)) => { + ty::Infer(ty::IntVar(v)) => self.freshen( + self.infcx + .int_unification_table + .borrow_mut() + .probe_value(v) + .map(|v| v.to_type(tcx)), + ty::IntVar(v), + ty::FreshIntTy, + ), + + ty::Infer(ty::FloatVar(v)) => self.freshen( + self.infcx + .float_unification_table + .borrow_mut() + .probe_value(v) + .map(|v| v.to_type(tcx)), + ty::FloatVar(v), + ty::FreshFloatTy, + ), + + ty::Infer(ty::FreshTy(c)) + | ty::Infer(ty::FreshIntTy(c)) + | ty::Infer(ty::FreshFloatTy(c)) => { if c >= self.freshen_count { - bug!("Encountered a freshend type with id {} \ - but our counter is only at {}", - c, - self.freshen_count); + bug!( + "Encountered a freshend type with id {} \ + but our counter is only at {}", + c, + self.freshen_count + ); } t } - ty::Generator(..) | - ty::Bool | - ty::Char | - ty::Int(..) | - ty::Uint(..) | - ty::Float(..) | - ty::Adt(..) | - ty::Str | - ty::Error | - ty::Array(..) | - ty::Slice(..) | - ty::RawPtr(..) | - ty::Ref(..) | - ty::FnDef(..) | - ty::FnPtr(_) | - ty::Dynamic(..) | - ty::Never | - ty::Tuple(..) | - ty::Projection(..) | - ty::UnnormalizedProjection(..) | - ty::Foreign(..) | - ty::Param(..) | - ty::Closure(..) | - ty::GeneratorWitness(..) | - ty::Opaque(..) => { - t.super_fold_with(self) - } - - ty::Placeholder(..) | - ty::Bound(..) => bug!("unexpected type {:?}", t), + ty::Generator(..) + | ty::Bool + | ty::Char + | ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::Adt(..) + | ty::Str + | ty::Error + | ty::Array(..) + | ty::Slice(..) + | ty::RawPtr(..) + | ty::Ref(..) + | ty::FnDef(..) + | ty::FnPtr(_) + | ty::Dynamic(..) + | ty::Never + | ty::Tuple(..) + | ty::Projection(..) + | ty::UnnormalizedProjection(..) + | ty::Foreign(..) + | ty::Param(..) + | ty::Closure(..) + | ty::GeneratorWitness(..) + | ty::Opaque(..) => t.super_fold_with(self), + + ty::Placeholder(..) | ty::Bound(..) => bug!("unexpected type {:?}", t), } } } diff --git a/src/librustc/infer/fudge.rs b/src/librustc/infer/fudge.rs index a38db5d210f7b..ae483feae69b5 100644 --- a/src/librustc/infer/fudge.rs +++ b/src/librustc/infer/fudge.rs @@ -1,6 +1,6 @@ use infer::type_variable::TypeVariableMap; -use ty::{self, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder}; +use ty::{self, Ty, TyCtxt}; use super::InferCtxt; use super::RegionVariableOrigin; @@ -45,9 +45,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// the actual types (`?T`, `Option(&self, - origin: &RegionVariableOrigin, - f: F) -> Result where + pub fn fudge_regions_if_ok(&self, origin: &RegionVariableOrigin, f: F) -> Result + where F: FnOnce() -> Result, T: TypeFoldable<'tcx>, { @@ -64,12 +63,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // going to be popped, so we will have to // eliminate any references to them. - let type_variables = - self.type_variables.borrow_mut().types_created_since_snapshot( - &snapshot.type_snapshot); - let region_vars = - self.borrow_region_constraints().vars_created_since_snapshot( - &snapshot.region_constraints_snapshot); + let type_variables = self + .type_variables + .borrow_mut() + .types_created_since_snapshot(&snapshot.type_snapshot); + let region_vars = self + .borrow_region_constraints() + .vars_created_since_snapshot(&snapshot.region_constraints_snapshot); Ok((type_variables, region_vars, value)) } @@ -99,7 +99,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } -pub struct RegionFudger<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct RegionFudger<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, type_variables: &'a TypeVariableMap, region_vars: &'a Vec, @@ -121,9 +121,12 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionFudger<'a, 'gcx, 'tcx> { // variables to their binding anyhow, we know // that it is unbound, so we can just return // it. - debug_assert!(self.infcx.type_variables.borrow_mut() - .probe(vid) - .is_unknown()); + debug_assert!(self + .infcx + .type_variables + .borrow_mut() + .probe(vid) + .is_unknown()); ty } @@ -144,9 +147,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionFudger<'a, 'gcx, 'tcx> { ty::ReVar(v) if self.region_vars.contains(&v) => { self.infcx.next_region_var(self.origin.clone()) } - _ => { - r - } + _ => r, } } } diff --git a/src/librustc/infer/glb.rs b/src/librustc/infer/glb.rs index aebacd28f4581..e72024918511d 100644 --- a/src/librustc/infer/glb.rs +++ b/src/librustc/infer/glb.rs @@ -1,45 +1,55 @@ use super::combine::CombineFields; -use super::InferCtxt; use super::lattice::{self, LatticeDir}; +use super::InferCtxt; use super::Subtype; use traits::ObligationCause; -use ty::{self, Ty, TyCtxt}; use ty::relate::{self, Relate, RelateResult, TypeRelation}; +use ty::{self, Ty, TyCtxt}; /// "Greatest lower bound" (common subtype) -pub struct Glb<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> { +pub struct Glb<'combine, 'infcx: 'combine, 'gcx: 'infcx + 'tcx, 'tcx: 'infcx> { fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool, } impl<'combine, 'infcx, 'gcx, 'tcx> Glb<'combine, 'infcx, 'gcx, 'tcx> { - pub fn new(fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool) - -> Glb<'combine, 'infcx, 'gcx, 'tcx> - { - Glb { fields: fields, a_is_expected: a_is_expected } + pub fn new( + fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, + a_is_expected: bool, + ) -> Glb<'combine, 'infcx, 'gcx, 'tcx> { + Glb { + fields: fields, + a_is_expected: a_is_expected, + } } } impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> for Glb<'combine, 'infcx, 'gcx, 'tcx> { - fn tag(&self) -> &'static str { "Glb" } + fn tag(&self) -> &'static str { + "Glb" + } fn trait_object_mode(&self) -> relate::TraitObjectMode { self.fields.infcx.trait_object_mode() } - fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { self.fields.tcx() } + fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { + self.fields.tcx() + } - fn a_is_expected(&self) -> bool { self.a_is_expected } + fn a_is_expected(&self) -> bool { + self.a_is_expected + } - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> - { + fn relate_with_variance>( + &mut self, + variance: ty::Variance, + a: &T, + b: &T, + ) -> RelateResult<'tcx, T> { match variance { ty::Invariant => self.fields.equate(self.a_is_expected).relate(a, b), ty::Covariant => self.relate(a, b), @@ -53,20 +63,28 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> lattice::super_lattice_tys(self, a, b) } - fn regions(&mut self, a: ty::Region<'tcx>, b: ty::Region<'tcx>) - -> RelateResult<'tcx, ty::Region<'tcx>> { - debug!("{}.regions({:?}, {:?})", - self.tag(), - a, - b); + fn regions( + &mut self, + a: ty::Region<'tcx>, + b: ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>> { + debug!("{}.regions({:?}, {:?})", self.tag(), a, b); let origin = Subtype(self.fields.trace.clone()); - Ok(self.fields.infcx.borrow_region_constraints().glb_regions(self.tcx(), origin, a, b)) + Ok(self + .fields + .infcx + .borrow_region_constraints() + .glb_regions(self.tcx(), origin, a, b)) } - fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where T: Relate<'tcx> + fn binders( + &mut self, + a: &ty::Binder, + b: &ty::Binder, + ) -> RelateResult<'tcx, ty::Binder> + where + T: Relate<'tcx>, { debug!("binders(a={:?}, b={:?})", a, b); diff --git a/src/librustc/infer/higher_ranked/mod.rs b/src/librustc/infer/higher_ranked/mod.rs index 709e8c0ba9b24..ba9b43c8e71c8 100644 --- a/src/librustc/infer/higher_ranked/mod.rs +++ b/src/librustc/infer/higher_ranked/mod.rs @@ -37,9 +37,9 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> { // with a fresh region variable. These region variables -- // but no other pre-existing region variables -- can name // the placeholders. - let (a_prime, _) = - self.infcx - .replace_bound_vars_with_fresh_vars(span, HigherRankedType, a); + let (a_prime, _) = self + .infcx + .replace_bound_vars_with_fresh_vars(span, HigherRankedType, a); debug!("a_prime={:?}", a_prime); debug!("b_prime={:?}", b_prime); @@ -72,10 +72,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// [rustc guide]: https://rust-lang.github.io/rustc-guide/traits/hrtb.html pub fn replace_bound_vars_with_placeholders( &self, - binder: &ty::Binder + binder: &ty::Binder, ) -> (T, PlaceholderMap<'tcx>) where - T: TypeFoldable<'tcx> + T: TypeFoldable<'tcx>, { let next_universe = self.create_next_universe(); @@ -97,9 +97,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { debug!( "replace_bound_vars_with_placeholders(binder={:?}, result={:?}, map={:?})", - binder, - result, - map + binder, result, map ); (result, map) diff --git a/src/librustc/infer/lattice.rs b/src/librustc/infer/lattice.rs index a8794b4076a9d..0aba41f604221 100644 --- a/src/librustc/infer/lattice.rs +++ b/src/librustc/infer/lattice.rs @@ -19,15 +19,15 @@ //! over a `LatticeValue`, which is a value defined with respect to //! a lattice. -use super::InferCtxt; use super::type_variable::TypeVariableOrigin; +use super::InferCtxt; use traits::ObligationCause; +use ty::relate::{RelateResult, TypeRelation}; use ty::TyVar; use ty::{self, Ty}; -use ty::relate::{RelateResult, TypeRelation}; -pub trait LatticeDir<'f, 'gcx: 'f+'tcx, 'tcx: 'f> : TypeRelation<'f, 'gcx, 'tcx> { +pub trait LatticeDir<'f, 'gcx: 'f + 'tcx, 'tcx: 'f>: TypeRelation<'f, 'gcx, 'tcx> { fn infcx(&self) -> &'f InferCtxt<'f, 'gcx, 'tcx>; fn cause(&self) -> &ObligationCause<'tcx>; @@ -41,16 +41,17 @@ pub trait LatticeDir<'f, 'gcx: 'f+'tcx, 'tcx: 'f> : TypeRelation<'f, 'gcx, 'tcx> fn relate_bound(&mut self, v: Ty<'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, ()>; } -pub fn super_lattice_tys<'a, 'gcx, 'tcx, L>(this: &mut L, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where L: LatticeDir<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a +pub fn super_lattice_tys<'a, 'gcx, 'tcx, L>( + this: &mut L, + a: Ty<'tcx>, + b: Ty<'tcx>, +) -> RelateResult<'tcx, Ty<'tcx>> +where + L: LatticeDir<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { - debug!("{}.lattice_tys({:?}, {:?})", - this.tag(), - a, - b); + debug!("{}.lattice_tys({:?}, {:?})", this.tag(), a, b); if a == b { return Ok(a); @@ -89,8 +90,6 @@ pub fn super_lattice_tys<'a, 'gcx, 'tcx, L>(this: &mut L, Ok(v) } - _ => { - infcx.super_combine_tys(this, a, b) - } + _ => infcx.super_combine_tys(this, a, b), } } diff --git a/src/librustc/infer/lexical_region_resolve/graphviz.rs b/src/librustc/infer/lexical_region_resolve/graphviz.rs index 7ce2aba54f5cc..33db0a75d8c08 100644 --- a/src/librustc/infer/lexical_region_resolve/graphviz.rs +++ b/src/librustc/infer/lexical_region_resolve/graphviz.rs @@ -8,46 +8,48 @@ /// For clarity, rename the graphviz crate locally to dot. use graphviz as dot; +use super::Constraint; use hir::def_id::DefIndex; -use ty; +use infer::region_constraints::RegionConstraintData; +use infer::SubregionOrigin; use middle::free_region::RegionRelations; use middle::region; -use super::Constraint; -use infer::SubregionOrigin; -use infer::region_constraints::RegionConstraintData; +use ty; use util::nodemap::{FxHashMap, FxHashSet}; use std::borrow::Cow; -use std::collections::hash_map::Entry::Vacant; use std::collections::btree_map::BTreeMap; +use std::collections::hash_map::Entry::Vacant; use std::env; use std::fs; use std::io; use std::sync::atomic::{AtomicBool, Ordering}; fn print_help_message() { - println!("\ --Z print-region-graph by default prints a region constraint graph for every \n\ -function body, to the path `constraints.nodeXXX.dot`, where the XXX is \n\ -replaced with the node id of the function under analysis. \n\ - \n\ -To select one particular function body, set `RUST_REGION_GRAPH_NODE=XXX`, \n\ -where XXX is the node id desired. \n\ - \n\ -To generate output to some path other than the default \n\ -`constraints.nodeXXX.dot`, set `RUST_REGION_GRAPH=/path/desired.dot`; \n\ -occurrences of the character `%` in the requested path will be replaced with\n\ -the node id of the function under analysis. \n\ - \n\ -(Since you requested help via RUST_REGION_GRAPH=help, no region constraint \n\ -graphs will be printed. \n\ -"); + println!( + "\ + -Z print-region-graph by default prints a region constraint graph for every \n\ + function body, to the path `constraints.nodeXXX.dot`, where the XXX is \n\ + replaced with the node id of the function under analysis. \n\ + \n\ + To select one particular function body, set `RUST_REGION_GRAPH_NODE=XXX`, \n\ + where XXX is the node id desired. \n\ + \n\ + To generate output to some path other than the default \n\ + `constraints.nodeXXX.dot`, set `RUST_REGION_GRAPH=/path/desired.dot`; \n\ + occurrences of the character `%` in the requested path will be replaced with\n\ + the node id of the function under analysis. \n\ + \n\ + (Since you requested help via RUST_REGION_GRAPH=help, no region constraint \n\ + graphs will be printed. \n\ + " + ); } pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>( region_data: &RegionConstraintData<'tcx>, - region_rels: &RegionRelations<'a, 'gcx, 'tcx>) -{ + region_rels: &RegionRelations<'a, 'gcx, 'tcx>, +) { let tcx = region_rels.tcx; let context = region_rels.context; @@ -56,16 +58,18 @@ pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>( } let requested_node = env::var("RUST_REGION_GRAPH_NODE") - .ok().and_then(|s| s.parse().map(DefIndex::from_raw_u32).ok()); + .ok() + .and_then(|s| s.parse().map(DefIndex::from_raw_u32).ok()); if requested_node.is_some() && requested_node != Some(context.index) { return; } let requested_output = env::var("RUST_REGION_GRAPH"); - debug!("requested_output: {:?} requested_node: {:?}", - requested_output, - requested_node); + debug!( + "requested_output: {:?} requested_node: {:?}", + requested_output, requested_node + ); let output_path = { let output_template = match requested_output { @@ -107,7 +111,7 @@ pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>( } } -struct ConstraintGraph<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct ConstraintGraph<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { graph_name: String, region_rels: &'a RegionRelations<'a, 'gcx, 'tcx>, map: &'a BTreeMap, SubregionOrigin<'tcx>>, @@ -127,10 +131,11 @@ enum Edge<'tcx> { } impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> { - fn new(name: String, - region_rels: &'a RegionRelations<'a, 'gcx, 'tcx>, - map: &'a ConstraintMap<'tcx>) - -> ConstraintGraph<'a, 'gcx, 'tcx> { + fn new( + name: String, + region_rels: &'a RegionRelations<'a, 'gcx, 'tcx>, + map: &'a ConstraintMap<'tcx>, + ) -> ConstraintGraph<'a, 'gcx, 'tcx> { let mut i = 0; let mut node_ids = FxHashMap::default(); { @@ -174,8 +179,8 @@ impl<'a, 'gcx, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { }; let name = || format!("node_{}", node_id); - dot::Id::new(name()).unwrap_or_else(|_| - bug!("failed to create graphviz node identified by {}", name())) + dot::Id::new(name()) + .unwrap_or_else(|_| bug!("failed to create graphviz node identified by {}", name())) } fn node_label(&self, n: &Node) -> dot::LabelText<'_> { match *n { @@ -185,8 +190,9 @@ impl<'a, 'gcx, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { } fn edge_label(&self, e: &Edge<'_>) -> dot::LabelText<'_> { match *e { - Edge::Constraint(ref c) => - dot::LabelText::label(format!("{:?}", self.map.get(c).unwrap())), + Edge::Constraint(ref c) => { + dot::LabelText::label(format!("{:?}", self.map.get(c).unwrap())) + } Edge::EnclScope(..) => dot::LabelText::label("(enclosed)".to_owned()), } } @@ -194,24 +200,20 @@ impl<'a, 'gcx, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { fn constraint_to_nodes(c: &Constraint<'_>) -> (Node, Node) { match *c { - Constraint::VarSubVar(rv_1, rv_2) => - (Node::RegionVid(rv_1), Node::RegionVid(rv_2)), - Constraint::RegSubVar(r_1, rv_2) => - (Node::Region(*r_1), Node::RegionVid(rv_2)), - Constraint::VarSubReg(rv_1, r_2) => - (Node::RegionVid(rv_1), Node::Region(*r_2)), - Constraint::RegSubReg(r_1, r_2) => - (Node::Region(*r_1), Node::Region(*r_2)), + Constraint::VarSubVar(rv_1, rv_2) => (Node::RegionVid(rv_1), Node::RegionVid(rv_2)), + Constraint::RegSubVar(r_1, rv_2) => (Node::Region(*r_1), Node::RegionVid(rv_2)), + Constraint::VarSubReg(rv_1, r_2) => (Node::RegionVid(rv_1), Node::Region(*r_2)), + Constraint::RegSubReg(r_1, r_2) => (Node::Region(*r_1), Node::Region(*r_2)), } } fn edge_to_nodes(e: &Edge<'_>) -> (Node, Node) { match *e { Edge::Constraint(ref c) => constraint_to_nodes(c), - Edge::EnclScope(sub, sup) => { - (Node::Region(ty::ReScope(sub)), - Node::Region(ty::ReScope(sup))) - } + Edge::EnclScope(sub, sup) => ( + Node::Region(ty::ReScope(sub)), + Node::Region(ty::ReScope(sup)), + ), } } @@ -226,9 +228,9 @@ impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { fn edges(&self) -> dot::Edges<'_, Edge<'tcx>> { debug!("constraint graph has {} edges", self.map.len()); let mut v: Vec<_> = self.map.keys().map(|e| Edge::Constraint(*e)).collect(); - self.region_rels.region_scope_tree.each_encl_scope(|sub, sup| { - v.push(Edge::EnclScope(sub, sup)) - }); + self.region_rels + .region_scope_tree + .each_encl_scope(|sub, sup| v.push(Edge::EnclScope(sub, sup))); debug!("region graph has {} edges", v.len()); Cow::Owned(v) } @@ -246,13 +248,12 @@ impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { pub type ConstraintMap<'tcx> = BTreeMap, SubregionOrigin<'tcx>>; -fn dump_region_data_to<'a, 'gcx, 'tcx>(region_rels: &RegionRelations<'a, 'gcx, 'tcx>, - map: &ConstraintMap<'tcx>, - path: &str) - -> io::Result<()> { - debug!("dump_region_data map (len: {}) path: {}", - map.len(), - path); +fn dump_region_data_to<'a, 'gcx, 'tcx>( + region_rels: &RegionRelations<'a, 'gcx, 'tcx>, + map: &ConstraintMap<'tcx>, + path: &str, +) -> io::Result<()> { + debug!("dump_region_data map (len: {}) path: {}", map.len(), path); let g = ConstraintGraph::new("region_data".to_string(), region_rels, map); debug!("dump_region_data calling render"); let mut v = Vec::new(); diff --git a/src/librustc/infer/lexical_region_resolve/mod.rs b/src/librustc/infer/lexical_region_resolve/mod.rs index dbf8f270ab0c9..d5de34af6a426 100644 --- a/src/librustc/infer/lexical_region_resolve/mod.rs +++ b/src/librustc/infer/lexical_region_resolve/mod.rs @@ -18,7 +18,7 @@ use std::u32; use ty::fold::TypeFoldable; use ty::{self, Ty, TyCtxt}; use ty::{ReEarlyBound, ReEmpty, ReErased, ReFree, ReStatic}; -use ty::{ReLateBound, ReScope, RePlaceholder, ReVar}; +use ty::{ReLateBound, RePlaceholder, ReScope, ReVar}; use ty::{Region, RegionVid}; mod graphviz; @@ -138,7 +138,7 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { fn construct_var_data(&self, tcx: TyCtxt<'_, '_, 'tcx>) -> LexicalRegionResolutions<'tcx> { LexicalRegionResolutions { error_region: tcx.types.re_static, - values: IndexVec::from_elem_n(VarValue::Value(tcx.types.re_empty), self.num_vars()) + values: IndexVec::from_elem_n(VarValue::Value(tcx.types.re_empty), self.num_vars()), } } @@ -218,16 +218,16 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { match *a_region { // Check if this relationship is implied by a given. - ty::ReEarlyBound(_) | ty::ReFree(_) => if self.data.givens.contains(&(a_region, b_vid)) - { - debug!("given"); - return false; - }, + ty::ReEarlyBound(_) | ty::ReFree(_) => { + if self.data.givens.contains(&(a_region, b_vid)) { + debug!("given"); + return false; + } + } _ => {} } - match *b_data { VarValue::Value(cur_region) => { let mut lub = self.lub_concrete_regions(a_region, cur_region); @@ -304,15 +304,18 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { // at least as big as fr.scope". So, we can // reasonably compare free regions and scopes: let fr_scope = match (a, b) { - (&ReEarlyBound(ref br), _) | (_, &ReEarlyBound(ref br)) => self.region_rels + (&ReEarlyBound(ref br), _) | (_, &ReEarlyBound(ref br)) => self + .region_rels .region_scope_tree .early_free_scope(self.tcx(), br), - (&ReFree(ref fr), _) | (_, &ReFree(ref fr)) => self.region_rels + (&ReFree(ref fr), _) | (_, &ReFree(ref fr)) => self + .region_rels .region_scope_tree .free_scope(self.tcx(), fr), _ => bug!(), }; - let r_id = self.region_rels + let r_id = self + .region_rels .region_scope_tree .nearest_common_ancestor(fr_scope, s_id); if r_id == fr_scope { @@ -335,7 +338,8 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { // The region corresponding to an outer block is a // subtype of the region corresponding to an inner // block. - let lub = self.region_rels + let lub = self + .region_rels .region_scope_tree .nearest_common_ancestor(a_id, b_id); tcx.mk_region(ReScope(lub)) @@ -348,11 +352,13 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { // For these types, we cannot define any additional // relationship: - (&RePlaceholder(..), _) | (_, &RePlaceholder(..)) => if a == b { - a - } else { - tcx.types.re_static - }, + (&RePlaceholder(..), _) | (_, &RePlaceholder(..)) => { + if a == b { + a + } else { + tcx.types.re_static + } + } } } @@ -473,27 +479,27 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { VarValue::Value(_) => { /* Inference successful */ } VarValue::ErrorValue => { /* Inference impossible, this value contains - inconsistent constraints. - - I think that in this case we should report an - error now---unlike the case above, we can't - wait to see whether the user needs the result - of this variable. The reason is that the mere - existence of this variable implies that the - region graph is inconsistent, whether or not it - is used. - - For example, we may have created a region - variable that is the GLB of two other regions - which do not have a GLB. Even if that variable - is not used, it implies that those two regions - *should* have a GLB. - - At least I think this is true. It may be that - the mere existence of a conflict in a region variable - that is not used is not a problem, so if this rule - starts to create problems we'll have to revisit - this portion of the code and think hard about it. =) */ + inconsistent constraints. + + I think that in this case we should report an + error now---unlike the case above, we can't + wait to see whether the user needs the result + of this variable. The reason is that the mere + existence of this variable implies that the + region graph is inconsistent, whether or not it + is used. + + For example, we may have created a region + variable that is the GLB of two other regions + which do not have a GLB. Even if that variable + is not used, it implies that those two regions + *should* have a GLB. + + At least I think this is true. It may be that + the mere existence of a conflict in a region variable + that is not used is not a problem, so if this rule + starts to create problems we'll have to revisit + this portion of the code and think hard about it. =) */ self.collect_error_for_expanding_node(graph, &mut dup_vec, node_vid, errors); } } @@ -587,7 +593,8 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { }; for upper_bound in &upper_bounds { - if !self.region_rels + if !self + .region_rels .is_subregion_of(effective_lower_bound, upper_bound.region) { let origin = self.var_infos[node_idx].origin.clone(); @@ -742,16 +749,16 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { && self.bound_is_met(b, var_values, generic_ty, min) } - VerifyBound::OutlivedBy(r) => - self.region_rels.is_subregion_of( - min, - var_values.normalize(self.tcx(), r), - ), + VerifyBound::OutlivedBy(r) => self + .region_rels + .is_subregion_of(min, var_values.normalize(self.tcx(), r)), - VerifyBound::AnyBound(bs) => bs.iter() + VerifyBound::AnyBound(bs) => bs + .iter() .any(|b| self.bound_is_met(b, var_values, generic_ty, min)), - VerifyBound::AllBounds(bs) => bs.iter() + VerifyBound::AllBounds(bs) => bs + .iter() .all(|b| self.bound_is_met(b, var_values, generic_ty, min)), } } diff --git a/src/librustc/infer/lub.rs b/src/librustc/infer/lub.rs index c6af8e4967150..b1feab98d1a6d 100644 --- a/src/librustc/infer/lub.rs +++ b/src/librustc/infer/lub.rs @@ -1,45 +1,55 @@ use super::combine::CombineFields; -use super::InferCtxt; use super::lattice::{self, LatticeDir}; +use super::InferCtxt; use super::Subtype; use traits::ObligationCause; -use ty::{self, Ty, TyCtxt}; use ty::relate::{self, Relate, RelateResult, TypeRelation}; +use ty::{self, Ty, TyCtxt}; /// "Least upper bound" (common supertype) -pub struct Lub<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> { +pub struct Lub<'combine, 'infcx: 'combine, 'gcx: 'infcx + 'tcx, 'tcx: 'infcx> { fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool, } impl<'combine, 'infcx, 'gcx, 'tcx> Lub<'combine, 'infcx, 'gcx, 'tcx> { - pub fn new(fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool) - -> Lub<'combine, 'infcx, 'gcx, 'tcx> - { - Lub { fields: fields, a_is_expected: a_is_expected } + pub fn new( + fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, + a_is_expected: bool, + ) -> Lub<'combine, 'infcx, 'gcx, 'tcx> { + Lub { + fields: fields, + a_is_expected: a_is_expected, + } } } impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> for Lub<'combine, 'infcx, 'gcx, 'tcx> { - fn tag(&self) -> &'static str { "Lub" } + fn tag(&self) -> &'static str { + "Lub" + } fn trait_object_mode(&self) -> relate::TraitObjectMode { self.fields.infcx.trait_object_mode() } - fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { self.fields.tcx() } + fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { + self.fields.tcx() + } - fn a_is_expected(&self) -> bool { self.a_is_expected } + fn a_is_expected(&self) -> bool { + self.a_is_expected + } - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> - { + fn relate_with_variance>( + &mut self, + variance: ty::Variance, + a: &T, + b: &T, + ) -> RelateResult<'tcx, T> { match variance { ty::Invariant => self.fields.equate(self.a_is_expected).relate(a, b), ty::Covariant => self.relate(a, b), @@ -53,20 +63,28 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> lattice::super_lattice_tys(self, a, b) } - fn regions(&mut self, a: ty::Region<'tcx>, b: ty::Region<'tcx>) - -> RelateResult<'tcx, ty::Region<'tcx>> { - debug!("{}.regions({:?}, {:?})", - self.tag(), - a, - b); + fn regions( + &mut self, + a: ty::Region<'tcx>, + b: ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>> { + debug!("{}.regions({:?}, {:?})", self.tag(), a, b); let origin = Subtype(self.fields.trace.clone()); - Ok(self.fields.infcx.borrow_region_constraints().lub_regions(self.tcx(), origin, a, b)) + Ok(self + .fields + .infcx + .borrow_region_constraints() + .lub_regions(self.tcx(), origin, a, b)) } - fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where T: Relate<'tcx> + fn binders( + &mut self, + a: &ty::Binder, + b: &ty::Binder, + ) -> RelateResult<'tcx, ty::Binder> + where + T: Relate<'tcx>, { debug!("binders(a={:?}, b={:?})", a, b); diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 2d3fb137fafdd..381e2c9ededc5 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -27,7 +27,7 @@ use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric}; use ty::fold::TypeFoldable; use ty::relate::{RelateResult, TraitObjectMode}; use ty::subst::{Kind, Substs}; -use ty::{self, GenericParamDefKind, Ty, TyCtxt, CtxtInterners}; +use ty::{self, CtxtInterners, GenericParamDefKind, Ty, TyCtxt}; use ty::{FloatVid, IntVid, TyVid}; use util::nodemap::FxHashMap; @@ -638,7 +638,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { use ty::error::UnconstrainedNumeric::{UnconstrainedFloat, UnconstrainedInt}; match ty.sty { ty::Infer(ty::IntVar(vid)) => { - if self.int_unification_table + if self + .int_unification_table .borrow_mut() .probe_value(vid) .is_some() @@ -649,7 +650,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } ty::Infer(ty::FloatVar(vid)) => { - if self.float_unification_table + if self + .float_unification_table .borrow_mut() .probe_value(vid) .is_some() @@ -876,9 +878,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { &self, snapshot: &CombinedSnapshot<'a, 'tcx>, ) -> Option { - self.borrow_region_constraints().region_constraints_added_in_snapshot( - &snapshot.region_constraints_snapshot, - ) + self.borrow_region_constraints() + .region_constraints_added_in_snapshot(&snapshot.region_constraints_snapshot) } pub fn add_given(&self, sub: ty::Region<'tcx>, sup: ty::RegionVid) { @@ -993,9 +994,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { pub fn next_ty_var_in_universe( &self, origin: TypeVariableOrigin, - universe: ty::UniverseIndex + universe: ty::UniverseIndex, ) -> Ty<'tcx> { - let vid = self.type_variables + let vid = self + .type_variables .borrow_mut() .new_var(universe, false, origin); self.tcx.mk_var(vid) @@ -1028,7 +1030,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { origin: RegionVariableOrigin, universe: ty::UniverseIndex, ) -> ty::Region<'tcx> { - let region_var = self.borrow_region_constraints() + let region_var = self + .borrow_region_constraints() .new_region_var(universe, origin); self.tcx.mk_region(ty::ReVar(region_var)) } @@ -1136,7 +1139,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { region_map, outlives_env.free_region_map(), ); - let (var_infos, data) = self.region_constraints + let (var_infos, data) = self + .region_constraints .borrow_mut() .take() .expect("regions already resolved") @@ -1144,7 +1148,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let (lexical_region_resolutions, errors) = lexical_region_resolve::resolve(region_rels, var_infos, data); - let old_value = self.lexical_region_resolutions + let old_value = self + .lexical_region_resolutions .replace(Some(lexical_region_resolutions)); assert!(old_value.is_none()); @@ -1194,7 +1199,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// called. This is used only during NLL processing to "hand off" ownership /// of the set of region variables into the NLL region context. pub fn take_region_var_origins(&self) -> VarInfos { - let (var_infos, data) = self.region_constraints + let (var_infos, data) = self + .region_constraints .borrow_mut() .take() .expect("regions already resolved") @@ -1240,13 +1246,15 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { .unwrap_or(typ) } - ty::Infer(ty::IntVar(v)) => self.int_unification_table + ty::Infer(ty::IntVar(v)) => self + .int_unification_table .borrow_mut() .probe_value(v) .map(|v| v.to_type(self.tcx)) .unwrap_or(typ), - ty::Infer(ty::FloatVar(v)) => self.float_unification_table + ty::Infer(ty::FloatVar(v)) => self + .float_unification_table .borrow_mut() .probe_value(v) .map(|v| v.to_type(self.tcx)) @@ -1376,10 +1384,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { &self, span: Span, lbrct: LateBoundRegionConversionTime, - value: &ty::Binder + value: &ty::Binder, ) -> (T, BTreeMap>) where - T: TypeFoldable<'tcx> + T: TypeFoldable<'tcx>, { let fld_r = |br| self.next_region_var(LateBoundRegion(span, br, lbrct)); let fld_t = |_| self.next_ty_var(TypeVariableOrigin::MiscVariable(span)); diff --git a/src/librustc/infer/nll_relate/mod.rs b/src/librustc/infer/nll_relate/mod.rs index db5ec3c1c0c46..eee9830ac0dd5 100644 --- a/src/librustc/infer/nll_relate/mod.rs +++ b/src/librustc/infer/nll_relate/mod.rs @@ -22,12 +22,12 @@ //! constituents) use crate::infer::InferCtxt; +use crate::traits::DomainGoal; +use crate::ty::error::TypeError; use crate::ty::fold::{TypeFoldable, TypeVisitor}; use crate::ty::relate::{self, Relate, RelateResult, TypeRelation}; use crate::ty::subst::Kind; use crate::ty::{self, Ty, TyCtxt}; -use crate::ty::error::TypeError; -use crate::traits::DomainGoal; use rustc_data_structures::fx::FxHashMap; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] @@ -265,7 +265,7 @@ where fn relate_projection_ty( &mut self, projection_ty: ty::ProjectionTy<'tcx>, - value_ty: ty::Ty<'tcx> + value_ty: ty::Ty<'tcx>, ) -> Ty<'tcx> { use crate::infer::type_variable::TypeVariableOrigin; use crate::traits::WhereClause; @@ -273,7 +273,9 @@ where match value_ty.sty { ty::Projection(other_projection_ty) => { - let var = self.infcx.next_ty_var(TypeVariableOrigin::MiscVariable(DUMMY_SP)); + let var = self + .infcx + .next_ty_var(TypeVariableOrigin::MiscVariable(DUMMY_SP)); self.relate_projection_ty(projection_ty, var); self.relate_projection_ty(other_projection_ty, var); var @@ -284,9 +286,8 @@ where projection_ty, ty: value_ty, }; - self.delegate.push_domain_goal( - DomainGoal::Holds(WhereClause::ProjectionEq(projection)) - ); + self.delegate + .push_domain_goal(DomainGoal::Holds(WhereClause::ProjectionEq(projection))); value_ty } } @@ -296,20 +297,21 @@ where fn relate_ty_var( &mut self, vid: ty::TyVid, - value_ty: Ty<'tcx> + value_ty: Ty<'tcx>, ) -> RelateResult<'tcx, Ty<'tcx>> { debug!("relate_ty_var(vid={:?}, value_ty={:?})", vid, value_ty); match value_ty.sty { ty::Infer(ty::TyVar(value_vid)) => { // Two type variables: just equate them. - self.infcx.type_variables.borrow_mut().equate(vid, value_vid); + self.infcx + .type_variables + .borrow_mut() + .equate(vid, value_vid); return Ok(value_ty); } - ty::Projection(projection_ty) - if D::normalization() == NormalizationStrategy::Lazy => - { + ty::Projection(projection_ty) if D::normalization() == NormalizationStrategy::Lazy => { return Ok(self.relate_projection_ty(projection_ty, self.infcx.tcx.mk_var(vid))); } @@ -326,7 +328,10 @@ where assert!(!generalized_ty.has_infer_types()); } - self.infcx.type_variables.borrow_mut().instantiate(vid, generalized_ty); + self.infcx + .type_variables + .borrow_mut() + .instantiate(vid, generalized_ty); // The generalized values we extract from `canonical_var_values` have // been fully instantiated and hence the set of scopes we have @@ -347,7 +352,7 @@ where fn generalize_value>( &mut self, value: T, - for_vid: ty::TyVid + for_vid: ty::TyVid, ) -> RelateResult<'tcx, T> { let universe = self.infcx.probe_ty_var(for_vid).unwrap_err(); @@ -374,8 +379,10 @@ where fn trait_object_mode(&self) -> relate::TraitObjectMode { // squashing should only be done in coherence, not NLL - assert_eq!(self.infcx.trait_object_mode(), - relate::TraitObjectMode::NoSquash); + assert_eq!( + self.infcx.trait_object_mode(), + relate::TraitObjectMode::NoSquash + ); relate::TraitObjectMode::NoSquash } @@ -695,8 +702,10 @@ where fn trait_object_mode(&self) -> relate::TraitObjectMode { // squashing should only be done in coherence, not NLL - assert_eq!(self.infcx.trait_object_mode(), - relate::TraitObjectMode::NoSquash); + assert_eq!( + self.infcx.trait_object_mode(), + relate::TraitObjectMode::NoSquash + ); relate::TraitObjectMode::NoSquash } @@ -766,7 +775,9 @@ where drop(variables); self.relate(&u, &u) } - TypeVariableValue::Unknown { universe: _universe } => { + TypeVariableValue::Unknown { + universe: _universe, + } => { if self.ambient_variance == ty::Bivariant { // FIXME: we may need a WF predicate (related to #54105). } @@ -781,8 +792,7 @@ where let u = self.tcx().mk_var(new_var_id); debug!( "generalize: replacing original vid={:?} with new={:?}", - vid, - u + vid, u ); return Ok(u); } @@ -790,8 +800,7 @@ where } } - ty::Infer(ty::IntVar(_)) | - ty::Infer(ty::FloatVar(_)) => { + ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) => { // No matter what mode we are in, // integer/floating-point types must be equal to be // relatable. @@ -802,9 +811,8 @@ where if self.universe.cannot_name(placeholder.universe) { debug!( "TypeGeneralizer::tys: root universe {:?} cannot name\ - placeholder in universe {:?}", - self.universe, - placeholder.universe + placeholder in universe {:?}", + self.universe, placeholder.universe ); Err(TypeError::Mismatch) } else { @@ -812,9 +820,7 @@ where } } - _ => { - relate::super_relate_tys(self, a, a) - } + _ => relate::super_relate_tys(self, a, a), } } diff --git a/src/librustc/infer/opaque_types/mod.rs b/src/librustc/infer/opaque_types/mod.rs index 56c6da83ed9a2..ea47535e2d4c5 100644 --- a/src/librustc/infer/opaque_types/mod.rs +++ b/src/librustc/infer/opaque_types/mod.rs @@ -1,15 +1,15 @@ -use hir::def_id::DefId; use hir; +use hir::def_id::DefId; use hir::Node; -use infer::{self, InferCtxt, InferOk, TypeVariableOrigin}; use infer::outlives::free_region_map::FreeRegionRelations; +use infer::{self, InferCtxt, InferOk, TypeVariableOrigin}; use rustc_data_structures::fx::FxHashMap; use syntax::ast; use traits::{self, PredicateObligation}; -use ty::{self, Ty, TyCtxt, GenericParamDefKind}; use ty::fold::{BottomUpFolder, TypeFoldable, TypeFolder}; use ty::outlives::Component; use ty::subst::{Kind, Substs, UnpackedKind}; +use ty::{self, GenericParamDefKind, Ty, TyCtxt}; use util::nodemap::DefIdMap; pub type OpaqueTypeMap<'tcx> = DefIdMap>; @@ -102,9 +102,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { param_env: ty::ParamEnv<'tcx>, value: &T, ) -> InferOk<'tcx, (T, OpaqueTypeMap<'tcx>)> { - debug!("instantiate_opaque_types(value={:?}, parent_def_id={:?}, body_id={:?}, \ - param_env={:?})", - value, parent_def_id, body_id, param_env, + debug!( + "instantiate_opaque_types(value={:?}, parent_def_id={:?}, body_id={:?}, \ + param_env={:?})", + value, parent_def_id, body_id, param_env, ); let mut instantiator = Instantiator { infcx: self, @@ -307,7 +308,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { for param in &abstract_type_generics.params { match param.kind { GenericParamDefKind::Lifetime => {} - _ => continue + _ => continue, } // Get the value supplied for this region from the substs. let subst_arg = opaque_defn.substs.region_at(param.index as usize); @@ -448,14 +449,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // Convert the type from the function into a type valid outside // the function, by replacing invalid regions with 'static, // after producing an error for each of them. - let definition_ty = - instantiated_ty.fold_with(&mut ReverseMapper::new( - self.tcx, - self.is_tainted_by_errors(), - def_id, - map, - instantiated_ty, - )); + let definition_ty = instantiated_ty.fold_with(&mut ReverseMapper::new( + self.tcx, + self.is_tainted_by_errors(), + def_id, + map, + instantiated_ty, + )); debug!( "infer_opaque_definition_from_instantiation: definition_ty={:?}", definition_ty @@ -570,14 +570,14 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ReverseMapper<'cx, 'gcx, 'tcx> &mut err, &format!("hidden type `{}` captures ", hidden_ty), r, - "" + "", ); err.emit(); } } self.tcx.types.re_empty - }, + } } } @@ -609,17 +609,17 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ReverseMapper<'cx, 'gcx, 'tcx> // during codegen. let generics = self.tcx.generics_of(def_id); - let substs = self.tcx.mk_substs(substs.substs.iter().enumerate().map( - |(index, &kind)| { - if index < generics.parent_count { - // Accommodate missing regions in the parent kinds... - self.fold_kind_mapping_missing_regions_to_empty(kind) - } else { - // ...but not elsewhere. - self.fold_kind_normally(kind) - } - }, - )); + let substs = + self.tcx + .mk_substs(substs.substs.iter().enumerate().map(|(index, &kind)| { + if index < generics.parent_count { + // Accommodate missing regions in the parent kinds... + self.fold_kind_mapping_missing_regions_to_empty(kind) + } else { + // ...but not elsewhere. + self.fold_kind_normally(kind) + } + })); self.tcx.mk_closure(def_id, ty::ClosureSubsts { substs }) } @@ -759,10 +759,7 @@ impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> { let ty_var = infcx.next_ty_var(TypeVariableOrigin::TypeInference(span)); let predicates_of = tcx.predicates_of(def_id); - debug!( - "instantiate_opaque_types: predicates: {:#?}", - predicates_of, - ); + debug!("instantiate_opaque_types: predicates: {:#?}", predicates_of,); let bounds = predicates_of.instantiate(tcx, substs); debug!("instantiate_opaque_types: bounds={:?}", bounds); @@ -775,10 +772,7 @@ impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> { // make sure that we are in fact defining the *entire* type // e.g., `existential type Foo: Bar;` needs to be // defined by a function like `fn foo() -> Foo`. - debug!( - "instantiate_opaque_types: param_env: {:#?}", - self.param_env, - ); + debug!("instantiate_opaque_types: param_env: {:#?}", self.param_env,); debug!( "instantiate_opaque_types: generics: {:#?}", tcx.generics_of(def_id), @@ -837,10 +831,7 @@ pub fn may_define_existential_type( def_id: DefId, opaque_node_id: ast::NodeId, ) -> bool { - let mut node_id = tcx - .hir() - .as_local_node_id(def_id) - .unwrap(); + let mut node_id = tcx.hir().as_local_node_id(def_id).unwrap(); // named existential types can be defined by any siblings or // children of siblings let mod_id = tcx.hir().get_parent(opaque_node_id); diff --git a/src/librustc/infer/outlives/env.rs b/src/librustc/infer/outlives/env.rs index 677b6136ea03b..79897ab0cff98 100644 --- a/src/librustc/infer/outlives/env.rs +++ b/src/librustc/infer/outlives/env.rs @@ -177,10 +177,9 @@ impl<'a, 'gcx: 'tcx, 'tcx: 'a> OutlivesEnvironment<'tcx> { /// Save the current set of region-bound pairs under the given `body_id`. pub fn save_implied_bounds(&mut self, body_id: ast::NodeId) { - let old = self.region_bound_pairs_map.insert( - body_id, - self.region_bound_pairs_accum.clone(), - ); + let old = self + .region_bound_pairs_map + .insert(body_id, self.region_bound_pairs_accum.clone()); assert!(old.is_none()); } diff --git a/src/librustc/infer/outlives/free_region_map.rs b/src/librustc/infer/outlives/free_region_map.rs index a6703c9d679da..600b0a93a79ce 100644 --- a/src/librustc/infer/outlives/free_region_map.rs +++ b/src/librustc/infer/outlives/free_region_map.rs @@ -1,5 +1,5 @@ -use ty::{self, Lift, TyCtxt, Region}; use rustc_data_structures::transitive_relation::TransitiveRelation; +use ty::{self, Lift, Region, TyCtxt}; #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default)] pub struct FreeRegionMap<'tcx> { @@ -7,7 +7,7 @@ pub struct FreeRegionMap<'tcx> { // // Invariant: only free regions like `'x` or `'static` are stored // in this relation, not scopes. - relation: TransitiveRelation> + relation: TransitiveRelation>, } impl<'tcx> FreeRegionMap<'tcx> { @@ -28,21 +28,27 @@ impl<'tcx> FreeRegionMap<'tcx> { /// cases, this is more conservative than necessary, in order to /// avoid making arbitrary choices. See /// `TransitiveRelation::postdom_upper_bound` for more details. - pub fn lub_free_regions<'a, 'gcx>(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - r_a: Region<'tcx>, - r_b: Region<'tcx>) - -> Region<'tcx> { + pub fn lub_free_regions<'a, 'gcx>( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + r_a: Region<'tcx>, + r_b: Region<'tcx>, + ) -> Region<'tcx> { debug!("lub_free_regions(r_a={:?}, r_b={:?})", r_a, r_b); assert!(is_free(r_a)); assert!(is_free(r_b)); - let result = if r_a == r_b { r_a } else { + let result = if r_a == r_b { + r_a + } else { match self.relation.postdom_upper_bound(&r_a, &r_b) { None => tcx.mk_region(ty::ReStatic), Some(r) => *r, } }; - debug!("lub_free_regions(r_a={:?}, r_b={:?}) = {:?}", r_a, r_b, result); + debug!( + "lub_free_regions(r_a={:?}, r_b={:?}) = {:?}", + r_a, r_b, result + ); result } } @@ -57,10 +63,7 @@ pub trait FreeRegionRelations<'tcx> { } impl<'tcx> FreeRegionRelations<'tcx> for FreeRegionMap<'tcx> { - fn sub_free_regions(&self, - r_a: Region<'tcx>, - r_b: Region<'tcx>) - -> bool { + fn sub_free_regions(&self, r_a: Region<'tcx>, r_b: Region<'tcx>) -> bool { assert!(is_free_or_static(r_a) && is_free_or_static(r_b)); if let ty::ReStatic = r_b { true // `'a <= 'static` is just always true, and not stored in the relation explicitly @@ -73,7 +76,7 @@ impl<'tcx> FreeRegionRelations<'tcx> for FreeRegionMap<'tcx> { fn is_free(r: Region<'_>) -> bool { match *r { ty::ReEarlyBound(_) | ty::ReFree(_) => true, - _ => false + _ => false, } } @@ -91,7 +94,8 @@ impl_stable_hash_for!(struct FreeRegionMap<'tcx> { impl<'a, 'tcx> Lift<'tcx> for FreeRegionMap<'a> { type Lifted = FreeRegionMap<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option> { - self.relation.maybe_map(|&fr| fr.lift_to_tcx(tcx)) - .map(|relation| FreeRegionMap { relation }) + self.relation + .maybe_map(|&fr| fr.lift_to_tcx(tcx)) + .map(|relation| FreeRegionMap { relation }) } } diff --git a/src/librustc/infer/outlives/obligations.rs b/src/librustc/infer/outlives/obligations.rs index abe835c9211a5..d2505a57c5f34 100644 --- a/src/librustc/infer/outlives/obligations.rs +++ b/src/librustc/infer/outlives/obligations.rs @@ -382,14 +382,16 @@ where // Compute the bounds we can derive from the trait definition. // These are guaranteed to apply, no matter the inference // results. - let trait_bounds: Vec<_> = self.verify_bound + let trait_bounds: Vec<_> = self + .verify_bound .projection_declared_bounds_from_trait(projection_ty) .collect(); // Compute the bounds we can derive from the environment. This // is an "approximate" match -- in some cases, these bounds // may not apply. - let mut approx_env_bounds = self.verify_bound + let mut approx_env_bounds = self + .verify_bound .projection_approx_declared_bounds_from_env(projection_ty); debug!( "projection_must_outlive: approx_env_bounds={:?}", @@ -401,15 +403,13 @@ where // #55756) in cases where you have e.g., `>::Item: // 'a` in the environment but `trait Foo<'b> { type Item: 'b // }` in the trait definition. - approx_env_bounds.retain(|bound| { - match bound.0.sty { - ty::Projection(projection_ty) => { - self.verify_bound.projection_declared_bounds_from_trait(projection_ty) - .all(|r| r != bound.1) - } + approx_env_bounds.retain(|bound| match bound.0.sty { + ty::Projection(projection_ty) => self + .verify_bound + .projection_declared_bounds_from_trait(projection_ty) + .all(|r| r != bound.1), - _ => panic!("expected only projection types from env, not {:?}", bound.0), - } + _ => panic!("expected only projection types from env, not {:?}", bound.0), }); // If declared bounds list is empty, the only applicable rule is diff --git a/src/librustc/infer/outlives/verify.rs b/src/librustc/infer/outlives/verify.rs index 4e9a8e9ded899..2b374523dc6ef 100644 --- a/src/librustc/infer/outlives/verify.rs +++ b/src/librustc/infer/outlives/verify.rs @@ -56,7 +56,8 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { // Start with anything like `T: 'a` we can scrape from the // environment - let param_bounds = self.declared_generic_bounds_from_env(GenericKind::Param(param_ty)) + let param_bounds = self + .declared_generic_bounds_from_env(GenericKind::Param(param_ty)) .into_iter() .map(|outlives| outlives.1); @@ -109,11 +110,13 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { pub fn projection_bound(&self, projection_ty: ty::ProjectionTy<'tcx>) -> VerifyBound<'tcx> { debug!("projection_bound(projection_ty={:?})", projection_ty); - let projection_ty_as_ty = - self.tcx.mk_projection(projection_ty.item_def_id, projection_ty.substs); + let projection_ty_as_ty = self + .tcx + .mk_projection(projection_ty.item_def_id, projection_ty.substs); // Search the env for where clauses like `P: 'a`. - let env_bounds = self.projection_approx_declared_bounds_from_env(projection_ty) + let env_bounds = self + .projection_approx_declared_bounds_from_env(projection_ty) .into_iter() .map(|ty::OutlivesPredicate(ty, r)| { let vb = VerifyBound::OutlivedBy(r); @@ -128,12 +131,14 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { }); // Extend with bounds that we can find from the trait. - let trait_bounds = self.projection_declared_bounds_from_trait(projection_ty) + let trait_bounds = self + .projection_declared_bounds_from_trait(projection_ty) .into_iter() .map(|r| VerifyBound::OutlivedBy(r)); // see the extensive comment in projection_must_outlive - let ty = self.tcx + let ty = self + .tcx .mk_projection(projection_ty.item_def_id, projection_ty.substs); let recursive_bound = self.recursive_type_bound(ty); @@ -141,7 +146,8 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { } fn recursive_type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> { - let mut bounds = ty.walk_shallow() + let mut bounds = ty + .walk_shallow() .map(|subty| self.type_bound(subty)) .collect::>(); @@ -288,7 +294,9 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { let tcx = self.tcx; let assoc_item = tcx.associated_item(assoc_item_def_id); let trait_def_id = assoc_item.container.assert_trait(); - let trait_predicates = tcx.predicates_of(trait_def_id).predicates + let trait_predicates = tcx + .predicates_of(trait_def_id) + .predicates .iter() .map(|(p, _)| *p) .collect(); @@ -297,7 +305,8 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { self.collect_outlives_from_predicate_list( move |ty| ty == identity_proj, traits::elaborate_predicates(tcx, trait_predicates), - ).map(|b| b.1) + ) + .map(|b| b.1) } /// Searches through a predicate list for a predicate `T: 'a`. diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs index 56ae850226c91..21e0ac2c8ba4f 100644 --- a/src/librustc/infer/region_constraints/mod.rs +++ b/src/librustc/infer/region_constraints/mod.rs @@ -834,7 +834,8 @@ impl<'tcx> RegionConstraintCollector<'tcx> { .filter_map(|&elt| match elt { AddVar(vid) => Some(vid), _ => None, - }).collect() + }) + .collect() } /// See [`RegionInference::region_constraints_added_in_snapshot`] @@ -844,7 +845,8 @@ impl<'tcx> RegionConstraintCollector<'tcx> { .map(|&elt| match elt { AddConstraint(constraint) => Some(constraint.involves_placeholders()), _ => None, - }).max() + }) + .max() .unwrap_or(None) } } diff --git a/src/librustc/infer/resolve.rs b/src/librustc/infer/resolve.rs index f6131c01b372f..4c48e62d66c3b 100644 --- a/src/librustc/infer/resolve.rs +++ b/src/librustc/infer/resolve.rs @@ -1,6 +1,6 @@ -use super::{InferCtxt, FixupError, FixupResult}; -use ty::{self, Ty, TyCtxt, TypeFoldable}; +use super::{FixupError, FixupResult, InferCtxt}; use ty::fold::{TypeFolder, TypeVisitor}; +use ty::{self, Ty, TyCtxt, TypeFoldable}; /////////////////////////////////////////////////////////////////////////// // OPPORTUNISTIC TYPE RESOLVER @@ -10,7 +10,7 @@ use ty::fold::{TypeFolder, TypeVisitor}; /// been unified with (similar to `shallow_resolve`, but deep). This is /// useful for printing messages etc but also required at various /// points for correctness. -pub struct OpportunisticTypeResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct OpportunisticTypeResolver<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, } @@ -39,7 +39,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for OpportunisticTypeResolver<'a, 'g /// The opportunistic type and region resolver is similar to the /// opportunistic type resolver, but also opportunistically resolves /// regions. It is useful for canonicalization. -pub struct OpportunisticTypeAndRegionResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct OpportunisticTypeAndRegionResolver<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, } @@ -65,11 +65,11 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for OpportunisticTypeAndRegionResolv fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { match *r { - ty::ReVar(rid) => - self.infcx.borrow_region_constraints() - .opportunistic_resolve_var(self.tcx(), rid), - _ => - r, + ty::ReVar(rid) => self + .infcx + .borrow_region_constraints() + .opportunistic_resolve_var(self.tcx(), rid), + _ => r, } } } @@ -81,7 +81,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for OpportunisticTypeAndRegionResolv /// type variables that don't yet have a value. They get pushed into a /// vector. It does not construct the fully resolved type (which might /// involve some hashing and so forth). -pub struct UnresolvedTypeFinder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct UnresolvedTypeFinder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, } @@ -117,11 +117,17 @@ impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for UnresolvedTypeFinder<'a, 'gcx, 'tcx> /// Full type resolution replaces all type and region variables with /// their concrete results. If any variable cannot be replaced (never unified, etc) /// then an `Err` result is returned. -pub fn fully_resolve<'a, 'gcx, 'tcx, T>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, - value: &T) -> FixupResult - where T : TypeFoldable<'tcx> +pub fn fully_resolve<'a, 'gcx, 'tcx, T>( + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + value: &T, +) -> FixupResult +where + T: TypeFoldable<'tcx>, { - let mut full_resolver = FullTypeResolver { infcx: infcx, err: None }; + let mut full_resolver = FullTypeResolver { + infcx: infcx, + err: None, + }; let result = value.fold_with(&mut full_resolver); match full_resolver.err { None => Ok(result), @@ -131,7 +137,7 @@ pub fn fully_resolve<'a, 'gcx, 'tcx, T>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, // N.B. This type is not public because the protocol around checking the // `err` field is not enforcable otherwise. -struct FullTypeResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct FullTypeResolver<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, err: Option, } @@ -164,20 +170,20 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for FullTypeResolver<'a, 'gcx, 'tcx> ty::Infer(_) => { bug!("Unexpected type in full type resolver: {:?}", t); } - _ => { - t.super_fold_with(self) - } + _ => t.super_fold_with(self), } } } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { match *r { - ty::ReVar(rid) => self.infcx.lexical_region_resolutions - .borrow() - .as_ref() - .expect("region resolution not performed") - .resolve_var(rid), + ty::ReVar(rid) => self + .infcx + .lexical_region_resolutions + .borrow() + .as_ref() + .expect("region resolution not performed") + .resolve_var(rid), _ => r, } } diff --git a/src/librustc/infer/sub.rs b/src/librustc/infer/sub.rs index 1d4842a6d5626..2b8f4cd6fcfe1 100644 --- a/src/librustc/infer/sub.rs +++ b/src/librustc/infer/sub.rs @@ -1,24 +1,28 @@ -use super::SubregionOrigin; use super::combine::{CombineFields, RelationDir}; +use super::SubregionOrigin; +use std::mem; use traits::Obligation; -use ty::{self, Ty, TyCtxt}; -use ty::TyVar; use ty::fold::TypeFoldable; use ty::relate::{self, Cause, Relate, RelateResult, TypeRelation}; -use std::mem; +use ty::TyVar; +use ty::{self, Ty, TyCtxt}; /// Ensures `a` is made a subtype of `b`. Returns `a` on success. -pub struct Sub<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> { +pub struct Sub<'combine, 'infcx: 'combine, 'gcx: 'infcx + 'tcx, 'tcx: 'infcx> { fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool, } impl<'combine, 'infcx, 'gcx, 'tcx> Sub<'combine, 'infcx, 'gcx, 'tcx> { - pub fn new(f: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool) - -> Sub<'combine, 'infcx, 'gcx, 'tcx> - { - Sub { fields: f, a_is_expected: a_is_expected } + pub fn new( + f: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, + a_is_expected: bool, + ) -> Sub<'combine, 'infcx, 'gcx, 'tcx> { + Sub { + fields: f, + a_is_expected: a_is_expected, + } } fn with_expected_switched R>(&mut self, f: F) -> R { @@ -32,16 +36,23 @@ impl<'combine, 'infcx, 'gcx, 'tcx> Sub<'combine, 'infcx, 'gcx, 'tcx> { impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> for Sub<'combine, 'infcx, 'gcx, 'tcx> { - fn tag(&self) -> &'static str { "Sub" } + fn tag(&self) -> &'static str { + "Sub" + } fn trait_object_mode(&self) -> relate::TraitObjectMode { self.fields.infcx.trait_object_mode() } - fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { self.fields.infcx.tcx } - fn a_is_expected(&self) -> bool { self.a_is_expected } + fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { + self.fields.infcx.tcx + } + fn a_is_expected(&self) -> bool { + self.a_is_expected + } - fn with_cause(&mut self, cause: Cause, f: F) -> R - where F: FnOnce(&mut Self) -> R + fn with_cause(&mut self, cause: Cause, f: F) -> R + where + F: FnOnce(&mut Self) -> R, { debug!("sub with_cause={:?}", cause); let old_cause = mem::replace(&mut self.fields.cause, Some(cause)); @@ -51,24 +62,26 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> r } - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> - { + fn relate_with_variance>( + &mut self, + variance: ty::Variance, + a: &T, + b: &T, + ) -> RelateResult<'tcx, T> { match variance { ty::Invariant => self.fields.equate(self.a_is_expected).relate(a, b), ty::Covariant => self.relate(a, b), ty::Bivariant => Ok(a.clone()), - ty::Contravariant => self.with_expected_switched(|this| { this.relate(b, a) }), + ty::Contravariant => self.with_expected_switched(|this| this.relate(b, a)), } } fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> { debug!("{}.tys({:?}, {:?})", self.tag(), a, b); - if a == b { return Ok(a); } + if a == b { + return Ok(a); + } let infcx = self.fields.infcx; let a = infcx.type_variables.borrow_mut().replace_if_possible(a); @@ -87,16 +100,15 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> // the two variables are equal modulo subtyping, which // is important to the occurs check later on. infcx.type_variables.borrow_mut().sub(a_vid, b_vid); - self.fields.obligations.push( - Obligation::new( - self.fields.trace.cause.clone(), - self.fields.param_env, - ty::Predicate::Subtype( - ty::Binder::dummy(ty::SubtypePredicate { - a_is_expected: self.a_is_expected, - a, - b, - })))); + self.fields.obligations.push(Obligation::new( + self.fields.trace.cause.clone(), + self.fields.param_env, + ty::Predicate::Subtype(ty::Binder::dummy(ty::SubtypePredicate { + a_is_expected: self.a_is_expected, + a, + b, + })), + )); Ok(a) } @@ -106,7 +118,8 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> Ok(a) } (_, &ty::Infer(TyVar(b_id))) => { - self.fields.instantiate(a, RelationDir::SubtypeOf, b_id, self.a_is_expected)?; + self.fields + .instantiate(a, RelationDir::SubtypeOf, b_id, self.a_is_expected)?; Ok(a) } @@ -122,24 +135,38 @@ impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx> } } - fn regions(&mut self, a: ty::Region<'tcx>, b: ty::Region<'tcx>) - -> RelateResult<'tcx, ty::Region<'tcx>> { - debug!("{}.regions({:?}, {:?}) self.cause={:?}", - self.tag(), a, b, self.fields.cause); + fn regions( + &mut self, + a: ty::Region<'tcx>, + b: ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>> { + debug!( + "{}.regions({:?}, {:?}) self.cause={:?}", + self.tag(), + a, + b, + self.fields.cause + ); // FIXME -- we have more fine-grained information available // from the "cause" field, we could perhaps give more tailored // error messages. let origin = SubregionOrigin::Subtype(self.fields.trace.clone()); - self.fields.infcx.borrow_region_constraints() - .make_subregion(origin, a, b); + self.fields + .infcx + .borrow_region_constraints() + .make_subregion(origin, a, b); Ok(a) } - fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where T: Relate<'tcx> + fn binders( + &mut self, + a: &ty::Binder, + b: &ty::Binder, + ) -> RelateResult<'tcx, ty::Binder> + where + T: Relate<'tcx>, { self.fields.higher_ranked_sub(a, b, self.a_is_expected) } diff --git a/src/librustc/infer/type_variable.rs b/src/librustc/infer/type_variable.rs index 3ec27bdcf1bcd..0946497d0ec56 100644 --- a/src/librustc/infer/type_variable.rs +++ b/src/librustc/infer/type_variable.rs @@ -2,12 +2,12 @@ use syntax::symbol::InternedString; use syntax_pos::Span; use ty::{self, Ty}; -use std::cmp; -use std::marker::PhantomData; -use std::u32; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::snapshot_vec as sv; use rustc_data_structures::unify as ut; +use std::cmp; +use std::marker::PhantomData; +use std::u32; pub struct TypeVariableTable<'tcx> { values: sv::SnapshotVec, @@ -151,10 +151,15 @@ impl<'tcx> TypeVariableTable<'tcx> { pub fn instantiate(&mut self, vid: ty::TyVid, ty: Ty<'tcx>) { let vid = self.root_var(vid); debug_assert!(self.probe(vid).is_unknown()); - debug_assert!(self.eq_relations.probe_value(vid).is_unknown(), - "instantiating type variable `{:?}` twice: new-value = {:?}, old-value={:?}", - vid, ty, self.eq_relations.probe_value(vid)); - self.eq_relations.union_value(vid, TypeVariableValue::Known { value: ty }); + debug_assert!( + self.eq_relations.probe_value(vid).is_unknown(), + "instantiating type variable `{:?}` twice: new-value = {:?}, old-value={:?}", + vid, + ty, + self.eq_relations.probe_value(vid) + ); + self.eq_relations + .union_value(vid, TypeVariableValue::Known { value: ty }); // Hack: we only need this so that `types_escaping_snapshot` // can see what has been unified; see the Delegate impl for @@ -172,23 +177,26 @@ impl<'tcx> TypeVariableTable<'tcx> { /// - `origin`: indicates *why* the type variable was created. /// The code in this module doesn't care, but it can be useful /// for improving error messages. - pub fn new_var(&mut self, - universe: ty::UniverseIndex, - diverging: bool, - origin: TypeVariableOrigin) - -> ty::TyVid { - let eq_key = self.eq_relations.new_key(TypeVariableValue::Unknown { universe }); + pub fn new_var( + &mut self, + universe: ty::UniverseIndex, + diverging: bool, + origin: TypeVariableOrigin, + ) -> ty::TyVid { + let eq_key = self + .eq_relations + .new_key(TypeVariableValue::Unknown { universe }); let sub_key = self.sub_relations.new_key(()); assert_eq!(eq_key.vid, sub_key); - let index = self.values.push(TypeVariableData { - origin, - diverging, - }); + let index = self.values.push(TypeVariableData { origin, diverging }); assert_eq!(eq_key.vid.index, index as u32); - debug!("new_var(index={:?}, diverging={:?}, origin={:?}", eq_key.vid, diverging, origin); + debug!( + "new_var(index={:?}, diverging={:?}, origin={:?}", + eq_key.vid, diverging, origin + ); eq_key.vid } @@ -235,12 +243,10 @@ impl<'tcx> TypeVariableTable<'tcx> { /// instantiated. Otherwise, returns `t`. pub fn replace_if_possible(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { match t.sty { - ty::Infer(ty::TyVar(v)) => { - match self.probe(v) { - TypeVariableValue::Unknown { .. } => t, - TypeVariableValue::Known { value } => value, - } - } + ty::Infer(ty::TyVar(v)) => match self.probe(v) { + TypeVariableValue::Unknown { .. } => t, + TypeVariableValue::Known { value } => value, + }, _ => t, } } @@ -269,7 +275,11 @@ impl<'tcx> TypeVariableTable<'tcx> { } }); - let Snapshot { snapshot, eq_snapshot, sub_snapshot } = s; + let Snapshot { + snapshot, + eq_snapshot, + sub_snapshot, + } = s; self.values.rollback_to(snapshot); self.eq_relations.rollback_to(eq_snapshot); self.sub_relations.rollback_to(sub_snapshot); @@ -280,7 +290,11 @@ impl<'tcx> TypeVariableTable<'tcx> { /// another snapshot). Any snapshots created since that point /// must already have been committed or rolled back. pub fn commit(&mut self, s: Snapshot<'tcx>) { - let Snapshot { snapshot, eq_snapshot, sub_snapshot } = s; + let Snapshot { + snapshot, + eq_snapshot, + sub_snapshot, + } = s; self.values.commit(snapshot); self.eq_relations.commit(eq_snapshot); self.sub_relations.commit(sub_snapshot); @@ -296,7 +310,9 @@ impl<'tcx> TypeVariableTable<'tcx> { actions_since_snapshot .iter() .filter_map(|action| match action { - &sv::UndoLog::NewElem(index) => Some(ty::TyVid { index: index as u32 }), + &sv::UndoLog::NewElem(index) => Some(ty::TyVid { + index: index as u32, + }), _ => None, }) .map(|vid| { @@ -316,7 +332,10 @@ impl<'tcx> TypeVariableTable<'tcx> { let mut new_elem_threshold = u32::MAX; let mut escaping_types = Vec::new(); let actions_since_snapshot = self.values.actions_since_snapshot(&s.snapshot); - debug!("actions_since_snapshot.len() = {}", actions_since_snapshot.len()); + debug!( + "actions_since_snapshot.len() = {}", + actions_since_snapshot.len() + ); for action in actions_since_snapshot { match *action { sv::UndoLog::NewElem(index) => { @@ -326,7 +345,10 @@ impl<'tcx> TypeVariableTable<'tcx> { // action must precede those variables being // specified. new_elem_threshold = cmp::min(new_elem_threshold, index as u32); - debug!("NewElem({}) new_elem_threshold={}", index, new_elem_threshold); + debug!( + "NewElem({}) new_elem_threshold={}", + index, new_elem_threshold + ); } sv::UndoLog::Other(Instantiate { vid, .. }) => { @@ -339,10 +361,13 @@ impl<'tcx> TypeVariableTable<'tcx> { }; escaping_types.push(escaping_type); } - debug!("SpecifyVar({:?}) new_elem_threshold={}", vid, new_elem_threshold); + debug!( + "SpecifyVar({:?}) new_elem_threshold={}", + vid, new_elem_threshold + ); } - _ => { } + _ => {} } } @@ -399,15 +424,24 @@ struct TyVidEqKey<'tcx> { impl<'tcx> From for TyVidEqKey<'tcx> { fn from(vid: ty::TyVid) -> Self { - TyVidEqKey { vid, phantom: PhantomData } + TyVidEqKey { + vid, + phantom: PhantomData, + } } } impl<'tcx> ut::UnifyKey for TyVidEqKey<'tcx> { type Value = TypeVariableValue<'tcx>; - fn index(&self) -> u32 { self.vid.index } - fn from_index(i: u32) -> Self { TyVidEqKey::from(ty::TyVid { index: i }) } - fn tag() -> &'static str { "TyVidEqKey" } + fn index(&self) -> u32 { + self.vid.index + } + fn from_index(i: u32) -> Self { + TyVidEqKey::from(ty::TyVid { index: i }) + } + fn tag() -> &'static str { + "TyVidEqKey" + } } impl<'tcx> ut::UnifyValue for TypeVariableValue<'tcx> { @@ -427,8 +461,14 @@ impl<'tcx> ut::UnifyValue for TypeVariableValue<'tcx> { (&TypeVariableValue::Unknown { .. }, &TypeVariableValue::Known { .. }) => Ok(*value2), // If both sides are *unknown*, it hardly matters, does it? - (&TypeVariableValue::Unknown { universe: universe1 }, - &TypeVariableValue::Unknown { universe: universe2 }) => { + ( + &TypeVariableValue::Unknown { + universe: universe1, + }, + &TypeVariableValue::Unknown { + universe: universe2, + }, + ) => { // If we unify two unbound variables, ?T and ?U, then whatever // value they wind up taking (which must be the same value) must // be nameable by both universes. Therefore, the resulting @@ -445,8 +485,13 @@ impl<'tcx> ut::UnifyValue for TypeVariableValue<'tcx> { /// they carry no values. impl ut::UnifyKey for ty::TyVid { type Value = (); - fn index(&self) -> u32 { self.index } - fn from_index(i: u32) -> ty::TyVid { ty::TyVid { index: i } } - fn tag() -> &'static str { "TyVid" } + fn index(&self) -> u32 { + self.index + } + fn from_index(i: u32) -> ty::TyVid { + ty::TyVid { index: i } + } + fn tag() -> &'static str { + "TyVid" + } } - diff --git a/src/librustc/infer/unify_key.rs b/src/librustc/infer/unify_key.rs index 068ff0c90e7fc..b909cb6000e90 100644 --- a/src/librustc/infer/unify_key.rs +++ b/src/librustc/infer/unify_key.rs @@ -1,5 +1,5 @@ +use rustc_data_structures::unify::{EqUnifyValue, NoError, UnifyKey, UnifyValue}; use ty::{self, FloatVarValue, IntVarValue, Ty, TyCtxt}; -use rustc_data_structures::unify::{NoError, EqUnifyValue, UnifyKey, UnifyValue}; pub trait ToType { fn to_type<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>; @@ -7,20 +7,25 @@ pub trait ToType { impl UnifyKey for ty::IntVid { type Value = Option; - fn index(&self) -> u32 { self.index } - fn from_index(i: u32) -> ty::IntVid { ty::IntVid { index: i } } - fn tag() -> &'static str { "IntVid" } + fn index(&self) -> u32 { + self.index + } + fn from_index(i: u32) -> ty::IntVid { + ty::IntVid { index: i } + } + fn tag() -> &'static str { + "IntVid" + } } -impl EqUnifyValue for IntVarValue { -} +impl EqUnifyValue for IntVarValue {} #[derive(PartialEq, Copy, Clone, Debug)] pub struct RegionVidKey { /// The minimum region vid in the unification set. This is needed /// to have a canonical name for a type to prevent infinite /// recursion. - pub min_vid: ty::RegionVid + pub min_vid: ty::RegionVid, } impl UnifyValue for RegionVidKey { @@ -39,9 +44,15 @@ impl UnifyValue for RegionVidKey { impl UnifyKey for ty::RegionVid { type Value = RegionVidKey; - fn index(&self) -> u32 { u32::from(*self) } - fn from_index(i: u32) -> ty::RegionVid { ty::RegionVid::from(i) } - fn tag() -> &'static str { "RegionVid" } + fn index(&self) -> u32 { + u32::from(*self) + } + fn from_index(i: u32) -> ty::RegionVid { + ty::RegionVid::from(i) + } + fn tag() -> &'static str { + "RegionVid" + } } impl ToType for IntVarValue { @@ -57,13 +68,18 @@ impl ToType for IntVarValue { impl UnifyKey for ty::FloatVid { type Value = Option; - fn index(&self) -> u32 { self.index } - fn from_index(i: u32) -> ty::FloatVid { ty::FloatVid { index: i } } - fn tag() -> &'static str { "FloatVid" } + fn index(&self) -> u32 { + self.index + } + fn from_index(i: u32) -> ty::FloatVid { + ty::FloatVid { index: i } + } + fn tag() -> &'static str { + "FloatVid" + } } -impl EqUnifyValue for FloatVarValue { -} +impl EqUnifyValue for FloatVarValue {} impl ToType for FloatVarValue { fn to_type<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index b2a924ac19895..a20ecfe0f763d 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -26,10 +26,11 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] - +#![doc( + html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/" +)] #![feature(box_patterns)] #![feature(box_syntax)] #![feature(core_intrinsics)] @@ -61,42 +62,46 @@ #![feature(in_band_lifetimes)] #![feature(crate_visibility_modifier)] #![feature(transpose_result)] - -#![recursion_limit="512"] - +#![recursion_limit = "512"] #![warn(elided_lifetimes_in_paths)] extern crate arena; -#[macro_use] extern crate bitflags; +#[macro_use] +extern crate bitflags; extern crate core; extern crate fmt_macros; extern crate getopts; extern crate graphviz; -#[macro_use] extern crate lazy_static; -#[macro_use] extern crate scoped_tls; +#[macro_use] +extern crate lazy_static; +#[macro_use] +extern crate scoped_tls; #[cfg(windows)] extern crate libc; extern crate polonius_engine; extern crate rustc_target; -#[macro_use] extern crate rustc_data_structures; -extern crate serialize; +#[macro_use] +extern crate rustc_data_structures; extern crate parking_lot; extern crate rustc_errors as errors; extern crate rustc_rayon as rayon; extern crate rustc_rayon_core as rayon_core; -#[macro_use] extern crate log; -#[macro_use] extern crate syntax; -extern crate syntax_pos; +extern crate serialize; +#[macro_use] +extern crate log; +#[macro_use] +extern crate syntax; +extern crate chalk_engine; extern crate jobserver; extern crate proc_macro; -extern crate chalk_engine; extern crate rustc_fs_util; +extern crate syntax_pos; extern crate serialize as rustc_serialize; // used by deriving -extern crate rustc_apfloat; -extern crate byteorder; extern crate backtrace; +extern crate byteorder; +extern crate rustc_apfloat; #[macro_use] extern crate smallvec; @@ -125,22 +130,22 @@ pub mod lint; pub mod middle { pub mod allocator; pub mod borrowck; - pub mod expr_use_visitor; pub mod cstore; pub mod dead; pub mod dependency_format; pub mod entry; pub mod exported_symbols; + pub mod expr_use_visitor; pub mod free_region; pub mod intrinsicck; - pub mod lib_features; pub mod lang_items; + pub mod lib_features; pub mod liveness; pub mod mem_categorization; pub mod privacy; pub mod reachable; - pub mod region; pub mod recursion_limit; + pub mod region; pub mod resolve_lifetime; pub mod stability; pub mod weak_lang_items; @@ -152,13 +157,13 @@ pub mod traits; pub mod ty; pub mod util { + pub mod bug; pub mod captures; pub mod common; - pub mod ppaux; pub mod nodemap; - pub mod time_graph; + pub mod ppaux; pub mod profiling; - pub mod bug; + pub mod time_graph; } // A private module so that macro-expanded idents like @@ -182,6 +187,5 @@ fn noop() { rustc_data_structures::__noop_fix_for_27438(); } - // Build the diagnostics array at the end so that the metadata includes error use sites. __build_diagnostic_array! { librustc, DIAGNOSTICS } diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 22854382df15c..e0d59a8b8038c 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -5,7 +5,7 @@ //! lints are all available in `rustc_lint::builtin`. use errors::{Applicability, DiagnosticBuilder}; -use lint::{LintPass, LateLintPass, LintArray}; +use lint::{LateLintPass, LintArray, LintPass}; use session::Session; use syntax::ast; use syntax::source_map::Span; @@ -162,10 +162,10 @@ declare_lint! { } declare_lint! { - pub LEGACY_DIRECTORY_OWNERSHIP, - Deny, - "non-inline, non-`#[path]` modules (e.g., `mod foo;`) were erroneously allowed in some files \ - not named `mod.rs`" +pub LEGACY_DIRECTORY_OWNERSHIP, +Deny, +"non-inline, non-`#[path]` modules (e.g., `mod foo;`) were erroneously allowed in some files \ + not named `mod.rs`" } declare_lint! { @@ -266,10 +266,10 @@ declare_lint! { } declare_lint! { - pub ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, - Allow, - "fully qualified paths that start with a module name \ - instead of `crate`, `self`, or an extern crate name" +pub ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, +Allow, +"fully qualified paths that start with a module name \ + instead of `crate`, `self`, or an extern crate name" } declare_lint! { @@ -333,17 +333,17 @@ declare_lint! { } declare_lint! { - pub MACRO_USE_EXTERN_CRATE, - Allow, - "the `#[macro_use]` attribute is now deprecated in favor of using macros \ - via the module system" +pub MACRO_USE_EXTERN_CRATE, +Allow, +"the `#[macro_use]` attribute is now deprecated in favor of using macros \ + via the module system" } declare_lint! { - pub MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS, - Deny, - "macro-expanded `macro_export` macros from the current crate \ - cannot be referred to by absolute paths" +pub MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS, +Deny, +"macro-expanded `macro_export` macros from the current crate \ + cannot be referred to by absolute paths" } declare_lint! { @@ -456,10 +456,11 @@ impl BuiltinLintDiagnostics { BuiltinLintDiagnostics::Normal => (), BuiltinLintDiagnostics::BareTraitObject(span, is_global) => { let (sugg, app) = match sess.source_map().span_to_snippet(span) { - Ok(ref s) if is_global => (format!("dyn ({})", s), - Applicability::MachineApplicable), + Ok(ref s) if is_global => { + (format!("dyn ({})", s), Applicability::MachineApplicable) + } Ok(s) => (format!("dyn {}", s), Applicability::MachineApplicable), - Err(_) => ("dyn ".to_string(), Applicability::HasPlaceholders) + Err(_) => ("dyn ".to_string(), Applicability::HasPlaceholders), }; db.span_suggestion_with_applicability(span, "use `dyn`", sugg, app); } @@ -474,9 +475,12 @@ impl BuiltinLintDiagnostics { "::" }; - (format!("crate{}{}", opt_colon, s), Applicability::MachineApplicable) + ( + format!("crate{}{}", opt_colon, s), + Applicability::MachineApplicable, + ) } - Err(_) => ("crate::".to_string(), Applicability::HasPlaceholders) + Err(_) => ("crate::".to_string(), Applicability::HasPlaceholders), }; db.span_suggestion_with_applicability(span, "use `crate`", sugg, app); } @@ -485,14 +489,21 @@ impl BuiltinLintDiagnostics { db.span_note(earlier_span, "previous macro export is now shadowed"); } BuiltinLintDiagnostics::ProcMacroDeriveResolutionFallback(span) => { - db.span_label(span, "names from parent modules are not \ - accessible without an explicit import"); + db.span_label( + span, + "names from parent modules are not \ + accessible without an explicit import", + ); } BuiltinLintDiagnostics::MacroExpandedMacroExportsAccessedByAbsolutePaths(span_def) => { db.span_note(span_def, "the macro is defined here"); } BuiltinLintDiagnostics::ElidedLifetimesInPaths( - n, path_span, incl_angl_brckt, insertion_span, anon_lts + n, + path_span, + incl_angl_brckt, + insertion_span, + anon_lts, ) => { let (replace_span, suggestion) = if incl_angl_brckt { (insertion_span, anon_lts) @@ -521,9 +532,12 @@ impl BuiltinLintDiagnostics { }; db.span_suggestion_with_applicability( replace_span, - &format!("indicate the anonymous lifetime{}", if n >= 2 { "s" } else { "" }), + &format!( + "indicate the anonymous lifetime{}", + if n >= 2 { "s" } else { "" } + ), suggestion, - Applicability::MachineApplicable + Applicability::MachineApplicable, ); } BuiltinLintDiagnostics::UnknownCrateTypes(span, note, sugg) => { @@ -531,7 +545,7 @@ impl BuiltinLintDiagnostics { span, ¬e, sugg, - Applicability::MaybeIncorrect + Applicability::MaybeIncorrect, ); } } diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index f5a7919ef09c8..3d653e60ad85a 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -16,29 +16,32 @@ use self::TargetLint::*; -use std::slice; -use rustc_data_structures::sync::ReadGuard; -use lint::{EarlyLintPassObject, LateLintPassObject}; -use lint::{Level, Lint, LintId, LintPass, LintBuffer}; use lint::builtin::BuiltinLintDiagnostics; use lint::levels::{LintLevelSets, LintLevelsBuilder}; +use lint::{EarlyLintPassObject, LateLintPassObject}; +use lint::{Level, Lint, LintBuffer, LintId, LintPass}; use middle::privacy::AccessLevels; -use rustc_serialize::{Decoder, Decodable, Encoder, Encodable}; +use rustc_data_structures::sync::ReadGuard; +use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use session::{config, early_error, Session}; -use ty::{self, TyCtxt, Ty}; +use std::slice; use ty::layout::{LayoutError, LayoutOf, TyLayout}; +use ty::{self, Ty, TyCtxt}; use util::nodemap::FxHashMap; -use std::default::Default as StdDefault; -use syntax::ast; -use syntax::edition; -use syntax_pos::{MultiSpan, Span, symbol::{LocalInternedString, Symbol}}; use errors::DiagnosticBuilder; use hir; use hir::def_id::LOCAL_CRATE; use hir::intravisit as hir_visit; +use std::default::Default as StdDefault; +use syntax::ast; +use syntax::edition; use syntax::util::lev_distance::find_best_match_for_name; use syntax::visit as ast_visit; +use syntax_pos::{ + symbol::{LocalInternedString, Symbol}, + MultiSpan, Span, +}; /// Information about the registered lints. /// @@ -74,7 +77,6 @@ pub struct LintSession<'a, PassObject> { passes: Option>, } - /// Lints that are buffered up early on in the `Session` before the /// `LintLevels` is calculated #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] @@ -161,21 +163,31 @@ impl LintStore { } pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec, bool)> { - self.lint_groups.iter() + self.lint_groups + .iter() .filter(|(_, LintGroup { depr, .. })| { // Don't display deprecated lint groups. depr.is_none() }) - .map(|(k, LintGroup { lint_ids, from_plugin, .. })| { - (*k, lint_ids.clone(), *from_plugin) - }) + .map( + |( + k, + LintGroup { + lint_ids, + from_plugin, + .. + }, + )| { (*k, lint_ids.clone(), *from_plugin) }, + ) .collect() } - pub fn register_early_pass(&mut self, - sess: Option<&Session>, - from_plugin: bool, - pass: EarlyLintPassObject) { + pub fn register_early_pass( + &mut self, + sess: Option<&Session>, + from_plugin: bool, + pass: EarlyLintPassObject, + ) { self.push_pass(sess, from_plugin, &pass); self.early_passes.as_mut().unwrap().push(pass); } @@ -189,19 +201,23 @@ impl LintStore { self.pre_expansion_passes.as_mut().unwrap().push(pass); } - pub fn register_late_pass(&mut self, - sess: Option<&Session>, - from_plugin: bool, - pass: LateLintPassObject) { + pub fn register_late_pass( + &mut self, + sess: Option<&Session>, + from_plugin: bool, + pass: LateLintPassObject, + ) { self.push_pass(sess, from_plugin, &pass); self.late_passes.as_mut().unwrap().push(pass); } // Helper method for register_early/late_pass - fn push_pass(&mut self, - sess: Option<&Session>, - from_plugin: bool, - pass: &Box

) { + fn push_pass( + &mut self, + sess: Option<&Session>, + from_plugin: bool, + pass: &Box

, + ) { for lint in pass.get_lints() { self.lints.push((lint, from_plugin)); @@ -215,19 +231,23 @@ impl LintStore { (Some(_), false) => bug!("{}", msg), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(&msg[..]), + (Some(sess), true) => sess.err(&msg[..]), } } } } - pub fn register_future_incompatible(&mut self, - sess: Option<&Session>, - lints: Vec) { - + pub fn register_future_incompatible( + &mut self, + sess: Option<&Session>, + lints: Vec, + ) { for edition in edition::ALL_EDITIONS { - let lints = lints.iter().filter(|f| f.edition == Some(*edition)).map(|f| f.id) - .collect::>(); + let lints = lints + .iter() + .filter(|f| f.edition == Some(*edition)) + .map(|f| f.id) + .collect::>(); if !lints.is_empty() { self.register_group(sess, false, edition.lint_name(), None, lints) } @@ -252,16 +272,18 @@ impl LintStore { self.future_incompatible.get(&id) } - pub fn register_group_alias( - &mut self, - lint_name: &'static str, - alias: &'static str, - ) { - self.lint_groups.insert(alias, LintGroup { - lint_ids: vec![], - from_plugin: false, - depr: Some(LintAlias { name: lint_name, silent: true }), - }); + pub fn register_group_alias(&mut self, lint_name: &'static str, alias: &'static str) { + self.lint_groups.insert( + alias, + LintGroup { + lint_ids: vec![], + from_plugin: false, + depr: Some(LintAlias { + name: lint_name, + silent: true, + }), + }, + ); } pub fn register_group( @@ -274,18 +296,27 @@ impl LintStore { ) { let new = self .lint_groups - .insert(name, LintGroup { - lint_ids: to, - from_plugin, - depr: None, - }) + .insert( + name, + LintGroup { + lint_ids: to, + from_plugin, + depr: None, + }, + ) .is_none(); if let Some(deprecated) = deprecated_name { - self.lint_groups.insert(deprecated, LintGroup { - lint_ids: vec![], - from_plugin, - depr: Some(LintAlias { name, silent: false }), - }); + self.lint_groups.insert( + deprecated, + LintGroup { + lint_ids: vec![], + from_plugin, + depr: Some(LintAlias { + name, + silent: false, + }), + }, + ); } if !new { @@ -297,7 +328,7 @@ impl LintStore { (Some(_), false) => bug!("{}", msg), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(&msg[..]), + (Some(sess), true) => sess.err(&msg[..]), } } } @@ -305,9 +336,10 @@ impl LintStore { pub fn register_renamed(&mut self, old_name: &str, new_name: &str) { let target = match self.by_name.get(new_name) { Some(&Id(lint_id)) => lint_id.clone(), - _ => bug!("invalid lint renaming of {} to {}", old_name, new_name) + _ => bug!("invalid lint renaming of {} to {}", old_name, new_name), }; - self.by_name.insert(old_name.to_string(), Renamed(new_name.to_string(), target)); + self.by_name + .insert(old_name.to_string(), Renamed(new_name.to_string(), target)); } pub fn register_removed(&mut self, name: &str, reason: &str) { @@ -317,39 +349,28 @@ impl LintStore { pub fn find_lints(&self, mut lint_name: &str) -> Result, FindLintError> { match self.by_name.get(lint_name) { Some(&Id(lint_id)) => Ok(vec![lint_id]), - Some(&Renamed(_, lint_id)) => { - Ok(vec![lint_id]) - }, - Some(&Removed(_)) => { - Err(FindLintError::Removed) - }, - None => { - loop { - return match self.lint_groups.get(lint_name) { - Some(LintGroup {lint_ids, depr, .. }) => { - if let Some(LintAlias { name, .. }) = depr { - lint_name = name; - continue; - } - Ok(lint_ids.clone()) + Some(&Renamed(_, lint_id)) => Ok(vec![lint_id]), + Some(&Removed(_)) => Err(FindLintError::Removed), + None => loop { + return match self.lint_groups.get(lint_name) { + Some(LintGroup { lint_ids, depr, .. }) => { + if let Some(LintAlias { name, .. }) = depr { + lint_name = name; + continue; } - None => Err(FindLintError::Removed) - }; - } - } + Ok(lint_ids.clone()) + } + None => Err(FindLintError::Removed), + }; + }, } } /// Checks the validity of lint names derived from the command line - pub fn check_lint_name_cmdline(&self, - sess: &Session, - lint_name: &str, - level: Level) { + pub fn check_lint_name_cmdline(&self, sess: &Session, lint_name: &str, level: Level) { let db = match self.check_lint_name(lint_name, None) { CheckLintNameResult::Ok(_) => None, - CheckLintNameResult::Warning(ref msg, _) => { - Some(sess.struct_warn(msg)) - }, + CheckLintNameResult::Warning(ref msg, _) => Some(sess.struct_warn(msg)), CheckLintNameResult::NoLint(suggestion) => { let mut err = struct_err!(sess, E0602, "unknown lint: `{}`", lint_name); @@ -371,14 +392,16 @@ impl LintStore { }; if let Some(mut db) = db { - let msg = format!("requested on the command line with `{} {}`", - match level { - Level::Allow => "-A", - Level::Warn => "-W", - Level::Deny => "-D", - Level::Forbid => "-F", - }, - lint_name); + let msg = format!( + "requested on the command line with `{} {}`", + match level { + Level::Allow => "-A", + Level::Warn => "-W", + Level::Deny => "-D", + Level::Forbid => "-F", + }, + lint_name + ); db.note(&msg); db.emit(); } @@ -439,10 +462,7 @@ impl LintStore { return if *silent { CheckLintNameResult::Ok(&lint_ids) } else { - CheckLintNameResult::Tool(Err(( - Some(&lint_ids), - name.to_string(), - ))) + CheckLintNameResult::Tool(Err((Some(&lint_ids), name.to_string()))) }; } CheckLintNameResult::Ok(&lint_ids) @@ -462,7 +482,9 @@ impl LintStore { None => match self.lint_groups.get(&*complete_name) { // Now we are sure, that this lint exists nowhere None => { - let symbols = self.by_name.keys() + let symbols = self + .by_name + .keys() .map(|name| Symbol::intern(&name)) .collect::>(); @@ -478,10 +500,7 @@ impl LintStore { return if *silent { CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name))) } else { - CheckLintNameResult::Tool(Err(( - Some(&lint_ids), - name.to_string(), - ))) + CheckLintNameResult::Tool(Err((Some(&lint_ids), name.to_string()))) }; } CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name))) @@ -552,7 +571,6 @@ impl LintPassObject for EarlyLintPassObject {} impl LintPassObject for LateLintPassObject {} - pub trait LintContext<'tcx>: Sized { type PassObject: LintPassObject; @@ -563,45 +581,52 @@ pub trait LintContext<'tcx>: Sized { fn enter_attrs(&mut self, attrs: &'tcx [ast::Attribute]); fn exit_attrs(&mut self, attrs: &'tcx [ast::Attribute]); - fn lookup_and_emit>(&self, - lint: &'static Lint, - span: Option, - msg: &str) { + fn lookup_and_emit>(&self, lint: &'static Lint, span: Option, msg: &str) { self.lookup(lint, span, msg).emit(); } - fn lookup_and_emit_with_diagnostics>(&self, - lint: &'static Lint, - span: Option, - msg: &str, - diagnostic: BuiltinLintDiagnostics) { + fn lookup_and_emit_with_diagnostics>( + &self, + lint: &'static Lint, + span: Option, + msg: &str, + diagnostic: BuiltinLintDiagnostics, + ) { let mut db = self.lookup(lint, span, msg); diagnostic.run(self.sess(), &mut db); db.emit(); } - fn lookup>(&self, - lint: &'static Lint, - span: Option, - msg: &str) - -> DiagnosticBuilder<'_>; + fn lookup>( + &self, + lint: &'static Lint, + span: Option, + msg: &str, + ) -> DiagnosticBuilder<'_>; /// Emit a lint at the appropriate level, for a particular span. fn span_lint>(&self, lint: &'static Lint, span: S, msg: &str) { self.lookup_and_emit(lint, Some(span), msg); } - fn struct_span_lint>(&self, - lint: &'static Lint, - span: S, - msg: &str) - -> DiagnosticBuilder<'_> { + fn struct_span_lint>( + &self, + lint: &'static Lint, + span: S, + msg: &str, + ) -> DiagnosticBuilder<'_> { self.lookup(lint, Some(span), msg) } /// Emit a lint and note at the appropriate level, for a particular span. - fn span_lint_note(&self, lint: &'static Lint, span: Span, msg: &str, - note_span: Span, note: &str) { + fn span_lint_note( + &self, + lint: &'static Lint, + span: Span, + msg: &str, + note_span: Span, + note: &str, + ) { let mut err = self.lookup(lint, Some(span), msg); if note_span == span { err.note(note); @@ -612,8 +637,7 @@ pub trait LintContext<'tcx>: Sized { } /// Emit a lint and help at the appropriate level, for a particular span. - fn span_lint_help(&self, lint: &'static Lint, span: Span, - msg: &str, help: &str) { + fn span_lint_help(&self, lint: &'static Lint, span: Span, msg: &str, help: &str) { let mut err = self.lookup(lint, Some(span), msg); self.span_lint(lint, span, msg); err.span_help(span, help); @@ -628,14 +652,11 @@ pub trait LintContext<'tcx>: Sized { /// Merge the lints specified by any lint attributes into the /// current lint context, call the provided function, then reset the /// lints in effect to their previous state. - fn with_lint_attrs(&mut self, - id: ast::NodeId, - attrs: &'tcx [ast::Attribute], - f: F) - where F: FnOnce(&mut Self); + fn with_lint_attrs(&mut self, id: ast::NodeId, attrs: &'tcx [ast::Attribute], f: F) + where + F: FnOnce(&mut Self); } - impl<'a> EarlyContext<'a> { fn new( sess: &'a Session, @@ -657,10 +678,12 @@ impl<'a> EarlyContext<'a> { fn check_id(&mut self, id: ast::NodeId) { for early_lint in self.buffered.take(id) { - self.lookup_and_emit_with_diagnostics(early_lint.lint_id.lint, - Some(early_lint.span.clone()), - &early_lint.msg, - early_lint.diagnostic); + self.lookup_and_emit_with_diagnostics( + early_lint.lint_id.lint, + Some(early_lint.span.clone()), + &early_lint.msg, + early_lint.diagnostic, + ); } } } @@ -695,11 +718,12 @@ impl<'a, 'tcx> LintContext<'tcx> for LateContext<'a, 'tcx> { run_lints!(self, exit_lint_attrs, attrs); } - fn lookup>(&self, - lint: &'static Lint, - span: Option, - msg: &str) - -> DiagnosticBuilder<'_> { + fn lookup>( + &self, + lint: &'static Lint, + span: Option, + msg: &str, + ) -> DiagnosticBuilder<'_> { let id = self.last_ast_node_with_lint_attrs; match span { Some(s) => self.tcx.struct_span_lint_node(lint, id, s, msg), @@ -707,11 +731,9 @@ impl<'a, 'tcx> LintContext<'tcx> for LateContext<'a, 'tcx> { } } - fn with_lint_attrs(&mut self, - id: ast::NodeId, - attrs: &'tcx [ast::Attribute], - f: F) - where F: FnOnce(&mut Self) + fn with_lint_attrs(&mut self, id: ast::NodeId, attrs: &'tcx [ast::Attribute], f: F) + where + F: FnOnce(&mut Self), { let prev = self.last_ast_node_with_lint_attrs; self.last_ast_node_with_lint_attrs = id; @@ -752,19 +774,18 @@ impl<'a> LintContext<'a> for EarlyContext<'a> { run_lints!(self, exit_lint_attrs, attrs); } - fn lookup>(&self, - lint: &'static Lint, - span: Option, - msg: &str) - -> DiagnosticBuilder<'_> { + fn lookup>( + &self, + lint: &'static Lint, + span: Option, + msg: &str, + ) -> DiagnosticBuilder<'_> { self.builder.struct_lint(lint, span.map(|s| s.into()), msg) } - fn with_lint_attrs(&mut self, - id: ast::NodeId, - attrs: &'a [ast::Attribute], - f: F) - where F: FnOnce(&mut Self) + fn with_lint_attrs(&mut self, id: ast::NodeId, attrs: &'a [ast::Attribute], f: F) + where + F: FnOnce(&mut Self), { let push = self.builder.push(attrs); self.check_id(id); @@ -777,7 +798,8 @@ impl<'a> LintContext<'a> for EarlyContext<'a> { impl<'a, 'tcx> LateContext<'a, 'tcx> { fn with_param_env(&mut self, id: ast::NodeId, f: F) - where F: FnOnce(&mut Self), + where + F: FnOnce(&mut Self), { let old_param_env = self.param_env; self.param_env = self.tcx.param_env(self.tcx.hir().local_def_id(id)); @@ -866,8 +888,14 @@ impl<'a, 'tcx> hir_visit::Visitor<'tcx> for LateContext<'a, 'tcx> { hir_visit::walk_stmt(self, s); } - fn visit_fn(&mut self, fk: hir_visit::FnKind<'tcx>, decl: &'tcx hir::FnDecl, - body_id: hir::BodyId, span: Span, id: ast::NodeId) { + fn visit_fn( + &mut self, + fk: hir_visit::FnKind<'tcx>, + decl: &'tcx hir::FnDecl, + body_id: hir::BodyId, + span: Span, + id: ast::NodeId, + ) { // Wrap in tables here, not just in visit_nested_body, // in order for `check_fn` to be able to use them. let old_tables = self.tables; @@ -879,12 +907,14 @@ impl<'a, 'tcx> hir_visit::Visitor<'tcx> for LateContext<'a, 'tcx> { self.tables = old_tables; } - fn visit_variant_data(&mut self, - s: &'tcx hir::VariantData, - name: ast::Name, - g: &'tcx hir::Generics, - item_id: ast::NodeId, - _: Span) { + fn visit_variant_data( + &mut self, + s: &'tcx hir::VariantData, + name: ast::Name, + g: &'tcx hir::Generics, + item_id: ast::NodeId, + _: Span, + ) { run_lints!(self, check_struct_def, s, name, g, item_id); hir_visit::walk_struct_def(self, s); run_lints!(self, check_struct_def_post, s, name, g, item_id); @@ -897,10 +927,12 @@ impl<'a, 'tcx> hir_visit::Visitor<'tcx> for LateContext<'a, 'tcx> { }) } - fn visit_variant(&mut self, - v: &'tcx hir::Variant, - g: &'tcx hir::Generics, - item_id: ast::NodeId) { + fn visit_variant( + &mut self, + v: &'tcx hir::Variant, + g: &'tcx hir::Generics, + item_id: ast::NodeId, + ) { self.with_lint_attrs(v.node.data.id(), &v.node.attrs, |cx| { run_lints!(cx, check_variant, v, g); hir_visit::walk_variant(cx, v, g, item_id); @@ -961,8 +993,7 @@ impl<'a, 'tcx> hir_visit::Visitor<'tcx> for LateContext<'a, 'tcx> { hir_visit::walk_where_predicate(self, p); } - fn visit_poly_trait_ref(&mut self, t: &'tcx hir::PolyTraitRef, - m: hir::TraitBoundModifier) { + fn visit_poly_trait_ref(&mut self, t: &'tcx hir::PolyTraitRef, m: hir::TraitBoundModifier) { run_lints!(self, check_poly_trait_ref, t, m); hir_visit::walk_poly_trait_ref(self, t, m); } @@ -1047,20 +1078,27 @@ impl<'a> ast_visit::Visitor<'a> for EarlyContext<'a> { ast_visit::walk_stmt(self, s); } - fn visit_fn(&mut self, fk: ast_visit::FnKind<'a>, decl: &'a ast::FnDecl, - span: Span, id: ast::NodeId) { + fn visit_fn( + &mut self, + fk: ast_visit::FnKind<'a>, + decl: &'a ast::FnDecl, + span: Span, + id: ast::NodeId, + ) { run_lints!(self, check_fn, fk, decl, span, id); self.check_id(id); ast_visit::walk_fn(self, fk, decl, span); run_lints!(self, check_fn_post, fk, decl, span, id); } - fn visit_variant_data(&mut self, - s: &'a ast::VariantData, - ident: ast::Ident, - g: &'a ast::Generics, - item_id: ast::NodeId, - _: Span) { + fn visit_variant_data( + &mut self, + s: &'a ast::VariantData, + ident: ast::Ident, + g: &'a ast::Generics, + item_id: ast::NodeId, + _: Span, + ) { run_lints!(self, check_struct_def, s, ident, g, item_id); self.check_id(s.id()); ast_visit::walk_struct_def(self, s); @@ -1190,7 +1228,6 @@ impl<'a> ast_visit::Visitor<'a> for EarlyContext<'a> { } } - /// Perform lint checking on a crate. /// /// Consumes the `lint_store` field of the `Session`. @@ -1231,11 +1268,7 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { tcx.sess.lint_store.borrow_mut().late_passes = passes; } -pub fn check_ast_crate( - sess: &Session, - krate: &ast::Crate, - pre_expansion: bool, -) { +pub fn check_ast_crate(sess: &Session, krate: &ast::Crate, pre_expansion: bool) { let (passes, buffered) = if pre_expansion { ( sess.lint_store.borrow_mut().pre_expansion_passes.take(), @@ -1298,16 +1331,14 @@ impl Decodable for LintId { #[inline] fn decode(d: &mut D) -> Result { let s = d.read_str()?; - ty::tls::with(|tcx| { - match tcx.sess.lint_store.borrow().find_lints(&s) { - Ok(ids) => { - if ids.len() != 0 { - panic!("invalid lint-id `{}`", s); - } - Ok(ids[0]) + ty::tls::with(|tcx| match tcx.sess.lint_store.borrow().find_lints(&s) { + Ok(ids) => { + if ids.len() != 0 { + panic!("invalid lint-id `{}`", s); } - Err(_) => panic!("invalid lint-id `{}`", s), + Ok(ids[0]) } + Err(_) => panic!("invalid lint-id `{}`", s), }) } } diff --git a/src/librustc/lint/levels.rs b/src/librustc/lint/levels.rs index 1ae12fec50661..2100b3993fe2b 100644 --- a/src/librustc/lint/levels.rs +++ b/src/librustc/lint/levels.rs @@ -5,9 +5,10 @@ use hir::HirId; use ich::StableHashingContext; use lint::builtin; use lint::context::CheckLintNameResult; -use lint::{self, Lint, LintId, Level, LintSource}; -use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, - StableHasher, StableHasherResult}; +use lint::{self, Level, Lint, LintId, LintSource}; +use rustc_data_structures::stable_hasher::{ + HashStable, StableHasher, StableHasherResult, ToStableHashKey, +}; use session::Session; use syntax::ast; use syntax::attr; @@ -41,7 +42,7 @@ impl LintLevelSets { lint_cap: Level::Forbid, }; me.process_command_line(sess); - return me + return me; } pub fn builder(sess: &Session) -> LintLevelsBuilder<'_> { @@ -72,18 +73,16 @@ impl LintLevelSets { } } - self.list.push(LintSet::CommandLine { - specs: specs, - }); + self.list.push(LintSet::CommandLine { specs: specs }); } - fn get_lint_level(&self, - lint: &'static Lint, - idx: u32, - aux: Option<&FxHashMap>, - sess: &Session) - -> (Level, LintSource) - { + fn get_lint_level( + &self, + lint: &'static Lint, + idx: u32, + aux: Option<&FxHashMap>, + sess: &Session, + ) -> (Level, LintSource) { let (level, mut src) = self.get_lint_id_level(LintId::of(lint), idx, aux); // If `level` is none then we actually assume the default level for this @@ -95,9 +94,7 @@ impl LintLevelSets { // `allow(warnings)` in scope then we want to respect that instead. if level == Level::Warn { let (warnings_level, warnings_src) = - self.get_lint_id_level(LintId::of(lint::builtin::WARNINGS), - idx, - aux); + self.get_lint_id_level(LintId::of(lint::builtin::WARNINGS), idx, aux); if let Some(configured_warning_level) = warnings_level { if configured_warning_level != Level::Warn { level = configured_warning_level; @@ -114,31 +111,31 @@ impl LintLevelSets { level = cmp::min(*driver_level, level); } - return (level, src) + return (level, src); } - fn get_lint_id_level(&self, - id: LintId, - mut idx: u32, - aux: Option<&FxHashMap>) - -> (Option, LintSource) - { + fn get_lint_id_level( + &self, + id: LintId, + mut idx: u32, + aux: Option<&FxHashMap>, + ) -> (Option, LintSource) { if let Some(specs) = aux { if let Some(&(level, src)) = specs.get(&id) { - return (Some(level), src) + return (Some(level), src); } } loop { match self.list[idx as usize] { LintSet::CommandLine { ref specs } => { if let Some(&(level, src)) = specs.get(&id) { - return (Some(level), src) + return (Some(level), src); } - return (None, LintSource::Default) + return (None, LintSource::Default); } LintSet::Node { ref specs, parent } => { if let Some(&(level, src)) = specs.get(&id) { - return (Some(level), src) + return (Some(level), src); } idx = parent; } @@ -189,9 +186,7 @@ impl<'a> LintLevelsBuilder<'a> { let mut specs = FxHashMap::default(); let store = self.sess.lint_store.borrow(); let sess = self.sess; - let bad_attr = |span| { - struct_span_err!(sess, span, E0452, "malformed lint attribute") - }; + let bad_attr = |span| struct_span_err!(sess, span, E0452, "malformed lint attribute"); for attr in attrs { let level = match Level::from_str(&attr.name().as_str()) { None => continue, @@ -217,14 +212,14 @@ impl<'a> LintLevelsBuilder<'a> { // Before processing the lint names, look for a reason (RFC 2383) // at the end. let mut reason = None; - let tail_li = &metas[metas.len()-1]; + let tail_li = &metas[metas.len() - 1]; if let Some(item) = tail_li.meta_item() { match item.node { - ast::MetaItemKind::Word => {} // actual lint names handled later + ast::MetaItemKind::Word => {} // actual lint names handled later ast::MetaItemKind::NameValue(ref name_value) => { if item.ident == "reason" { // found reason, reslice meta list to exclude it - metas = &metas[0..metas.len()-1]; + metas = &metas[0..metas.len() - 1]; // FIXME (#55112): issue unused-attributes lint if we thereby // don't have any lint names (`#[level(reason = "foo")]`) if let ast::LitKind::Str(rationale, _) = name_value.node { @@ -234,7 +229,7 @@ impl<'a> LintLevelsBuilder<'a> { "lint_reasons", item.span, feature_gate::GateIssue::Language, - "lint reasons are experimental" + "lint reasons are experimental", ); } reason = Some(rationale); @@ -247,7 +242,7 @@ impl<'a> LintLevelsBuilder<'a> { let mut err = bad_attr(item.span); err.emit(); } - }, + } ast::MetaItemKind::List(_) => { let mut err = bad_attr(item.span); err.emit(); @@ -301,7 +296,9 @@ impl<'a> LintLevelsBuilder<'a> { Ok(ids) => { let complete_name = &format!("{}::{}", tool_name.unwrap(), name); let src = LintSource::Node( - Symbol::intern(complete_name), li.span, reason + Symbol::intern(complete_name), + li.span, + reason, ); for id in ids { specs.insert(*id, (level, src)); @@ -331,10 +328,13 @@ impl<'a> LintLevelsBuilder<'a> { "change it to", new_lint_name.to_string(), Applicability::MachineApplicable, - ).emit(); + ) + .emit(); let src = LintSource::Node( - Symbol::intern(&new_lint_name), li.span, reason + Symbol::intern(&new_lint_name), + li.span, + reason, ); for id in ids { specs.insert(*id, (level, src)); @@ -353,39 +353,41 @@ impl<'a> LintLevelsBuilder<'a> { CheckLintNameResult::Warning(msg, renamed) => { let lint = builtin::RENAMED_AND_REMOVED_LINTS; - let (level, src) = self.sets.get_lint_level(lint, - self.cur, - Some(&specs), - &sess); - let mut err = lint::struct_lint_level(self.sess, - lint, - level, - src, - Some(li.span.into()), - &msg); + let (level, src) = + self.sets + .get_lint_level(lint, self.cur, Some(&specs), &sess); + let mut err = lint::struct_lint_level( + self.sess, + lint, + level, + src, + Some(li.span.into()), + &msg, + ); if let Some(new_name) = renamed { err.span_suggestion_with_applicability( li.span, "use the new name", new_name, - Applicability::MachineApplicable + Applicability::MachineApplicable, ); } err.emit(); } CheckLintNameResult::NoLint(suggestion) => { let lint = builtin::UNKNOWN_LINTS; - let (level, src) = self.sets.get_lint_level(lint, - self.cur, - Some(&specs), - self.sess); + let (level, src) = + self.sets + .get_lint_level(lint, self.cur, Some(&specs), self.sess); let msg = format!("unknown lint: `{}`", name); - let mut db = lint::struct_lint_level(self.sess, - lint, - level, - src, - Some(li.span.into()), - &msg); + let mut db = lint::struct_lint_level( + self.sess, + lint, + level, + src, + Some(li.span.into()), + &msg, + ); if let Some(suggestion) = suggestion { db.span_suggestion_with_applicability( @@ -404,7 +406,7 @@ impl<'a> LintLevelsBuilder<'a> { for (id, &(level, ref src)) in specs.iter() { if level == Level::Forbid { - continue + continue; } let forbid_src = match self.sets.get_lint_id_level(*id, self.cur, None) { (Some(Level::Forbid), src) => src, @@ -419,30 +421,31 @@ impl<'a> LintLevelsBuilder<'a> { LintSource::Node(name, span, _) => (name, span), _ => continue, }; - let mut diag_builder = struct_span_err!(self.sess, - lint_attr_span, - E0453, - "{}({}) overruled by outer forbid({})", - level.as_str(), - lint_attr_name, - forbidden_lint_name); + let mut diag_builder = struct_span_err!( + self.sess, + lint_attr_span, + E0453, + "{}({}) overruled by outer forbid({})", + level.as_str(), + lint_attr_name, + forbidden_lint_name + ); diag_builder.span_label(lint_attr_span, "overruled by previous forbid"); match forbid_src { - LintSource::Default => {}, + LintSource::Default => {} LintSource::Node(_, forbid_source_span, reason) => { - diag_builder.span_label(forbid_source_span, - "`forbid` level set here"); + diag_builder.span_label(forbid_source_span, "`forbid` level set here"); if let Some(rationale) = reason { diag_builder.note(&rationale.as_str()); } - }, + } LintSource::CommandLine(_) => { diag_builder.note("`forbid` lint level was set on command line"); } } diag_builder.emit(); // don't set a separate error for every lint in the group - break + break; } let prev = self.cur; @@ -454,9 +457,7 @@ impl<'a> LintLevelsBuilder<'a> { }); } - BuilderPush { - prev: prev, - } + BuilderPush { prev: prev } } /// Called after `push` when the scope of a set of attributes are exited. @@ -466,12 +467,12 @@ impl<'a> LintLevelsBuilder<'a> { /// Used to emit a lint-related diagnostic based on the current state of /// this lint context. - pub fn struct_lint(&self, - lint: &'static Lint, - span: Option, - msg: &str) - -> DiagnosticBuilder<'a> - { + pub fn struct_lint( + &self, + lint: &'static Lint, + span: Option, + msg: &str, + ) -> DiagnosticBuilder<'a> { let (level, src) = self.sets.get_lint_level(lint, self.cur, None, self.sess); lint::struct_lint_level(self.sess, lint, level, src, span, msg) } @@ -507,12 +508,15 @@ impl LintLevelMap { /// If the `id` was not previously registered, returns `None`. If `None` is /// returned then the parent of `id` should be acquired and this function /// should be called again. - pub fn level_and_source(&self, lint: &'static Lint, id: HirId, session: &Session) - -> Option<(Level, LintSource)> - { - self.id_to_set.get(&id).map(|idx| { - self.sets.get_lint_level(lint, *idx, None, session) - }) + pub fn level_and_source( + &self, + lint: &'static Lint, + id: HirId, + session: &Session, + ) -> Option<(Level, LintSource)> { + self.id_to_set + .get(&id) + .map(|idx| self.sets.get_lint_level(lint, *idx, None, session)) } /// Returns if this `id` has lint level information. @@ -523,9 +527,11 @@ impl LintLevelMap { impl<'a> HashStable> for LintLevelMap { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let LintLevelMap { ref sets, ref id_to_set, @@ -533,10 +539,7 @@ impl<'a> HashStable> for LintLevelMap { id_to_set.hash_stable(hcx, hasher); - let LintLevelSets { - ref list, - lint_cap, - } = *sets; + let LintLevelSets { ref list, lint_cap } = *sets; lint_cap.hash_stable(hcx, hasher); @@ -564,9 +567,7 @@ impl<'a> HashStable> for LintLevelMap { impl HashStable for LintId { #[inline] - fn hash_stable(&self, - hcx: &mut HCX, - hasher: &mut StableHasher) { + fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) { self.lint_name_raw().hash_stable(hcx, hasher); } } diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index a373faab3a204..98cba3de00b9a 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -24,27 +24,28 @@ pub use self::LintSource::*; use rustc_data_structures::sync::{self, Lrc}; use errors::{DiagnosticBuilder, DiagnosticId}; +use hir; use hir::def_id::{CrateNum, LOCAL_CRATE}; use hir::intravisit; -use hir; -use lint::builtin::BuiltinLintDiagnostics; use lint::builtin::parser::QUESTION_MARK_MACRO_SEP; -use session::{Session, DiagnosticMessageId}; +use lint::builtin::BuiltinLintDiagnostics; +use session::{DiagnosticMessageId, Session}; use std::{hash, ptr}; use syntax::ast; -use syntax::source_map::{MultiSpan, ExpnFormat}; use syntax::early_buffered_lints::BufferedEarlyLintId; use syntax::edition::Edition; +use syntax::source_map::{ExpnFormat, MultiSpan}; use syntax::symbol::Symbol; use syntax::visit as ast_visit; use syntax_pos::Span; -use ty::TyCtxt; use ty::query::Providers; +use ty::TyCtxt; use util::nodemap::NodeMap; -pub use lint::context::{LateContext, EarlyContext, LintContext, LintStore, - check_crate, check_ast_crate, CheckLintNameResult, - FutureIncompatibleInfo, BufferedEarlyLint}; +pub use lint::context::{ + check_ast_crate, check_crate, BufferedEarlyLint, CheckLintNameResult, EarlyContext, + FutureIncompatibleInfo, LateContext, LintContext, LintStore, +}; /// Specification of a single lint. #[derive(Copy, Clone, Debug)] @@ -317,64 +318,95 @@ macro_rules! declare_combined_late_lint_pass { } pub trait EarlyLintPass: LintPass { - fn check_ident(&mut self, _: &EarlyContext<'_>, _: ast::Ident) { } - fn check_crate(&mut self, _: &EarlyContext<'_>, _: &ast::Crate) { } - fn check_crate_post(&mut self, _: &EarlyContext<'_>, _: &ast::Crate) { } - fn check_mod(&mut self, _: &EarlyContext<'_>, _: &ast::Mod, _: Span, _: ast::NodeId) { } - fn check_mod_post(&mut self, _: &EarlyContext<'_>, _: &ast::Mod, _: Span, _: ast::NodeId) { } - fn check_foreign_item(&mut self, _: &EarlyContext<'_>, _: &ast::ForeignItem) { } - fn check_foreign_item_post(&mut self, _: &EarlyContext<'_>, _: &ast::ForeignItem) { } - fn check_item(&mut self, _: &EarlyContext<'_>, _: &ast::Item) { } - fn check_item_post(&mut self, _: &EarlyContext<'_>, _: &ast::Item) { } - fn check_local(&mut self, _: &EarlyContext<'_>, _: &ast::Local) { } - fn check_block(&mut self, _: &EarlyContext<'_>, _: &ast::Block) { } - fn check_block_post(&mut self, _: &EarlyContext<'_>, _: &ast::Block) { } - fn check_stmt(&mut self, _: &EarlyContext<'_>, _: &ast::Stmt) { } - fn check_arm(&mut self, _: &EarlyContext<'_>, _: &ast::Arm) { } - fn check_pat(&mut self, _: &EarlyContext<'_>, _: &ast::Pat, _: &mut bool) { } - fn check_expr(&mut self, _: &EarlyContext<'_>, _: &ast::Expr) { } - fn check_expr_post(&mut self, _: &EarlyContext<'_>, _: &ast::Expr) { } - fn check_ty(&mut self, _: &EarlyContext<'_>, _: &ast::Ty) { } - fn check_generic_param(&mut self, _: &EarlyContext<'_>, _: &ast::GenericParam) { } - fn check_generics(&mut self, _: &EarlyContext<'_>, _: &ast::Generics) { } - fn check_where_predicate(&mut self, _: &EarlyContext<'_>, _: &ast::WherePredicate) { } - fn check_poly_trait_ref(&mut self, _: &EarlyContext<'_>, _: &ast::PolyTraitRef, - _: &ast::TraitBoundModifier) { } - fn check_fn(&mut self, _: &EarlyContext<'_>, - _: ast_visit::FnKind<'_>, _: &ast::FnDecl, _: Span, _: ast::NodeId) { } - fn check_fn_post(&mut self, _: &EarlyContext<'_>, - _: ast_visit::FnKind<'_>, _: &ast::FnDecl, _: Span, _: ast::NodeId) { } - fn check_trait_item(&mut self, _: &EarlyContext<'_>, _: &ast::TraitItem) { } - fn check_trait_item_post(&mut self, _: &EarlyContext<'_>, _: &ast::TraitItem) { } - fn check_impl_item(&mut self, _: &EarlyContext<'_>, _: &ast::ImplItem) { } - fn check_impl_item_post(&mut self, _: &EarlyContext<'_>, _: &ast::ImplItem) { } - fn check_struct_def(&mut self, _: &EarlyContext<'_>, - _: &ast::VariantData, _: ast::Ident, _: &ast::Generics, _: ast::NodeId) { } - fn check_struct_def_post(&mut self, _: &EarlyContext<'_>, - _: &ast::VariantData, _: ast::Ident, _: &ast::Generics, _: ast::NodeId) { } - fn check_struct_field(&mut self, _: &EarlyContext<'_>, _: &ast::StructField) { } - fn check_variant(&mut self, _: &EarlyContext<'_>, _: &ast::Variant, _: &ast::Generics) { } - fn check_variant_post(&mut self, _: &EarlyContext<'_>, _: &ast::Variant, _: &ast::Generics) { } - fn check_lifetime(&mut self, _: &EarlyContext<'_>, _: &ast::Lifetime) { } - fn check_path(&mut self, _: &EarlyContext<'_>, _: &ast::Path, _: ast::NodeId) { } - fn check_attribute(&mut self, _: &EarlyContext<'_>, _: &ast::Attribute) { } - fn check_mac_def(&mut self, _: &EarlyContext<'_>, _: &ast::MacroDef, _id: ast::NodeId) { } - fn check_mac(&mut self, _: &EarlyContext<'_>, _: &ast::Mac) { } + fn check_ident(&mut self, _: &EarlyContext<'_>, _: ast::Ident) {} + fn check_crate(&mut self, _: &EarlyContext<'_>, _: &ast::Crate) {} + fn check_crate_post(&mut self, _: &EarlyContext<'_>, _: &ast::Crate) {} + fn check_mod(&mut self, _: &EarlyContext<'_>, _: &ast::Mod, _: Span, _: ast::NodeId) {} + fn check_mod_post(&mut self, _: &EarlyContext<'_>, _: &ast::Mod, _: Span, _: ast::NodeId) {} + fn check_foreign_item(&mut self, _: &EarlyContext<'_>, _: &ast::ForeignItem) {} + fn check_foreign_item_post(&mut self, _: &EarlyContext<'_>, _: &ast::ForeignItem) {} + fn check_item(&mut self, _: &EarlyContext<'_>, _: &ast::Item) {} + fn check_item_post(&mut self, _: &EarlyContext<'_>, _: &ast::Item) {} + fn check_local(&mut self, _: &EarlyContext<'_>, _: &ast::Local) {} + fn check_block(&mut self, _: &EarlyContext<'_>, _: &ast::Block) {} + fn check_block_post(&mut self, _: &EarlyContext<'_>, _: &ast::Block) {} + fn check_stmt(&mut self, _: &EarlyContext<'_>, _: &ast::Stmt) {} + fn check_arm(&mut self, _: &EarlyContext<'_>, _: &ast::Arm) {} + fn check_pat(&mut self, _: &EarlyContext<'_>, _: &ast::Pat, _: &mut bool) {} + fn check_expr(&mut self, _: &EarlyContext<'_>, _: &ast::Expr) {} + fn check_expr_post(&mut self, _: &EarlyContext<'_>, _: &ast::Expr) {} + fn check_ty(&mut self, _: &EarlyContext<'_>, _: &ast::Ty) {} + fn check_generic_param(&mut self, _: &EarlyContext<'_>, _: &ast::GenericParam) {} + fn check_generics(&mut self, _: &EarlyContext<'_>, _: &ast::Generics) {} + fn check_where_predicate(&mut self, _: &EarlyContext<'_>, _: &ast::WherePredicate) {} + fn check_poly_trait_ref( + &mut self, + _: &EarlyContext<'_>, + _: &ast::PolyTraitRef, + _: &ast::TraitBoundModifier, + ) { + } + fn check_fn( + &mut self, + _: &EarlyContext<'_>, + _: ast_visit::FnKind<'_>, + _: &ast::FnDecl, + _: Span, + _: ast::NodeId, + ) { + } + fn check_fn_post( + &mut self, + _: &EarlyContext<'_>, + _: ast_visit::FnKind<'_>, + _: &ast::FnDecl, + _: Span, + _: ast::NodeId, + ) { + } + fn check_trait_item(&mut self, _: &EarlyContext<'_>, _: &ast::TraitItem) {} + fn check_trait_item_post(&mut self, _: &EarlyContext<'_>, _: &ast::TraitItem) {} + fn check_impl_item(&mut self, _: &EarlyContext<'_>, _: &ast::ImplItem) {} + fn check_impl_item_post(&mut self, _: &EarlyContext<'_>, _: &ast::ImplItem) {} + fn check_struct_def( + &mut self, + _: &EarlyContext<'_>, + _: &ast::VariantData, + _: ast::Ident, + _: &ast::Generics, + _: ast::NodeId, + ) { + } + fn check_struct_def_post( + &mut self, + _: &EarlyContext<'_>, + _: &ast::VariantData, + _: ast::Ident, + _: &ast::Generics, + _: ast::NodeId, + ) { + } + fn check_struct_field(&mut self, _: &EarlyContext<'_>, _: &ast::StructField) {} + fn check_variant(&mut self, _: &EarlyContext<'_>, _: &ast::Variant, _: &ast::Generics) {} + fn check_variant_post(&mut self, _: &EarlyContext<'_>, _: &ast::Variant, _: &ast::Generics) {} + fn check_lifetime(&mut self, _: &EarlyContext<'_>, _: &ast::Lifetime) {} + fn check_path(&mut self, _: &EarlyContext<'_>, _: &ast::Path, _: ast::NodeId) {} + fn check_attribute(&mut self, _: &EarlyContext<'_>, _: &ast::Attribute) {} + fn check_mac_def(&mut self, _: &EarlyContext<'_>, _: &ast::MacroDef, _id: ast::NodeId) {} + fn check_mac(&mut self, _: &EarlyContext<'_>, _: &ast::Mac) {} /// Called when entering a syntax node that can have lint attributes such /// as `#[allow(...)]`. Called with *all* the attributes of that node. - fn enter_lint_attrs(&mut self, _: &EarlyContext<'_>, _: &[ast::Attribute]) { } + fn enter_lint_attrs(&mut self, _: &EarlyContext<'_>, _: &[ast::Attribute]) {} /// Counterpart to `enter_lint_attrs`. - fn exit_lint_attrs(&mut self, _: &EarlyContext<'_>, _: &[ast::Attribute]) { } + fn exit_lint_attrs(&mut self, _: &EarlyContext<'_>, _: &[ast::Attribute]) {} } /// A lint pass boxed up as a trait object. pub type EarlyLintPassObject = Box; -pub type LateLintPassObject = Box LateLintPass<'a, 'tcx> + sync::Send - + sync::Sync + 'static>; - - +pub type LateLintPassObject = + Box LateLintPass<'a, 'tcx> + sync::Send + sync::Sync + 'static>; /// Identifies a lint known to the compiler. #[derive(Clone, Copy, Debug)] @@ -389,7 +421,7 @@ impl PartialEq for LintId { } } -impl Eq for LintId { } +impl Eq for LintId {} impl hash::Hash for LintId { fn hash(&self, state: &mut H) { @@ -401,9 +433,7 @@ impl hash::Hash for LintId { impl LintId { /// Get the `LintId` for a `Lint`. pub fn of(lint: &'static Lint) -> LintId { - LintId { - lint, - } + LintId { lint } } pub fn lint_name_raw(&self) -> &'static str { @@ -419,7 +449,10 @@ impl LintId { /// Setting for how to handle a lint. #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] pub enum Level { - Allow, Warn, Deny, Forbid, + Allow, + Warn, + Deny, + Forbid, } impl_stable_hash_for!(enum self::Level { @@ -478,7 +511,7 @@ pub mod builtin; mod context; mod levels; -pub use self::levels::{LintLevelSets, LintLevelMap}; +pub use self::levels::{LintLevelMap, LintLevelSets}; #[derive(Default)] pub struct LintBuffer { @@ -486,18 +519,20 @@ pub struct LintBuffer { } impl LintBuffer { - pub fn add_lint(&mut self, - lint: &'static Lint, - id: ast::NodeId, - sp: MultiSpan, - msg: &str, - diagnostic: BuiltinLintDiagnostics) { + pub fn add_lint( + &mut self, + lint: &'static Lint, + id: ast::NodeId, + sp: MultiSpan, + msg: &str, + diagnostic: BuiltinLintDiagnostics, + ) { let early_lint = BufferedEarlyLint { lint_id: LintId::of(lint), ast_id: id, span: sp, msg: msg.to_string(), - diagnostic + diagnostic, }; let arr = self.map.entry(id).or_default(); if !arr.contains(&early_lint) { @@ -515,22 +550,20 @@ impl LintBuffer { } } -pub fn struct_lint_level<'a>(sess: &'a Session, - lint: &'static Lint, - level: Level, - src: LintSource, - span: Option, - msg: &str) - -> DiagnosticBuilder<'a> -{ +pub fn struct_lint_level<'a>( + sess: &'a Session, + lint: &'static Lint, + level: Level, + src: LintSource, + span: Option, + msg: &str, +) -> DiagnosticBuilder<'a> { let mut err = match (level, span) { (Level::Allow, _) => return sess.diagnostic().struct_dummy(), (Level::Warn, Some(span)) => sess.struct_span_warn(span, msg), (Level::Warn, None) => sess.struct_warn(msg), - (Level::Deny, Some(span)) | - (Level::Forbid, Some(span)) => sess.struct_span_err(span, msg), - (Level::Deny, None) | - (Level::Forbid, None) => sess.struct_err(msg), + (Level::Deny, Some(span)) | (Level::Forbid, Some(span)) => sess.struct_span_err(span, msg), + (Level::Deny, None) | (Level::Forbid, None) => sess.struct_err(msg), }; let name = lint.name_lower(); @@ -539,7 +572,8 @@ pub fn struct_lint_level<'a>(sess: &'a Session, sess.diag_note_once( &mut err, DiagnosticMessageId::from(lint), - &format!("#[{}({})] on by default", level.as_str(), name)); + &format!("#[{}({})] on by default", level.as_str(), name), + ); } LintSource::CommandLine(lint_flag_val) => { let flag = match level { @@ -553,29 +587,43 @@ pub fn struct_lint_level<'a>(sess: &'a Session, sess.diag_note_once( &mut err, DiagnosticMessageId::from(lint), - &format!("requested on the command line with `{} {}`", - flag, hyphen_case_lint_name)); + &format!( + "requested on the command line with `{} {}`", + flag, hyphen_case_lint_name + ), + ); } else { let hyphen_case_flag_val = lint_flag_val.as_str().replace("_", "-"); sess.diag_note_once( &mut err, DiagnosticMessageId::from(lint), - &format!("`{} {}` implied by `{} {}`", - flag, hyphen_case_lint_name, flag, - hyphen_case_flag_val)); + &format!( + "`{} {}` implied by `{} {}`", + flag, hyphen_case_lint_name, flag, hyphen_case_flag_val + ), + ); } } LintSource::Node(lint_attr_name, src, reason) => { if let Some(rationale) = reason { err.note(&rationale.as_str()); } - sess.diag_span_note_once(&mut err, DiagnosticMessageId::from(lint), - src, "lint level defined here"); + sess.diag_span_note_once( + &mut err, + DiagnosticMessageId::from(lint), + src, + "lint level defined here", + ); if lint_attr_name.as_str() != name { let level_str = level.as_str(); - sess.diag_note_once(&mut err, DiagnosticMessageId::from(lint), - &format!("#[{}({})] implied by #[{}({})]", - level_str, name, level_str, lint_attr_name)); + sess.diag_note_once( + &mut err, + DiagnosticMessageId::from(lint), + &format!( + "#[{}({})] implied by #[{}({})]", + level_str, name, level_str, lint_attr_name + ), + ); } } } @@ -600,8 +648,10 @@ pub fn struct_lint_level<'a>(sess: &'a Session, } else { format!("{} in a future release!", STANDARD_MESSAGE) }; - let citation = format!("for more information, see {}", - future_incompatible.reference); + let citation = format!( + "for more information, see {}", + future_incompatible.reference + ); err.warn(&explanation); err.note(&citation); } @@ -609,7 +659,12 @@ pub fn struct_lint_level<'a>(sess: &'a Session, // If this code originates in a foreign macro, aka something that this crate // did not itself author, then it's likely that there's nothing this crate // can do about it. We probably want to skip the lint entirely. - if err.span.primary_spans().iter().any(|s| in_external_macro(sess, *s)) { + if err + .span + .primary_spans() + .iter() + .any(|s| in_external_macro(sess, *s)) + { // Any suggestions made here are likely to be incorrect, so anything we // emit shouldn't be automatically fixed by rustfix. err.allow_suggestions(false); @@ -622,12 +677,10 @@ pub fn struct_lint_level<'a>(sess: &'a Session, } } - return err + return err; } -fn lint_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cnum: CrateNum) - -> Lrc -{ +fn lint_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cnum: CrateNum) -> Lrc { assert_eq!(cnum, LOCAL_CRATE); let mut builder = LintLevelMapBuilder { levels: LintLevelSets::builder(tcx.sess), @@ -648,14 +701,13 @@ struct LintLevelMapBuilder<'a, 'tcx: 'a> { } impl<'a, 'tcx> LintLevelMapBuilder<'a, 'tcx> { - fn with_lint_attrs(&mut self, - id: ast::NodeId, - attrs: &[ast::Attribute], - f: F) - where F: FnOnce(&mut Self) + fn with_lint_attrs(&mut self, id: ast::NodeId, attrs: &[ast::Attribute], f: F) + where + F: FnOnce(&mut Self), { let push = self.levels.push(attrs); - self.levels.register_id(self.tcx.hir().definitions().node_to_hir_id(id)); + self.levels + .register_id(self.tcx.hir().definitions().node_to_hir_id(id)); f(self); self.levels.pop(push); } @@ -690,10 +742,12 @@ impl<'a, 'tcx> intravisit::Visitor<'tcx> for LintLevelMapBuilder<'a, 'tcx> { }) } - fn visit_variant(&mut self, - v: &'tcx hir::Variant, - g: &'tcx hir::Generics, - item_id: ast::NodeId) { + fn visit_variant( + &mut self, + v: &'tcx hir::Variant, + g: &'tcx hir::Generics, + item_id: ast::NodeId, + ) { self.with_lint_attrs(v.node.data.id(), &v.node.attrs, |builder| { intravisit::walk_variant(builder, v, g, item_id); }) @@ -735,7 +789,7 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool { match info.format { ExpnFormat::MacroAttribute(..) => return true, // definitely a plugin ExpnFormat::CompilerDesugaring(_) => return true, // well, it's "external" - ExpnFormat::MacroBang(..) => {} // check below + ExpnFormat::MacroBang(..) => {} // check below } let def_site = match info.def_site { diff --git a/src/librustc/macros.rs b/src/librustc/macros.rs index 2978b35319944..625fae54f8d07 100644 --- a/src/librustc/macros.rs +++ b/src/librustc/macros.rs @@ -54,9 +54,15 @@ macro_rules! span_bug { #[macro_export] macro_rules! __impl_stable_hash_field { - ($field:ident, $ctx:expr, $hasher:expr) => ($field.hash_stable($ctx, $hasher)); - ($field:ident, $ctx:expr, $hasher:expr, _) => ({ let _ = $field; }); - ($field:ident, $ctx:expr, $hasher:expr, $delegate:expr) => ($delegate.hash_stable($ctx, $hasher)); + ($field:ident, $ctx:expr, $hasher:expr) => { + $field.hash_stable($ctx, $hasher) + }; + ($field:ident, $ctx:expr, $hasher:expr, _) => {{ + let _ = $field; + }}; + ($field:ident, $ctx:expr, $hasher:expr, $delegate:expr) => { + $delegate.hash_stable($ctx, $hasher) + }; } #[macro_export] @@ -172,19 +178,19 @@ macro_rules! impl_stable_hash_for { #[macro_export] macro_rules! impl_stable_hash_for_spanned { - ($T:path) => ( - - impl<'a, 'tcx> HashStable> for ::syntax::source_map::Spanned<$T> - { + ($T:path) => { + impl<'a, 'tcx> HashStable> for ::syntax::source_map::Spanned<$T> { #[inline] - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { self.node.hash_stable(hcx, hasher); self.span.hash_stable(hcx, hasher); } } - ); + }; } /////////////////////////////////////////////////////////////////////////// @@ -513,4 +519,3 @@ macro_rules! EnumTypeFoldableImpl { ) }; } - diff --git a/src/librustc/middle/borrowck.rs b/src/librustc/middle/borrowck.rs index 120ba6b1d4e9f..a5a1c29c63c24 100644 --- a/src/librustc/middle/borrowck.rs +++ b/src/librustc/middle/borrowck.rs @@ -1,12 +1,14 @@ -use ich::StableHashingContext; use hir::HirId; +use ich::StableHashingContext; use util::nodemap::FxHashSet; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; #[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable)] -pub enum SignalledError { SawSomeError, NoErrorsSeen } +pub enum SignalledError { + SawSomeError, + NoErrorsSeen, +} impl Default for SignalledError { fn default() -> SignalledError { @@ -23,9 +25,11 @@ pub struct BorrowCheckResult { } impl<'a> HashStable> for BorrowCheckResult { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let BorrowCheckResult { ref used_mut_nodes, ref signalled_any_error, diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index c431dc77f782f..01f5cfc70eb33 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -6,17 +6,17 @@ use hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use hir::map as hir_map; use hir::map::definitions::{DefKey, DefPathTable}; use rustc_data_structures::svh::Svh; -use ty::{self, TyCtxt}; -use session::{Session, CrateDisambiguator}; use session::search_paths::PathKind; +use session::{CrateDisambiguator, Session}; +use ty::{self, TyCtxt}; +use rustc_data_structures::sync::{self, Lrc, MetadataRef}; +use rustc_target::spec::Target; use std::any::Any; use std::path::{Path, PathBuf}; use syntax::ast; use syntax::symbol::Symbol; use syntax_pos::Span; -use rustc_target::spec::Target; -use rustc_data_structures::sync::{self, MetadataRef, Lrc}; pub use self::NativeLibraryKind::*; @@ -145,7 +145,7 @@ pub enum ExternCrateSource { } pub struct EncodedMetadata { - pub raw_data: Vec + pub raw_data: Vec, } impl EncodedMetadata { @@ -165,14 +165,8 @@ impl EncodedMetadata { /// metadata in library -- this trait just serves to decouple rustc_metadata from /// the archive reader, which depends on LLVM. pub trait MetadataLoader { - fn get_rlib_metadata(&self, - target: &Target, - filename: &Path) - -> Result; - fn get_dylib_metadata(&self, - target: &Target, - filename: &Path) - -> Result; + fn get_rlib_metadata(&self, target: &Target, filename: &Path) -> Result; + fn get_dylib_metadata(&self, target: &Target, filename: &Path) -> Result; } /// A store of Rust crates, through with their metadata @@ -207,9 +201,7 @@ pub trait CrateStore { fn crates_untracked(&self) -> Vec; // utility functions - fn encode_metadata<'a, 'tcx>(&self, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> EncodedMetadata; + fn encode_metadata<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> EncodedMetadata; fn metadata_encoding_version(&self) -> &[u8]; } @@ -224,15 +216,17 @@ pub type CrateStoreDyn = dyn CrateStore + sync::Sync; // In order to get this left-to-right dependency ordering, we perform a // topological sort of all crates putting the leaves at the right-most // positions. -pub fn used_crates(tcx: TyCtxt<'_, '_, '_>, prefer: LinkagePreference) - -> Vec<(CrateNum, LibSource)> -{ - let mut libs = tcx.crates() +pub fn used_crates( + tcx: TyCtxt<'_, '_, '_>, + prefer: LinkagePreference, +) -> Vec<(CrateNum, LibSource)> { + let mut libs = tcx + .crates() .iter() .cloned() .filter_map(|cnum| { if tcx.dep_kind(cnum).macros_only() { - return None + return None; } let source = tcx.used_crate_source(cnum); let path = match prefer { @@ -254,8 +248,6 @@ pub fn used_crates(tcx: TyCtxt<'_, '_, '_>, prefer: LinkagePreference) .collect::>(); let mut ordering = tcx.postorder_cnums(LOCAL_CRATE); Lrc::make_mut(&mut ordering).reverse(); - libs.sort_by_cached_key(|&(a, _)| { - ordering.iter().position(|x| *x == a) - }); + libs.sort_by_cached_key(|&(a, _)| ordering.iter().position(|x| *x == a)); libs } diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 0c769c91801b8..f18c209249296 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -2,14 +2,14 @@ // closely. The idea is that all reachable symbols are live, codes called // from live codes are live, and everything else is dead. +use hir::intravisit::{self, NestedVisitorMap, Visitor}; +use hir::itemlikevisit::ItemLikeVisitor; use hir::Node; use hir::{self, PatKind}; -use hir::intravisit::{self, Visitor, NestedVisitorMap}; -use hir::itemlikevisit::ItemLikeVisitor; use hir::def::Def; -use hir::CodegenFnAttrFlags; use hir::def_id::{DefId, LOCAL_CRATE}; +use hir::CodegenFnAttrFlags; use lint; use middle::privacy; use ty::{self, TyCtxt}; @@ -17,24 +17,21 @@ use util::nodemap::FxHashSet; use rustc_data_structures::fx::FxHashMap; -use syntax::{ast, source_map}; use syntax::attr; +use syntax::{ast, source_map}; use syntax_pos; // Any local node that may call something in its body block should be // explored. For example, if it's a live Node::Item that is a // function, then we should explore its block to check for codes that // may need to be marked as live. -fn should_explore<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - node_id: ast::NodeId) -> bool { +fn should_explore<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) -> bool { match tcx.hir().find(node_id) { - Some(Node::Item(..)) | - Some(Node::ImplItem(..)) | - Some(Node::ForeignItem(..)) | - Some(Node::TraitItem(..)) => - true, - _ => - false + Some(Node::Item(..)) + | Some(Node::ImplItem(..)) + | Some(Node::ForeignItem(..)) + | Some(Node::TraitItem(..)) => true, + _ => false, } } @@ -54,8 +51,8 @@ struct MarkSymbolVisitor<'a, 'tcx: 'a> { impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { fn check_def_id(&mut self, def_id: DefId) { if let Some(node_id) = self.tcx.hir().as_local_node_id(def_id) { - if should_explore(self.tcx, node_id) || - self.struct_constructors.contains_key(&node_id) { + if should_explore(self.tcx, node_id) || self.struct_constructors.contains_key(&node_id) + { self.worklist.push(node_id); } self.live_symbols.insert(node_id); @@ -75,8 +72,11 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { self.check_def_id(def.def_id()); } _ if self.in_pat => (), - Def::PrimTy(..) | Def::SelfTy(..) | Def::SelfCtor(..) | - Def::Local(..) | Def::Upvar(..) => {} + Def::PrimTy(..) + | Def::SelfTy(..) + | Def::SelfCtor(..) + | Def::Local(..) + | Def::Upvar(..) => {} Def::Variant(variant_id) | Def::VariantCtor(variant_id, ..) => { if let Some(enum_id) = self.tcx.parent_def_id(variant_id) { self.check_def_id(enum_id); @@ -110,11 +110,15 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { } } - fn handle_field_pattern_match(&mut self, lhs: &hir::Pat, def: Def, - pats: &[source_map::Spanned]) { + fn handle_field_pattern_match( + &mut self, + lhs: &hir::Pat, + def: Def, + pats: &[source_map::Spanned], + ) { let variant = match self.tables.node_id_to_type(lhs.hir_id).sty { ty::Adt(adt, _) => adt.variant_of_def(def), - _ => span_bug!(lhs.span, "non-ADT in struct pattern") + _ => span_bug!(lhs.span, "non-ADT in struct pattern"), }; for pat in pats { if let PatKind::Wild = pat.node.pat.node { @@ -129,7 +133,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { let mut scanned = FxHashSet::default(); while let Some(id) = self.worklist.pop() { if !scanned.insert(id) { - continue + continue; } // in the case of tuple struct constructors we want to check the item, not the generated @@ -149,29 +153,27 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { let had_inherited_pub_visibility = self.inherited_pub_visibility; self.inherited_pub_visibility = false; match node { - Node::Item(item) => { - match item.node { - hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) => { - let def_id = self.tcx.hir().local_def_id(item.id); - let def = self.tcx.adt_def(def_id); - self.repr_has_repr_c = def.repr.c(); - - intravisit::walk_item(self, &item); - } - hir::ItemKind::Enum(..) => { - self.inherited_pub_visibility = item.vis.node.is_pub(); - intravisit::walk_item(self, &item); - } - hir::ItemKind::Fn(..) - | hir::ItemKind::Ty(..) - | hir::ItemKind::Static(..) - | hir::ItemKind::Existential(..) - | hir::ItemKind::Const(..) => { - intravisit::walk_item(self, &item); - } - _ => () + Node::Item(item) => match item.node { + hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) => { + let def_id = self.tcx.hir().local_def_id(item.id); + let def = self.tcx.adt_def(def_id); + self.repr_has_repr_c = def.repr.c(); + + intravisit::walk_item(self, &item); } - } + hir::ItemKind::Enum(..) => { + self.inherited_pub_visibility = item.vis.node.is_pub(); + intravisit::walk_item(self, &item); + } + hir::ItemKind::Fn(..) + | hir::ItemKind::Ty(..) + | hir::ItemKind::Static(..) + | hir::ItemKind::Existential(..) + | hir::ItemKind::Const(..) => { + intravisit::walk_item(self, &item); + } + _ => (), + }, Node::TraitItem(trait_item) => { intravisit::walk_trait_item(self, trait_item); } @@ -181,7 +183,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { Node::ForeignItem(foreign_item) => { intravisit::walk_foreign_item(self, &foreign_item); } - _ => () + _ => (), } self.repr_has_repr_c = had_repr_c; self.inherited_pub_visibility = had_inherited_pub_visibility; @@ -210,13 +212,20 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> { self.tables = old_tables; } - fn visit_variant_data(&mut self, def: &'tcx hir::VariantData, _: ast::Name, - _: &hir::Generics, _: ast::NodeId, _: syntax_pos::Span) { + fn visit_variant_data( + &mut self, + def: &'tcx hir::VariantData, + _: ast::Name, + _: &hir::Generics, + _: ast::NodeId, + _: syntax_pos::Span, + ) { let has_repr_c = self.repr_has_repr_c; let inherited_pub_visibility = self.inherited_pub_visibility; - let live_fields = def.fields().iter().filter(|f| { - has_repr_c || inherited_pub_visibility || f.vis.node.is_pub() - }); + let live_fields = def + .fields() + .iter() + .filter(|f| has_repr_c || inherited_pub_visibility || f.vis.node.is_pub()); self.live_symbols.extend(live_fields.map(|f| f.id)); intravisit::walk_struct_def(self, def); @@ -239,7 +248,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> { self.mark_as_used_if_union(adt, fields); } } - _ => () + _ => (), } intravisit::walk_expr(self, expr); @@ -270,7 +279,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> { let def = self.tables.qpath_def(qpath, pat.hir_id); self.handle_definition(def); } - _ => () + _ => (), } self.in_pat = true; @@ -284,9 +293,11 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> { } } -fn has_allow_dead_code_or_lang_attr(tcx: TyCtxt<'_, '_, '_>, - id: ast::NodeId, - attrs: &[ast::Attribute]) -> bool { +fn has_allow_dead_code_or_lang_attr( + tcx: TyCtxt<'_, '_, '_>, + id: ast::NodeId, + attrs: &[ast::Attribute], +) -> bool { if attr::contains_name(attrs, "lang") { return true; } @@ -311,8 +322,7 @@ fn has_allow_dead_code_or_lang_attr(tcx: TyCtxt<'_, '_, '_>, // #[used], #[no_mangle], #[export_name], etc also keeps the item alive // forcefully, e.g., for placing it in a specific section. - if cg_attrs.contains_extern_indicator() || - cg_attrs.flags.contains(CodegenFnAttrFlags::USED) { + if cg_attrs.contains_extern_indicator() || cg_attrs.flags.contains(CodegenFnAttrFlags::USED) { return true; } @@ -342,26 +352,30 @@ struct LifeSeeder<'k, 'tcx: 'k> { impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> { fn visit_item(&mut self, item: &hir::Item) { - let allow_dead_code = has_allow_dead_code_or_lang_attr(self.tcx, - item.id, - &item.attrs); + let allow_dead_code = has_allow_dead_code_or_lang_attr(self.tcx, item.id, &item.attrs); if allow_dead_code { self.worklist.push(item.id); } match item.node { hir::ItemKind::Enum(ref enum_def, _) if allow_dead_code => { - self.worklist.extend(enum_def.variants.iter() - .map(|variant| variant.node.data.id())); + self.worklist.extend( + enum_def + .variants + .iter() + .map(|variant| variant.node.data.id()), + ); } hir::ItemKind::Trait(.., ref trait_item_refs) => { for trait_item_ref in trait_item_refs { let trait_item = self.krate.trait_item(trait_item_ref.id); match trait_item.node { - hir::TraitItemKind::Const(_, Some(_)) | - hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => { - if has_allow_dead_code_or_lang_attr(self.tcx, - trait_item.id, - &trait_item.attrs) { + hir::TraitItemKind::Const(_, Some(_)) + | hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => { + if has_allow_dead_code_or_lang_attr( + self.tcx, + trait_item.id, + &trait_item.attrs, + ) { self.worklist.push(trait_item.id); } } @@ -372,10 +386,13 @@ impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> { hir::ItemKind::Impl(.., ref opt_trait, _, ref impl_item_refs) => { for impl_item_ref in impl_item_refs { let impl_item = self.krate.impl_item(impl_item_ref.id); - if opt_trait.is_some() || - has_allow_dead_code_or_lang_attr(self.tcx, - impl_item.id, - &impl_item.attrs) { + if opt_trait.is_some() + || has_allow_dead_code_or_lang_attr( + self.tcx, + impl_item.id, + &impl_item.attrs, + ) + { self.worklist.push(impl_item_ref.id.node_id); } } @@ -383,7 +400,7 @@ impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> { hir::ItemKind::Struct(ref variant_data, _) => { self.struct_constructors.insert(variant_data.id(), item.id); } - _ => () + _ => (), } } @@ -401,16 +418,21 @@ fn create_and_seed_worklist<'a, 'tcx>( access_levels: &privacy::AccessLevels, krate: &hir::Crate, ) -> (Vec, FxHashMap) { - let worklist = access_levels.map.iter().filter_map(|(&id, level)| { - if level >= &privacy::AccessLevel::Reachable { - Some(id) - } else { - None - } - }).chain( - // Seed entry point - tcx.sess.entry_fn.borrow().map(|(id, _, _)| id) - ).collect::>(); + let worklist = access_levels + .map + .iter() + .filter_map(|(&id, level)| { + if level >= &privacy::AccessLevel::Reachable { + Some(id) + } else { + None + } + }) + .chain( + // Seed entry point + tcx.sess.entry_fn.borrow().map(|(id, _, _)| id), + ) + .collect::>(); // Seed implemented trait items let mut life_seeder = LifeSeeder { @@ -424,10 +446,11 @@ fn create_and_seed_worklist<'a, 'tcx>( (life_seeder.worklist, life_seeder.struct_constructors) } -fn find_live<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - access_levels: &privacy::AccessLevels, - krate: &hir::Crate) - -> FxHashSet { +fn find_live<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + access_levels: &privacy::AccessLevels, + krate: &hir::Crate, +) -> FxHashSet { let (worklist, struct_constructors) = create_and_seed_worklist(tcx, access_levels, krate); let mut symbol_visitor = MarkSymbolVisitor { worklist, @@ -459,7 +482,7 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> { | hir::ItemKind::Enum(..) | hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) => true, - _ => false + _ => false, }; should_warn && !self.symbol_is_live(item.id) } @@ -474,21 +497,15 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> { fn should_warn_about_variant(&mut self, variant: &hir::VariantKind) -> bool { !self.symbol_is_live(variant.data.id()) - && !has_allow_dead_code_or_lang_attr(self.tcx, - variant.data.id(), - &variant.attrs) + && !has_allow_dead_code_or_lang_attr(self.tcx, variant.data.id(), &variant.attrs) } fn should_warn_about_foreign_item(&mut self, fi: &hir::ForeignItem) -> bool { - !self.symbol_is_live(fi.id) - && !has_allow_dead_code_or_lang_attr(self.tcx, fi.id, &fi.attrs) + !self.symbol_is_live(fi.id) && !has_allow_dead_code_or_lang_attr(self.tcx, fi.id, &fi.attrs) } // id := node id of an item's definition. - fn symbol_is_live( - &mut self, - id: ast::NodeId, - ) -> bool { + fn symbol_is_live(&mut self, id: ast::NodeId) -> bool { if self.live_symbols.contains(&id) { return true; } @@ -510,19 +527,21 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> { false } - fn warn_dead_code(&mut self, - id: ast::NodeId, - span: syntax_pos::Span, - name: ast::Name, - node_type: &str, - participle: &str) { + fn warn_dead_code( + &mut self, + id: ast::NodeId, + span: syntax_pos::Span, + name: ast::Name, + node_type: &str, + participle: &str, + ) { if !name.as_str().starts_with("_") { - self.tcx - .lint_node(lint::builtin::DEAD_CODE, - id, - span, - &format!("{} is never {}: `{}`", - node_type, participle, name)); + self.tcx.lint_node( + lint::builtin::DEAD_CODE, + id, + span, + &format!("{} is never {}: `{}`", node_type, participle, name), + ); } } } @@ -541,18 +560,18 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> { // For items that have a definition with a signature followed by a // block, point only at the signature. let span = match item.node { - hir::ItemKind::Fn(..) | - hir::ItemKind::Mod(..) | - hir::ItemKind::Enum(..) | - hir::ItemKind::Struct(..) | - hir::ItemKind::Union(..) | - hir::ItemKind::Trait(..) | - hir::ItemKind::Impl(..) => self.tcx.sess.source_map().def_span(item.span), + hir::ItemKind::Fn(..) + | hir::ItemKind::Mod(..) + | hir::ItemKind::Enum(..) + | hir::ItemKind::Struct(..) + | hir::ItemKind::Union(..) + | hir::ItemKind::Trait(..) + | hir::ItemKind::Impl(..) => self.tcx.sess.source_map().def_span(item.span), _ => item.span, }; let participle = match item.node { hir::ItemKind::Struct(..) => "constructed", // Issue #52325 - _ => "used" + _ => "used", }; self.warn_dead_code( item.id, @@ -567,13 +586,20 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> { } } - fn visit_variant(&mut self, - variant: &'tcx hir::Variant, - g: &'tcx hir::Generics, - id: ast::NodeId) { + fn visit_variant( + &mut self, + variant: &'tcx hir::Variant, + g: &'tcx hir::Generics, + id: ast::NodeId, + ) { if self.should_warn_about_variant(&variant.node) { - self.warn_dead_code(variant.node.data.id(), variant.span, variant.node.ident.name, - "variant", "constructed"); + self.warn_dead_code( + variant.node.data.id(), + variant.span, + variant.node.ident.name, + "variant", + "constructed", + ); } else { intravisit::walk_variant(self, variant, g, id); } @@ -581,8 +607,13 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> { fn visit_foreign_item(&mut self, fi: &'tcx hir::ForeignItem) { if self.should_warn_about_foreign_item(fi) { - self.warn_dead_code(fi.id, fi.span, fi.ident.name, - fi.node.descriptive_variant(), "used"); + self.warn_dead_code( + fi.id, + fi.span, + fi.ident.name, + fi.node.descriptive_variant(), + "used", + ); } intravisit::walk_foreign_item(self, fi); } @@ -598,11 +629,13 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> { match impl_item.node { hir::ImplItemKind::Const(_, body_id) => { if !self.symbol_is_live(impl_item.id) { - self.warn_dead_code(impl_item.id, - impl_item.span, - impl_item.ident.name, - "associated const", - "used"); + self.warn_dead_code( + impl_item.id, + impl_item.span, + impl_item.ident.name, + "associated const", + "used", + ); } self.visit_nested_body(body_id) } @@ -613,21 +646,20 @@ impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> { } self.visit_nested_body(body_id) } - hir::ImplItemKind::Existential(..) | - hir::ImplItemKind::Type(..) => {} + hir::ImplItemKind::Existential(..) | hir::ImplItemKind::Type(..) => {} } } // Overwrite so that we don't warn the trait item itself. fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) { match trait_item.node { - hir::TraitItemKind::Const(_, Some(body_id)) | - hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body_id)) => { + hir::TraitItemKind::Const(_, Some(body_id)) + | hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body_id)) => { self.visit_nested_body(body_id) } - hir::TraitItemKind::Const(_, None) | - hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) | - hir::TraitItemKind::Type(..) => {} + hir::TraitItemKind::Const(_, None) + | hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) + | hir::TraitItemKind::Type(..) => {} } } } @@ -636,9 +668,6 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE); let krate = tcx.hir().krate(); let live_symbols = find_live(tcx, access_levels, krate); - let mut visitor = DeadVisitor { - tcx, - live_symbols, - }; + let mut visitor = DeadVisitor { tcx, live_symbols }; intravisit::walk_crate(&mut visitor, krate); } diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 16c9344a03722..6fd191c2b73c2 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -53,12 +53,12 @@ use hir::def_id::CrateNum; +use middle::cstore::LinkagePreference::{self, RequireDynamic, RequireStatic}; +use middle::cstore::{self, DepKind}; +use rustc_target::spec::PanicStrategy; use session::config; use ty::TyCtxt; -use middle::cstore::{self, DepKind}; -use middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic}; use util::nodemap::FxHashMap; -use rustc_target::spec::PanicStrategy; /// A list of dependencies for a certain crate type. /// @@ -83,18 +83,21 @@ pub enum Linkage { pub fn calculate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let sess = &tcx.sess; - let fmts = sess.crate_types.borrow().iter().map(|&ty| { - let linkage = calculate_type(tcx, ty); - verify_ok(tcx, &linkage); - (ty, linkage) - }).collect::>(); + let fmts = sess + .crate_types + .borrow() + .iter() + .map(|&ty| { + let linkage = calculate_type(tcx, ty); + verify_ok(tcx, &linkage); + (ty, linkage) + }) + .collect::>(); sess.abort_if_errors(); sess.dependency_formats.set(fmts); } -fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: config::CrateType) -> DependencyList { - +fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: config::CrateType) -> DependencyList { let sess = &tcx.sess; if !sess.opts.output_types.should_codegen() { @@ -113,8 +116,9 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // If the global prefer_dynamic switch is turned off, or the final // executable will be statically linked, prefer static crate linkage. - config::CrateType::Executable if !sess.opts.cg.prefer_dynamic || - sess.crt_static() => Linkage::Static, + config::CrateType::Executable if !sess.opts.cg.prefer_dynamic || sess.crt_static() => { + Linkage::Static + } config::CrateType::Executable => Linkage::Dynamic, // proc-macro crates are mostly cdylibs, but we also need metadata. @@ -142,16 +146,25 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // Staticlibs, cdylibs, and static executables must have all static // dependencies. If any are not found, generate some nice pretty errors. - if ty == config::CrateType::Cdylib || ty == config::CrateType::Staticlib || - (ty == config::CrateType::Executable && sess.crt_static() && - !sess.target.target.options.crt_static_allows_dylibs) { + if ty == config::CrateType::Cdylib + || ty == config::CrateType::Staticlib + || (ty == config::CrateType::Executable + && sess.crt_static() + && !sess.target.target.options.crt_static_allows_dylibs) + { for &cnum in tcx.crates().iter() { - if tcx.dep_kind(cnum).macros_only() { continue } + if tcx.dep_kind(cnum).macros_only() { + continue; + } let src = tcx.used_crate_source(cnum); - if src.rlib.is_some() { continue } - sess.err(&format!("crate `{}` required to be available in rlib format, \ - but was not found in this form", - tcx.crate_name(cnum))); + if src.rlib.is_some() { + continue; + } + sess.err(&format!( + "crate `{}` required to be available in rlib format, \ + but was not found in this form", + tcx.crate_name(cnum) + )); } return Vec::new(); } @@ -163,7 +176,9 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // dependencies, ensuring there are no conflicts. The only valid case for a // dependency to be relied upon twice is for both cases to rely on a dylib. for &cnum in tcx.crates().iter() { - if tcx.dep_kind(cnum).macros_only() { continue } + if tcx.dep_kind(cnum).macros_only() { + continue; + } let name = tcx.crate_name(cnum); let src = tcx.used_crate_source(cnum); if src.dylib.is_some() { @@ -179,13 +194,13 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // Collect what we've got so far in the return vector. let last_crate = tcx.crates().len(); - let mut ret = (1..last_crate+1).map(|cnum| { - match formats.get(&CrateNum::new(cnum)) { + let mut ret = (1..last_crate + 1) + .map(|cnum| match formats.get(&CrateNum::new(cnum)) { Some(&RequireDynamic) => Linkage::Dynamic, Some(&RequireStatic) => Linkage::IncludedFromDylib, None => Linkage::NotLinked, - } - }).collect::>(); + }) + .collect::>(); // Run through the dependency list again, and add any missing libraries as // static libraries. @@ -194,9 +209,10 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // (e.g., it's an allocator) then we skip it here as well. for &cnum in tcx.crates().iter() { let src = tcx.used_crate_source(cnum); - if src.dylib.is_none() && - !formats.contains_key(&cnum) && - tcx.dep_kind(cnum) == DepKind::Explicit { + if src.dylib.is_none() + && !formats.contains_key(&cnum) + && tcx.dep_kind(cnum) == DepKind::Explicit + { assert!(src.rlib.is_some() || src.rmeta.is_some()); info!("adding staticlib: {}", tcx.crate_name(cnum)); add_library(tcx, cnum, RequireStatic, &mut formats); @@ -210,8 +226,9 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // // Things like allocators and panic runtimes may not have been activated // quite yet, so do so here. - activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret, - &|cnum| tcx.is_panic_runtime(cnum)); + activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret, &|cnum| { + tcx.is_panic_runtime(cnum) + }); // When dylib B links to dylib A, then when using B we must also link to A. // It could be the case, however, that the rlib for A is present (hence we @@ -223,8 +240,7 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let cnum = CrateNum::new(cnum + 1); let src = tcx.used_crate_source(cnum); match *kind { - Linkage::NotLinked | - Linkage::IncludedFromDylib => {} + Linkage::NotLinked | Linkage::IncludedFromDylib => {} Linkage::Static if src.rlib.is_some() => continue, Linkage::Dynamic if src.dylib.is_some() => continue, kind => { @@ -232,9 +248,12 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, Linkage::Static => "rlib", _ => "dylib", }; - sess.err(&format!("crate `{}` required to be available in {} format, \ - but was not found in this form", - tcx.crate_name(cnum), kind)); + sess.err(&format!( + "crate `{}` required to be available in {} format, \ + but was not found in this form", + tcx.crate_name(cnum), + kind + )); } } } @@ -242,10 +261,12 @@ fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ret } -fn add_library(tcx: TyCtxt<'_, '_, '_>, - cnum: CrateNum, - link: LinkagePreference, - m: &mut FxHashMap) { +fn add_library( + tcx: TyCtxt<'_, '_, '_>, + cnum: CrateNum, + link: LinkagePreference, + m: &mut FxHashMap, +) { match m.get(&cnum) { Some(&link2) => { // If the linkages differ, then we'd have two copies of the library @@ -256,14 +277,22 @@ fn add_library(tcx: TyCtxt<'_, '_, '_>, // This error is probably a little obscure, but I imagine that it // can be refined over time. if link2 != link || link == RequireStatic { - tcx.sess.struct_err(&format!("cannot satisfy dependencies so `{}` only \ - shows up once", tcx.crate_name(cnum))) - .help("having upstream crates all available in one format \ - will likely make this go away") + tcx.sess + .struct_err(&format!( + "cannot satisfy dependencies so `{}` only \ + shows up once", + tcx.crate_name(cnum) + )) + .help( + "having upstream crates all available in one format \ + will likely make this go away", + ) .emit(); } } - None => { m.insert(cnum, link); } + None => { + m.insert(cnum, link); + } } } @@ -271,25 +300,28 @@ fn attempt_static<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option>(); + let mut ret = (1..last_crate + 1) + .map(|cnum| { + if tcx.dep_kind(CrateNum::new(cnum)) == DepKind::Explicit { + Linkage::Static + } else { + Linkage::NotLinked + } + }) + .collect::>(); // Our allocator/panic runtime may not have been linked above if it wasn't // explicitly linked, which is the case for any injected dependency. Handle // that here and activate them. - activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret, - &|cnum| tcx.is_panic_runtime(cnum)); + activate_injected_dep(*sess.injected_panic_runtime.get(), &mut ret, &|cnum| { + tcx.is_panic_runtime(cnum) + }); Some(ret) } @@ -303,16 +335,18 @@ fn attempt_static<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option, - list: &mut DependencyList, - replaces_injected: &dyn Fn(CrateNum) -> bool) { +fn activate_injected_dep( + injected: Option, + list: &mut DependencyList, + replaces_injected: &dyn Fn(CrateNum) -> bool, +) { for (i, slot) in list.iter().enumerate() { let cnum = CrateNum::new(i + 1); if !replaces_injected(cnum) { - continue + continue; } if *slot != Linkage::NotLinked { - return + return; } } if let Some(injected) = injected { @@ -327,12 +361,12 @@ fn activate_injected_dep(injected: Option, fn verify_ok<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, list: &[Linkage]) { let sess = &tcx.sess; if list.len() == 0 { - return + return; } let mut panic_runtime = None; for (i, linkage) in list.iter().enumerate() { if let Linkage::NotLinked = *linkage { - continue + continue; } let cnum = CrateNum::new(i + 1); @@ -340,9 +374,11 @@ fn verify_ok<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, list: &[Linkage]) { if let Some((prev, _)) = panic_runtime { let prev_name = tcx.crate_name(prev); let cur_name = tcx.crate_name(cnum); - sess.err(&format!("cannot link together two \ - panic runtimes: {} and {}", - prev_name, cur_name)); + sess.err(&format!( + "cannot link together two \ + panic runtimes: {} and {}", + prev_name, cur_name + )); } panic_runtime = Some((cnum, tcx.panic_strategy(cnum))); } @@ -357,11 +393,13 @@ fn verify_ok<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, list: &[Linkage]) { // First up, validate that our selected panic runtime is indeed exactly // our same strategy. if found_strategy != desired_strategy { - sess.err(&format!("the linked panic runtime `{}` is \ - not compiled with this crate's \ - panic strategy `{}`", - tcx.crate_name(cnum), - desired_strategy.desc())); + sess.err(&format!( + "the linked panic runtime `{}` is \ + not compiled with this crate's \ + panic strategy `{}`", + tcx.crate_name(cnum), + desired_strategy.desc() + )); } // Next up, verify that all other crates are compatible with this panic @@ -370,25 +408,27 @@ fn verify_ok<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, list: &[Linkage]) { // panic strategy must match our own. for (i, linkage) in list.iter().enumerate() { if let Linkage::NotLinked = *linkage { - continue + continue; } if desired_strategy == PanicStrategy::Abort { - continue + continue; } let cnum = CrateNum::new(i + 1); let found_strategy = tcx.panic_strategy(cnum); let is_compiler_builtins = tcx.is_compiler_builtins(cnum); if is_compiler_builtins || desired_strategy == found_strategy { - continue + continue; } - sess.err(&format!("the crate `{}` is compiled with the \ - panic strategy `{}` which is \ - incompatible with this crate's \ - strategy of `{}`", - tcx.crate_name(cnum), - found_strategy.desc(), - desired_strategy.desc())); + sess.err(&format!( + "the crate `{}` is compiled with the \ + panic strategy `{}` which is \ + incompatible with this crate's \ + strategy of `{}`", + tcx.crate_name(cnum), + found_strategy.desc(), + desired_strategy.desc() + )); } } } diff --git a/src/librustc/middle/entry.rs b/src/librustc/middle/entry.rs index 6b593a1a9f9b2..1e5a549687dec 100644 --- a/src/librustc/middle/entry.rs +++ b/src/librustc/middle/entry.rs @@ -1,13 +1,13 @@ +use hir::def_id::CRATE_DEF_INDEX; +use hir::itemlikevisit::ItemLikeVisitor; use hir::map as hir_map; -use hir::def_id::{CRATE_DEF_INDEX}; -use session::{config, Session}; +use hir::{ImplItem, Item, ItemKind, TraitItem}; use session::config::EntryFnType; +use session::{config, Session}; use syntax::ast::NodeId; use syntax::attr; use syntax::entry::EntryPointType; use syntax_pos::Span; -use hir::{Item, ItemKind, ImplItem, TraitItem}; -use hir::itemlikevisit::ItemLikeVisitor; struct EntryContext<'a, 'tcx: 'a> { session: &'a Session, @@ -25,7 +25,7 @@ struct EntryContext<'a, 'tcx: 'a> { // The functions that one might think are 'main' but aren't, e.g. // main functions not defined at the top level. For diagnostics. - non_main_fns: Vec<(NodeId, Span)> , + non_main_fns: Vec<(NodeId, Span)>, } impl<'a, 'tcx> ItemLikeVisitor<'tcx> for EntryContext<'a, 'tcx> { @@ -45,22 +45,22 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for EntryContext<'a, 'tcx> { } } -pub fn find_entry_point(session: &Session, - hir_map: &hir_map::Map<'_>, - crate_name: &str) { - let any_exe = session.crate_types.borrow().iter().any(|ty| { - *ty == config::CrateType::Executable - }); +pub fn find_entry_point(session: &Session, hir_map: &hir_map::Map<'_>, crate_name: &str) { + let any_exe = session + .crate_types + .borrow() + .iter() + .any(|ty| *ty == config::CrateType::Executable); if !any_exe { // No need to find a main function session.entry_fn.set(None); - return + return; } // If the user wants no main function at all, then stop here. if attr::contains_name(&hir_map.krate().attrs, "no_main") { session.entry_fn.set(None); - return + return; } let mut ctxt = EntryContext { @@ -101,31 +101,33 @@ fn entry_point_type(item: &Item, at_root: bool) -> EntryPointType { } } - fn find_item(item: &Item, ctxt: &mut EntryContext<'_, '_>, at_root: bool) { match entry_point_type(item, at_root) { EntryPointType::MainNamed => { if ctxt.main_fn.is_none() { ctxt.main_fn = Some((item.id, item.span)); } else { - span_err!(ctxt.session, item.span, E0136, - "multiple 'main' functions"); + span_err!(ctxt.session, item.span, E0136, "multiple 'main' functions"); } - }, + } EntryPointType::OtherMain => { ctxt.non_main_fns.push((item.id, item.span)); - }, + } EntryPointType::MainAttr => { if ctxt.attr_main_fn.is_none() { ctxt.attr_main_fn = Some((item.id, item.span)); } else { - struct_span_err!(ctxt.session, item.span, E0137, - "multiple functions with a #[main] attribute") + struct_span_err!( + ctxt.session, + item.span, + E0137, + "multiple functions with a #[main] attribute" + ) .span_label(item.span, "additional #[main] function") .span_label(ctxt.attr_main_fn.unwrap().1, "first #[main] function") .emit(); } - }, + } EntryPointType::Start => { if ctxt.start_fn.is_none() { ctxt.start_fn = Some((item.id, item.span)); @@ -135,29 +137,41 @@ fn find_item(item: &Item, ctxt: &mut EntryContext<'_, '_>, at_root: bool) { .span_label(item.span, "multiple `start` functions") .emit(); } - }, - EntryPointType::None => () + } + EntryPointType::None => (), } } fn configure_main(this: &mut EntryContext<'_, '_>, crate_name: &str) { if let Some((node_id, span)) = this.start_fn { - this.session.entry_fn.set(Some((node_id, span, EntryFnType::Start))); + this.session + .entry_fn + .set(Some((node_id, span, EntryFnType::Start))); } else if let Some((node_id, span)) = this.attr_main_fn { - this.session.entry_fn.set(Some((node_id, span, EntryFnType::Main))); + this.session + .entry_fn + .set(Some((node_id, span, EntryFnType::Main))); } else if let Some((node_id, span)) = this.main_fn { - this.session.entry_fn.set(Some((node_id, span, EntryFnType::Main))); + this.session + .entry_fn + .set(Some((node_id, span, EntryFnType::Main))); } else { // No main function this.session.entry_fn.set(None); - let mut err = struct_err!(this.session, E0601, - "`main` function not found in crate `{}`", crate_name); + let mut err = struct_err!( + this.session, + E0601, + "`main` function not found in crate `{}`", + crate_name + ); if !this.non_main_fns.is_empty() { // There were some functions named 'main' though. Try to give the user a hint. - err.note("the main function must be defined at the crate level \ - but you have one or more functions named 'main' that are not \ - defined at the crate level. Either move the definition or \ - attach the `#[main]` attribute to override this behavior."); + err.note( + "the main function must be defined at the crate level \ + but you have one or more functions named 'main' that are not \ + defined at the crate level. Either move the definition or \ + attach the `#[main]` attribute to override this behavior.", + ); for &(_, span) in &this.non_main_fns { err.span_note(span, "here is a function named 'main'"); } @@ -165,11 +179,16 @@ fn configure_main(this: &mut EntryContext<'_, '_>, crate_name: &str) { this.session.abort_if_errors(); } else { if let Some(ref filename) = this.session.local_crate_source_file { - err.note(&format!("consider adding a `main` function to `{}`", filename.display())); + err.note(&format!( + "consider adding a `main` function to `{}`", + filename.display() + )); } if this.session.teach(&err.get_code().unwrap()) { - err.note("If you don't know the basics of Rust, you can go look to the Rust Book \ - to get started: https://doc.rust-lang.org/book/"); + err.note( + "If you don't know the basics of Rust, you can go look to the Rust Book \ + to get started: https://doc.rust-lang.org/book/", + ); } err.emit(); } diff --git a/src/librustc/middle/exported_symbols.rs b/src/librustc/middle/exported_symbols.rs index a0a73ea0b81fb..9b6e4dca6209f 100644 --- a/src/librustc/middle/exported_symbols.rs +++ b/src/librustc/middle/exported_symbols.rs @@ -1,7 +1,6 @@ use hir::def_id::{DefId, LOCAL_CRATE}; use ich::StableHashingContext; -use rustc_data_structures::stable_hasher::{StableHasher, HashStable, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use std::cmp; use std::mem; use ty; @@ -38,70 +37,61 @@ pub enum ExportedSymbol<'tcx> { } impl<'tcx> ExportedSymbol<'tcx> { - pub fn symbol_name(&self, - tcx: ty::TyCtxt<'_, 'tcx, '_>) - -> ty::SymbolName { + pub fn symbol_name(&self, tcx: ty::TyCtxt<'_, 'tcx, '_>) -> ty::SymbolName { match *self { - ExportedSymbol::NonGeneric(def_id) => { - tcx.symbol_name(ty::Instance::mono(tcx, def_id)) - } + ExportedSymbol::NonGeneric(def_id) => tcx.symbol_name(ty::Instance::mono(tcx, def_id)), ExportedSymbol::Generic(def_id, substs) => { tcx.symbol_name(ty::Instance::new(def_id, substs)) } - ExportedSymbol::NoDefId(symbol_name) => { - symbol_name - } + ExportedSymbol::NoDefId(symbol_name) => symbol_name, } } - pub fn compare_stable(&self, - tcx: ty::TyCtxt<'_, 'tcx, '_>, - other: &ExportedSymbol<'tcx>) - -> cmp::Ordering { + pub fn compare_stable( + &self, + tcx: ty::TyCtxt<'_, 'tcx, '_>, + other: &ExportedSymbol<'tcx>, + ) -> cmp::Ordering { match *self { ExportedSymbol::NonGeneric(self_def_id) => match *other { - ExportedSymbol::NonGeneric(other_def_id) => { - tcx.def_path_hash(self_def_id).cmp(&tcx.def_path_hash(other_def_id)) - } - ExportedSymbol::Generic(..) | - ExportedSymbol::NoDefId(_) => { - cmp::Ordering::Less - } - } + ExportedSymbol::NonGeneric(other_def_id) => tcx + .def_path_hash(self_def_id) + .cmp(&tcx.def_path_hash(other_def_id)), + ExportedSymbol::Generic(..) | ExportedSymbol::NoDefId(_) => cmp::Ordering::Less, + }, ExportedSymbol::Generic(..) => match *other { - ExportedSymbol::NonGeneric(_) => { - cmp::Ordering::Greater - } - ExportedSymbol::Generic(..) => { - self.symbol_name(tcx).cmp(&other.symbol_name(tcx)) - } - ExportedSymbol::NoDefId(_) => { - cmp::Ordering::Less - } - } + ExportedSymbol::NonGeneric(_) => cmp::Ordering::Greater, + ExportedSymbol::Generic(..) => self.symbol_name(tcx).cmp(&other.symbol_name(tcx)), + ExportedSymbol::NoDefId(_) => cmp::Ordering::Less, + }, ExportedSymbol::NoDefId(self_symbol_name) => match *other { - ExportedSymbol::NonGeneric(_) | - ExportedSymbol::Generic(..) => { + ExportedSymbol::NonGeneric(_) | ExportedSymbol::Generic(..) => { cmp::Ordering::Greater } ExportedSymbol::NoDefId(ref other_symbol_name) => { self_symbol_name.cmp(other_symbol_name) } - } + }, } } } pub fn metadata_symbol_name(tcx: ty::TyCtxt<'_, '_, '_>) -> String { - format!("rust_metadata_{}_{}", - tcx.original_crate_name(LOCAL_CRATE), - tcx.crate_disambiguator(LOCAL_CRATE).to_fingerprint().to_hex()) + format!( + "rust_metadata_{}_{}", + tcx.original_crate_name(LOCAL_CRATE), + tcx.crate_disambiguator(LOCAL_CRATE) + .to_fingerprint() + .to_hex() + ) } impl<'a, 'gcx> HashStable> for ExportedSymbol<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ExportedSymbol::NonGeneric(def_id) => { diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index c1aa25b6b75c2..f039a05700f25 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -2,19 +2,19 @@ //! normal visitor, which just walks the entire body in one shot, the //! `ExprUseVisitor` determines how expressions are being used. -pub use self::LoanCause::*; pub use self::ConsumeMode::*; -pub use self::MoveReason::*; +pub use self::LoanCause::*; pub use self::MatchMode::*; -use self::TrackMatchMode::*; +pub use self::MoveReason::*; use self::OverloadedCallType::*; +use self::TrackMatchMode::*; use hir::def::Def; use hir::def_id::DefId; use infer::InferCtxt; use middle::mem_categorization as mc; use middle::region; -use ty::{self, TyCtxt, adjustment}; +use ty::{self, adjustment, TyCtxt}; use hir::{self, PatKind}; use rustc_data_structures::sync::Lrc; @@ -32,11 +32,13 @@ use util::nodemap::ItemLocalSet; pub trait Delegate<'tcx> { // The value found at `cmt` is either copied or moved, depending // on mode. - fn consume(&mut self, - consume_id: ast::NodeId, - consume_span: Span, - cmt: &mc::cmt_<'tcx>, - mode: ConsumeMode); + fn consume( + &mut self, + consume_id: ast::NodeId, + consume_span: Span, + cmt: &mc::cmt_<'tcx>, + mode: ConsumeMode, + ); // The value found at `cmt` has been determined to match the // pattern binding `matched_pat`, and its subparts are being @@ -50,39 +52,35 @@ pub trait Delegate<'tcx> { // `matched_pat` and `consume_pat` are never both called on the // same input pattern structure (though of `consume_pat` can be // called on a subpart of an input passed to `matched_pat). - fn matched_pat(&mut self, - matched_pat: &hir::Pat, - cmt: &mc::cmt_<'tcx>, - mode: MatchMode); + fn matched_pat(&mut self, matched_pat: &hir::Pat, cmt: &mc::cmt_<'tcx>, mode: MatchMode); // The value found at `cmt` is either copied or moved via the // pattern binding `consume_pat`, depending on mode. - fn consume_pat(&mut self, - consume_pat: &hir::Pat, - cmt: &mc::cmt_<'tcx>, - mode: ConsumeMode); + fn consume_pat(&mut self, consume_pat: &hir::Pat, cmt: &mc::cmt_<'tcx>, mode: ConsumeMode); // The value found at `borrow` is being borrowed at the point // `borrow_id` for the region `loan_region` with kind `bk`. - fn borrow(&mut self, - borrow_id: ast::NodeId, - borrow_span: Span, - cmt: &mc::cmt_<'tcx>, - loan_region: ty::Region<'tcx>, - bk: ty::BorrowKind, - loan_cause: LoanCause); + fn borrow( + &mut self, + borrow_id: ast::NodeId, + borrow_span: Span, + cmt: &mc::cmt_<'tcx>, + loan_region: ty::Region<'tcx>, + bk: ty::BorrowKind, + loan_cause: LoanCause, + ); // The local variable `id` is declared but not initialized. - fn decl_without_init(&mut self, - id: ast::NodeId, - span: Span); + fn decl_without_init(&mut self, id: ast::NodeId, span: Span); // The path at `cmt` is being assigned to. - fn mutate(&mut self, - assignment_id: ast::NodeId, - assignment_span: Span, - assignee_cmt: &mc::cmt_<'tcx>, - mode: MutateMode); + fn mutate( + &mut self, + assignment_id: ast::NodeId, + assignment_span: Span, + assignee_cmt: &mc::cmt_<'tcx>, + mode: MutateMode, + ); } #[derive(Copy, Clone, PartialEq, Debug)] @@ -95,13 +93,13 @@ pub enum LoanCause { OverloadedOperator, ClosureInvocation, ForLoop, - MatchDiscriminant + MatchDiscriminant, } #[derive(Copy, Clone, PartialEq, Debug)] pub enum ConsumeMode { - Copy, // reference to x where x has a type that copies - Move(MoveReason), // reference to x where x has a type that moves + Copy, // reference to x where x has a type that copies + Move(MoveReason), // reference to x where x has a type that moves } #[derive(Copy, Clone, PartialEq, Debug)] @@ -205,12 +203,10 @@ impl OverloadedCallType { for &(maybe_function_trait, overloaded_call_type) in &[ (tcx.lang_items().fn_once_trait(), FnOnceOverloadedCall), (tcx.lang_items().fn_mut_trait(), FnMutOverloadedCall), - (tcx.lang_items().fn_trait(), FnOverloadedCall) + (tcx.lang_items().fn_trait(), FnOverloadedCall), ] { match maybe_function_trait { - Some(function_trait) if function_trait == trait_id => { - return overloaded_call_type - } + Some(function_trait) if function_trait == trait_id => return overloaded_call_type, _ => continue, } } @@ -228,7 +224,7 @@ impl OverloadedCallType { // The ExprUseVisitor type // // This is the code that actually walks the tree. -pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct ExprUseVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>, delegate: &'a mut dyn Delegate<'tcx>, param_env: ty::ParamEnv<'tcx>, @@ -242,15 +238,15 @@ pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { // Note that this macro appears similar to try!(), but, unlike try!(), // it does not propagate the error. macro_rules! return_if_err { - ($inp: expr) => ( + ($inp: expr) => { match $inp { Ok(v) => v, Err(()) => { debug!("mc reported err"); - return + return; } } - ) + }; } impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx, 'tcx> { @@ -265,19 +261,21 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx, 'tcx> { /// `None` means that rvalues will be given more conservative lifetimes. /// /// See also `with_infer`, which is used *during* typeck. - pub fn new(delegate: &'a mut (dyn Delegate<'tcx>+'a), - tcx: TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - region_scope_tree: &'a region::ScopeTree, - tables: &'a ty::TypeckTables<'tcx>, - rvalue_promotable_map: Option>) - -> Self - { + pub fn new( + delegate: &'a mut (dyn Delegate<'tcx> + 'a), + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + region_scope_tree: &'a region::ScopeTree, + tables: &'a ty::TypeckTables<'tcx>, + rvalue_promotable_map: Option>, + ) -> Self { ExprUseVisitor { - mc: mc::MemCategorizationContext::new(tcx, - region_scope_tree, - tables, - rvalue_promotable_map), + mc: mc::MemCategorizationContext::new( + tcx, + region_scope_tree, + tables, + rvalue_promotable_map, + ), delegate, param_env, } @@ -285,13 +283,13 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx, 'tcx> { } impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { - pub fn with_infer(delegate: &'a mut (dyn Delegate<'tcx>+'a), - infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - region_scope_tree: &'a region::ScopeTree, - tables: &'a ty::TypeckTables<'tcx>) - -> Self - { + pub fn with_infer( + delegate: &'a mut (dyn Delegate<'tcx> + 'a), + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + region_scope_tree: &'a region::ScopeTree, + tables: &'a ty::TypeckTables<'tcx>, + ) -> Self { ExprUseVisitor { mc: mc::MemCategorizationContext::with_infer(infcx, region_scope_tree, tables), delegate, @@ -306,17 +304,16 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { let arg_ty = return_if_err!(self.mc.pat_ty_adjusted(&arg.pat)); debug!("consume_body: arg_ty = {:?}", arg_ty); - let fn_body_scope_r = - self.tcx().mk_region(ty::ReScope( - region::Scope { - id: body.value.hir_id.local_id, - data: region::ScopeData::Node - })); + let fn_body_scope_r = self.tcx().mk_region(ty::ReScope(region::Scope { + id: body.value.hir_id.local_id, + data: region::ScopeData::Node, + })); let arg_cmt = Rc::new(self.mc.cat_rvalue( arg.hir_id, arg.pat.span, fn_body_scope_r, // Args live only as long as the fn body. - arg_ty)); + arg_ty, + )); self.walk_irrefutable_pat(arg_cmt, &arg.pat); } @@ -328,12 +325,13 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.mc.tcx } - fn delegate_consume(&mut self, - consume_id: ast::NodeId, - consume_span: Span, - cmt: &mc::cmt_<'tcx>) { - debug!("delegate_consume(consume_id={}, cmt={:?})", - consume_id, cmt); + fn delegate_consume( + &mut self, + consume_id: ast::NodeId, + consume_span: Span, + cmt: &mc::cmt_<'tcx>, + ) { + debug!("delegate_consume(consume_id={}, cmt={:?})", consume_id, cmt); let mode = copy_or_move(&self.mc, self.param_env, cmt, DirectRefMove); self.delegate.consume(consume_id, consume_span, cmt, mode); @@ -353,23 +351,26 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.walk_expr(expr); } - fn mutate_expr(&mut self, - span: Span, - assignment_expr: &hir::Expr, - expr: &hir::Expr, - mode: MutateMode) { + fn mutate_expr( + &mut self, + span: Span, + assignment_expr: &hir::Expr, + expr: &hir::Expr, + mode: MutateMode, + ) { let cmt = return_if_err!(self.mc.cat_expr(expr)); self.delegate.mutate(assignment_expr.id, span, &cmt, mode); self.walk_expr(expr); } - fn borrow_expr(&mut self, - expr: &hir::Expr, - r: ty::Region<'tcx>, - bk: ty::BorrowKind, - cause: LoanCause) { - debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})", - expr, r, bk); + fn borrow_expr( + &mut self, + expr: &hir::Expr, + r: ty::Region<'tcx>, + bk: ty::BorrowKind, + cause: LoanCause, + ) { + debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})", expr, r, bk); let cmt = return_if_err!(self.mc.cat_expr(expr)); self.delegate.borrow(expr.id, expr.span, &cmt, r, bk, cause); @@ -387,31 +388,34 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.walk_adjustment(expr); match expr.node { - hir::ExprKind::Path(_) => { } + hir::ExprKind::Path(_) => {} - hir::ExprKind::Type(ref subexpr, _) => { - self.walk_expr(&subexpr) - } + hir::ExprKind::Type(ref subexpr, _) => self.walk_expr(&subexpr), - hir::ExprKind::Unary(hir::UnDeref, ref base) => { // *base + hir::ExprKind::Unary(hir::UnDeref, ref base) => { + // *base self.select_from_expr(&base); } - hir::ExprKind::Field(ref base, _) => { // base.f + hir::ExprKind::Field(ref base, _) => { + // base.f self.select_from_expr(&base); } - hir::ExprKind::Index(ref lhs, ref rhs) => { // lhs[rhs] + hir::ExprKind::Index(ref lhs, ref rhs) => { + // lhs[rhs] self.select_from_expr(&lhs); self.consume_expr(&rhs); } - hir::ExprKind::Call(ref callee, ref args) => { // callee(args) + hir::ExprKind::Call(ref callee, ref args) => { + // callee(args) self.walk_callee(expr, &callee); self.consume_exprs(args); } - hir::ExprKind::MethodCall(.., ref args) => { // callee.m(args) + hir::ExprKind::MethodCall(.., ref args) => { + // callee.m(args) self.consume_exprs(args); } @@ -448,7 +452,8 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.consume_exprs(exprs); } - hir::ExprKind::AddrOf(m, ref base) => { // &base + hir::ExprKind::AddrOf(m, ref base) => { + // &base // make sure that the thing we are pointing out stays valid // for the lifetime `scope_r` of the resulting ptr: let expr_ty = return_if_err!(self.mc.expr_ty(expr)); @@ -478,9 +483,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.consume_exprs(inputs); } - hir::ExprKind::Continue(..) | - hir::ExprKind::Lit(..) | - hir::ExprKind::Err => {} + hir::ExprKind::Continue(..) | hir::ExprKind::Lit(..) | hir::ExprKind::Err => {} hir::ExprKind::Loop(ref blk, _, _) => { self.walk_block(&blk); @@ -532,9 +535,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.consume_expr(&base); } - hir::ExprKind::Closure(.., fn_decl_span, _) => { - self.walk_captures(expr, fn_decl_span) - } + hir::ExprKind::Closure(.., fn_decl_span, _) => self.walk_captures(expr, fn_decl_span), hir::ExprKind::Box(ref base) => { self.consume_expr(&base); @@ -548,40 +549,44 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) { let callee_ty = return_if_err!(self.mc.expr_ty_adjusted(callee)); - debug!("walk_callee: callee={:?} callee_ty={:?}", - callee, callee_ty); + debug!("walk_callee: callee={:?} callee_ty={:?}", callee, callee_ty); match callee_ty.sty { ty::FnDef(..) | ty::FnPtr(_) => { self.consume_expr(callee); } - ty::Error => { } + ty::Error => {} _ => { if let Some(def) = self.mc.tables.type_dependent_defs().get(call.hir_id) { let def_id = def.def_id(); let call_scope = region::Scope { id: call.hir_id.local_id, - data: region::ScopeData::Node + data: region::ScopeData::Node, }; match OverloadedCallType::from_method_id(self.tcx(), def_id) { FnMutOverloadedCall => { let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope)); - self.borrow_expr(callee, - call_scope_r, - ty::MutBorrow, - ClosureInvocation); + self.borrow_expr( + callee, + call_scope_r, + ty::MutBorrow, + ClosureInvocation, + ); } FnOverloadedCall => { let call_scope_r = self.tcx().mk_region(ty::ReScope(call_scope)); - self.borrow_expr(callee, - call_scope_r, - ty::ImmBorrow, - ClosureInvocation); + self.borrow_expr( + callee, + call_scope_r, + ty::ImmBorrow, + ClosureInvocation, + ); } FnOnceOverloadedCall => self.consume_expr(callee), } } else { - self.tcx().sess.delay_span_bug(call.span, - "no type-dependent def for overloaded call"); + self.tcx() + .sess + .delay_span_bug(call.span, "no type-dependent def for overloaded call"); } } } @@ -602,8 +607,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { } } - hir::StmtKind::Expr(ref expr, _) | - hir::StmtKind::Semi(ref expr, _) => { + hir::StmtKind::Expr(ref expr, _) | hir::StmtKind::Semi(ref expr, _) => { self.consume_expr(&expr); } } @@ -611,12 +615,10 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { fn walk_local(&mut self, local: &hir::Local) { match local.init { - None => { - local.pat.each_binding(|_, hir_id, span, _| { - let node_id = self.mc.tcx.hir().hir_to_node_id(hir_id); - self.delegate.decl_without_init(node_id, span); - }) - } + None => local.pat.each_binding(|_, hir_id, span, _| { + let node_id = self.mc.tcx.hir().hir_to_node_id(hir_id); + self.delegate.decl_without_init(node_id, span); + }), Some(ref expr) => { // Variable declarations with @@ -644,9 +646,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { } } - fn walk_struct_expr(&mut self, - fields: &[hir::Field], - opt_with: &Option>) { + fn walk_struct_expr(&mut self, fields: &[hir::Field], opt_with: &Option>) { // Consume the expressions supplying values for each field. for field in fields { self.consume_expr(&field.expr); @@ -654,7 +654,9 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { let with_expr = match *opt_with { Some(ref w) => &**w, - None => { return; } + None => { + return; + } }; let with_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&with_expr))); @@ -665,16 +667,16 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { ty::Adt(adt, substs) if adt.is_struct() => { // Consume those fields of the with expression that are needed. for (f_index, with_field) in adt.non_enum_variant().fields.iter().enumerate() { - let is_mentioned = fields.iter().any(|f| { - self.tcx().field_index(f.id, self.mc.tables) == f_index - }); + let is_mentioned = fields + .iter() + .any(|f| self.tcx().field_index(f.id, self.mc.tables) == f_index); if !is_mentioned { let cmt_field = self.mc.cat_field( &*with_expr, with_cmt.clone(), f_index, with_field.ident, - with_field.ty(self.tcx(), substs) + with_field.ty(self.tcx(), substs), ); self.delegate_consume(with_expr.id, with_expr.span, &cmt_field); } @@ -688,7 +690,8 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { if !self.tcx().sess.has_errors() { span_bug!( with_expr.span, - "with expression doesn't evaluate to a struct"); + "with expression doesn't evaluate to a struct" + ); } } } @@ -707,12 +710,12 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { for adjustment in adjustments { debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment); match adjustment.kind { - adjustment::Adjust::NeverToAny | - adjustment::Adjust::ReifyFnPointer | - adjustment::Adjust::UnsafeFnPointer | - adjustment::Adjust::ClosureFnPointer | - adjustment::Adjust::MutToConstPointer | - adjustment::Adjust::Unsize => { + adjustment::Adjust::NeverToAny + | adjustment::Adjust::ReifyFnPointer + | adjustment::Adjust::UnsafeFnPointer + | adjustment::Adjust::ClosureFnPointer + | adjustment::Adjust::MutToConstPointer + | adjustment::Adjust::Unsize => { // Creating a closure/fn-pointer or unsizing consumes // the input and stores it into the resulting rvalue. self.delegate_consume(expr.id, expr.span, &cmt); @@ -727,7 +730,8 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { // this is an autoref of `x`. adjustment::Adjust::Deref(Some(ref deref)) => { let bk = ty::BorrowKind::from_mutbl(deref.mutbl); - self.delegate.borrow(expr.id, expr.span, &cmt, deref.region, bk, AutoRef); + self.delegate + .borrow(expr.id, expr.span, &cmt, deref.region, bk, AutoRef); } adjustment::Adjust::Borrow(ref autoref) => { @@ -741,45 +745,48 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { /// Walks the autoref `autoref` applied to the autoderef'd /// `expr`. `cmt_base` is the mem-categorized form of `expr` /// after all relevant autoderefs have occurred. - fn walk_autoref(&mut self, - expr: &hir::Expr, - cmt_base: &mc::cmt_<'tcx>, - autoref: &adjustment::AutoBorrow<'tcx>) { - debug!("walk_autoref(expr.id={} cmt_base={:?} autoref={:?})", - expr.id, - cmt_base, - autoref); + fn walk_autoref( + &mut self, + expr: &hir::Expr, + cmt_base: &mc::cmt_<'tcx>, + autoref: &adjustment::AutoBorrow<'tcx>, + ) { + debug!( + "walk_autoref(expr.id={} cmt_base={:?} autoref={:?})", + expr.id, cmt_base, autoref + ); match *autoref { adjustment::AutoBorrow::Ref(r, m) => { - self.delegate.borrow(expr.id, - expr.span, - cmt_base, - r, - ty::BorrowKind::from_mutbl(m.into()), - AutoRef); + self.delegate.borrow( + expr.id, + expr.span, + cmt_base, + r, + ty::BorrowKind::from_mutbl(m.into()), + AutoRef, + ); } adjustment::AutoBorrow::RawPtr(m) => { - debug!("walk_autoref: expr.id={} cmt_base={:?}", - expr.id, - cmt_base); + debug!("walk_autoref: expr.id={} cmt_base={:?}", expr.id, cmt_base); // Converting from a &T to *T (or &mut T to *mut T) is // treated as borrowing it for the enclosing temporary // scope. - let r = self.tcx().mk_region(ty::ReScope( - region::Scope { - id: expr.hir_id.local_id, - data: region::ScopeData::Node - })); - - self.delegate.borrow(expr.id, - expr.span, - cmt_base, - r, - ty::BorrowKind::from_mutbl(m), - AutoUnsafe); + let r = self.tcx().mk_region(ty::ReScope(region::Scope { + id: expr.hir_id.local_id, + data: region::ScopeData::Node, + })); + + self.delegate.borrow( + expr.id, + expr.span, + cmt_base, + r, + ty::BorrowKind::from_mutbl(m), + AutoUnsafe, + ); } } } @@ -816,20 +823,27 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { /// Identifies any bindings within `pat` and accumulates within /// `mode` whether the overall pattern/match structure is a move, /// copy, or borrow. - fn determine_pat_move_mode(&mut self, - cmt_discr: mc::cmt<'tcx>, - pat: &hir::Pat, - mode: &mut TrackMatchMode) { - debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr, pat); + fn determine_pat_move_mode( + &mut self, + cmt_discr: mc::cmt<'tcx>, + pat: &hir::Pat, + mode: &mut TrackMatchMode, + ) { + debug!( + "determine_pat_move_mode cmt_discr={:?} pat={:?}", + cmt_discr, pat + ); return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |cmt_pat, pat| { if let PatKind::Binding(..) = pat.node { - let bm = *self.mc.tables.pat_binding_modes() - .get(pat.hir_id) - .expect("missing binding mode"); + let bm = *self + .mc + .tables + .pat_binding_modes() + .get(pat.hir_id) + .expect("missing binding mode"); match bm { - ty::BindByReference(..) => - mode.lub(BorrowingMatch), + ty::BindByReference(..) => mode.lub(BorrowingMatch), ty::BindByValue(..) => { match copy_or_move(&self.mc, self.param_env, &cmt_pat, PatBindingMove) { Copy => mode.lub(CopyingMatch), @@ -848,14 +862,16 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { debug!("walk_pat(cmt_discr={:?}, pat={:?})", cmt_discr, pat); let tcx = self.tcx(); - let ExprUseVisitor { ref mc, ref mut delegate, param_env } = *self; + let ExprUseVisitor { + ref mc, + ref mut delegate, + param_env, + } = *self; return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |cmt_pat, pat| { if let PatKind::Binding(_, canonical_id, ..) = pat.node { debug!( "walk_pat: binding cmt_pat={:?} pat={:?} match_mode={:?}", - cmt_pat, - pat, - match_mode, + cmt_pat, pat, match_mode, ); if let Some(&bm) = mc.tables.pat_binding_modes().get(pat.hir_id) { debug!("walk_pat: pat.hir_id={:?} bm={:?}", pat.hir_id, bm); @@ -897,22 +913,25 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { // the leaves of the pattern tree structure. return_if_err!(mc.cat_pattern(cmt_discr, pat, |cmt_pat, pat| { let qpath = match pat.node { - PatKind::Path(ref qpath) | - PatKind::TupleStruct(ref qpath, ..) | - PatKind::Struct(ref qpath, ..) => qpath, - _ => return + PatKind::Path(ref qpath) + | PatKind::TupleStruct(ref qpath, ..) + | PatKind::Struct(ref qpath, ..) => qpath, + _ => return, }; let def = mc.tables.qpath_def(qpath, pat.hir_id); match def { - Def::Variant(variant_did) | - Def::VariantCtor(variant_did, ..) => { + Def::Variant(variant_did) | Def::VariantCtor(variant_did, ..) => { let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did); debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat); delegate.matched_pat(pat, &downcast_cmt, match_mode); } - Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) | - Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => { + Def::Struct(..) + | Def::StructCtor(..) + | Def::Union(..) + | Def::TyAlias(..) + | Def::AssociatedTy(..) + | Def::SelfTy(..) => { debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat); delegate.matched_pat(pat, &cmt_pat, match_mode); } @@ -933,49 +952,53 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { closure_expr_id: closure_def_id.to_local(), }; let upvar_capture = self.mc.tables.upvar_capture(upvar_id); - let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.hir_id, - fn_decl_span, - freevar)); + let cmt_var = return_if_err!(self.cat_captured_var( + closure_expr.hir_id, + fn_decl_span, + freevar + )); match upvar_capture { ty::UpvarCapture::ByValue => { - let mode = copy_or_move(&self.mc, - self.param_env, - &cmt_var, - CaptureMove); - self.delegate.consume(closure_expr.id, freevar.span, &cmt_var, mode); + let mode = copy_or_move(&self.mc, self.param_env, &cmt_var, CaptureMove); + self.delegate + .consume(closure_expr.id, freevar.span, &cmt_var, mode); } ty::UpvarCapture::ByRef(upvar_borrow) => { - self.delegate.borrow(closure_expr.id, - fn_decl_span, - &cmt_var, - upvar_borrow.region, - upvar_borrow.kind, - ClosureCapture(freevar.span)); + self.delegate.borrow( + closure_expr.id, + fn_decl_span, + &cmt_var, + upvar_borrow.region, + upvar_borrow.kind, + ClosureCapture(freevar.span), + ); } } } }); } - fn cat_captured_var(&mut self, - closure_hir_id: hir::HirId, - closure_span: Span, - upvar: &hir::Freevar) - -> mc::McResult> { + fn cat_captured_var( + &mut self, + closure_hir_id: hir::HirId, + closure_span: Span, + upvar: &hir::Freevar, + ) -> mc::McResult> { // Create the cmt for the variable being borrowed, from the // caller's perspective let var_hir_id = self.tcx().hir().node_to_hir_id(upvar.var_id()); let var_ty = self.mc.node_ty(var_hir_id)?; - self.mc.cat_def(closure_hir_id, closure_span, var_ty, upvar.def) + self.mc + .cat_def(closure_hir_id, closure_span, var_ty, upvar.def) } } -fn copy_or_move<'a, 'gcx, 'tcx>(mc: &mc::MemCategorizationContext<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - cmt: &mc::cmt_<'tcx>, - move_reason: MoveReason) - -> ConsumeMode -{ +fn copy_or_move<'a, 'gcx, 'tcx>( + mc: &mc::MemCategorizationContext<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + cmt: &mc::cmt_<'tcx>, + move_reason: MoveReason, +) -> ConsumeMode { if !mc.type_is_copy_modulo_regions(param_env, cmt.ty, cmt.span) { Move(move_reason) } else { diff --git a/src/librustc/middle/free_region.rs b/src/librustc/middle/free_region.rs index 6e9eadca6a521..a8e77be45ea3e 100644 --- a/src/librustc/middle/free_region.rs +++ b/src/librustc/middle/free_region.rs @@ -5,10 +5,10 @@ //! `TransitiveRelation` type and use that to decide when one free //! region outlives another and so forth. -use infer::outlives::free_region_map::{FreeRegionMap, FreeRegionRelations}; use hir::def_id::DefId; +use infer::outlives::free_region_map::{FreeRegionMap, FreeRegionRelations}; use middle::region; -use ty::{self, TyCtxt, Region}; +use ty::{self, Region, TyCtxt}; /// Combines a `region::ScopeTree` (which governs relationships between /// scopes) and a `FreeRegionMap` (which governs relationships between @@ -47,18 +47,18 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> { /// Determines whether one region is a subregion of another. This is intended to run *after /// inference* and sadly the logic is somewhat duplicated with the code in infer.rs. - pub fn is_subregion_of(&self, - sub_region: ty::Region<'tcx>, - super_region: ty::Region<'tcx>) - -> bool { + pub fn is_subregion_of( + &self, + sub_region: ty::Region<'tcx>, + super_region: ty::Region<'tcx>, + ) -> bool { let result = sub_region == super_region || { match (sub_region, super_region) { - (ty::ReEmpty, _) | - (_, ty::ReStatic) => - true, + (ty::ReEmpty, _) | (_, ty::ReStatic) => true, - (ty::ReScope(sub_scope), ty::ReScope(super_scope)) => - self.region_scope_tree.is_subscope_of(*sub_scope, *super_scope), + (ty::ReScope(sub_scope), ty::ReScope(super_scope)) => self + .region_scope_tree + .is_subscope_of(*sub_scope, *super_scope), (ty::ReScope(sub_scope), ty::ReEarlyBound(ref br)) => { let fr_scope = self.region_scope_tree.early_free_scope(self.tcx, br); @@ -70,19 +70,21 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> { self.region_scope_tree.is_subscope_of(*sub_scope, fr_scope) } - (ty::ReEarlyBound(_), ty::ReEarlyBound(_)) | - (ty::ReFree(_), ty::ReEarlyBound(_)) | - (ty::ReEarlyBound(_), ty::ReFree(_)) | - (ty::ReFree(_), ty::ReFree(_)) => - self.free_regions.sub_free_regions(sub_region, super_region), + (ty::ReEarlyBound(_), ty::ReEarlyBound(_)) + | (ty::ReFree(_), ty::ReEarlyBound(_)) + | (ty::ReEarlyBound(_), ty::ReFree(_)) + | (ty::ReFree(_), ty::ReFree(_)) => { + self.free_regions.sub_free_regions(sub_region, super_region) + } - _ => - false, + _ => false, } }; let result = result || self.is_static(super_region); - debug!("is_subregion_of(sub_region={:?}, super_region={:?}) = {:?}", - sub_region, super_region, result); + debug!( + "is_subregion_of(sub_region={:?}, super_region={:?}) = {:?}", + sub_region, super_region, result + ); result } @@ -93,17 +95,14 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> { ty::ReStatic => true, ty::ReEarlyBound(_) | ty::ReFree(_) => { let re_static = self.tcx.mk_region(ty::ReStatic); - self.free_regions.sub_free_regions(&re_static, &super_region) + self.free_regions + .sub_free_regions(&re_static, &super_region) } - _ => false + _ => false, } } - pub fn lub_free_regions(&self, - r_a: Region<'tcx>, - r_b: Region<'tcx>) - -> Region<'tcx> { + pub fn lub_free_regions(&self, r_a: Region<'tcx>, r_b: Region<'tcx>) -> Region<'tcx> { self.free_regions.lub_free_regions(self.tcx, r_a, r_b) } } - diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index 1716daaa107c4..b5cf5635b3ab8 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -1,23 +1,23 @@ use hir::def::Def; use hir::def_id::DefId; -use ty::{self, Ty, TyCtxt}; use ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx}; +use ty::{self, Ty, TyCtxt}; -use rustc_target::spec::abi::Abi::RustIntrinsic; +use hir; +use hir::intravisit::{self, NestedVisitorMap, Visitor}; use rustc_data_structures::indexed_vec::Idx; +use rustc_target::spec::abi::Abi::RustIntrinsic; use syntax_pos::Span; -use hir::intravisit::{self, Visitor, NestedVisitorMap}; -use hir; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut visitor = ItemVisitor { - tcx, - }; - tcx.hir().krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); + let mut visitor = ItemVisitor { tcx }; + tcx.hir() + .krate() + .visit_all_item_likes(&mut visitor.as_deep_visitor()); } struct ItemVisitor<'a, 'tcx: 'a> { - tcx: TyCtxt<'a, 'tcx, 'tcx> + tcx: TyCtxt<'a, 'tcx, 'tcx>, } struct ExprVisitor<'a, 'tcx: 'a> { @@ -28,12 +28,10 @@ struct ExprVisitor<'a, 'tcx: 'a> { /// If the type is `Option`, it will return `T`, otherwise /// the type itself. Works on most `Option`-like types. -fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: Ty<'tcx>) - -> Ty<'tcx> { +fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { let (def, substs) = match ty.sty { ty::Adt(def, substs) => (def, substs), - _ => return ty + _ => return ty, }; if def.variants.len() == 2 && !def.repr.c() && def.repr.int.is_none() { @@ -60,8 +58,7 @@ fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { fn def_id_is_transmute(&self, def_id: DefId) -> bool { - self.tcx.fn_sig(def_id).abi() == RustIntrinsic && - self.tcx.item_name(def_id) == "transmute" + self.tcx.fn_sig(def_id).abi() == RustIntrinsic && self.tcx.item_name(def_id) == "transmute" } fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) { @@ -79,45 +76,55 @@ impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { let from = unpack_option_like(self.tcx.global_tcx(), from); if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.sty, sk_to) { if size_to == Pointer.size(&self.tcx) { - struct_span_err!(self.tcx.sess, span, E0591, - "can't transmute zero-sized type") - .note(&format!("source type: {}", from)) - .note(&format!("target type: {}", to)) - .help("cast with `as` to a pointer instead") - .emit(); + struct_span_err!( + self.tcx.sess, + span, + E0591, + "can't transmute zero-sized type" + ) + .note(&format!("source type: {}", from)) + .note(&format!("target type: {}", to)) + .help("cast with `as` to a pointer instead") + .emit(); return; } } } // Try to display a sensible error with as much information as possible. - let skeleton_string = |ty: Ty<'tcx>, sk| { - match sk { - Ok(SizeSkeleton::Known(size)) => { - format!("{} bits", size.bits()) - } - Ok(SizeSkeleton::Pointer { tail, .. }) => { - format!("pointer to `{}`", tail) + let skeleton_string = |ty: Ty<'tcx>, sk| match sk { + Ok(SizeSkeleton::Known(size)) => format!("{} bits", size.bits()), + Ok(SizeSkeleton::Pointer { tail, .. }) => format!("pointer to `{}`", tail), + Err(LayoutError::Unknown(bad)) => { + if bad == ty { + "this type does not have a fixed size".to_owned() + } else { + format!("size can vary because of {}", bad) } - Err(LayoutError::Unknown(bad)) => { - if bad == ty { - "this type does not have a fixed size".to_owned() - } else { - format!("size can vary because of {}", bad) - } - } - Err(err) => err.to_string() } + Err(err) => err.to_string(), }; - let mut err = struct_span_err!(self.tcx.sess, span, E0512, - "cannot transmute between types of different sizes, \ - or dependently-sized types"); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0512, + "cannot transmute between types of different sizes, \ + or dependently-sized types" + ); if from == to { err.note(&format!("`{}` does not have a fixed size", from)); } else { - err.note(&format!("source type: `{}` ({})", from, skeleton_string(from, sk_from))) - .note(&format!("target type: `{}` ({})", to, skeleton_string(to, sk_to))); + err.note(&format!( + "source type: `{}` ({})", + from, + skeleton_string(from, sk_from) + )) + .note(&format!( + "target type: `{}` ({})", + to, + skeleton_string(to, sk_to) + )); } err.emit() } @@ -133,7 +140,12 @@ impl<'a, 'tcx> Visitor<'tcx> for ItemVisitor<'a, 'tcx> { let body = self.tcx.hir().body(body_id); let param_env = self.tcx.param_env(owner_def_id); let tables = self.tcx.typeck_tables_of(owner_def_id); - ExprVisitor { tcx: self.tcx, param_env, tables }.visit_body(body); + ExprVisitor { + tcx: self.tcx, + param_env, + tables, + } + .visit_body(body); self.visit_body(body); } } diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index c203ea96f3d64..d4f19b16db571 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -11,17 +11,17 @@ pub use self::LangItem::*; -use hir::def_id::DefId; use hir::check_attr::Target; -use ty::{self, TyCtxt}; +use hir::def_id::DefId; use middle::weak_lang_items; +use ty::{self, TyCtxt}; use util::nodemap::FxHashMap; +use hir; +use hir::itemlikevisit::ItemLikeVisitor; use syntax::ast; use syntax::symbol::Symbol; use syntax_pos::Span; -use hir::itemlikevisit::ItemLikeVisitor; -use hir; // The actual lang items defined come at the end of this file in one handy table. // So you probably just want to nip down to the end. @@ -379,8 +379,8 @@ language_item_table! { impl<'a, 'tcx, 'gcx> TyCtxt<'a, 'tcx, 'gcx> { pub fn require_lang_item(&self, lang_item: LangItem) -> DefId { - self.lang_items().require(lang_item).unwrap_or_else(|msg| { - self.sess.fatal(&msg) - }) + self.lang_items() + .require(lang_item) + .unwrap_or_else(|msg| self.sess.fatal(&msg)) } } diff --git a/src/librustc/middle/lib_features.rs b/src/librustc/middle/lib_features.rs index 8c23377324f1f..b215849d2f82d 100644 --- a/src/librustc/middle/lib_features.rs +++ b/src/librustc/middle/lib_features.rs @@ -4,13 +4,13 @@ // and `#[unstable (..)]`), but are not declared in one single location // (unlike lang features), which means we need to collect them instead. -use ty::TyCtxt; -use syntax::symbol::Symbol; +use errors::DiagnosticId; +use hir::intravisit::{self, NestedVisitorMap, Visitor}; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use syntax::ast::{Attribute, MetaItem, MetaItemKind}; +use syntax::symbol::Symbol; use syntax_pos::Span; -use hir::intravisit::{self, NestedVisitorMap, Visitor}; -use rustc_data_structures::fx::{FxHashSet, FxHashMap}; -use errors::DiagnosticId; +use ty::TyCtxt; pub struct LibFeatures { // A map from feature to stabilisation version. @@ -27,7 +27,10 @@ impl LibFeatures { } pub fn to_vec(&self) -> Vec<(Symbol, Option)> { - let mut all_features: Vec<_> = self.stable.iter().map(|(f, s)| (*f, Some(*s))) + let mut all_features: Vec<_> = self + .stable + .iter() + .map(|(f, s)| (*f, Some(*s))) .chain(self.unstable.iter().map(|f| (*f, None))) .collect(); all_features.sort_unstable_by_key(|f| f.0.as_str()); @@ -53,11 +56,16 @@ impl<'a, 'tcx> LibFeatureCollector<'a, 'tcx> { // Find a stability attribute (i.e., `#[stable (..)]`, `#[unstable (..)]`, // `#[rustc_const_unstable (..)]`). - if let Some(stab_attr) = stab_attrs.iter().find(|stab_attr| { - attr.check_name(stab_attr) - }) { + if let Some(stab_attr) = stab_attrs + .iter() + .find(|stab_attr| attr.check_name(stab_attr)) + { let meta_item = attr.meta(); - if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta_item { + if let Some(MetaItem { + node: MetaItemKind::List(ref metas), + .. + }) = meta_item + { let mut feature = None; let mut since = None; for meta in metas { @@ -99,12 +107,16 @@ impl<'a, 'tcx> LibFeatureCollector<'a, 'tcx> { let msg = format!( "feature `{}` is declared stable since {}, \ but was previously declared stable since {}", - feature, - since, - prev_since, + feature, since, prev_since, ); - self.tcx.sess.struct_span_err_with_code(span, &msg, - DiagnosticId::Error("E0711".into())).emit(); + self.tcx + .sess + .struct_span_err_with_code( + span, + &msg, + DiagnosticId::Error("E0711".into()), + ) + .emit(); return; } } @@ -118,11 +130,21 @@ impl<'a, 'tcx> LibFeatureCollector<'a, 'tcx> { let msg = format!( "feature `{}` is declared {}, but was previously declared {}", feature, - if since.is_some() { "stable" } else { "unstable" }, - if since.is_none() { "stable" } else { "unstable" }, + if since.is_some() { + "stable" + } else { + "unstable" + }, + if since.is_none() { + "stable" + } else { + "unstable" + }, ); - self.tcx.sess.struct_span_err_with_code(span, &msg, - DiagnosticId::Error("E0711".into())).emit(); + self.tcx + .sess + .struct_span_err_with_code(span, &msg, DiagnosticId::Error("E0711".into())) + .emit(); } } } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index a78cf1a471b4b..c76a2a57b88c4 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -93,30 +93,30 @@ //! It is the responsibility of typeck to ensure that there are no //! `return` expressions in a function declared as diverging. -use self::LoopKind::*; use self::LiveNodeKind::*; +use self::LoopKind::*; use self::VarKind::*; +use errors::Applicability; use hir::def::*; use hir::Node; -use ty::{self, TyCtxt}; use lint; -use errors::Applicability; -use util::nodemap::{NodeMap, HirIdMap, HirIdSet}; +use ty::{self, TyCtxt}; +use util::nodemap::{HirIdMap, HirIdSet, NodeMap}; use std::collections::VecDeque; -use std::{fmt, u32}; -use std::io::prelude::*; use std::io; +use std::io::prelude::*; use std::rc::Rc; +use std::{fmt, u32}; use syntax::ast::{self, NodeId}; use syntax::ptr::P; use syntax::symbol::keywords; use syntax_pos::Span; -use hir::{Expr, HirId}; use hir; -use hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap}; +use hir::intravisit::{self, FnKind, NestedVisitorMap, Visitor}; +use hir::{Expr, HirId}; /// For use with `propagate_through_loop`. enum LoopKind<'a> { @@ -133,11 +133,15 @@ struct Variable(u32); struct LiveNode(u32); impl Variable { - fn get(&self) -> usize { self.0 as usize } + fn get(&self) -> usize { + self.0 as usize + } } impl LiveNode { - fn get(&self) -> usize { self.0 as usize } + fn get(&self) -> usize { + self.0 as usize + } } #[derive(Copy, Clone, PartialEq, Debug)] @@ -145,21 +149,15 @@ enum LiveNodeKind { FreeVarNode(Span), ExprNode(Span), VarDefNode(Span), - ExitNode + ExitNode, } fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_, '_, '_>) -> String { let cm = tcx.sess.source_map(); match lnk { - FreeVarNode(s) => { - format!("Free var node [{}]", cm.span_to_string(s)) - } - ExprNode(s) => { - format!("Expr node [{}]", cm.span_to_string(s)) - } - VarDefNode(s) => { - format!("Var def node [{}]", cm.span_to_string(s)) - } + FreeVarNode(s) => format!("Free var node [{}]", cm.span_to_string(s)), + ExprNode(s) => format!("Expr node [{}]", cm.span_to_string(s)), + VarDefNode(s) => format!("Var def node [{}]", cm.span_to_string(s)), ExitNode => "Exit node".to_owned(), } } @@ -169,18 +167,32 @@ impl<'a, 'tcx> Visitor<'tcx> for IrMaps<'a, 'tcx> { NestedVisitorMap::OnlyBodies(&self.tcx.hir()) } - fn visit_fn(&mut self, fk: FnKind<'tcx>, fd: &'tcx hir::FnDecl, - b: hir::BodyId, s: Span, id: NodeId) { + fn visit_fn( + &mut self, + fk: FnKind<'tcx>, + fd: &'tcx hir::FnDecl, + b: hir::BodyId, + s: Span, + id: NodeId, + ) { visit_fn(self, fk, fd, b, s, id); } - fn visit_local(&mut self, l: &'tcx hir::Local) { visit_local(self, l); } - fn visit_expr(&mut self, ex: &'tcx Expr) { visit_expr(self, ex); } - fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); } + fn visit_local(&mut self, l: &'tcx hir::Local) { + visit_local(self, l); + } + fn visit_expr(&mut self, ex: &'tcx Expr) { + visit_expr(self, ex); + } + fn visit_arm(&mut self, a: &'tcx hir::Arm) { + visit_arm(self, a); + } } pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - tcx.hir().krate().visit_all_item_likes(&mut IrMaps::new(tcx).as_deep_visitor()); + tcx.hir() + .krate() + .visit_all_item_likes(&mut IrMaps::new(tcx).as_deep_visitor()); tcx.sess.abort_if_errors(); } @@ -224,11 +236,13 @@ impl LiveNode { } } -fn invalid_node() -> LiveNode { LiveNode(u32::MAX) } +fn invalid_node() -> LiveNode { + LiveNode(u32::MAX) +} struct CaptureInfo { ln: LiveNode, - var_hid: HirId + var_hid: HirId, } #[derive(Copy, Clone, Debug)] @@ -242,7 +256,7 @@ struct LocalInfo { enum VarKind { Arg(HirId, ast::Name), Local(LocalInfo), - CleanExit + CleanExit, } struct IrMaps<'a, 'tcx: 'a> { @@ -275,8 +289,11 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { self.lnks.push(lnk); self.num_live_nodes += 1; - debug!("{:?} is of kind {}", ln, - live_node_kind_to_string(lnk, self.tcx)); + debug!( + "{:?} is of kind {}", + ln, + live_node_kind_to_string(lnk, self.tcx) + ); ln } @@ -296,7 +313,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { match vk { Local(LocalInfo { id: node_id, .. }) | Arg(node_id, _) => { self.variable_map.insert(node_id, v); - }, + } CleanExit => {} } @@ -316,17 +333,15 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { fn variable_name(&self, var: Variable) -> String { match self.var_kinds[var.get()] { - Local(LocalInfo { name, .. }) | Arg(_, name) => { - name.to_string() - }, - CleanExit => "".to_owned() + Local(LocalInfo { name, .. }) | Arg(_, name) => name.to_string(), + CleanExit => "".to_owned(), } } fn variable_is_shorthand(&self, var: Variable) -> bool { match self.var_kinds[var.get()] { Local(LocalInfo { is_shorthand, .. }) => is_shorthand, - Arg(..) | CleanExit => false + Arg(..) | CleanExit => false, } } @@ -339,12 +354,14 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { } } -fn visit_fn<'a, 'tcx: 'a>(ir: &mut IrMaps<'a, 'tcx>, - fk: FnKind<'tcx>, - decl: &'tcx hir::FnDecl, - body_id: hir::BodyId, - sp: Span, - id: ast::NodeId) { +fn visit_fn<'a, 'tcx: 'a>( + ir: &mut IrMaps<'a, 'tcx>, + fk: FnKind<'tcx>, + decl: &'tcx hir::FnDecl, + body_id: hir::BodyId, + sp: Span, + id: ast::NodeId, +) { debug!("visit_fn"); // swap in a new set of IR maps for this function body: @@ -354,7 +371,10 @@ fn visit_fn<'a, 'tcx: 'a>(ir: &mut IrMaps<'a, 'tcx>, if let FnKind::Method(..) = fk { let parent = ir.tcx.hir().get_parent(id); if let Some(Node::Item(i)) = ir.tcx.hir().find(parent) { - if i.attrs.iter().any(|a| a.check_name("automatically_derived")) { + if i.attrs + .iter() + .any(|a| a.check_name("automatically_derived")) + { return; } } @@ -369,7 +389,7 @@ fn visit_fn<'a, 'tcx: 'a>(ir: &mut IrMaps<'a, 'tcx>, debug!("adding argument {:?}", hir_id); fn_maps.add_variable(Arg(hir_id, ident.name)); }) - }; + } // gather up the various local variables, significant expressions, // and so forth: @@ -403,12 +423,10 @@ fn add_from_pat<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, pat: &P) { } } } - Ref(ref inner_pat, _) | - Box(ref inner_pat) => { + Ref(ref inner_pat, _) | Box(ref inner_pat) => { pats.push_back(inner_pat); } - TupleStruct(_, ref inner_pats, _) | - Tuple(ref inner_pats, _) => { + TupleStruct(_, ref inner_pats, _) | Tuple(ref inner_pats, _) => { pats.extend(inner_pats.iter()); } Slice(ref pre_pats, ref inner_pat, ref post_pats) => { @@ -425,7 +443,7 @@ fn add_from_pat<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, pat: &P) { ir.add_variable(Local(LocalInfo { id: hir_id, name: ident.name, - is_shorthand: shorthand_field_ids.contains(&hir_id) + is_shorthand: shorthand_field_ids.contains(&hir_id), })); }); } @@ -444,81 +462,81 @@ fn visit_arm<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, arm: &'tcx hir::Arm) { fn visit_expr<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, expr: &'tcx Expr) { match expr.node { - // live nodes required for uses or definitions of variables: - hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => { - debug!("expr {}: path that leads to {:?}", expr.id, path.def); - if let Def::Local(..) = path.def { + // live nodes required for uses or definitions of variables: + hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => { + debug!("expr {}: path that leads to {:?}", expr.id, path.def); + if let Def::Local(..) = path.def { + ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span)); + } + intravisit::walk_expr(ir, expr); + } + hir::ExprKind::Closure(..) => { + // Interesting control flow (for loops can contain labeled + // breaks or continues) ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span)); + + // Make a live_node for each captured variable, with the span + // being the location that the variable is used. This results + // in better error messages than just pointing at the closure + // construction site. + let mut call_caps = Vec::new(); + ir.tcx.with_freevars(expr.id, |freevars| { + call_caps.extend(freevars.iter().filter_map(|fv| { + if let Def::Local(rv) = fv.def { + let fv_ln = ir.add_live_node(FreeVarNode(fv.span)); + let var_hid = ir.tcx.hir().node_to_hir_id(rv); + Some(CaptureInfo { ln: fv_ln, var_hid }) + } else { + None + } + })); + }); + ir.set_captures(expr.id, call_caps); + + intravisit::walk_expr(ir, expr); + } + + // live nodes required for interesting control flow: + hir::ExprKind::If(..) + | hir::ExprKind::Match(..) + | hir::ExprKind::While(..) + | hir::ExprKind::Loop(..) => { + ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span)); + intravisit::walk_expr(ir, expr); + } + hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => { + ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span)); + intravisit::walk_expr(ir, expr); + } + + // otherwise, live nodes are not required: + hir::ExprKind::Index(..) + | hir::ExprKind::Field(..) + | hir::ExprKind::Array(..) + | hir::ExprKind::Call(..) + | hir::ExprKind::MethodCall(..) + | hir::ExprKind::Tup(..) + | hir::ExprKind::Binary(..) + | hir::ExprKind::AddrOf(..) + | hir::ExprKind::Cast(..) + | hir::ExprKind::Unary(..) + | hir::ExprKind::Break(..) + | hir::ExprKind::Continue(_) + | hir::ExprKind::Lit(_) + | hir::ExprKind::Ret(..) + | hir::ExprKind::Block(..) + | hir::ExprKind::Assign(..) + | hir::ExprKind::AssignOp(..) + | hir::ExprKind::Struct(..) + | hir::ExprKind::Repeat(..) + | hir::ExprKind::InlineAsm(..) + | hir::ExprKind::Box(..) + | hir::ExprKind::Yield(..) + | hir::ExprKind::Type(..) + | hir::ExprKind::Err + | hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => { + intravisit::walk_expr(ir, expr); } - intravisit::walk_expr(ir, expr); - } - hir::ExprKind::Closure(..) => { - // Interesting control flow (for loops can contain labeled - // breaks or continues) - ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span)); - - // Make a live_node for each captured variable, with the span - // being the location that the variable is used. This results - // in better error messages than just pointing at the closure - // construction site. - let mut call_caps = Vec::new(); - ir.tcx.with_freevars(expr.id, |freevars| { - call_caps.extend(freevars.iter().filter_map(|fv| { - if let Def::Local(rv) = fv.def { - let fv_ln = ir.add_live_node(FreeVarNode(fv.span)); - let var_hid = ir.tcx.hir().node_to_hir_id(rv); - Some(CaptureInfo { ln: fv_ln, var_hid }) - } else { - None - } - })); - }); - ir.set_captures(expr.id, call_caps); - - intravisit::walk_expr(ir, expr); - } - - // live nodes required for interesting control flow: - hir::ExprKind::If(..) | - hir::ExprKind::Match(..) | - hir::ExprKind::While(..) | - hir::ExprKind::Loop(..) => { - ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span)); - intravisit::walk_expr(ir, expr); - } - hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => { - ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span)); - intravisit::walk_expr(ir, expr); - } - - // otherwise, live nodes are not required: - hir::ExprKind::Index(..) | - hir::ExprKind::Field(..) | - hir::ExprKind::Array(..) | - hir::ExprKind::Call(..) | - hir::ExprKind::MethodCall(..) | - hir::ExprKind::Tup(..) | - hir::ExprKind::Binary(..) | - hir::ExprKind::AddrOf(..) | - hir::ExprKind::Cast(..) | - hir::ExprKind::Unary(..) | - hir::ExprKind::Break(..) | - hir::ExprKind::Continue(_) | - hir::ExprKind::Lit(_) | - hir::ExprKind::Ret(..) | - hir::ExprKind::Block(..) | - hir::ExprKind::Assign(..) | - hir::ExprKind::AssignOp(..) | - hir::ExprKind::Struct(..) | - hir::ExprKind::Repeat(..) | - hir::ExprKind::InlineAsm(..) | - hir::ExprKind::Box(..) | - hir::ExprKind::Yield(..) | - hir::ExprKind::Type(..) | - hir::ExprKind::Err | - hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => { - intravisit::walk_expr(ir, expr); - } } } @@ -532,7 +550,7 @@ fn visit_expr<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, expr: &'tcx Expr) { struct RWU { reader: LiveNode, writer: LiveNode, - used: bool + used: bool, } /// Conceptually, this is like a `Vec`. But the number of `RWU`s can get @@ -570,8 +588,16 @@ impl RWUTable { fn get(&self, idx: usize) -> RWU { let packed_rwu = self.packed_rwus[idx]; match packed_rwu { - INV_INV_FALSE => RWU { reader: invalid_node(), writer: invalid_node(), used: false }, - INV_INV_TRUE => RWU { reader: invalid_node(), writer: invalid_node(), used: true }, + INV_INV_FALSE => RWU { + reader: invalid_node(), + writer: invalid_node(), + used: false, + }, + INV_INV_TRUE => RWU { + reader: invalid_node(), + writer: invalid_node(), + used: true, + }, _ => self.unpacked_rwus[packed_rwu as usize], } } @@ -638,7 +664,7 @@ impl RWUTable { struct Specials { exit_ln: LiveNode, fallthrough_ln: LiveNode, - clean_exit_var: Variable + clean_exit_var: Variable, } const ACC_READ: u32 = 1; @@ -668,7 +694,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let specials = Specials { exit_ln: ir.add_live_node(ExitNode), fallthrough_ln: ir.add_live_node(ExitNode), - clean_exit_var: ir.add_variable(CleanExit) + clean_exit_var: ir.add_variable(CleanExit), }; let tables = ir.tcx.body_tables(body); @@ -689,17 +715,14 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode { match self.ir.live_node_map.get(&hir_id) { - Some(&ln) => ln, - None => { - // This must be a mismatch between the ir_map construction - // above and the propagation code below; the two sets of - // code have to agree about which AST nodes are worth - // creating liveness nodes for. - span_bug!( - span, - "no live node registered for node {:?}", - hir_id); - } + Some(&ln) => ln, + None => { + // This must be a mismatch between the ir_map construction + // above and the propagation code below; the two sets of + // code have to agree about which AST nodes are worth + // creating liveness nodes for. + span_bug!(span, "no live node registered for node {:?}", hir_id); + } } } @@ -707,7 +730,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.ir.variable(hir_id, span) } - fn pat_bindings(&mut self, pat: &hir::Pat, mut f: F) where + fn pat_bindings(&mut self, pat: &hir::Pat, mut f: F) + where F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, HirId), { pat.each_binding(|_bm, hir_id, sp, n| { @@ -717,7 +741,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { }) } - fn arm_pats_bindings(&mut self, pat: Option<&hir::Pat>, f: F) where + fn arm_pats_bindings(&mut self, pat: Option<&hir::Pat>, f: F) + where F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, HirId), { if let Some(pat) = pat { @@ -725,13 +750,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } } - fn define_bindings_in_pat(&mut self, pat: &hir::Pat, succ: LiveNode) - -> LiveNode { + fn define_bindings_in_pat(&mut self, pat: &hir::Pat, succ: LiveNode) -> LiveNode { self.define_bindings_in_arm_pats(Some(pat), succ) } - fn define_bindings_in_arm_pats(&mut self, pat: Option<&hir::Pat>, succ: LiveNode) - -> LiveNode { + fn define_bindings_in_arm_pats(&mut self, pat: Option<&hir::Pat>, succ: LiveNode) -> LiveNode { let mut succ = succ; self.arm_pats_bindings(pat, |this, ln, var, _sp, _id| { this.init_from_succ(ln, succ); @@ -748,12 +771,15 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option { assert!(ln.is_valid()); let reader = self.rwu_table.get_reader(self.idx(ln, var)); - if reader.is_valid() { Some(self.ir.lnk(reader)) } else { None } + if reader.is_valid() { + Some(self.ir.lnk(reader)) + } else { + None + } } // Is this variable live on entry to any of its successor nodes? - fn live_on_exit(&self, ln: LiveNode, var: Variable) - -> Option { + fn live_on_exit(&self, ln: LiveNode, var: Variable) -> Option { let successor = self.successors[ln.get()]; self.live_on_entry(successor, var) } @@ -763,20 +789,23 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.rwu_table.get_used(self.idx(ln, var)) } - fn assigned_on_entry(&self, ln: LiveNode, var: Variable) - -> Option { + fn assigned_on_entry(&self, ln: LiveNode, var: Variable) -> Option { assert!(ln.is_valid()); let writer = self.rwu_table.get_writer(self.idx(ln, var)); - if writer.is_valid() { Some(self.ir.lnk(writer)) } else { None } + if writer.is_valid() { + Some(self.ir.lnk(writer)) + } else { + None + } } - fn assigned_on_exit(&self, ln: LiveNode, var: Variable) - -> Option { + fn assigned_on_exit(&self, ln: LiveNode, var: Variable) -> Option { let successor = self.successors[ln.get()]; self.assigned_on_entry(successor, var) } - fn indices2(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F) where + fn indices2(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F) + where F: FnMut(&mut Liveness<'a, 'tcx>, usize, usize), { let node_base_idx = self.idx(ln, Variable(0)); @@ -786,11 +815,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } } - fn write_vars(&self, - wr: &mut dyn Write, - ln: LiveNode, - mut test: F) - -> io::Result<()> where + fn write_vars(&self, wr: &mut dyn Write, ln: LiveNode, mut test: F) -> io::Result<()> + where F: FnMut(usize) -> LiveNode, { let node_base_idx = self.idx(ln, Variable(0)); @@ -803,13 +829,17 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { Ok(()) } - #[allow(unused_must_use)] fn ln_str(&self, ln: LiveNode) -> String { let mut wr = Vec::new(); { let wr = &mut wr as &mut dyn Write; - write!(wr, "[ln({:?}) of kind {:?} reads", ln.get(), self.ir.lnk(ln)); + write!( + wr, + "[ln({:?}) of kind {:?} reads", + ln.get(), + self.ir.lnk(ln) + ); self.write_vars(wr, ln, |idx| self.rwu_table.get_reader(idx)); write!(wr, " writes"); self.write_vars(wr, ln, |idx| self.rwu_table.get_writer(idx)); @@ -833,16 +863,17 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.indices2(ln, succ_ln, |this, idx, succ_idx| { this.rwu_table.copy_packed(idx, succ_idx); }); - debug!("init_from_succ(ln={}, succ={})", - self.ln_str(ln), self.ln_str(succ_ln)); + debug!( + "init_from_succ(ln={}, succ={})", + self.ln_str(ln), + self.ln_str(succ_ln) + ); } - fn merge_from_succ(&mut self, - ln: LiveNode, - succ_ln: LiveNode, - first_merge: bool) - -> bool { - if ln == succ_ln { return false; } + fn merge_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode, first_merge: bool) -> bool { + if ln == succ_ln { + return false; + } let mut changed = false; self.indices2(ln, succ_ln, |this, idx, succ_idx| { @@ -868,8 +899,13 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } }); - debug!("merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})", - ln, self.ln_str(succ_ln), first_merge, changed); + debug!( + "merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})", + ln, + self.ln_str(succ_ln), + first_merge, + changed + ); return changed; } @@ -880,14 +916,24 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let idx = self.idx(writer, var); self.rwu_table.assign_inv_inv(idx); - debug!("{:?} defines {:?} (idx={}): {}", writer, var, - idx, self.ln_str(writer)); + debug!( + "{:?} defines {:?} (idx={}): {}", + writer, + var, + idx, + self.ln_str(writer) + ); } // Either read, write, or both depending on the acc bitset fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) { - debug!("{:?} accesses[{:x}] {:?}: {}", - ln, acc, var, self.ln_str(ln)); + debug!( + "{:?} accesses[{:x}] {:?}: {}", + ln, + acc, + var, + self.ln_str(ln) + ); let idx = self.idx(ln, var); let mut rwu = self.rwu_table.get(idx); @@ -915,7 +961,10 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // effectively a return---this only occurs in `for` loops, // where the body is really a closure. - debug!("compute: using id for body, {}", self.ir.tcx.hir().node_to_pretty_string(body.id)); + debug!( + "compute: using id for body, {}", + self.ir.tcx.hir().node_to_pretty_string(body.id) + ); let exit_ln = self.s.exit_ln; @@ -931,34 +980,34 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let entry_ln = self.propagate_through_expr(body, s.fallthrough_ln); // hack to skip the loop unless debug! is enabled: - debug!("^^ liveness computation results for body {} (entry={:?})", { - for ln_idx in 0..self.ir.num_live_nodes { - debug!("{:?}", self.ln_str(LiveNode(ln_idx as u32))); - } - body.id - }, - entry_ln); + debug!( + "^^ liveness computation results for body {} (entry={:?})", + { + for ln_idx in 0..self.ir.num_live_nodes { + debug!("{:?}", self.ln_str(LiveNode(ln_idx as u32))); + } + body.id + }, + entry_ln + ); entry_ln } - fn propagate_through_block(&mut self, blk: &hir::Block, succ: LiveNode) - -> LiveNode { + fn propagate_through_block(&mut self, blk: &hir::Block, succ: LiveNode) -> LiveNode { if blk.targeted_by_break { self.break_ln.insert(blk.id, succ); } let succ = self.propagate_through_opt_expr(blk.expr.as_ref().map(|e| &**e), succ); - blk.stmts.iter().rev().fold(succ, |succ, stmt| { - self.propagate_through_stmt(stmt, succ) - }) + blk.stmts + .iter() + .rev() + .fold(succ, |succ, stmt| self.propagate_through_stmt(stmt, succ)) } - fn propagate_through_stmt(&mut self, stmt: &hir::Stmt, succ: LiveNode) - -> LiveNode { + fn propagate_through_stmt(&mut self, stmt: &hir::Stmt, succ: LiveNode) -> LiveNode { match stmt.node { - hir::StmtKind::Decl(ref decl, _) => { - self.propagate_through_decl(&decl, succ) - } + hir::StmtKind::Decl(ref decl, _) => self.propagate_through_decl(&decl, succ), hir::StmtKind::Expr(ref expr, _) | hir::StmtKind::Semi(ref expr, _) => { self.propagate_through_expr(&expr, succ) @@ -966,18 +1015,14 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } } - fn propagate_through_decl(&mut self, decl: &hir::Decl, succ: LiveNode) - -> LiveNode { + fn propagate_through_decl(&mut self, decl: &hir::Decl, succ: LiveNode) -> LiveNode { match decl.node { - hir::DeclKind::Local(ref local) => { - self.propagate_through_local(&local, succ) - } + hir::DeclKind::Local(ref local) => self.propagate_through_local(&local, succ), hir::DeclKind::Item(_) => succ, } } - fn propagate_through_local(&mut self, local: &hir::Local, succ: LiveNode) - -> LiveNode { + fn propagate_through_local(&mut self, local: &hir::Local, succ: LiveNode) -> LiveNode { // Note: we mark the variable as defined regardless of whether // there is an initializer. Initially I had thought to only mark // the live variable as defined if it was initialized, and then we @@ -996,23 +1041,22 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.define_bindings_in_pat(&local.pat, succ) } - fn propagate_through_exprs(&mut self, exprs: &[Expr], succ: LiveNode) - -> LiveNode { - exprs.iter().rev().fold(succ, |succ, expr| { - self.propagate_through_expr(&expr, succ) - }) + fn propagate_through_exprs(&mut self, exprs: &[Expr], succ: LiveNode) -> LiveNode { + exprs + .iter() + .rev() + .fold(succ, |succ, expr| self.propagate_through_expr(&expr, succ)) } - fn propagate_through_opt_expr(&mut self, - opt_expr: Option<&Expr>, - succ: LiveNode) - -> LiveNode { + fn propagate_through_opt_expr(&mut self, opt_expr: Option<&Expr>, succ: LiveNode) -> LiveNode { opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ)) } - fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode) - -> LiveNode { - debug!("propagate_through_expr: {}", self.ir.tcx.hir().node_to_pretty_string(expr.id)); + fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode) -> LiveNode { + debug!( + "propagate_through_expr: {}", + self.ir.tcx.hir().node_to_pretty_string(expr.id) + ); match expr.node { // Interesting cases with control flow or which gen/kill @@ -1020,13 +1064,13 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE) } - hir::ExprKind::Field(ref e, _) => { - self.propagate_through_expr(&e, succ) - } + hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ), hir::ExprKind::Closure(.., blk_id, _, _) => { - debug!("{} is an ExprKind::Closure", - self.ir.tcx.hir().node_to_pretty_string(expr.id)); + debug!( + "{} is an ExprKind::Closure", + self.ir.tcx.hir().node_to_pretty_string(expr.id) + ); // The next-node for a break is the successor of the entire // loop. The next-node for a continue is the top of this loop. @@ -1039,8 +1083,12 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // the construction of a closure itself is not important, // but we have to consider the closed over variables. - let caps = self.ir.capture_info_map.get(&expr.id).cloned().unwrap_or_else(|| - span_bug!(expr.span, "no registered caps")); + let caps = self + .ir + .capture_info_map + .get(&expr.id) + .cloned() + .unwrap_or_else(|| span_bug!(expr.span, "no registered caps")); caps.iter().rev().fold(succ, |succ, cap| { self.init_from_succ(cap.ln, succ); @@ -1105,17 +1153,16 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let guard_succ = self.propagate_through_opt_expr( arm.guard.as_ref().map(|hir::Guard::If(e)| &**e), - body_succ + body_succ, ); // only consider the first pattern; any later patterns must have // the same bindings, and we also consider the first pattern to be // the "authoritative" set of ids - let arm_succ = - self.define_bindings_in_arm_pats(arm.pats.first().map(|p| &**p), - guard_succ); + let arm_succ = self + .define_bindings_in_arm_pats(arm.pats.first().map(|p| &**p), guard_succ); self.merge_from_succ(ln, arm_succ, first_merge); first_merge = false; - }; + } self.propagate_through_expr(&e, ln) } @@ -1130,26 +1177,30 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let target = match label.target_id { Ok(node_id) => self.break_ln.get(&node_id), Err(err) => span_bug!(expr.span, "loop scope error: {}", err), - }.cloned(); + } + .cloned(); // Now that we know the label we're going to, // look it up in the break loop nodes table match target { Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b), - None => span_bug!(expr.span, "break to unknown label") + None => span_bug!(expr.span, "break to unknown label"), } } hir::ExprKind::Continue(label) => { // Find which label this expr continues to - let sc = label.target_id.unwrap_or_else(|err| - span_bug!(expr.span, "loop scope error: {}", err)); + let sc = label + .target_id + .unwrap_or_else(|err| span_bug!(expr.span, "loop scope error: {}", err)); // Now that we know the label we're going to, // look it up in the continue loop nodes table - self.cont_ln.get(&sc).cloned().unwrap_or_else(|| - span_bug!(expr.span, "continue to unknown label")) + self.cont_ln + .get(&sc) + .cloned() + .unwrap_or_else(|| span_bug!(expr.span, "continue to unknown label")) } hir::ExprKind::Assign(ref l, ref r) => { @@ -1168,17 +1219,14 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } else { // see comment on places in // propagate_through_place_components() - let succ = self.write_place(&l, succ, ACC_WRITE|ACC_READ); + let succ = self.write_place(&l, succ, ACC_WRITE | ACC_READ); let succ = self.propagate_through_expr(&r, succ); self.propagate_through_place_components(&l, succ) } } // Uninteresting cases: just propagate in rev exec order - - hir::ExprKind::Array(ref exprs) => { - self.propagate_through_exprs(exprs, succ) - } + hir::ExprKind::Array(ref exprs) => self.propagate_through_exprs(exprs, succ), hir::ExprKind::Struct(_, ref fields, ref with_expr) => { let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ); @@ -1189,7 +1237,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { hir::ExprKind::Call(ref f, ref args) => { let m = self.ir.tcx.hir().get_module_parent(expr.id); - let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) { + let succ = if self + .ir + .tcx + .is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) + { self.s.exit_ln } else { succ @@ -1200,7 +1252,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { hir::ExprKind::MethodCall(.., ref args) => { let m = self.ir.tcx.hir().get_module_parent(expr.id); - let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) { + let succ = if self + .ir + .tcx + .is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) + { self.s.exit_ln } else { succ @@ -1209,9 +1265,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.propagate_through_exprs(args, succ) } - hir::ExprKind::Tup(ref exprs) => { - self.propagate_through_exprs(exprs, succ) - } + hir::ExprKind::Tup(ref exprs) => self.propagate_through_exprs(exprs, succ), hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => { let r_succ = self.propagate_through_expr(&r, succ); @@ -1223,55 +1277,56 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.propagate_through_expr(&l, ln) } - hir::ExprKind::Index(ref l, ref r) | - hir::ExprKind::Binary(_, ref l, ref r) => { + hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => { let r_succ = self.propagate_through_expr(&r, succ); self.propagate_through_expr(&l, r_succ) } - hir::ExprKind::Box(ref e) | - hir::ExprKind::AddrOf(_, ref e) | - hir::ExprKind::Cast(ref e, _) | - hir::ExprKind::Type(ref e, _) | - hir::ExprKind::Unary(_, ref e) | - hir::ExprKind::Yield(ref e) | - hir::ExprKind::Repeat(ref e, _) => { - self.propagate_through_expr(&e, succ) - } + hir::ExprKind::Box(ref e) + | hir::ExprKind::AddrOf(_, ref e) + | hir::ExprKind::Cast(ref e, _) + | hir::ExprKind::Type(ref e, _) + | hir::ExprKind::Unary(_, ref e) + | hir::ExprKind::Yield(ref e) + | hir::ExprKind::Repeat(ref e, _) => self.propagate_through_expr(&e, succ), hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => { - let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| { - // see comment on places - // in propagate_through_place_components() - if o.is_indirect { - self.propagate_through_expr(output, succ) - } else { - let acc = if o.is_rw { ACC_WRITE|ACC_READ } else { ACC_WRITE }; - let succ = self.write_place(output, succ, acc); - self.propagate_through_place_components(output, succ) - }}); + let succ = ia + .outputs + .iter() + .zip(outputs) + .rev() + .fold(succ, |succ, (o, output)| { + // see comment on places + // in propagate_through_place_components() + if o.is_indirect { + self.propagate_through_expr(output, succ) + } else { + let acc = if o.is_rw { + ACC_WRITE | ACC_READ + } else { + ACC_WRITE + }; + let succ = self.write_place(output, succ, acc); + self.propagate_through_place_components(output, succ) + } + }); // Inputs are executed first. Propagate last because of rev order self.propagate_through_exprs(inputs, succ) } - hir::ExprKind::Lit(..) | hir::ExprKind::Err | - hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => { - succ - } + hir::ExprKind::Lit(..) + | hir::ExprKind::Err + | hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => succ, // Note that labels have been resolved, so we don't need to look // at the label ident - hir::ExprKind::Block(ref blk, _) => { - self.propagate_through_block(&blk, succ) - } + hir::ExprKind::Block(ref blk, _) => self.propagate_through_block(&blk, succ), } } - fn propagate_through_place_components(&mut self, - expr: &Expr, - succ: LiveNode) - -> LiveNode { + fn propagate_through_place_components(&mut self, expr: &Expr, succ: LiveNode) -> LiveNode { // # Places // // In general, the full flow graph structure for an @@ -1324,7 +1379,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { match expr.node { hir::ExprKind::Path(_) => succ, hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ), - _ => self.propagate_through_expr(expr, succ) + _ => self.propagate_through_expr(expr, succ), } } @@ -1339,12 +1394,18 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // to their subcomponents. Also, it may happen that // non-places occur here, because those are detected in the // later pass borrowck. - _ => succ + _ => succ, } } - fn access_var(&mut self, hir_id: HirId, nid: NodeId, succ: LiveNode, acc: u32, span: Span) - -> LiveNode { + fn access_var( + &mut self, + hir_id: HirId, + nid: NodeId, + succ: LiveNode, + acc: u32, + span: Span, + ) -> LiveNode { let ln = self.live_node(hir_id, span); if acc != 0 { self.init_from_succ(ln, succ); @@ -1355,22 +1416,26 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { ln } - fn access_path(&mut self, hir_id: HirId, path: &hir::Path, succ: LiveNode, acc: u32) - -> LiveNode { + fn access_path( + &mut self, + hir_id: HirId, + path: &hir::Path, + succ: LiveNode, + acc: u32, + ) -> LiveNode { match path.def { - Def::Local(nid) => { - self.access_var(hir_id, nid, succ, acc, path.span) - } - _ => succ + Def::Local(nid) => self.access_var(hir_id, nid, succ, acc, path.span), + _ => succ, } } - fn propagate_through_loop(&mut self, - expr: &Expr, - kind: LoopKind<'_>, - body: &hir::Block, - succ: LiveNode) - -> LiveNode { + fn propagate_through_loop( + &mut self, + expr: &Expr, + kind: LoopKind<'_>, + body: &hir::Block, + succ: LiveNode, + ) -> LiveNode { /* We model control flow like this: @@ -1389,7 +1454,6 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { */ - // first iteration: let mut first_merge = true; let ln = self.live_node(expr.hir_id, expr.span); @@ -1404,8 +1468,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { first_merge = false; } } - debug!("propagate_through_loop: using id for loop body {} {}", - expr.id, self.ir.tcx.hir().node_to_pretty_string(body.id)); + debug!( + "propagate_through_loop: using id for loop body {} {}", + expr.id, + self.ir.tcx.hir().node_to_pretty_string(body.id) + ); let break_ln = succ; let cont_ln = ln; @@ -1424,9 +1491,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let new_cond_ln = match kind { LoopLoop => ln, - WhileLoop(ref cond) => { - self.propagate_through_expr(&cond, ln) - } + WhileLoop(ref cond) => self.propagate_through_expr(&cond, ln), }; assert_eq!(cond_ln, new_cond_ln); assert_eq!(body_ln, self.propagate_through_block(body, cond_ln)); @@ -1459,13 +1524,11 @@ fn check_local<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, local: &'tcx hir::Local) match local.init { Some(_) => { this.warn_about_unused_or_dead_vars_in_pat(&local.pat); - }, - None => { - this.pat_bindings(&local.pat, |this, ln, var, sp, id| { - let span = local.pat.simple_ident().map_or(sp, |ident| ident.span); - this.warn_about_unused(span, id, ln, var); - }) } + None => this.pat_bindings(&local.pat, |this, ln, var, sp, id| { + let span = local.pat.simple_ident().map_or(sp, |ident| ident.span); + this.warn_about_unused(span, id, ln, var); + }), } intravisit::walk_local(this, local); @@ -1514,16 +1577,33 @@ fn check_expr<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, expr: &'tcx Expr) { } // no correctness conditions related to liveness - hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) | hir::ExprKind::If(..) | - hir::ExprKind::Match(..) | hir::ExprKind::While(..) | hir::ExprKind::Loop(..) | - hir::ExprKind::Index(..) | hir::ExprKind::Field(..) | - hir::ExprKind::Array(..) | hir::ExprKind::Tup(..) | hir::ExprKind::Binary(..) | - hir::ExprKind::Cast(..) | hir::ExprKind::Unary(..) | hir::ExprKind::Ret(..) | - hir::ExprKind::Break(..) | hir::ExprKind::Continue(..) | hir::ExprKind::Lit(_) | - hir::ExprKind::Block(..) | hir::ExprKind::AddrOf(..) | - hir::ExprKind::Struct(..) | hir::ExprKind::Repeat(..) | - hir::ExprKind::Closure(..) | hir::ExprKind::Path(_) | hir::ExprKind::Yield(..) | - hir::ExprKind::Box(..) | hir::ExprKind::Type(..) | hir::ExprKind::Err => { + hir::ExprKind::Call(..) + | hir::ExprKind::MethodCall(..) + | hir::ExprKind::If(..) + | hir::ExprKind::Match(..) + | hir::ExprKind::While(..) + | hir::ExprKind::Loop(..) + | hir::ExprKind::Index(..) + | hir::ExprKind::Field(..) + | hir::ExprKind::Array(..) + | hir::ExprKind::Tup(..) + | hir::ExprKind::Binary(..) + | hir::ExprKind::Cast(..) + | hir::ExprKind::Unary(..) + | hir::ExprKind::Ret(..) + | hir::ExprKind::Break(..) + | hir::ExprKind::Continue(..) + | hir::ExprKind::Lit(_) + | hir::ExprKind::Block(..) + | hir::ExprKind::AddrOf(..) + | hir::ExprKind::Struct(..) + | hir::ExprKind::Repeat(..) + | hir::ExprKind::Closure(..) + | hir::ExprKind::Path(_) + | hir::ExprKind::Yield(..) + | hir::ExprKind::Box(..) + | hir::ExprKind::Type(..) + | hir::ExprKind::Err => { intravisit::walk_expr(this, expr); } } @@ -1586,12 +1666,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { }) } - fn warn_about_unused(&self, - sp: Span, - hir_id: HirId, - ln: LiveNode, - var: Variable) - -> bool { + fn warn_about_unused(&self, sp: Span, hir_id: HirId, ln: LiveNode, var: Variable) -> bool { if !self.used_on_entry(ln, var) { let r = self.should_warn(var); if let Some(name) = r { @@ -1607,22 +1682,32 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let suggest_underscore_msg = format!("consider using `_{}` instead", name); if is_assigned { - self.ir.tcx - .lint_hir_note(lint::builtin::UNUSED_VARIABLES, hir_id, sp, - &format!("variable `{}` is assigned to, but never used", - name), - &suggest_underscore_msg); + self.ir.tcx.lint_hir_note( + lint::builtin::UNUSED_VARIABLES, + hir_id, + sp, + &format!("variable `{}` is assigned to, but never used", name), + &suggest_underscore_msg, + ); } else if name != "self" { let msg = format!("unused variable: `{}`", name); - let mut err = self.ir.tcx - .struct_span_lint_hir(lint::builtin::UNUSED_VARIABLES, hir_id, sp, &msg); + let mut err = self.ir.tcx.struct_span_lint_hir( + lint::builtin::UNUSED_VARIABLES, + hir_id, + sp, + &msg, + ); if self.ir.variable_is_shorthand(var) { - err.span_suggestion_with_applicability(sp, "try ignoring the field", - format!("{}: _", name), - Applicability::MachineApplicable); + err.span_suggestion_with_applicability( + sp, + "try ignoring the field", + format!("{}: _", name), + Applicability::MachineApplicable, + ); } else { err.span_suggestion_short_with_applicability( - sp, &suggest_underscore_msg, + sp, + &suggest_underscore_msg, format!("_{}", name), Applicability::MachineApplicable, ); @@ -1636,11 +1721,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } } - fn warn_about_dead_assign(&self, - sp: Span, - hir_id: HirId, - ln: LiveNode, - var: Variable) { + fn warn_about_dead_assign(&self, sp: Span, hir_id: HirId, ln: LiveNode, var: Variable) { if self.live_on_exit(ln, var).is_none() { self.report_dead_assign(hir_id, sp, var, false); } @@ -1649,15 +1730,27 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { fn report_dead_assign(&self, hir_id: HirId, sp: Span, var: Variable, is_argument: bool) { if let Some(name) = self.should_warn(var) { if is_argument { - self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, sp, - &format!("value passed to `{}` is never read", name)) - .help("maybe it is overwritten before being read?") - .emit(); + self.ir + .tcx + .struct_span_lint_hir( + lint::builtin::UNUSED_ASSIGNMENTS, + hir_id, + sp, + &format!("value passed to `{}` is never read", name), + ) + .help("maybe it is overwritten before being read?") + .emit(); } else { - self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, sp, - &format!("value assigned to `{}` is never read", name)) - .help("maybe it is overwritten before being read?") - .emit(); + self.ir + .tcx + .struct_span_lint_hir( + lint::builtin::UNUSED_ASSIGNMENTS, + hir_id, + sp, + &format!("value assigned to `{}` is never read", name), + ) + .help("maybe it is overwritten before being read?") + .emit(); } } } diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 370f0d1a6c6d7..8048215d63460 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -50,42 +50,42 @@ #![allow(non_camel_case_types)] -pub use self::PointerKind::*; +pub use self::AliasableReason::*; pub use self::InteriorKind::*; pub use self::MutabilityCategory::*; -pub use self::AliasableReason::*; pub use self::Note::*; +pub use self::PointerKind::*; use self::Aliasability::*; -use middle::region; +use hir::def::{CtorKind, Def}; use hir::def_id::{DefId, LocalDefId}; use hir::Node; use infer::InferCtxt; -use hir::def::{Def, CtorKind}; +use middle::region; use ty::adjustment; -use ty::{self, Ty, TyCtxt}; use ty::fold::TypeFoldable; use ty::layout::VariantIdx; +use ty::{self, Ty, TyCtxt}; -use hir::{MutImmutable, MutMutable, PatKind}; -use hir::pat_util::EnumerateAndAdjustIterator; use hir; +use hir::pat_util::EnumerateAndAdjustIterator; +use hir::{MutImmutable, MutMutable, PatKind}; use syntax::ast::{self, Name}; use syntax_pos::Span; +use rustc_data_structures::indexed_vec::Idx; +use rustc_data_structures::sync::Lrc; use std::borrow::Cow; use std::fmt; use std::hash::{Hash, Hasher}; -use rustc_data_structures::sync::Lrc; -use rustc_data_structures::indexed_vec::Idx; use std::rc::Rc; use util::nodemap::ItemLocalSet; #[derive(Clone, Debug, PartialEq)] pub enum Categorization<'tcx> { - Rvalue(ty::Region<'tcx>), // temporary val, argument is its scope - ThreadLocal(ty::Region<'tcx>), // value that cannot move, but still restricted in scope + Rvalue(ty::Region<'tcx>), // temporary val, argument is its scope + ThreadLocal(ty::Region<'tcx>), // value that cannot move, but still restricted in scope StaticItem, Upvar(Upvar), // upvar referenced by closure env Local(ast::NodeId), // local variable @@ -93,14 +93,14 @@ pub enum Categorization<'tcx> { Interior(cmt<'tcx>, InteriorKind), // something interior: field, tuple, etc Downcast(cmt<'tcx>, DefId), // selects a particular enum variant (*1) - // (*1) downcast is only required if the enum has more than one variant + // (*1) downcast is only required if the enum has more than one variant } // Represents any kind of upvar #[derive(Clone, Copy, PartialEq)] pub struct Upvar { pub id: ty::UpvarId, - pub kind: ty::ClosureKind + pub kind: ty::ClosureKind, } // different kinds of pointers: @@ -164,7 +164,7 @@ pub enum Note { NoteClosureEnv(ty::UpvarId), // Deref through closure env NoteUpvarRef(ty::UpvarId), // Deref through by-ref upvar NoteIndex, // Deref as part of desugaring `x[]` into its two components - NoteNone // Nothing special + NoteNone, // Nothing special } // `cmt`: "Category, Mutability, and Type". @@ -201,22 +201,19 @@ pub enum ImmutabilityBlame<'tcx> { ImmLocal(ast::NodeId), ClosureEnv(LocalDefId), LocalDeref(ast::NodeId), - AdtFieldDeref(&'tcx ty::AdtDef, &'tcx ty::FieldDef) + AdtFieldDeref(&'tcx ty::AdtDef, &'tcx ty::FieldDef), } impl<'tcx> cmt_<'tcx> { - fn resolve_field(&self, field_index: usize) -> Option<(&'tcx ty::AdtDef, &'tcx ty::FieldDef)> - { + fn resolve_field(&self, field_index: usize) -> Option<(&'tcx ty::AdtDef, &'tcx ty::FieldDef)> { let adt_def = match self.ty.sty { ty::Adt(def, _) => def, ty::Tuple(..) => return None, // closures get `Categorization::Upvar` rather than `Categorization::Interior` - _ => bug!("interior cmt {:?} is not an ADT", self) + _ => bug!("interior cmt {:?} is not an ADT", self), }; let variant_def = match self.cat { - Categorization::Downcast(_, variant_did) => { - adt_def.variant_with_id(variant_did) - } + Categorization::Downcast(_, variant_did) => adt_def.variant_with_id(variant_did), _ => { assert_eq!(adt_def.variants.len(), 1); &adt_def.variants[VariantIdx::new(0)] @@ -230,13 +227,12 @@ impl<'tcx> cmt_<'tcx> { Categorization::Deref(ref base_cmt, BorrowedPtr(ty::ImmBorrow, _)) => { // try to figure out where the immutable reference came from match base_cmt.cat { - Categorization::Local(node_id) => - Some(ImmutabilityBlame::LocalDeref(node_id)), - Categorization::Interior(ref base_cmt, InteriorField(field_index)) => { - base_cmt.resolve_field(field_index.0).map(|(adt_def, field_def)| { + Categorization::Local(node_id) => Some(ImmutabilityBlame::LocalDeref(node_id)), + Categorization::Interior(ref base_cmt, InteriorField(field_index)) => base_cmt + .resolve_field(field_index.0) + .map(|(adt_def, field_def)| { ImmutabilityBlame::AdtFieldDeref(adt_def, field_def) - }) - } + }), Categorization::Upvar(Upvar { id, .. }) => { if let NoteClosureEnv(..) = self.note { Some(ImmutabilityBlame::ClosureEnv(id.closure_expr_id)) @@ -244,25 +240,20 @@ impl<'tcx> cmt_<'tcx> { None } } - _ => None + _ => None, } } - Categorization::Local(node_id) => { - Some(ImmutabilityBlame::ImmLocal(node_id)) - } - Categorization::Rvalue(..) | - Categorization::Upvar(..) | - Categorization::Deref(_, UnsafePtr(..)) => { + Categorization::Local(node_id) => Some(ImmutabilityBlame::ImmLocal(node_id)), + Categorization::Rvalue(..) + | Categorization::Upvar(..) + | Categorization::Deref(_, UnsafePtr(..)) => { // This should not be reachable up to inference limitations. None } - Categorization::Interior(ref base_cmt, _) | - Categorization::Downcast(ref base_cmt, _) | - Categorization::Deref(ref base_cmt, _) => { - base_cmt.immutability_blame() - } - Categorization::ThreadLocal(..) | - Categorization::StaticItem => { + Categorization::Interior(ref base_cmt, _) + | Categorization::Downcast(ref base_cmt, _) + | Categorization::Deref(ref base_cmt, _) => base_cmt.immutability_blame(), + Categorization::ThreadLocal(..) | Categorization::StaticItem => { // Do we want to do something here? None } @@ -276,17 +267,25 @@ pub trait HirNode { } impl HirNode for hir::Expr { - fn hir_id(&self) -> hir::HirId { self.hir_id } - fn span(&self) -> Span { self.span } + fn hir_id(&self) -> hir::HirId { + self.hir_id + } + fn span(&self) -> Span { + self.span + } } impl HirNode for hir::Pat { - fn hir_id(&self) -> hir::HirId { self.hir_id } - fn span(&self) -> Span { self.span } + fn hir_id(&self) -> hir::HirId { + self.hir_id + } + fn span(&self) -> Span { + self.span + } } #[derive(Clone)] -pub struct MemCategorizationContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct MemCategorizationContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { pub tcx: TyCtxt<'a, 'gcx, 'tcx>, pub region_scope_tree: &'a region::ScopeTree, pub tables: &'a ty::TypeckTables<'tcx>, @@ -300,10 +299,9 @@ impl MutabilityCategory { pub fn from_mutbl(m: hir::Mutability) -> MutabilityCategory { let ret = match m { MutImmutable => McImmutable, - MutMutable => McDeclared + MutMutable => McDeclared, }; - debug!("MutabilityCategory::{}({:?}) => {:?}", - "from_mutbl", m, ret); + debug!("MutabilityCategory::{}({:?}) => {:?}", "from_mutbl", m, ret); ret } @@ -313,49 +311,55 @@ impl MutabilityCategory { ty::UniqueImmBorrow => McImmutable, ty::MutBorrow => McDeclared, }; - debug!("MutabilityCategory::{}({:?}) => {:?}", - "from_borrow_kind", borrow_kind, ret); + debug!( + "MutabilityCategory::{}({:?}) => {:?}", + "from_borrow_kind", borrow_kind, ret + ); ret } - fn from_pointer_kind(base_mutbl: MutabilityCategory, - ptr: PointerKind<'_>) -> MutabilityCategory { + fn from_pointer_kind( + base_mutbl: MutabilityCategory, + ptr: PointerKind<'_>, + ) -> MutabilityCategory { let ret = match ptr { - Unique => { - base_mutbl.inherit() - } - BorrowedPtr(borrow_kind, _) => { - MutabilityCategory::from_borrow_kind(borrow_kind) - } - UnsafePtr(m) => { - MutabilityCategory::from_mutbl(m) - } + Unique => base_mutbl.inherit(), + BorrowedPtr(borrow_kind, _) => MutabilityCategory::from_borrow_kind(borrow_kind), + UnsafePtr(m) => MutabilityCategory::from_mutbl(m), }; - debug!("MutabilityCategory::{}({:?}, {:?}) => {:?}", - "from_pointer_kind", base_mutbl, ptr, ret); + debug!( + "MutabilityCategory::{}({:?}, {:?}) => {:?}", + "from_pointer_kind", base_mutbl, ptr, ret + ); ret } - fn from_local(tcx: TyCtxt<'_, '_, '_>, tables: &ty::TypeckTables<'_>, - id: ast::NodeId) -> MutabilityCategory { + fn from_local( + tcx: TyCtxt<'_, '_, '_>, + tables: &ty::TypeckTables<'_>, + id: ast::NodeId, + ) -> MutabilityCategory { let ret = match tcx.hir().get(id) { Node::Binding(p) => match p.node { PatKind::Binding(..) => { - let bm = *tables.pat_binding_modes() - .get(p.hir_id) - .expect("missing binding mode"); + let bm = *tables + .pat_binding_modes() + .get(p.hir_id) + .expect("missing binding mode"); if bm == ty::BindByValue(hir::MutMutable) { McDeclared } else { McImmutable } } - _ => span_bug!(p.span, "expected identifier pattern") + _ => span_bug!(p.span, "expected identifier pattern"), }, - _ => span_bug!(tcx.hir().span(id), "expected identifier pattern") + _ => span_bug!(tcx.hir().span(id), "expected identifier pattern"), }; - debug!("MutabilityCategory::{}(tcx, id={:?}) => {:?}", - "from_local", id, ret); + debug!( + "MutabilityCategory::{}(tcx, id={:?}) => {:?}", + "from_local", id, ret + ); ret } @@ -382,7 +386,7 @@ impl MutabilityCategory { pub fn is_immutable(&self) -> bool { let ret = match *self { McImmutable => true, - McDeclared | McInherited => false + McDeclared | McInherited => false, }; debug!("{:?}.is_immutable() => {:?}", self, ret); ret @@ -397,17 +401,18 @@ impl MutabilityCategory { } impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx, 'tcx> { - pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, - region_scope_tree: &'a region::ScopeTree, - tables: &'a ty::TypeckTables<'tcx>, - rvalue_promotable_map: Option>) - -> MemCategorizationContext<'a, 'tcx, 'tcx> { + pub fn new( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + region_scope_tree: &'a region::ScopeTree, + tables: &'a ty::TypeckTables<'tcx>, + rvalue_promotable_map: Option>, + ) -> MemCategorizationContext<'a, 'tcx, 'tcx> { MemCategorizationContext { tcx, region_scope_tree, tables, rvalue_promotable_map, - infcx: None + infcx: None, } } } @@ -422,10 +427,11 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { /// temporaries may be overly conservative; /// - similarly, as the results of upvar analysis are not yet /// known, the results around upvar accesses may be incorrect. - pub fn with_infer(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - region_scope_tree: &'a region::ScopeTree, - tables: &'a ty::TypeckTables<'tcx>) - -> MemCategorizationContext<'a, 'gcx, 'tcx> { + pub fn with_infer( + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, + region_scope_tree: &'a region::ScopeTree, + tables: &'a ty::TypeckTables<'tcx>, + ) -> MemCategorizationContext<'a, 'gcx, 'tcx> { let tcx = infcx.tcx; // Subtle: we can't do rvalue promotion analysis until the @@ -449,30 +455,37 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { ty: Ty<'tcx>, span: Span, ) -> bool { - self.infcx.map(|infcx| infcx.type_is_copy_modulo_regions(param_env, ty, span)) + self.infcx + .map(|infcx| infcx.type_is_copy_modulo_regions(param_env, ty, span)) .or_else(|| { - self.tcx.lift_to_global(&(param_env, ty)).map(|(param_env, ty)| { - ty.is_copy_modulo_regions(self.tcx.global_tcx(), param_env, span) - }) + self.tcx + .lift_to_global(&(param_env, ty)) + .map(|(param_env, ty)| { + ty.is_copy_modulo_regions(self.tcx.global_tcx(), param_env, span) + }) }) .unwrap_or(true) } fn resolve_type_vars_if_possible(&self, value: &T) -> T - where T: TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { - self.infcx.map(|infcx| infcx.resolve_type_vars_if_possible(value)) + self.infcx + .map(|infcx| infcx.resolve_type_vars_if_possible(value)) .unwrap_or_else(|| value.clone()) } fn is_tainted_by_errors(&self) -> bool { - self.infcx.map_or(false, |infcx| infcx.is_tainted_by_errors()) + self.infcx + .map_or(false, |infcx| infcx.is_tainted_by_errors()) } - fn resolve_type_vars_or_error(&self, - id: hir::HirId, - ty: Option>) - -> McResult> { + fn resolve_type_vars_or_error( + &self, + id: hir::HirId, + ty: Option>, + ) -> McResult> { match ty { Some(ty) => { let ty = self.resolve_type_vars_if_possible(&ty); @@ -487,17 +500,17 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { None if self.is_tainted_by_errors() => Err(()), None => { let id = self.tcx.hir().hir_to_node_id(id); - bug!("no type for node {}: {} in mem_categorization", - id, self.tcx.hir().node_to_string(id)); + bug!( + "no type for node {}: {} in mem_categorization", + id, + self.tcx.hir().node_to_string(id) + ); } } } - pub fn node_ty(&self, - hir_id: hir::HirId) - -> McResult> { - self.resolve_type_vars_or_error(hir_id, - self.tables.node_id_to_type_opt(hir_id)) + pub fn node_ty(&self, hir_id: hir::HirId) -> McResult> { + self.resolve_type_vars_or_error(hir_id, self.tables.node_id_to_type_opt(hir_id)) } pub fn expr_ty(&self, expr: &hir::Expr) -> McResult> { @@ -533,7 +546,6 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { self.pat_ty_unadjusted(pat) } - /// Like `pat_ty`, but ignores implicit `&` patterns. fn pat_ty_unadjusted(&self, pat: &hir::Pat) -> McResult> { let base_ty = self.node_ty(pat.hir_id)?; @@ -543,10 +555,11 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { // and if so, figures out what the type *being borrowed* is. let ret_ty = match pat.node { PatKind::Binding(..) => { - let bm = *self.tables - .pat_binding_modes() - .get(pat.hir_id) - .expect("missing binding mode"); + let bm = *self + .tables + .pat_binding_modes() + .get(pat.hir_id) + .expect("missing binding mode"); if let ty::BindByReference(_) = bm { // a bind-by-ref means that the base_ty will be the type of the ident itself, @@ -573,10 +586,11 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { pub fn cat_expr(&self, expr: &hir::Expr) -> McResult> { // This recursion helper avoids going through *too many* // adjustments, since *only* non-overloaded deref recurses. - fn helper<'a, 'gcx, 'tcx>(mc: &MemCategorizationContext<'a, 'gcx, 'tcx>, - expr: &hir::Expr, - adjustments: &[adjustment::Adjustment<'tcx>]) - -> McResult> { + fn helper<'a, 'gcx, 'tcx>( + mc: &MemCategorizationContext<'a, 'gcx, 'tcx>, + expr: &hir::Expr, + adjustments: &[adjustment::Adjustment<'tcx>], + ) -> McResult> { match adjustments.split_last() { None => mc.cat_expr_unadjusted(expr), Some((adjustment, previous)) => { @@ -588,18 +602,23 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { helper(self, expr, self.tables.expr_adjustments(expr)) } - pub fn cat_expr_adjusted(&self, expr: &hir::Expr, - previous: cmt_<'tcx>, - adjustment: &adjustment::Adjustment<'tcx>) - -> McResult> { + pub fn cat_expr_adjusted( + &self, + expr: &hir::Expr, + previous: cmt_<'tcx>, + adjustment: &adjustment::Adjustment<'tcx>, + ) -> McResult> { self.cat_expr_adjusted_with(expr, || Ok(previous), adjustment) } - fn cat_expr_adjusted_with(&self, expr: &hir::Expr, - previous: F, - adjustment: &adjustment::Adjustment<'tcx>) - -> McResult> - where F: FnOnce() -> McResult> + fn cat_expr_adjusted_with( + &self, + expr: &hir::Expr, + previous: F, + adjustment: &adjustment::Adjustment<'tcx>, + ) -> McResult> + where + F: FnOnce() -> McResult>, { debug!("cat_expr_adjusted_with({:?}): {:?}", adjustment, expr); let target = self.resolve_type_vars_if_possible(&adjustment.target); @@ -607,10 +626,13 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { adjustment::Adjust::Deref(overloaded) => { // Equivalent to *expr or something similar. let base = Rc::new(if let Some(deref) = overloaded { - let ref_ty = self.tcx.mk_ref(deref.region, ty::TypeAndMut { - ty: target, - mutbl: deref.mutbl, - }); + let ref_ty = self.tcx.mk_ref( + deref.region, + ty::TypeAndMut { + ty: target, + mutbl: deref.mutbl, + }, + ); self.cat_rvalue_node(expr.hir_id, expr.span, ref_ty) } else { previous()? @@ -618,13 +640,13 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { self.cat_deref(expr, base, NoteNone) } - adjustment::Adjust::NeverToAny | - adjustment::Adjust::ReifyFnPointer | - adjustment::Adjust::UnsafeFnPointer | - adjustment::Adjust::ClosureFnPointer | - adjustment::Adjust::MutToConstPointer | - adjustment::Adjust::Borrow(_) | - adjustment::Adjust::Unsize => { + adjustment::Adjust::NeverToAny + | adjustment::Adjust::ReifyFnPointer + | adjustment::Adjust::UnsafeFnPointer + | adjustment::Adjust::ClosureFnPointer + | adjustment::Adjust::MutToConstPointer + | adjustment::Adjust::Borrow(_) + | adjustment::Adjust::Unsize => { // Result is an rvalue. Ok(self.cat_rvalue_node(expr.hir_id, expr.span, target)) } @@ -647,10 +669,10 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { hir::ExprKind::Field(ref base, f_ident) => { let base_cmt = Rc::new(self.cat_expr(&base)?); - debug!("cat_expr(cat_field): id={} expr={:?} base={:?}", - expr.id, - expr, - base_cmt); + debug!( + "cat_expr(cat_field): id={} expr={:?} base={:?}", + expr.id, expr, base_cmt + ); let f_index = self.tcx.field_index(expr.id, self.tables); Ok(self.cat_field(expr, base_cmt, f_index, f_ident, expr_ty)) } @@ -674,40 +696,54 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { self.cat_def(expr.hir_id, expr.span, expr_ty, def) } - hir::ExprKind::Type(ref e, _) => { - self.cat_expr(&e) - } - - hir::ExprKind::AddrOf(..) | hir::ExprKind::Call(..) | - hir::ExprKind::Assign(..) | hir::ExprKind::AssignOp(..) | - hir::ExprKind::Closure(..) | hir::ExprKind::Ret(..) | - hir::ExprKind::Unary(..) | hir::ExprKind::Yield(..) | - hir::ExprKind::MethodCall(..) | hir::ExprKind::Cast(..) | - hir::ExprKind::Array(..) | hir::ExprKind::Tup(..) | hir::ExprKind::If(..) | - hir::ExprKind::Binary(..) | hir::ExprKind::While(..) | - hir::ExprKind::Block(..) | hir::ExprKind::Loop(..) | hir::ExprKind::Match(..) | - hir::ExprKind::Lit(..) | hir::ExprKind::Break(..) | - hir::ExprKind::Continue(..) | hir::ExprKind::Struct(..) | hir::ExprKind::Repeat(..) | - hir::ExprKind::InlineAsm(..) | hir::ExprKind::Box(..) | hir::ExprKind::Err => { - Ok(self.cat_rvalue_node(expr.hir_id, expr.span, expr_ty)) - } + hir::ExprKind::Type(ref e, _) => self.cat_expr(&e), + + hir::ExprKind::AddrOf(..) + | hir::ExprKind::Call(..) + | hir::ExprKind::Assign(..) + | hir::ExprKind::AssignOp(..) + | hir::ExprKind::Closure(..) + | hir::ExprKind::Ret(..) + | hir::ExprKind::Unary(..) + | hir::ExprKind::Yield(..) + | hir::ExprKind::MethodCall(..) + | hir::ExprKind::Cast(..) + | hir::ExprKind::Array(..) + | hir::ExprKind::Tup(..) + | hir::ExprKind::If(..) + | hir::ExprKind::Binary(..) + | hir::ExprKind::While(..) + | hir::ExprKind::Block(..) + | hir::ExprKind::Loop(..) + | hir::ExprKind::Match(..) + | hir::ExprKind::Lit(..) + | hir::ExprKind::Break(..) + | hir::ExprKind::Continue(..) + | hir::ExprKind::Struct(..) + | hir::ExprKind::Repeat(..) + | hir::ExprKind::InlineAsm(..) + | hir::ExprKind::Box(..) + | hir::ExprKind::Err => Ok(self.cat_rvalue_node(expr.hir_id, expr.span, expr_ty)), } } - pub fn cat_def(&self, - hir_id: hir::HirId, - span: Span, - expr_ty: Ty<'tcx>, - def: Def) - -> McResult> { - debug!("cat_def: id={:?} expr={:?} def={:?}", - hir_id, expr_ty, def); + pub fn cat_def( + &self, + hir_id: hir::HirId, + span: Span, + expr_ty: Ty<'tcx>, + def: Def, + ) -> McResult> { + debug!("cat_def: id={:?} expr={:?} def={:?}", hir_id, expr_ty, def); match def { - Def::StructCtor(..) | Def::VariantCtor(..) | Def::Const(..) | - Def::AssociatedConst(..) | Def::Fn(..) | Def::Method(..) | Def::SelfCtor(..) => { - Ok(self.cat_rvalue_node(hir_id, span, expr_ty)) - } + Def::StructCtor(..) + | Def::VariantCtor(..) + | Def::Const(..) + | Def::AssociatedConst(..) + | Def::Fn(..) + | Def::Method(..) + | Def::SelfCtor(..) => Ok(self.cat_rvalue_node(hir_id, span, expr_ty)), Def::Static(def_id, mutbl) => { // `#[thread_local]` statics may not outlive the current function, but @@ -727,40 +763,40 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { hir_id, span, cat, - mutbl: if mutbl { McDeclared } else { McImmutable}, - ty:expr_ty, - note: NoteNone - }) - } - - Def::Upvar(var_id, _, fn_node_id) => { - self.cat_upvar(hir_id, span, var_id, fn_node_id) - } - - Def::Local(vid) => { - Ok(cmt_ { - hir_id, - span, - cat: Categorization::Local(vid), - mutbl: MutabilityCategory::from_local(self.tcx, self.tables, vid), + mutbl: if mutbl { McDeclared } else { McImmutable }, ty: expr_ty, - note: NoteNone + note: NoteNone, }) } - def => span_bug!(span, "unexpected definition in memory categorization: {:?}", def) + Def::Upvar(var_id, _, fn_node_id) => self.cat_upvar(hir_id, span, var_id, fn_node_id), + + Def::Local(vid) => Ok(cmt_ { + hir_id, + span, + cat: Categorization::Local(vid), + mutbl: MutabilityCategory::from_local(self.tcx, self.tables, vid), + ty: expr_ty, + note: NoteNone, + }), + + def => span_bug!( + span, + "unexpected definition in memory categorization: {:?}", + def + ), } } // Categorize an upvar, complete with invisible derefs of closure // environment and upvar reference as appropriate. - fn cat_upvar(&self, - hir_id: hir::HirId, - span: Span, - var_id: ast::NodeId, - fn_node_id: ast::NodeId) - -> McResult> - { + fn cat_upvar( + &self, + hir_id: hir::HirId, + span: Span, + var_id: ast::NodeId, + fn_node_id: ast::NodeId, + ) -> McResult> { let fn_hir_id = self.tcx.hir().node_to_hir_id(fn_node_id); // An upvar can have up to 3 components. We translate first to a @@ -792,18 +828,23 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { match self.infcx { // During upvar inference we may not know the // closure kind, just use the LATTICE_BOTTOM value. - Some(infcx) => - infcx.closure_kind(closure_def_id, closure_substs) - .unwrap_or(ty::ClosureKind::LATTICE_BOTTOM), - - None => - self.tcx.global_tcx() - .lift(&closure_substs) - .expect("no inference cx, but inference variables in closure ty") - .closure_kind(closure_def_id, self.tcx.global_tcx()), + Some(infcx) => infcx + .closure_kind(closure_def_id, closure_substs) + .unwrap_or(ty::ClosureKind::LATTICE_BOTTOM), + + None => self + .tcx + .global_tcx() + .lift(&closure_substs) + .expect("no inference cx, but inference variables in closure ty") + .closure_kind(closure_def_id, self.tcx.global_tcx()), } } - ref t => span_bug!(span, "unexpected type for fn in mem_categorization: {:?}", t), + ref t => span_bug!( + span, + "unexpected type for fn in mem_categorization: {:?}", + t + ), }; let closure_expr_def_id = self.tcx.hir().local_def_id(fn_node_id); @@ -824,19 +865,20 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { let cmt_result = cmt_ { hir_id, span, - cat: Categorization::Upvar(Upvar {id: upvar_id, kind: kind}), + cat: Categorization::Upvar(Upvar { + id: upvar_id, + kind: kind, + }), mutbl: var_mutbl, ty: var_ty, - note: NoteNone + note: NoteNone, }; // If this is a `FnMut` or `Fn` closure, then the above is // conceptually a `&mut` or `&` reference, so we have to add a // deref. let cmt_result = match kind { - ty::ClosureKind::FnOnce => { - cmt_result - } + ty::ClosureKind::FnOnce => cmt_result, ty::ClosureKind::FnMut => { self.env_deref(hir_id, span, upvar_id, var_mutbl, ty::MutBorrow, cmt_result) } @@ -850,9 +892,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { // for that. let upvar_capture = self.tables.upvar_capture(upvar_id); let cmt_result = match upvar_capture { - ty::UpvarCapture::ByValue => { - cmt_result - } + ty::UpvarCapture::ByValue => cmt_result, ty::UpvarCapture::ByRef(upvar_borrow) => { let ptr = BorrowedPtr(upvar_borrow.kind, upvar_borrow.region); cmt_ { @@ -861,7 +901,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { cat: Categorization::Deref(Rc::new(cmt_result), ptr), mutbl: MutabilityCategory::from_borrow_kind(upvar_borrow.kind), ty: var_ty, - note: NoteUpvarRef(upvar_id) + note: NoteUpvarRef(upvar_id), } } }; @@ -871,22 +911,22 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { Ok(ret) } - fn env_deref(&self, - hir_id: hir::HirId, - span: Span, - upvar_id: ty::UpvarId, - upvar_mutbl: MutabilityCategory, - env_borrow_kind: ty::BorrowKind, - cmt_result: cmt_<'tcx>) - -> cmt_<'tcx> - { + fn env_deref( + &self, + hir_id: hir::HirId, + span: Span, + upvar_id: ty::UpvarId, + upvar_mutbl: MutabilityCategory, + env_borrow_kind: ty::BorrowKind, + cmt_result: cmt_<'tcx>, + ) -> cmt_<'tcx> { // Region of environment pointer let env_region = self.tcx.mk_region(ty::ReFree(ty::FreeRegion { // The environment of a closure is guaranteed to // outlive any bindings introduced in the body of the // closure itself. scope: upvar_id.closure_expr_id.to_def_id(), - bound_region: ty::BrEnv + bound_region: ty::BrEnv, })); let env_ptr = BorrowedPtr(env_borrow_kind, env_region); @@ -911,8 +951,10 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { // Issue #18335. If variable is declared as immutable, override the // mutability from the environment and substitute an `&T` anyway. match upvar_mutbl { - McImmutable => { deref_mutbl = McImmutable; } - McDeclared | McInherited => { } + McImmutable => { + deref_mutbl = McImmutable; + } + McDeclared | McInherited => {} } let ret = cmt_ { @@ -921,7 +963,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { cat: Categorization::Deref(Rc::new(cmt_result), env_ptr), mutbl: deref_mutbl, ty: var_ty, - note: NoteClosureEnv(upvar_id) + note: NoteClosureEnv(upvar_id), }; debug!("env_deref ret {:?}", ret); @@ -935,20 +977,21 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { let scope = self.region_scope_tree.temporary_scope(id); self.tcx.mk_region(match scope { Some(scope) => ty::ReScope(scope), - None => ty::ReStatic + None => ty::ReStatic, }) } - pub fn cat_rvalue_node(&self, - hir_id: hir::HirId, - span: Span, - expr_ty: Ty<'tcx>) - -> cmt_<'tcx> { - debug!("cat_rvalue_node(id={:?}, span={:?}, expr_ty={:?})", - hir_id, span, expr_ty); + pub fn cat_rvalue_node(&self, hir_id: hir::HirId, span: Span, expr_ty: Ty<'tcx>) -> cmt_<'tcx> { + debug!( + "cat_rvalue_node(id={:?}, span={:?}, expr_ty={:?})", + hir_id, span, expr_ty + ); - let promotable = self.rvalue_promotable_map.as_ref().map(|m| m.contains(&hir_id.local_id)) - .unwrap_or(false); + let promotable = self + .rvalue_promotable_map + .as_ref() + .map(|m| m.contains(&hir_id.local_id)) + .unwrap_or(false); debug!("cat_rvalue_node: promotable = {:?}", promotable); @@ -973,38 +1016,43 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { ret } - pub fn cat_rvalue(&self, - cmt_hir_id: hir::HirId, - span: Span, - temp_scope: ty::Region<'tcx>, - expr_ty: Ty<'tcx>) -> cmt_<'tcx> { + pub fn cat_rvalue( + &self, + cmt_hir_id: hir::HirId, + span: Span, + temp_scope: ty::Region<'tcx>, + expr_ty: Ty<'tcx>, + ) -> cmt_<'tcx> { let ret = cmt_ { hir_id: cmt_hir_id, - span:span, - cat:Categorization::Rvalue(temp_scope), - mutbl:McDeclared, - ty:expr_ty, - note: NoteNone + span: span, + cat: Categorization::Rvalue(temp_scope), + mutbl: McDeclared, + ty: expr_ty, + note: NoteNone, }; debug!("cat_rvalue ret {:?}", ret); ret } - pub fn cat_field(&self, - node: &N, - base_cmt: cmt<'tcx>, - f_index: usize, - f_ident: ast::Ident, - f_ty: Ty<'tcx>) - -> cmt_<'tcx> { + pub fn cat_field( + &self, + node: &N, + base_cmt: cmt<'tcx>, + f_index: usize, + f_ident: ast::Ident, + f_ty: Ty<'tcx>, + ) -> cmt_<'tcx> { let ret = cmt_ { hir_id: node.hir_id(), span: node.span(), mutbl: base_cmt.mutbl.inherit(), - cat: Categorization::Interior(base_cmt, - InteriorField(FieldIndex(f_index, f_ident.name))), + cat: Categorization::Interior( + base_cmt, + InteriorField(FieldIndex(f_index, f_ident.name)), + ), ty: f_ty, - note: NoteNone + note: NoteNone, }; debug!("cat_field ret {:?}", ret); ret @@ -1016,10 +1064,10 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { base: &hir::Expr, note: Note, ) -> McResult> { - debug!("cat_overloaded_place(expr={:?}, base={:?}, note={:?})", - expr, - base, - note); + debug!( + "cat_overloaded_place(expr={:?}, base={:?}, note={:?})", + expr, base, note + ); // Reconstruct the output assuming it's a reference with the // same region and mutability as the receiver. This holds for @@ -1029,12 +1077,15 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { let (region, mutbl) = match base_ty.sty { ty::Ref(region, _, mutbl) => (region, mutbl), - _ => span_bug!(expr.span, "cat_overloaded_place: base is not a reference") + _ => span_bug!(expr.span, "cat_overloaded_place: base is not a reference"), }; - let ref_ty = self.tcx.mk_ref(region, ty::TypeAndMut { - ty: place_ty, - mutbl, - }); + let ref_ty = self.tcx.mk_ref( + region, + ty::TypeAndMut { + ty: place_ty, + mutbl, + }, + ); let base_cmt = Rc::new(self.cat_rvalue_node(expr.hir_id, expr.span, ref_ty)); self.cat_deref(expr, base_cmt, note) @@ -1064,7 +1115,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { let bk = ty::BorrowKind::from_mutbl(mutbl); BorrowedPtr(bk, r) } - ref ty => bug!("unexpected type in cat_deref: {:?}", ty) + ref ty => bug!("unexpected type in cat_deref: {:?}", ty), }; let ret = cmt_ { hir_id: node.hir_id(), @@ -1079,12 +1130,13 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { Ok(ret) } - fn cat_index(&self, - elt: &N, - base_cmt: cmt<'tcx>, - element_ty: Ty<'tcx>, - context: InteriorOffsetKind) - -> McResult> { + fn cat_index( + &self, + elt: &N, + base_cmt: cmt<'tcx>, + element_ty: Ty<'tcx>, + context: InteriorOffsetKind, + ) -> McResult> { //! Creates a cmt for an indexing operation (`[]`). //! //! One subtle aspect of indexing that may not be @@ -1108,29 +1160,31 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { return Ok(ret); } - pub fn cat_imm_interior(&self, - node: &N, - base_cmt: cmt<'tcx>, - interior_ty: Ty<'tcx>, - interior: InteriorKind) - -> cmt_<'tcx> { + pub fn cat_imm_interior( + &self, + node: &N, + base_cmt: cmt<'tcx>, + interior_ty: Ty<'tcx>, + interior: InteriorKind, + ) -> cmt_<'tcx> { let ret = cmt_ { hir_id: node.hir_id(), span: node.span(), mutbl: base_cmt.mutbl.inherit(), cat: Categorization::Interior(base_cmt, interior), ty: interior_ty, - note: NoteNone + note: NoteNone, }; debug!("cat_imm_interior ret={:?}", ret); ret } - pub fn cat_downcast_if_needed(&self, - node: &N, - base_cmt: cmt<'tcx>, - variant_did: DefId) - -> cmt<'tcx> { + pub fn cat_downcast_if_needed( + &self, + node: &N, + base_cmt: cmt<'tcx>, + variant_did: DefId, + ) -> cmt<'tcx> { // univariant enums do not need downcasts let base_did = self.tcx.parent_def_id(variant_did).unwrap(); if self.tcx.adt_def(base_did).variants.len() != 1 { @@ -1141,7 +1195,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { mutbl: base_cmt.mutbl.inherit(), cat: Categorization::Downcast(base_cmt, variant_did), ty: base_ty, - note: NoteNone + note: NoteNone, }); debug!("cat_downcast ret={:?}", ret); ret @@ -1152,14 +1206,16 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { } pub fn cat_pattern(&self, cmt: cmt<'tcx>, pat: &hir::Pat, mut op: F) -> McResult<()> - where F: FnMut(cmt<'tcx>, &hir::Pat), + where + F: FnMut(cmt<'tcx>, &hir::Pat), { self.cat_pattern_(cmt, pat, &mut op) } // FIXME(#19596) This is a workaround, but there should be a better way to do this fn cat_pattern_(&self, mut cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F) -> McResult<()> - where F : FnMut(cmt<'tcx>, &hir::Pat) + where + F: FnMut(cmt<'tcx>, &hir::Pat), { // Here, `cmt` is the categorization for the value being // matched and pat is the pattern it is being matched against. @@ -1242,17 +1298,21 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { // Then we see that to get the same result, we must start with `deref { deref { cmt_foo }}` // instead of `cmt_foo` since the pattern is now `Some(x,)` and not `&&Some(x,)`, even // though its assigned type is that of `&&Some(x,)`. - for _ in 0..self.tables - .pat_adjustments() - .get(pat.hir_id) - .map(|v| v.len()) - .unwrap_or(0) + for _ in 0..self + .tables + .pat_adjustments() + .get(pat.hir_id) + .map(|v| v.len()) + .unwrap_or(0) { debug!("cat_pattern: applying adjustment to cmt={:?}", cmt); cmt = Rc::new(self.cat_deref(pat, cmt, NoteNone)?); } let cmt = cmt; // lose mutability - debug!("cat_pattern: applied adjustment derefs to get cmt={:?}", cmt); + debug!( + "cat_pattern: applied adjustment derefs to get cmt={:?}", + cmt + ); // Invoke the callback, but only now, after the `cmt` has adjusted. // @@ -1271,35 +1331,46 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { let (cmt, expected_len) = match def { Def::Err => { debug!("access to unresolvable pattern {:?}", pat); - return Err(()) + return Err(()); } Def::VariantCtor(def_id, CtorKind::Fn) => { let enum_def = self.tcx.parent_def_id(def_id).unwrap(); - (self.cat_downcast_if_needed(pat, cmt, def_id), - self.tcx.adt_def(enum_def).variant_with_id(def_id).fields.len()) + ( + self.cat_downcast_if_needed(pat, cmt, def_id), + self.tcx + .adt_def(enum_def) + .variant_with_id(def_id) + .fields + .len(), + ) } Def::StructCtor(_, CtorKind::Fn) | Def::SelfCtor(..) => { match self.pat_ty_unadjusted(&pat)?.sty { - ty::Adt(adt_def, _) => { - (cmt, adt_def.non_enum_variant().fields.len()) - } + ty::Adt(adt_def, _) => (cmt, adt_def.non_enum_variant().fields.len()), ref ty => { - span_bug!(pat.span, - "tuple struct pattern unexpected type {:?}", ty); + span_bug!( + pat.span, + "tuple struct pattern unexpected type {:?}", + ty + ); } } } def => { - span_bug!(pat.span, "tuple struct pattern didn't resolve \ - to variant or struct {:?}", def); + span_bug!( + pat.span, + "tuple struct pattern didn't resolve \ + to variant or struct {:?}", + def + ); } }; for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) { let subpat_ty = self.pat_ty_adjusted(&subpat)?; // see (*2) let interior = InteriorField(FieldIndex(i, Name::intern(&i.to_string()))); - let subcmt = Rc::new( - self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior)); + let subcmt = + Rc::new(self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior)); self.cat_pattern_(subcmt, &subpat, op)?; } } @@ -1310,20 +1381,19 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { let cmt = match def { Def::Err => { debug!("access to unresolvable pattern {:?}", pat); - return Err(()) - }, - Def::Variant(variant_did) | - Def::VariantCtor(variant_did, ..) => { + return Err(()); + } + Def::Variant(variant_did) | Def::VariantCtor(variant_did, ..) => { self.cat_downcast_if_needed(pat, cmt, variant_did) - }, - _ => cmt + } + _ => cmt, }; for fp in field_pats { let field_ty = self.pat_ty_adjusted(&fp.node.pat)?; // see (*2) let f_index = self.tcx.field_index(fp.node.id, self.tables); - let cmt_field = Rc::new(self.cat_field(pat, cmt.clone(), f_index, - fp.node.ident, field_ty)); + let cmt_field = + Rc::new(self.cat_field(pat, cmt.clone(), f_index, fp.node.ident, field_ty)); self.cat_pattern_(cmt_field, &fp.node.pat, op)?; } } @@ -1341,13 +1411,13 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) { let subpat_ty = self.pat_ty_adjusted(&subpat)?; // see (*2) let interior = InteriorField(FieldIndex(i, Name::intern(&i.to_string()))); - let subcmt = Rc::new( - self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior)); + let subcmt = + Rc::new(self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior)); self.cat_pattern_(subcmt, &subpat, op)?; } } - PatKind::Box(ref subpat) | PatKind::Ref(ref subpat, _) => { + PatKind::Box(ref subpat) | PatKind::Ref(ref subpat, _) => { // box p1, &p1, &mut p1. we can ignore the mutability of // PatKind::Ref since that information is already contained // in the type. @@ -1376,8 +1446,11 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { } } - PatKind::Path(_) | PatKind::Binding(.., None) | - PatKind::Lit(..) | PatKind::Range(..) | PatKind::Wild => { + PatKind::Path(_) + | PatKind::Binding(.., None) + | PatKind::Lit(..) + | PatKind::Range(..) + | PatKind::Wild => { // always ok } } @@ -1407,20 +1480,16 @@ impl<'tcx> cmt_<'tcx> { //! determines how long the value in `self` remains live. match self.cat { - Categorization::Rvalue(..) | - Categorization::StaticItem | - Categorization::ThreadLocal(..) | - Categorization::Local(..) | - Categorization::Deref(_, UnsafePtr(..)) | - Categorization::Deref(_, BorrowedPtr(..)) | - Categorization::Upvar(..) => { - (*self).clone() - } - Categorization::Downcast(ref b, _) | - Categorization::Interior(ref b, _) | - Categorization::Deref(ref b, Unique) => { - b.guarantor() - } + Categorization::Rvalue(..) + | Categorization::StaticItem + | Categorization::ThreadLocal(..) + | Categorization::Local(..) + | Categorization::Deref(_, UnsafePtr(..)) + | Categorization::Deref(_, BorrowedPtr(..)) + | Categorization::Upvar(..) => (*self).clone(), + Categorization::Downcast(ref b, _) + | Categorization::Interior(ref b, _) + | Categorization::Deref(ref b, Unique) => b.guarantor(), } } @@ -1431,20 +1500,21 @@ impl<'tcx> cmt_<'tcx> { // aliased and eventually recused. match self.cat { - Categorization::Deref(ref b, BorrowedPtr(ty::MutBorrow, _)) | - Categorization::Deref(ref b, BorrowedPtr(ty::UniqueImmBorrow, _)) | - Categorization::Deref(ref b, Unique) | - Categorization::Downcast(ref b, _) | - Categorization::Interior(ref b, _) => { + Categorization::Deref(ref b, BorrowedPtr(ty::MutBorrow, _)) + | Categorization::Deref(ref b, BorrowedPtr(ty::UniqueImmBorrow, _)) + | Categorization::Deref(ref b, Unique) + | Categorization::Downcast(ref b, _) + | Categorization::Interior(ref b, _) => { // Aliasability depends on base cmt b.freely_aliasable() } - Categorization::Rvalue(..) | - Categorization::ThreadLocal(..) | - Categorization::Local(..) | - Categorization::Upvar(..) | - Categorization::Deref(_, UnsafePtr(..)) => { // yes, it's aliasable, but... + Categorization::Rvalue(..) + | Categorization::ThreadLocal(..) + | Categorization::Local(..) + | Categorization::Upvar(..) + | Categorization::Deref(_, UnsafePtr(..)) => { + // yes, it's aliasable, but... NonAliasable } @@ -1467,79 +1537,51 @@ impl<'tcx> cmt_<'tcx> { // one. pub fn upvar_cat(&self) -> Option<&Categorization<'tcx>> { match self.note { - NoteClosureEnv(..) | NoteUpvarRef(..) => { - Some(match self.cat { - Categorization::Deref(ref inner, _) => { - match inner.cat { - Categorization::Deref(ref inner, _) => &inner.cat, - Categorization::Upvar(..) => &inner.cat, - _ => bug!() - } - } - _ => bug!() - }) - } - NoteIndex | NoteNone => None + NoteClosureEnv(..) | NoteUpvarRef(..) => Some(match self.cat { + Categorization::Deref(ref inner, _) => match inner.cat { + Categorization::Deref(ref inner, _) => &inner.cat, + Categorization::Upvar(..) => &inner.cat, + _ => bug!(), + }, + _ => bug!(), + }), + NoteIndex | NoteNone => None, } } pub fn descriptive_string(&self, tcx: TyCtxt<'_, '_, '_>) -> Cow<'static, str> { match self.cat { - Categorization::StaticItem => { - "static item".into() - } - Categorization::ThreadLocal(..) => { - "thread-local static item".into() - } - Categorization::Rvalue(..) => { - "non-place".into() - } - Categorization::Local(vid) => { - if tcx.hir().is_argument(vid) { - "argument" - } else { - "local variable" - }.into() + Categorization::StaticItem => "static item".into(), + Categorization::ThreadLocal(..) => "thread-local static item".into(), + Categorization::Rvalue(..) => "non-place".into(), + Categorization::Local(vid) => if tcx.hir().is_argument(vid) { + "argument" + } else { + "local variable" } - Categorization::Deref(_, pk) => { - match self.upvar_cat() { - Some(&Categorization::Upvar(ref var)) => { - var.to_string().into() - } - Some(_) => bug!(), - None => { - match pk { - Unique => { - "`Box` content" - } - UnsafePtr(..) => { - "dereference of raw pointer" - } - BorrowedPtr(..) => { - match self.note { - NoteIndex => "indexed content", - _ => "borrowed content" - } - } - }.into() - } + .into(), + Categorization::Deref(_, pk) => match self.upvar_cat() { + Some(&Categorization::Upvar(ref var)) => var.to_string().into(), + Some(_) => bug!(), + None => match pk { + Unique => "`Box` content", + UnsafePtr(..) => "dereference of raw pointer", + BorrowedPtr(..) => match self.note { + NoteIndex => "indexed content", + _ => "borrowed content", + }, } - } - Categorization::Interior(_, InteriorField(..)) => { - "field".into() - } + .into(), + }, + Categorization::Interior(_, InteriorField(..)) => "field".into(), Categorization::Interior(_, InteriorElement(InteriorOffsetKind::Index)) => { "indexed content".into() } Categorization::Interior(_, InteriorElement(InteriorOffsetKind::Pattern)) => { "pattern-bound indexed content".into() } - Categorization::Upvar(ref var) => { - var.to_string().into() - } - Categorization::Downcast(ref cmt, _) => { - cmt.descriptive_string(tcx).into() - } + Categorization::Upvar(ref var) => var.to_string().into(), + Categorization::Downcast(ref cmt, _) => cmt.descriptive_string(tcx).into(), } } } diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 7736d5e795ea0..5e8d369f90267 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -4,8 +4,8 @@ use util::nodemap::{DefIdSet, FxHashMap}; -use std::hash::Hash; use std::fmt; +use std::hash::Hash; use syntax::ast::NodeId; // Accessibility levels, sorted in ascending order @@ -27,7 +27,7 @@ pub enum AccessLevel { // Accessibility levels for reachable HIR nodes #[derive(Clone)] pub struct AccessLevels { - pub map: FxHashMap + pub map: FxHashMap, } impl AccessLevels { @@ -44,7 +44,9 @@ impl AccessLevels { impl Default for AccessLevels { fn default() -> Self { - AccessLevels { map: Default::default() } + AccessLevels { + map: Default::default(), + } } } diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 10deca836fff3..5770c611f5ab6 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -5,38 +5,39 @@ // makes all other generics or inline functions that it references // reachable as well. -use hir::{CodegenFnAttrs, CodegenFnAttrFlags}; -use hir::Node; use hir::def::Def; -use hir::def_id::{DefId, CrateNum}; -use rustc_data_structures::sync::Lrc; -use ty::{self, TyCtxt}; -use ty::query::Providers; +use hir::def_id::{CrateNum, DefId}; +use hir::Node; +use hir::{CodegenFnAttrFlags, CodegenFnAttrs}; use middle::privacy; +use rustc_data_structures::sync::Lrc; use session::config; -use util::nodemap::{NodeSet, FxHashSet}; +use ty::query::Providers; +use ty::{self, TyCtxt}; +use util::nodemap::{FxHashSet, NodeSet}; -use rustc_target::spec::abi::Abi; -use syntax::ast; use hir; use hir::def_id::LOCAL_CRATE; -use hir::intravisit::{Visitor, NestedVisitorMap}; -use hir::itemlikevisit::ItemLikeVisitor; use hir::intravisit; +use hir::intravisit::{NestedVisitorMap, Visitor}; +use hir::itemlikevisit::ItemLikeVisitor; +use rustc_target::spec::abi::Abi; +use syntax::ast; // Returns true if the given item must be inlined because it may be // monomorphized or it was marked with `#[inline]`. This will only return // true for functions. -fn item_might_be_inlined(tcx: TyCtxt<'a, 'tcx, 'tcx>, - item: &hir::Item, - attrs: CodegenFnAttrs) -> bool { +fn item_might_be_inlined( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + item: &hir::Item, + attrs: CodegenFnAttrs, +) -> bool { if attrs.requests_inline() { - return true + return true; } match item.node { - hir::ItemKind::Impl(..) | - hir::ItemKind::Fn(..) => { + hir::ItemKind::Impl(..) | hir::ItemKind::Fn(..) => { let generics = tcx.generics_of(tcx.hir().local_def_id(item.id)); generics.requires_monomorphization(tcx) } @@ -44,23 +45,26 @@ fn item_might_be_inlined(tcx: TyCtxt<'a, 'tcx, 'tcx>, } } -fn method_might_be_inlined<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - impl_item: &hir::ImplItem, - impl_src: DefId) -> bool { +fn method_might_be_inlined<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + impl_item: &hir::ImplItem, + impl_src: DefId, +) -> bool { let codegen_fn_attrs = tcx.codegen_fn_attrs(impl_item.hir_id.owner_def_id()); let generics = tcx.generics_of(tcx.hir().local_def_id(impl_item.id)); if codegen_fn_attrs.requests_inline() || generics.requires_monomorphization(tcx) { - return true + return true; } if let Some(impl_node_id) = tcx.hir().as_local_node_id(impl_src) { match tcx.hir().find(impl_node_id) { - Some(Node::Item(item)) => - item_might_be_inlined(tcx, &item, codegen_fn_attrs), - Some(..) | None => - span_bug!(impl_item.span, "impl did is not an item") + Some(Node::Item(item)) => item_might_be_inlined(tcx, &item, codegen_fn_attrs), + Some(..) | None => span_bug!(impl_item.span, "impl did is not an item"), } } else { - span_bug!(impl_item.span, "found a foreign impl as a parent of a local method") + span_bug!( + impl_item.span, + "found a foreign impl as a parent of a local method" + ) } } @@ -93,13 +97,11 @@ impl<'a, 'tcx> Visitor<'tcx> for ReachableContext<'a, 'tcx> { fn visit_expr(&mut self, expr: &'tcx hir::Expr) { let def = match expr.node { - hir::ExprKind::Path(ref qpath) => { - Some(self.tables.qpath_def(qpath, expr.hir_id)) - } + hir::ExprKind::Path(ref qpath) => Some(self.tables.qpath_def(qpath, expr.hir_id)), hir::ExprKind::MethodCall(..) => { self.tables.type_dependent_defs().get(expr.hir_id).cloned() } - _ => None + _ => None, }; match def { @@ -108,7 +110,10 @@ impl<'a, 'tcx> Visitor<'tcx> for ReachableContext<'a, 'tcx> { } Some(def) => { if let Some((node_id, def_id)) = def.opt_def_id().and_then(|def_id| { - self.tcx.hir().as_local_node_id(def_id).map(|node_id| (node_id, def_id)) + self.tcx + .hir() + .as_local_node_id(def_id) + .map(|node_id| (node_id, def_id)) }) { if self.def_id_represents_local_inlined_item(def_id) { self.worklist.push(node_id); @@ -143,25 +148,24 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { fn def_id_represents_local_inlined_item(&self, def_id: DefId) -> bool { let node_id = match self.tcx.hir().as_local_node_id(def_id) { Some(node_id) => node_id, - None => { return false; } + None => { + return false; + } }; match self.tcx.hir().find(node_id) { - Some(Node::Item(item)) => { - match item.node { - hir::ItemKind::Fn(..) => - item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id)), - _ => false, - } - } - Some(Node::TraitItem(trait_method)) => { - match trait_method.node { - hir::TraitItemKind::Const(_, ref default) => default.is_some(), - hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => true, - hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) | - hir::TraitItemKind::Type(..) => false, + Some(Node::Item(item)) => match item.node { + hir::ItemKind::Fn(..) => { + item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id)) } - } + _ => false, + }, + Some(Node::TraitItem(trait_method)) => match trait_method.node { + hir::TraitItemKind::Const(_, ref default) => default.is_some(), + hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => true, + hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) + | hir::TraitItemKind::Type(..) => false, + }, Some(Node::ImplItem(impl_item)) => { match impl_item.node { hir::ImplItemKind::Const(..) => true, @@ -171,9 +175,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { if generics.requires_monomorphization(self.tcx) || attrs.requests_inline() { true } else { - let impl_did = self.tcx - .hir() - .get_parent_did(node_id); + let impl_did = self.tcx.hir().get_parent_did(node_id); // Check the impl. If the generics on the self // type of the impl require inlining, this method // does too. @@ -183,16 +185,15 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { let generics = self.tcx.generics_of(impl_did); generics.requires_monomorphization(self.tcx) } - _ => false + _ => false, } } } - hir::ImplItemKind::Existential(..) | - hir::ImplItemKind::Type(_) => false, + hir::ImplItemKind::Existential(..) | hir::ImplItemKind::Type(_) => false, } } Some(_) => false, - None => false // This will happen for default methods. + None => false, // This will happen for default methods. } } @@ -201,7 +202,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { let mut scanned = FxHashSet::default(); while let Some(search_item) = self.worklist.pop() { if !scanned.insert(search_item) { - continue + continue; } if let Some(ref item) = self.tcx.hir().find(search_item) { @@ -210,8 +211,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { } } - fn propagate_node(&mut self, node: &Node<'tcx>, - search_item: ast::NodeId) { + fn propagate_node(&mut self, node: &Node<'tcx>, search_item: ast::NodeId) { if !self.any_library { // If we are building an executable, only explicitly extern // types need to be exported. @@ -224,8 +224,9 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { let def_id = self.tcx.hir().local_def_id(item.id); let codegen_attrs = self.tcx.codegen_fn_attrs(def_id); let is_extern = codegen_attrs.contains_extern_indicator(); - let std_internal = codegen_attrs.flags.contains( - CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL); + let std_internal = codegen_attrs + .flags + .contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL); if reachable || is_extern || std_internal { self.reachable_symbols.insert(search_item); } @@ -243,9 +244,8 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { match item.node { hir::ItemKind::Fn(.., body) => { let def_id = self.tcx.hir().local_def_id(item.id); - if item_might_be_inlined(self.tcx, - &item, - self.tcx.codegen_fn_attrs(def_id)) { + if item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id)) + { self.visit_nested_body(body); } } @@ -260,64 +260,64 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { // These are normal, nothing reachable about these // inherently and their children are already in the // worklist, as determined by the privacy pass - hir::ItemKind::ExternCrate(_) | - hir::ItemKind::Use(..) | - hir::ItemKind::Existential(..) | - hir::ItemKind::Ty(..) | - hir::ItemKind::Static(..) | - hir::ItemKind::Mod(..) | - hir::ItemKind::ForeignMod(..) | - hir::ItemKind::Impl(..) | - hir::ItemKind::Trait(..) | - hir::ItemKind::TraitAlias(..) | - hir::ItemKind::Struct(..) | - hir::ItemKind::Enum(..) | - hir::ItemKind::Union(..) | - hir::ItemKind::GlobalAsm(..) => {} + hir::ItemKind::ExternCrate(_) + | hir::ItemKind::Use(..) + | hir::ItemKind::Existential(..) + | hir::ItemKind::Ty(..) + | hir::ItemKind::Static(..) + | hir::ItemKind::Mod(..) + | hir::ItemKind::ForeignMod(..) + | hir::ItemKind::Impl(..) + | hir::ItemKind::Trait(..) + | hir::ItemKind::TraitAlias(..) + | hir::ItemKind::Struct(..) + | hir::ItemKind::Enum(..) + | hir::ItemKind::Union(..) + | hir::ItemKind::GlobalAsm(..) => {} } } Node::TraitItem(trait_method) => { match trait_method.node { - hir::TraitItemKind::Const(_, None) | - hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) => { + hir::TraitItemKind::Const(_, None) + | hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) => { // Keep going, nothing to get exported } - hir::TraitItemKind::Const(_, Some(body_id)) | - hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body_id)) => { + hir::TraitItemKind::Const(_, Some(body_id)) + | hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body_id)) => { self.visit_nested_body(body_id); } hir::TraitItemKind::Type(..) => {} } } - Node::ImplItem(impl_item) => { - match impl_item.node { - hir::ImplItemKind::Const(_, body) => { - self.visit_nested_body(body); - } - hir::ImplItemKind::Method(_, body) => { - let did = self.tcx.hir().get_parent_did(search_item); - if method_might_be_inlined(self.tcx, impl_item, did) { - self.visit_nested_body(body) - } + Node::ImplItem(impl_item) => match impl_item.node { + hir::ImplItemKind::Const(_, body) => { + self.visit_nested_body(body); + } + hir::ImplItemKind::Method(_, body) => { + let did = self.tcx.hir().get_parent_did(search_item); + if method_might_be_inlined(self.tcx, impl_item, did) { + self.visit_nested_body(body) } - hir::ImplItemKind::Existential(..) | - hir::ImplItemKind::Type(_) => {} } - } - Node::Expr(&hir::Expr { node: hir::ExprKind::Closure(.., body, _, _), .. }) => { + hir::ImplItemKind::Existential(..) | hir::ImplItemKind::Type(_) => {} + }, + Node::Expr(&hir::Expr { + node: hir::ExprKind::Closure(.., body, _, _), + .. + }) => { self.visit_nested_body(body); } // Nothing to recurse on for these - Node::ForeignItem(_) | - Node::Variant(_) | - Node::StructCtor(_) | - Node::Field(_) | - Node::Ty(_) | - Node::MacroDef(_) => {} - _ => { - bug!("found unexpected thingy in worklist: {}", - self.tcx.hir().node_to_string(search_item)) - } + Node::ForeignItem(_) + | Node::Variant(_) + | Node::StructCtor(_) + | Node::Field(_) + | Node::Ty(_) + | Node::MacroDef(_) => {} + _ => bug!( + "found unexpected thingy in worklist: {}", + self.tcx.hir().node_to_string(search_item) + ), } } } @@ -343,32 +343,37 @@ impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, // which are currently akin to allocator symbols. let def_id = self.tcx.hir().local_def_id(item.id); let codegen_attrs = self.tcx.codegen_fn_attrs(def_id); - if codegen_attrs.contains_extern_indicator() || - codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) { + if codegen_attrs.contains_extern_indicator() + || codegen_attrs + .flags + .contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) + { self.worklist.push(item.id); } // We need only trait impls here, not inherent impls, and only non-exported ones if let hir::ItemKind::Impl(.., Some(ref trait_ref), _, ref impl_item_refs) = item.node { if !self.access_levels.is_reachable(item.id) { - self.worklist.extend(impl_item_refs.iter().map(|r| r.id.node_id)); + self.worklist + .extend(impl_item_refs.iter().map(|r| r.id.node_id)); let trait_def_id = match trait_ref.path.def { Def::Trait(def_id) => def_id, - _ => unreachable!() + _ => unreachable!(), }; if !trait_def_id.is_local() { - return + return; } let provided_trait_methods = self.tcx.provided_trait_methods(trait_def_id); self.worklist.reserve(provided_trait_methods.len()); for default_method in provided_trait_methods { - let node_id = self.tcx - .hir() - .as_local_node_id(default_method.def_id) - .unwrap(); + let node_id = self + .tcx + .hir() + .as_local_node_id(default_method.def_id) + .unwrap(); self.worklist.push(node_id); } } @@ -393,8 +398,9 @@ fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE); let any_library = tcx.sess.crate_types.borrow().iter().any(|ty| { - *ty == config::CrateType::Rlib || *ty == config::CrateType::Dylib || - *ty == config::CrateType::ProcMacro + *ty == config::CrateType::Rlib + || *ty == config::CrateType::Dylib + || *ty == config::CrateType::ProcMacro }); let mut reachable_context = ReachableContext { tcx, @@ -409,7 +415,9 @@ fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> // If other crates link to us, they're going to expect to be able to // use the lang items, so we need to be sure to mark them as // exported. - reachable_context.worklist.extend(access_levels.map.iter().map(|(id, _)| *id)); + reachable_context + .worklist + .extend(access_levels.map.iter().map(|(id, _)| *id)); for item in tcx.lang_items().items().iter() { if let Some(did) = *item { if let Some(node_id) = tcx.hir().as_local_node_id(did) { @@ -423,13 +431,18 @@ fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> access_levels, worklist: &mut reachable_context.worklist, }; - tcx.hir().krate().visit_all_item_likes(&mut collect_private_impl_items); + tcx.hir() + .krate() + .visit_all_item_likes(&mut collect_private_impl_items); } // Step 2: Mark all symbols that the symbols on the worklist touch. reachable_context.propagate(); - debug!("Inline reachability shows: {:?}", reachable_context.reachable_symbols); + debug!( + "Inline reachability shows: {:?}", + reachable_context.reachable_symbols + ); // Return the set of reachable symbols. ReachableSet(Lrc::new(reachable_context.reachable_symbols)) diff --git a/src/librustc/middle/recursion_limit.rs b/src/librustc/middle/recursion_limit.rs index ea83432a80184..f8ddf376e1aa8 100644 --- a/src/librustc/middle/recursion_limit.rs +++ b/src/librustc/middle/recursion_limit.rs @@ -11,14 +11,32 @@ use syntax::ast; use rustc_data_structures::sync::Once; pub fn update_limits(sess: &Session, krate: &ast::Crate) { - update_limit(sess, krate, &sess.recursion_limit, "recursion_limit", - "recursion limit", 64); - update_limit(sess, krate, &sess.type_length_limit, "type_length_limit", - "type length limit", 1048576); + update_limit( + sess, + krate, + &sess.recursion_limit, + "recursion_limit", + "recursion limit", + 64, + ); + update_limit( + sess, + krate, + &sess.type_length_limit, + "type_length_limit", + "type length limit", + 1048576, + ); } -fn update_limit(sess: &Session, krate: &ast::Crate, limit: &Once, - name: &str, description: &str, default: usize) { +fn update_limit( + sess: &Session, + krate: &ast::Crate, + limit: &Once, + name: &str, + description: &str, + default: usize, +) { for attr in &krate.attrs { if !attr.check_name(name) { continue; @@ -31,9 +49,14 @@ fn update_limit(sess: &Session, krate: &ast::Crate, limit: &Once, } } - span_err!(sess, attr.span, E0296, - "malformed {} attribute, expected #![{}=\"N\"]", - description, name); + span_err!( + sess, + attr.span, + E0296, + "malformed {} attribute, expected #![{}=\"N\"]", + description, + name + ); } limit.set(default); } diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index ce2a348950622..185e6434f6093 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -6,27 +6,26 @@ //! //! [rustc guide]: https://rust-lang.github.io/rustc-guide/mir/borrowck.html -use ich::{StableHashingContext, NodeIdHashingMode}; -use util::nodemap::{FxHashMap, FxHashSet}; +use ich::{NodeIdHashingMode, StableHashingContext}; use ty; +use util::nodemap::{FxHashMap, FxHashSet}; -use std::mem; -use std::fmt; use rustc_data_structures::sync::Lrc; -use syntax::source_map; +use std::fmt; +use std::mem; use syntax::ast; +use syntax::source_map; use syntax_pos::{Span, DUMMY_SP}; -use ty::TyCtxt; use ty::query::Providers; +use ty::TyCtxt; use hir; -use hir::Node; use hir::def_id::DefId; -use hir::intravisit::{self, Visitor, NestedVisitorMap}; -use hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local}; +use hir::intravisit::{self, NestedVisitorMap, Visitor}; +use hir::Node; +use hir::{Arm, Block, Expr, Local, Pat, PatKind, Stmt}; use rustc_data_structures::indexed_vec::Idx; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; /// Scope represents a statically-describable scope that can be /// used to bound the lifetime/region for values. @@ -113,7 +112,9 @@ impl fmt::Debug for Scope { } } -#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, RustcEncodable, RustcDecodable)] +#[derive( + Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, RustcEncodable, RustcDecodable, +)] pub enum ScopeData { Node, @@ -129,7 +130,7 @@ pub enum ScopeData { Destruction, // Scope following a `let id = expr;` binding in a block. - Remainder(FirstStatementIndex) + Remainder(FirstStatementIndex), } /// Represents a subscope of `block` for a binding that is introduced @@ -170,13 +171,11 @@ impl Scope { pub fn node_id(&self, tcx: TyCtxt<'_, '_, '_>, scope_tree: &ScopeTree) -> ast::NodeId { match scope_tree.root_body { - Some(hir_id) => { - tcx.hir().hir_to_node_id(hir::HirId { - owner: hir_id.owner, - local_id: self.item_local_id() - }) - } - None => ast::DUMMY_NODE_ID + Some(hir_id) => tcx.hir().hir_to_node_id(hir::HirId { + owner: hir_id.owner, + local_id: self.item_local_id(), + }), + None => ast::DUMMY_NODE_ID, } } @@ -207,8 +206,8 @@ impl Scope { return Span::new(stmt_span.lo(), span.hi(), span.ctxt()); } } - } - span + } + span } } @@ -415,17 +414,19 @@ impl<'tcx> Visitor<'tcx> for ExprLocatorVisitor { } fn visit_expr(&mut self, expr: &'tcx Expr) { - debug!("ExprLocatorVisitor - pre-increment {} expr = {:?}", - self.expr_and_pat_count, - expr); + debug!( + "ExprLocatorVisitor - pre-increment {} expr = {:?}", + self.expr_and_pat_count, expr + ); intravisit::walk_expr(self, expr); self.expr_and_pat_count += 1; - debug!("ExprLocatorVisitor - post-increment {} expr = {:?}", - self.expr_and_pat_count, - expr); + debug!( + "ExprLocatorVisitor - post-increment {} expr = {:?}", + self.expr_and_pat_count, expr + ); if expr.hir_id == self.hir_id { self.result = Some(self.expr_and_pat_count); @@ -448,13 +449,19 @@ impl<'tcx> ScopeTree { } } - pub fn each_encl_scope(&self, mut e: E) where E: FnMut(Scope, Scope) { + pub fn each_encl_scope(&self, mut e: E) + where + E: FnMut(Scope, Scope), + { for (&child, &parent) in &self.parent_map { e(child, parent.0) } } - pub fn each_var_scope(&self, mut e: E) where E: FnMut(&hir::ItemLocalId, Scope) { + pub fn each_var_scope(&self, mut e: E) + where + E: FnMut(&hir::ItemLocalId, Scope), + { for (child, &parent) in self.var_map.iter() { e(child, parent) } @@ -467,11 +474,15 @@ impl<'tcx> ScopeTree { /// Records that `sub_closure` is defined within `sup_closure`. These ids /// should be the id of the block that is the fn body, which is /// also the root of the region hierarchy for that fn. - fn record_closure_parent(&mut self, - sub_closure: hir::ItemLocalId, - sup_closure: hir::ItemLocalId) { - debug!("record_closure_parent(sub_closure={:?}, sup_closure={:?})", - sub_closure, sup_closure); + fn record_closure_parent( + &mut self, + sub_closure: hir::ItemLocalId, + sup_closure: hir::ItemLocalId, + ) { + debug!( + "record_closure_parent(sub_closure={:?}, sup_closure={:?})", + sub_closure, sup_closure + ); assert!(sub_closure != sup_closure); let previous = self.closure_tree.insert(sub_closure, sup_closure); assert!(previous.is_none()); @@ -504,8 +515,10 @@ impl<'tcx> ScopeTree { /// Returns the lifetime of the local variable `var_id` pub fn var_scope(&self, var_id: hir::ItemLocalId) -> Scope { - self.var_map.get(&var_id).cloned().unwrap_or_else(|| - bug!("no enclosing scope for id {:?}", var_id)) + self.var_map + .get(&var_id) + .cloned() + .unwrap_or_else(|| bug!("no enclosing scope for id {:?}", var_id)) } pub fn temporary_scope(&self, expr_id: hir::ItemLocalId) -> Option { @@ -521,16 +534,18 @@ impl<'tcx> ScopeTree { // if there's one. Static items, for instance, won't // have an enclosing scope, hence no scope will be // returned. - let mut id = Scope { id: expr_id, data: ScopeData::Node }; + let mut id = Scope { + id: expr_id, + data: ScopeData::Node, + }; while let Some(&(p, _)) = self.parent_map.get(&id) { match p.data { ScopeData::Destruction => { - debug!("temporary_scope({:?}) = {:?} [enclosing]", - expr_id, id); + debug!("temporary_scope({:?}) = {:?} [enclosing]", expr_id, id); return Some(id); } - _ => id = p + _ => id = p, } } @@ -547,26 +562,24 @@ impl<'tcx> ScopeTree { } pub fn scopes_intersect(&self, scope1: Scope, scope2: Scope) -> bool { - self.is_subscope_of(scope1, scope2) || - self.is_subscope_of(scope2, scope1) + self.is_subscope_of(scope1, scope2) || self.is_subscope_of(scope2, scope1) } /// Returns true if `subscope` is equal to or is lexically nested inside `superscope` and false /// otherwise. - pub fn is_subscope_of(&self, - subscope: Scope, - superscope: Scope) - -> bool { + pub fn is_subscope_of(&self, subscope: Scope, superscope: Scope) -> bool { let mut s = subscope; debug!("is_subscope_of({:?}, {:?})", subscope, superscope); while superscope != s { match self.opt_encl_scope(s) { None => { - debug!("is_subscope_of({:?}, {:?}, s={:?})=false", - subscope, superscope, s); + debug!( + "is_subscope_of({:?}, {:?}, s={:?})=false", + subscope, superscope, s + ); return false; } - Some(scope) => s = scope + Some(scope) => s = scope, } } @@ -590,7 +603,9 @@ impl<'tcx> ScopeTree { /// smallest scope which is greater than or equal to both `scope_a` and /// `scope_b`. pub fn nearest_common_ancestor(&self, scope_a: Scope, scope_b: Scope) -> Scope { - if scope_a == scope_b { return scope_a; } + if scope_a == scope_b { + return scope_a; + } let mut a = scope_a; let mut b = scope_b; @@ -638,48 +653,55 @@ impl<'tcx> ScopeTree { while a != b { a = self.parent_map.get(&a).unwrap().0; b = self.parent_map.get(&b).unwrap().0; - }; + } a } /// Assuming that the provided region was defined within this `ScopeTree`, /// returns the outermost `Scope` that the region outlives. - pub fn early_free_scope<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - br: &ty::EarlyBoundRegion) - -> Scope { + pub fn early_free_scope<'a, 'gcx>( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + br: &ty::EarlyBoundRegion, + ) -> Scope { let param_owner = tcx.parent_def_id(br.def_id).unwrap(); let param_owner_id = tcx.hir().as_local_node_id(param_owner).unwrap(); - let scope = tcx.hir().maybe_body_owned_by(param_owner_id).map(|body_id| { - tcx.hir().body(body_id).value.hir_id.local_id - }).unwrap_or_else(|| { - // The lifetime was defined on node that doesn't own a body, - // which in practice can only mean a trait or an impl, that - // is the parent of a method, and that is enforced below. - assert_eq!(Some(param_owner_id), self.root_parent, - "free_scope: {:?} not recognized by the \ - region scope tree for {:?} / {:?}", - param_owner, - self.root_parent.map(|id| tcx.hir().local_def_id(id)), - self.root_body.map(|hir_id| DefId::local(hir_id.owner))); - - // The trait/impl lifetime is in scope for the method's body. - self.root_body.unwrap().local_id - }); + let scope = tcx + .hir() + .maybe_body_owned_by(param_owner_id) + .map(|body_id| tcx.hir().body(body_id).value.hir_id.local_id) + .unwrap_or_else(|| { + // The lifetime was defined on node that doesn't own a body, + // which in practice can only mean a trait or an impl, that + // is the parent of a method, and that is enforced below. + assert_eq!( + Some(param_owner_id), + self.root_parent, + "free_scope: {:?} not recognized by the \ + region scope tree for {:?} / {:?}", + param_owner, + self.root_parent.map(|id| tcx.hir().local_def_id(id)), + self.root_body.map(|hir_id| DefId::local(hir_id.owner)) + ); + + // The trait/impl lifetime is in scope for the method's body. + self.root_body.unwrap().local_id + }); - Scope { id: scope, data: ScopeData::CallSite } + Scope { + id: scope, + data: ScopeData::CallSite, + } } /// Assuming that the provided region was defined within this `ScopeTree`, /// returns the outermost `Scope` that the region outlives. - pub fn free_scope<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, fr: &ty::FreeRegion) - -> Scope { + pub fn free_scope<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, fr: &ty::FreeRegion) -> Scope { let param_owner = match fr.bound_region { - ty::BoundRegion::BrNamed(def_id, _) => { - tcx.parent_def_id(def_id).unwrap() - } - _ => fr.scope + ty::BoundRegion::BrNamed(def_id, _) => tcx.parent_def_id(def_id).unwrap(), + _ => fr.scope, }; // Ensure that the named late-bound lifetimes were defined @@ -688,7 +710,10 @@ impl<'tcx> ScopeTree { let param_owner_id = tcx.hir().as_local_node_id(param_owner).unwrap(); let body_id = tcx.hir().body_owned_by(param_owner_id); - Scope { id: tcx.hir().body(body_id).value.hir_id.local_id, data: ScopeData::CallSite } + Scope { + id: tcx.hir().body(body_id).value.hir_id.local_id, + data: ScopeData::CallSite, + } } /// Checks whether the given scope contains a `yield`. If so, @@ -702,10 +727,12 @@ impl<'tcx> ScopeTree { /// Checks whether the given scope contains a `yield` and if that yield could execute /// after `expr`. If so, it returns the span of that `yield`. /// `scope` must be inside the body. - pub fn yield_in_scope_for_expr(&self, - scope: Scope, - expr_hir_id: hir::HirId, - body: &'tcx hir::Body) -> Option { + pub fn yield_in_scope_for_expr( + &self, + scope: Scope, + expr_hir_id: hir::HirId, + body: &'tcx hir::Body, + ) -> Option { self.yield_in_scope(scope).and_then(|(span, count)| { let mut visitor = ExprLocatorVisitor { hir_id: expr_hir_id, @@ -730,17 +757,18 @@ impl<'tcx> ScopeTree { } /// Records the lifetime of a local variable as `cx.var_parent` -fn record_var_lifetime(visitor: &mut RegionResolutionVisitor<'_, '_>, - var_id: hir::ItemLocalId, - _sp: Span) { +fn record_var_lifetime( + visitor: &mut RegionResolutionVisitor<'_, '_>, + var_id: hir::ItemLocalId, + _sp: Span, +) { match visitor.cx.var_parent { None => { // this can happen in extern fn declarations like // // extern fn isalnum(c: c_int) -> c_int } - Some((parent_scope, _)) => - visitor.scope_tree.record_var_scope(var_id, parent_scope), + Some((parent_scope, _)) => visitor.scope_tree.record_var_scope(var_id, parent_scope), } } @@ -791,12 +819,10 @@ fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: // has the previous subscope in the block as a parent, // except for the first such subscope, which has the // block itself as a parent. - visitor.enter_scope( - Scope { - id: blk.hir_id.local_id, - data: ScopeData::Remainder(FirstStatementIndex::new(i)) - } - ); + visitor.enter_scope(Scope { + id: blk.hir_id.local_id, + data: ScopeData::Remainder(FirstStatementIndex::new(i)), + }); visitor.cx.var_parent = visitor.cx.parent; } visitor.visit_stmt(statement) @@ -818,20 +844,29 @@ fn resolve_arm<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, arm: & } fn resolve_pat<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, pat: &'tcx hir::Pat) { - visitor.record_child_scope(Scope { id: pat.hir_id.local_id, data: ScopeData::Node }); + visitor.record_child_scope(Scope { + id: pat.hir_id.local_id, + data: ScopeData::Node, + }); // If this is a binding then record the lifetime of that binding. if let PatKind::Binding(..) = pat.node { record_var_lifetime(visitor, pat.hir_id.local_id, pat.span); } - debug!("resolve_pat - pre-increment {} pat = {:?}", visitor.expr_and_pat_count, pat); + debug!( + "resolve_pat - pre-increment {} pat = {:?}", + visitor.expr_and_pat_count, pat + ); intravisit::walk_pat(visitor, pat); visitor.expr_and_pat_count += 1; - debug!("resolve_pat - post-increment {} pat = {:?}", visitor.expr_and_pat_count, pat); + debug!( + "resolve_pat - post-increment {} pat = {:?}", + visitor.expr_and_pat_count, pat + ); } fn resolve_stmt<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, stmt: &'tcx hir::Stmt) { @@ -854,7 +889,10 @@ fn resolve_stmt<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, stmt: } fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &'tcx hir::Expr) { - debug!("resolve_expr - pre-increment {} expr = {:?}", visitor.expr_and_pat_count, expr); + debug!( + "resolve_expr - pre-increment {} expr = {:?}", + visitor.expr_and_pat_count, expr + ); let prev_cx = visitor.cx; visitor.enter_node_scope_with_dtor(expr.hir_id.local_id); @@ -868,14 +906,25 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: // Conditional or repeating scopes are always terminating // scopes, meaning that temporaries cannot outlive them. // This ensures fixed size stacks. - hir::ExprKind::Binary( - source_map::Spanned { node: hir::BinOpKind::And, .. }, _, ref r) | - hir::ExprKind::Binary( - source_map::Spanned { node: hir::BinOpKind::Or, .. }, _, ref r) => { - // For shortcircuiting operators, mark the RHS as a terminating - // scope since it only executes conditionally. - terminating(r.hir_id.local_id); + source_map::Spanned { + node: hir::BinOpKind::And, + .. + }, + _, + ref r, + ) + | hir::ExprKind::Binary( + source_map::Spanned { + node: hir::BinOpKind::Or, + .. + }, + _, + ref r, + ) => { + // For shortcircuiting operators, mark the RHS as a terminating + // scope since it only executes conditionally. + terminating(r.hir_id.local_id); } hir::ExprKind::If(ref expr, ref then, Some(ref otherwise)) => { @@ -902,8 +951,11 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: visitor.cx.var_parent = visitor.cx.parent; } - hir::ExprKind::AssignOp(..) | hir::ExprKind::Index(..) | - hir::ExprKind::Unary(..) | hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) => { + hir::ExprKind::AssignOp(..) + | hir::ExprKind::Index(..) + | hir::ExprKind::Unary(..) + | hir::ExprKind::Call(..) + | hir::ExprKind::MethodCall(..) => { // FIXME(https://github.com/rust-lang/rfcs/issues/811) Nested method calls // // The lifetimes for a call or method call look as follows: @@ -936,28 +988,36 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: visitor.visit_body(body); } - _ => intravisit::walk_expr(visitor, expr) + _ => intravisit::walk_expr(visitor, expr), } visitor.expr_and_pat_count += 1; - debug!("resolve_expr post-increment {}, expr = {:?}", visitor.expr_and_pat_count, expr); + debug!( + "resolve_expr post-increment {}, expr = {:?}", + visitor.expr_and_pat_count, expr + ); if let hir::ExprKind::Yield(..) = expr.node { // Mark this expr's scope and all parent scopes as containing `yield`. - let mut scope = Scope { id: expr.hir_id.local_id, data: ScopeData::Node }; + let mut scope = Scope { + id: expr.hir_id.local_id, + data: ScopeData::Node, + }; loop { - visitor.scope_tree.yield_in_scope.insert(scope, - (expr.span, visitor.expr_and_pat_count)); + visitor + .scope_tree + .yield_in_scope + .insert(scope, (expr.span, visitor.expr_and_pat_count)); // Keep traversing up while we can. match visitor.scope_tree.parent_map.get(&scope) { // Don't cross from closure bodies to their parent. Some(&(superscope, _)) => match superscope.data { ScopeData::CallSite => break, - _ => scope = superscope + _ => scope = superscope, }, - None => break + None => break, } } } @@ -965,9 +1025,11 @@ fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: visitor.cx = prev_cx; } -fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, - pat: Option<&'tcx hir::Pat>, - init: Option<&'tcx hir::Expr>) { +fn resolve_local<'a, 'tcx>( + visitor: &mut RegionResolutionVisitor<'a, 'tcx>, + pat: Option<&'tcx hir::Pat>, + init: Option<&'tcx hir::Expr>, +) { debug!("resolve_local(pat={:?}, init={:?})", pat, init); let blk_scope = visitor.cx.var_parent.map(|(p, _)| p); @@ -1079,27 +1141,24 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, // & expression, and its lifetime would be extended to the end of the block (due // to a different rule, not the below code). match pat.node { - PatKind::Binding(hir::BindingAnnotation::Ref, ..) | - PatKind::Binding(hir::BindingAnnotation::RefMut, ..) => true, + PatKind::Binding(hir::BindingAnnotation::Ref, ..) + | PatKind::Binding(hir::BindingAnnotation::RefMut, ..) => true, PatKind::Struct(_, ref field_pats, _) => { field_pats.iter().any(|fp| is_binding_pat(&fp.node.pat)) } PatKind::Slice(ref pats1, ref pats2, ref pats3) => { - pats1.iter().any(|p| is_binding_pat(&p)) || - pats2.iter().any(|p| is_binding_pat(&p)) || - pats3.iter().any(|p| is_binding_pat(&p)) + pats1.iter().any(|p| is_binding_pat(&p)) + || pats2.iter().any(|p| is_binding_pat(&p)) + || pats3.iter().any(|p| is_binding_pat(&p)) } - PatKind::TupleStruct(_, ref subpats, _) | - PatKind::Tuple(ref subpats, _) => { + PatKind::TupleStruct(_, ref subpats, _) | PatKind::Tuple(ref subpats, _) => { subpats.iter().any(|p| is_binding_pat(&p)) } - PatKind::Box(ref subpat) => { - is_binding_pat(&subpat) - } + PatKind::Box(ref subpat) => is_binding_pat(&subpat), _ => false, } @@ -1118,8 +1177,8 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, fn record_rvalue_scope_if_borrow_expr<'a, 'tcx>( visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &hir::Expr, - blk_id: Option) - { + blk_id: Option, + ) { match expr.node { hir::ExprKind::AddrOf(_, ref subexpr) => { record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id); @@ -1127,15 +1186,12 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, } hir::ExprKind::Struct(_, ref fields, _) => { for field in fields { - record_rvalue_scope_if_borrow_expr( - visitor, &field.expr, blk_id); + record_rvalue_scope_if_borrow_expr(visitor, &field.expr, blk_id); } } - hir::ExprKind::Array(ref subexprs) | - hir::ExprKind::Tup(ref subexprs) => { + hir::ExprKind::Array(ref subexprs) | hir::ExprKind::Tup(ref subexprs) => { for subexpr in subexprs { - record_rvalue_scope_if_borrow_expr( - visitor, &subexpr, blk_id); + record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id); } } hir::ExprKind::Cast(ref subexpr, _) => { @@ -1143,8 +1199,7 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, } hir::ExprKind::Block(ref block, _) => { if let Some(ref subexpr) = block.expr { - record_rvalue_scope_if_borrow_expr( - visitor, &subexpr, blk_id); + record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id); } } _ => {} @@ -1166,9 +1221,11 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, /// | /// /// Note: ET is intended to match "rvalues or places based on rvalues". - fn record_rvalue_scope<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, - expr: &hir::Expr, - blk_scope: Option) { + fn record_rvalue_scope<'a, 'tcx>( + visitor: &mut RegionResolutionVisitor<'a, 'tcx>, + expr: &hir::Expr, + blk_scope: Option, + ) { let mut expr = expr; loop { // Note: give all the expressions matching `ET` with the @@ -1176,13 +1233,15 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, // because in codegen if we must compile e.g., `*rvalue()` // into a temporary, we request the temporary scope of the // outer expression. - visitor.scope_tree.record_rvalue_scope(expr.hir_id.local_id, blk_scope); + visitor + .scope_tree + .record_rvalue_scope(expr.hir_id.local_id, blk_scope); match expr.node { - hir::ExprKind::AddrOf(_, ref subexpr) | - hir::ExprKind::Unary(hir::UnDeref, ref subexpr) | - hir::ExprKind::Field(ref subexpr, _) | - hir::ExprKind::Index(ref subexpr, _) => { + hir::ExprKind::AddrOf(_, ref subexpr) + | hir::ExprKind::Unary(hir::UnDeref, ref subexpr) + | hir::ExprKind::Field(ref subexpr, _) + | hir::ExprKind::Index(ref subexpr, _) => { expr = &subexpr; } _ => { @@ -1217,9 +1276,15 @@ impl<'a, 'tcx> RegionResolutionVisitor<'a, 'tcx> { // account for the destruction scope representing the scope of // the destructors that run immediately after it completes. if self.terminating_scopes.contains(&id) { - self.enter_scope(Scope { id, data: ScopeData::Destruction }); + self.enter_scope(Scope { + id, + data: ScopeData::Destruction, + }); } - self.enter_scope(Scope { id, data: ScopeData::Node }); + self.enter_scope(Scope { + id, + data: ScopeData::Node, + }); } } @@ -1236,11 +1301,13 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { let body_id = body.id(); let owner_id = self.tcx.hir().body_owner(body_id); - debug!("visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})", - owner_id, - self.tcx.sess.source_map().span_to_string(body.value.span), - body_id, - self.cx.parent); + debug!( + "visit_body(id={:?}, span={:?}, body.id={:?}, cx.parent={:?})", + owner_id, + self.tcx.sess.source_map().span_to_string(body.value.span), + body_id, + self.cx.parent + ); let outer_ec = mem::replace(&mut self.expr_and_pat_count, 0); let outer_cx = self.cx; @@ -1248,12 +1315,19 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { self.terminating_scopes.insert(body.value.hir_id.local_id); if let Some(root_id) = self.cx.root_id { - self.scope_tree.record_closure_parent(body.value.hir_id.local_id, root_id); + self.scope_tree + .record_closure_parent(body.value.hir_id.local_id, root_id); } self.cx.root_id = Some(body.value.hir_id.local_id); - self.enter_scope(Scope { id: body.value.hir_id.local_id, data: ScopeData::CallSite }); - self.enter_scope(Scope { id: body.value.hir_id.local_id, data: ScopeData::Arguments }); + self.enter_scope(Scope { + id: body.value.hir_id.local_id, + data: ScopeData::CallSite, + }); + self.enter_scope(Scope { + id: body.value.hir_id.local_id, + data: ScopeData::Arguments, + }); // The arguments and `self` are parented to the fn. self.cx.var_parent = self.cx.parent.take(); @@ -1289,7 +1363,9 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { } if body.is_generator { - self.scope_tree.body_expr_count.insert(body_id, self.expr_and_pat_count); + self.scope_tree + .body_expr_count + .insert(body_id, self.expr_and_pat_count); } // Restore context we had at the start. @@ -1315,9 +1391,7 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> { } } -fn region_scope_tree<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) - -> Lrc -{ +fn region_scope_tree<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Lrc { let closure_base_def_id = tcx.closure_base_def_id(def_id); if closure_base_def_id != def_id { return tcx.region_scope_tree(closure_base_def_id); @@ -1344,8 +1418,7 @@ fn region_scope_tree<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) // record its impl/trait parent, as it can also have // lifetime parameters free in this body. match tcx.hir().get(id) { - Node::ImplItem(_) | - Node::TraitItem(_) => { + Node::ImplItem(_) | Node::TraitItem(_) => { visitor.scope_tree.root_parent = Some(tcx.hir().get_parent(id)); } _ => {} @@ -1369,9 +1442,11 @@ pub fn provide(providers: &mut Providers<'_>) { } impl<'a> HashStable> for ScopeTree { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let ScopeTree { root_body, root_parent, diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 2d3653464d538..ffe26d4ffe058 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -394,14 +394,16 @@ fn resolve_lifetimes<'tcx>( } for k in named_region_map.late_bound { let hir_id = tcx.hir().node_to_hir_id(k); - let map = rl.late_bound + let map = rl + .late_bound .entry(hir_id.owner_local_def_id()) .or_default(); Lrc::get_mut(map).unwrap().insert(hir_id.local_id); } for (k, v) in named_region_map.object_lifetime_defaults { let hir_id = tcx.hir().node_to_hir_id(k); - let map = rl.object_lifetime_defaults + let map = rl + .object_lifetime_defaults .entry(hir_id.owner_local_def_id()) .or_default(); Lrc::get_mut(map) @@ -442,8 +444,7 @@ fn krate<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> NamedRegionMap { /// This function returns whether there is such an implicit parameter defined on the given item. fn sub_items_have_self_param(node: &hir::ItemKind) -> bool { match *node { - hir::ItemKind::Trait(..) | - hir::ItemKind::TraitAlias(..) => true, + hir::ItemKind::Trait(..) | hir::ItemKind::TraitAlias(..) => true, _ => false, } } @@ -582,7 +583,8 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { let was_in_fn_syntax = self.is_in_fn_syntax; self.is_in_fn_syntax = true; let scope = Scope::Binder { - lifetimes: c.generic_params + lifetimes: c + .generic_params .iter() .filter_map(|param| match param.kind { GenericParamKind::Lifetime { .. } => { @@ -1020,12 +1022,15 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { ) { debug!("visit_poly_trait_ref trait_ref={:?}", trait_ref); - if !self.trait_ref_hack || trait_ref.bound_generic_params.iter().any(|param| { - match param.kind { - GenericParamKind::Lifetime { .. } => true, - _ => false, - } - }) { + if !self.trait_ref_hack + || trait_ref + .bound_generic_params + .iter() + .any(|param| match param.kind { + GenericParamKind::Lifetime { .. } => true, + _ => false, + }) + { if self.trait_ref_hack { span_err!( self.tcx.sess, @@ -1131,9 +1136,10 @@ fn check_mixed_explicit_and_in_band_defs(tcx: TyCtxt<'_, '_, '_>, params: &P<[hi *in_band_span, E0688, "cannot mix in-band and explicit lifetime definitions" - ).span_label(*in_band_span, "in-band lifetime definition here") - .span_label(*explicit_span, "explicit lifetime definition here") - .emit(); + ) + .span_label(*in_band_span, "in-band lifetime definition here") + .span_label(*explicit_span, "explicit lifetime definition here") + .emit(); } } @@ -1421,8 +1427,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { .. } = self; let labels_in_fn = replace(&mut self.labels_in_fn, vec![]); - let xcrate_object_lifetime_defaults = - replace(&mut self.xcrate_object_lifetime_defaults, DefIdMap::default()); + let xcrate_object_lifetime_defaults = replace( + &mut self.xcrate_object_lifetime_defaults, + DefIdMap::default(), + ); let mut this = LifetimeContext { tcx: *tcx, map: map, @@ -1476,7 +1484,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { // helper method to issue suggestions from `fn rah<'a>(&'a T)` to `fn rah(&T)` fn suggest_eliding_single_use_lifetime( - &self, err: &mut DiagnosticBuilder<'_>, def_id: DefId, lifetime: &hir::Lifetime + &self, + err: &mut DiagnosticBuilder<'_>, + def_id: DefId, + lifetime: &hir::Lifetime, ) { // FIXME: future work: also suggest `impl Foo<'_>` for `impl<'a> Foo<'a>` let name = lifetime.name.ident(); @@ -1495,8 +1506,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { // include the trailing whitespace between the ampersand and the type name let lt_through_ty_span = lifetime.span.to(input.span.shrink_to_hi()); remove_use = Some( - self.tcx.sess.source_map() - .span_until_non_whitespace(lt_through_ty_span) + self.tcx + .sess + .source_map() + .span_until_non_whitespace(lt_through_ty_span), ); break; } @@ -1504,13 +1517,17 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } }; if let Node::Lifetime(hir_lifetime) = self.tcx.hir().get(lifetime.id) { - if let Some(parent) = self.tcx.hir().find(self.tcx.hir().get_parent(hir_lifetime.id)) { + if let Some(parent) = self + .tcx + .hir() + .find(self.tcx.hir().get_parent(hir_lifetime.id)) + { match parent { Node::Item(item) => { if let hir::ItemKind::Fn(decl, _, _, _) = &item.node { find_arg_use_span(&decl.inputs); } - }, + } Node::ImplItem(impl_item) => { if let hir::ImplItemKind::Method(sig, _) = &impl_item.node { find_arg_use_span(&sig.decl.inputs); @@ -1748,8 +1765,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { next_early_index, abstract_type_parent, .. - } if (!only_abstract_type_parent || abstract_type_parent) => - { + } if (!only_abstract_type_parent || abstract_type_parent) => { return next_early_index } @@ -1859,8 +1875,9 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { E0687, "lifetimes used in `fn` or `Fn` syntax must be \ explicitly declared using `<...>` binders" - ).span_label(lifetime_ref.span, "in-band lifetime definition") - .emit(); + ) + .span_label(lifetime_ref.span, "in-band lifetime definition") + .emit(); } Region::Static @@ -1881,8 +1898,9 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { E0261, "use of undeclared lifetime name `{}`", lifetime_ref - ).span_label(lifetime_ref.span, "undeclared lifetime") - .emit(); + ) + .span_label(lifetime_ref.span, "undeclared lifetime") + .emit(); } } @@ -1931,7 +1949,8 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { | Def::Union(def_id) | Def::Enum(def_id) | Def::TyAlias(def_id) - | Def::Trait(def_id) if depth == 0 => + | Def::Trait(def_id) + if depth == 0 => { Some(def_id) } @@ -1980,11 +1999,13 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { unsubst .iter() .map(|set| match *set { - Set1::Empty => if in_body { - None - } else { - Some(Region::Static) - }, + Set1::Empty => { + if in_body { + None + } else { + Some(Region::Static) + } + } Set1::One(r) => { let lifetimes = generic_args.args.iter().filter_map(|arg| match arg { GenericArg::Lifetime(lt) => Some(lt), @@ -2063,7 +2084,8 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { node: hir::TraitItemKind::Method(_, ref m), .. }) => { - if let hir::ItemKind::Trait(.., ref trait_items) = self.tcx + if let hir::ItemKind::Trait(.., ref trait_items) = self + .tcx .hir() .expect_item(self.tcx.hir().get_parent(parent)) .node @@ -2083,7 +2105,8 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { node: hir::ImplItemKind::Method(_, body), .. }) => { - if let hir::ItemKind::Impl(.., ref self_ty, ref impl_items) = self.tcx + if let hir::ItemKind::Impl(.., ref self_ty, ref impl_items) = self + .tcx .hir() .expect_item(self.tcx.hir().get_parent(parent)) .node @@ -2348,7 +2371,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } else if snippet == "'_" { ("'static".to_owned(), Applicability::MachineApplicable) } else { - (format!("{} + 'static", snippet), Applicability::MaybeIncorrect) + ( + format!("{} + 'static", snippet), + Applicability::MaybeIncorrect, + ) }; db.span_suggestion_with_applicability(span, msg, sugg, applicability); false @@ -2513,9 +2539,10 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { E0263, "lifetime name `{}` declared twice in the same scope", lifetime_j.name.ident() - ).span_label(lifetime_j.span, "declared twice") - .span_label(lifetime_i.span, "previous declaration here") - .emit(); + ) + .span_label(lifetime_j.span, "declared twice") + .span_label(lifetime_i.span, "previous declaration here") + .emit(); } } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 43cb89ccf6c0f..c4147dc1cbcdc 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -3,24 +3,24 @@ pub use self::StabilityLevel::*; -use lint::{self, Lint}; -use hir::{self, Item, Generics, StructField, Variant, HirId}; use hir::def::Def; -use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE}; -use hir::intravisit::{self, Visitor, NestedVisitorMap}; +use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; +use hir::intravisit::{self, NestedVisitorMap, Visitor}; +use hir::{self, Generics, HirId, Item, StructField, Variant}; +use lint::{self, Lint}; use middle::privacy::AccessLevels; use session::{DiagnosticMessageId, Session}; -use syntax::symbol::Symbol; -use syntax_pos::{Span, MultiSpan}; use syntax::ast; -use syntax::ast::{NodeId, Attribute}; -use syntax::feature_gate::{GateIssue, emit_feature_err}; -use syntax::attr::{self, Stability, Deprecation}; +use syntax::ast::{Attribute, NodeId}; +use syntax::attr::{self, Deprecation, Stability}; +use syntax::feature_gate::{emit_feature_err, GateIssue}; +use syntax::symbol::Symbol; +use syntax_pos::{MultiSpan, Span}; use ty::{self, TyCtxt}; -use util::nodemap::{FxHashSet, FxHashMap}; +use util::nodemap::{FxHashMap, FxHashSet}; -use std::mem::replace; use std::cmp::Ordering; +use std::mem::replace; #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Copy, Debug, Eq, Hash)] pub enum StabilityLevel { @@ -30,7 +30,11 @@ pub enum StabilityLevel { impl StabilityLevel { pub fn from_attr_level(level: &attr::StabilityLevel) -> Self { - if level.is_stable() { Stable } else { Unstable } + if level.is_stable() { + Stable + } else { + Unstable + } } } @@ -68,16 +72,13 @@ impl DeprecationEntry { } pub fn external(attr: Deprecation) -> DeprecationEntry { - DeprecationEntry { - attr, - origin: None, - } + DeprecationEntry { attr, origin: None } } pub fn same_origin(&self, other: &DeprecationEntry) -> bool { match (self.origin, other.origin) { (Some(o1), Some(o2)) => o1 == o2, - _ => false + _ => false, } } } @@ -115,25 +116,37 @@ struct Annotator<'a, 'tcx: 'a> { impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { // Determine the stability for a node based on its attributes and inherited // stability. The stability is recorded in the index and used as the parent. - fn annotate(&mut self, id: NodeId, attrs: &[Attribute], - item_sp: Span, kind: AnnotationKind, visit_children: F) - where F: FnOnce(&mut Self) + fn annotate( + &mut self, + id: NodeId, + attrs: &[Attribute], + item_sp: Span, + kind: AnnotationKind, + visit_children: F, + ) where + F: FnOnce(&mut Self), { if self.tcx.features().staged_api { // This crate explicitly wants staged API. debug!("annotate(id = {:?}, attrs = {:?})", id, attrs); if let Some(..) = attr::find_deprecation(&self.tcx.sess.parse_sess, attrs, item_sp) { - self.tcx.sess.span_err(item_sp, "`#[deprecated]` cannot be used in staged api, \ - use `#[rustc_deprecated]` instead"); + self.tcx.sess.span_err( + item_sp, + "`#[deprecated]` cannot be used in staged api, \ + use `#[rustc_deprecated]` instead", + ); } - if let Some(mut stab) = attr::find_stability(&self.tcx.sess.parse_sess, - attrs, item_sp) { + if let Some(mut stab) = attr::find_stability(&self.tcx.sess.parse_sess, attrs, item_sp) + { // Error if prohibited, or can't inherit anything from a container - if kind == AnnotationKind::Prohibited || - (kind == AnnotationKind::Container && - stab.level.is_stable() && - stab.rustc_depr.is_none()) { - self.tcx.sess.span_err(item_sp, "This stability annotation is useless"); + if kind == AnnotationKind::Prohibited + || (kind == AnnotationKind::Container + && stab.level.is_stable() + && stab.rustc_depr.is_none()) + { + self.tcx + .sess + .span_err(item_sp, "This stability annotation is useless"); } debug!("annotate: found {:?}", stab); @@ -149,19 +162,28 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { // Check if deprecated_since < stable_since. If it is, // this is *almost surely* an accident. - if let (&Some(attr::RustcDeprecation {since: dep_since, ..}), - &attr::Stable {since: stab_since}) = (&stab.rustc_depr, &stab.level) { + if let ( + &Some(attr::RustcDeprecation { + since: dep_since, .. + }), + &attr::Stable { since: stab_since }, + ) = (&stab.rustc_depr, &stab.level) + { // Explicit version of iter::order::lt to handle parse errors properly - for (dep_v, stab_v) in dep_since.as_str() - .split('.') - .zip(stab_since.as_str().split('.')) + for (dep_v, stab_v) in dep_since + .as_str() + .split('.') + .zip(stab_since.as_str().split('.')) { if let (Ok(dep_v), Ok(stab_v)) = (dep_v.parse::(), stab_v.parse()) { match dep_v.cmp(&stab_v) { Ordering::Less => { - self.tcx.sess.span_err(item_sp, "An API can't be stabilized \ - after it is deprecated"); - break + self.tcx.sess.span_err( + item_sp, + "An API can't be stabilized \ + after it is deprecated", + ); + break; } Ordering::Equal => continue, Ordering::Greater => break, @@ -169,9 +191,12 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { } else { // Act like it isn't less because the question is now nonsensical, // and this makes us not do anything else interesting. - self.tcx.sess.span_err(item_sp, "Invalid stability or deprecation \ - version found"); - break + self.tcx.sess.span_err( + item_sp, + "Invalid stability or deprecation \ + version found", + ); + break; } } } @@ -198,8 +223,11 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { let tag = attr.name(); if tag == "unstable" || tag == "stable" || tag == "rustc_deprecated" { attr::mark_used(attr); - self.tcx.sess.span_err(attr.span(), "stability attributes may not be used \ - outside of the standard library"); + self.tcx.sess.span_err( + attr.span(), + "stability attributes may not be used \ + outside of the standard library", + ); } } @@ -214,7 +242,9 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { if let Some(depr) = attr::find_deprecation(&self.tcx.sess.parse_sess, attrs, item_sp) { if kind == AnnotationKind::Prohibited { - self.tcx.sess.span_err(item_sp, "This deprecation annotation is useless"); + self.tcx + .sess + .span_err(item_sp, "This deprecation annotation is useless"); } // `Deprecation` is just two pointers, no need to intern it @@ -222,8 +252,7 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { let depr_entry = DeprecationEntry::local(depr, hir_id); self.index.depr_map.insert(hir_id, depr_entry.clone()); - let orig_parent_depr = replace(&mut self.parent_depr, - Some(depr_entry)); + let orig_parent_depr = replace(&mut self.parent_depr, Some(depr_entry)); visit_children(self); self.parent_depr = orig_parent_depr; } else if let Some(parent_depr) = self.parent_depr.clone() { @@ -292,9 +321,15 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> { } fn visit_variant(&mut self, var: &'tcx Variant, g: &'tcx Generics, item_id: NodeId) { - self.annotate(var.node.data.id(), &var.node.attrs, var.span, AnnotationKind::Required, |v| { - intravisit::walk_variant(v, var, g, item_id); - }) + self.annotate( + var.node.data.id(), + &var.node.attrs, + var.span, + AnnotationKind::Required, + |v| { + intravisit::walk_variant(v, var, g, item_id); + }, + ) } fn visit_struct_field(&mut self, s: &'tcx StructField) { @@ -323,11 +358,12 @@ impl<'a, 'tcx: 'a> MissingStabilityAnnotations<'a, 'tcx> { fn check_missing_stability(&self, id: NodeId, span: Span) { let hir_id = self.tcx.hir().node_to_hir_id(id); let stab = self.tcx.stability().local_stability(hir_id); - let is_error = !self.tcx.sess.opts.test && - stab.is_none() && - self.access_levels.is_reachable(id); + let is_error = + !self.tcx.sess.opts.test && stab.is_none() && self.access_levels.is_reachable(id); if is_error { - self.tcx.sess.span_err(span, "This node does not have a stability attribute"); + self.tcx + .sess + .span_err(span, "This node does not have a stability attribute"); } } } @@ -345,7 +381,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'a, 'tcx> { // optional. They inherit stability from their parents when unannotated. hir::ItemKind::Impl(.., None, _, _) | hir::ItemKind::ForeignMod(..) => {} - _ => self.check_missing_stability(i.id, i.span) + _ => self.check_missing_stability(i.id, i.span), } intravisit::walk_item(self, i) @@ -357,7 +393,10 @@ impl<'a, 'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'a, 'tcx> { } fn visit_impl_item(&mut self, ii: &'tcx hir::ImplItem) { - let impl_def_id = self.tcx.hir().local_def_id(self.tcx.hir().get_parent(ii.id)); + let impl_def_id = self + .tcx + .hir() + .local_def_id(self.tcx.hir().get_parent(ii.id)); if self.tcx.impl_trait_ref(impl_def_id).is_none() { self.check_missing_stability(ii.id, ii.span); } @@ -387,8 +426,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'a, 'tcx> { impl<'a, 'tcx> Index<'tcx> { pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Index<'tcx> { let is_staged_api = - tcx.sess.opts.debugging_opts.force_unstable_if_unmarked || - tcx.features().staged_api; + tcx.sess.opts.debugging_opts.force_unstable_if_unmarked || tcx.features().staged_api; let mut staged_api = FxHashMap::default(); staged_api.insert(LOCAL_CRATE, is_staged_api); let mut index = Index { @@ -401,7 +439,10 @@ impl<'a, 'tcx> Index<'tcx> { let ref active_lib_features = tcx.features().declared_lib_features; // Put the active features into a map for quick lookup - index.active_features = active_lib_features.iter().map(|&(ref s, _)| s.clone()).collect(); + index.active_features = active_lib_features + .iter() + .map(|&(ref s, _)| s.clone()) + .collect(); { let krate = tcx.hir().krate(); @@ -436,13 +477,15 @@ impl<'a, 'tcx> Index<'tcx> { annotator.parent_stab = Some(stability); } - annotator.annotate(ast::CRATE_NODE_ID, - &krate.attrs, - krate.span, - AnnotationKind::Required, - |v| intravisit::walk_crate(v, krate)); + annotator.annotate( + ast::CRATE_NODE_ID, + &krate.attrs, + krate.span, + AnnotationKind::Required, + |v| intravisit::walk_crate(v, krate), + ); } - return index + return index; } pub fn local_stability(&self, id: HirId) -> Option<&'tcx Stability> { @@ -458,7 +501,9 @@ impl<'a, 'tcx> Index<'tcx> { /// features and possibly prints errors. pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut checker = Checker { tcx }; - tcx.hir().krate().visit_all_item_likes(&mut checker.as_deep_visitor()); + tcx.hir() + .krate() + .visit_all_item_likes(&mut checker.as_deep_visitor()); } /// Check whether an item marked with `deprecated(since="X")` is currently @@ -466,7 +511,9 @@ pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { pub fn deprecation_in_effect(since: &str) -> bool { fn parse_version(ver: &str) -> Vec { // We ignore non-integer components of the version (e.g., "nightly"). - ver.split(|c| c == '.' || c == '-').flat_map(|s| s.parse()).collect() + ver.split(|c| c == '.' || c == '-') + .flat_map(|s| s.parse()) + .collect() } if let Some(rustc) = option_env!("CFG_RELEASE") { @@ -510,9 +557,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { fn skip_stability_check_due_to_privacy(self, mut def_id: DefId) -> bool { // Check if `def_id` is a trait method. match self.describe_def(def_id) { - Some(Def::Method(_)) | - Some(Def::AssociatedTy(_)) | - Some(Def::AssociatedConst(_)) => { + Some(Def::Method(_)) | Some(Def::AssociatedTy(_)) | Some(Def::AssociatedConst(_)) => { if let ty::TraitContainer(trait_def_id) = self.associated_item(def_id).container { // Trait methods do not declare visibility (even // for visibility info in cstore). Use containing @@ -532,8 +577,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // These are not visible outside crate; therefore // stability markers are irrelevant, if even present. - ty::Visibility::Restricted(..) | - ty::Visibility::Invisible => true, + ty::Visibility::Restricted(..) | ty::Visibility::Invisible => true, } } @@ -565,7 +609,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.lint_node(lint, id, span, &msg); if id == ast::DUMMY_NODE_ID { - span_bug!(span, "emitted a {} lint with dummy node id: {:?}", lint.name, def_id); + span_bug!( + span, + "emitted a {} lint with dummy node id: {:?}", + lint.name, + def_id + ); } }; @@ -586,66 +635,85 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }; let parent_def_id = self.hir().local_def_id(self.hir().get_parent(id)); - let skip = self.lookup_deprecation_entry(parent_def_id) - .map_or(false, |parent_depr| parent_depr.same_origin(&depr_entry)); + let skip = self + .lookup_deprecation_entry(parent_def_id) + .map_or(false, |parent_depr| parent_depr.same_origin(&depr_entry)); if let Some(since) = deprecated_in_future_version { let path = self.item_path_str(def_id); - let message = format!("use of item '{}' \ - that will be deprecated in future version {}", - path, - since); - - lint_deprecated(def_id, - id, - depr_entry.attr.note, - &message, - lint::builtin::DEPRECATED_IN_FUTURE); + let message = format!( + "use of item '{}' \ + that will be deprecated in future version {}", + path, since + ); + + lint_deprecated( + def_id, + id, + depr_entry.attr.note, + &message, + lint::builtin::DEPRECATED_IN_FUTURE, + ); } else if !skip { let path = self.item_path_str(def_id); let message = format!("use of deprecated item '{}'", path); - lint_deprecated(def_id, - id, - depr_entry.attr.note, - &message, - lint::builtin::DEPRECATED); + lint_deprecated( + def_id, + id, + depr_entry.attr.note, + &message, + lint::builtin::DEPRECATED, + ); } }; } - let is_staged_api = self.lookup_stability(DefId { - index: CRATE_DEF_INDEX, - ..def_id - }).is_some(); + let is_staged_api = self + .lookup_stability(DefId { + index: CRATE_DEF_INDEX, + ..def_id + }) + .is_some(); if !is_staged_api { return EvalResult::Allow; } let stability = self.lookup_stability(def_id); - debug!("stability: \ - inspecting def_id={:?} span={:?} of stability={:?}", def_id, span, stability); - - if let Some(&Stability{rustc_depr: Some(attr::RustcDeprecation { reason, since }), ..}) - = stability { + debug!( + "stability: \ + inspecting def_id={:?} span={:?} of stability={:?}", + def_id, span, stability + ); + + if let Some(&Stability { + rustc_depr: Some(attr::RustcDeprecation { reason, since }), + .. + }) = stability + { if let Some(id) = id { let path = self.item_path_str(def_id); if deprecation_in_effect(&since.as_str()) { let message = format!("use of deprecated item '{}'", path); - lint_deprecated(def_id, - id, - Some(reason), - &message, - lint::builtin::DEPRECATED); + lint_deprecated( + def_id, + id, + Some(reason), + &message, + lint::builtin::DEPRECATED, + ); } else { - let message = format!("use of item '{}' \ - that will be deprecated in future version {}", - path, - since); - lint_deprecated(def_id, - id, - Some(reason), - &message, - lint::builtin::DEPRECATED_IN_FUTURE); + let message = format!( + "use of item '{}' \ + that will be deprecated in future version {}", + path, since + ); + lint_deprecated( + def_id, + id, + Some(reason), + &message, + lint::builtin::DEPRECATED_IN_FUTURE, + ); } } } @@ -662,7 +730,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } match stability { - Some(&Stability { level: attr::Unstable { reason, issue }, feature, .. }) => { + Some(&Stability { + level: attr::Unstable { reason, issue }, + feature, + .. + }) => { if self.stability().active_features.contains(&feature) { return EvalResult::Allow; } @@ -682,16 +754,18 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - EvalResult::Deny { feature, reason, issue } + EvalResult::Deny { + feature, + reason, + issue, + } } Some(_) => { // Stable APIs are always ok to call and deprecated APIs are // handled by the lint emitting logic above. EvalResult::Allow } - None => { - EvalResult::Unmarked - } + None => EvalResult::Unmarked, } } @@ -705,15 +779,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn check_stability(self, def_id: DefId, id: Option, span: Span) { match self.eval_stability(def_id, id, span) { EvalResult::Allow => {} - EvalResult::Deny { feature, reason, issue } => { + EvalResult::Deny { + feature, + reason, + issue, + } => { let msg = match reason { Some(r) => format!("use of unstable library feature '{}': {}", feature, r), - None => format!("use of unstable library feature '{}'", &feature) + None => format!("use of unstable library feature '{}'", &feature), }; let msp: MultiSpan = span.into(); let cm = &self.sess.parse_sess.source_map(); - let span_key = msp.primary_span().and_then(|sp: Span| + let span_key = msp.primary_span().and_then(|sp: Span| { if !sp.is_dummy() { let file = cm.lookup_char_pos(sp.lo()).file; if file.name.is_macros() { @@ -724,13 +802,22 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } else { None } - ); + }); - let error_id = (DiagnosticMessageId::StabilityId(issue), span_key, msg.clone()); + let error_id = ( + DiagnosticMessageId::StabilityId(issue), + span_key, + msg.clone(), + ); let fresh = self.sess.one_time_diagnostics.borrow_mut().insert(error_id); if fresh { - emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span, - GateIssue::Library(Some(issue)), &msg); + emit_feature_err( + &self.sess.parse_sess, + &feature.as_str(), + span, + GateIssue::Library(Some(issue)), + &msg, + ); } } EvalResult::Unmarked => { @@ -752,14 +839,19 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { match item.node { hir::ItemKind::ExternCrate(_) => { // compiler-generated `extern crate` items have a dummy span. - if item.span.is_dummy() { return } + if item.span.is_dummy() { + return; + } let def_id = self.tcx.hir().local_def_id(item.id); let cnum = match self.tcx.extern_mod_stmt_cnum(def_id) { Some(cnum) => cnum, None => return, }; - let def_id = DefId { krate: cnum, index: CRATE_DEF_INDEX }; + let def_id = DefId { + krate: cnum, + index: CRATE_DEF_INDEX, + }; self.tcx.check_stability(def_id, Some(item.id), item.span); } @@ -770,7 +862,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { if let Def::Trait(trait_did) = t.path.def { for impl_item_ref in impl_item_refs { let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); - let trait_item_def_id = self.tcx.associated_items(trait_did) + let trait_item_def_id = self + .tcx + .associated_items(trait_did) .find(|item| item.ident.name == impl_item.ident.name) .map(|item| item.def_id); if let Some(def_id) = trait_item_def_id { @@ -789,20 +883,28 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { let ty = self.tcx.type_of(def_id); if adt_def.has_dtor(self.tcx) { - emit_feature_err(&self.tcx.sess.parse_sess, - "untagged_unions", item.span, GateIssue::Language, - "unions with `Drop` implementations are unstable"); + emit_feature_err( + &self.tcx.sess.parse_sess, + "untagged_unions", + item.span, + GateIssue::Language, + "unions with `Drop` implementations are unstable", + ); } else { let param_env = self.tcx.param_env(def_id); if !param_env.can_type_implement_copy(self.tcx, ty).is_ok() { - emit_feature_err(&self.tcx.sess.parse_sess, - "untagged_unions", item.span, GateIssue::Language, - "unions with non-`Copy` fields are unstable"); + emit_feature_err( + &self.tcx.sess.parse_sess, + "untagged_unions", + item.span, + GateIssue::Language, + "unions with non-`Copy` fields are unstable", + ); } } } - _ => (/* pass */) + _ => (/* pass */), } intravisit::walk_item(self, item); } @@ -830,10 +932,7 @@ pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { if tcx.stability().staged_api[&LOCAL_CRATE] { let krate = tcx.hir().krate(); - let mut missing = MissingStabilityAnnotations { - tcx, - access_levels, - }; + let mut missing = MissingStabilityAnnotations { tcx, access_levels }; missing.check_missing_stability(ast::CRATE_NODE_ID, krate.span); intravisit::walk_crate(&mut missing, krate); krate.visit_all_item_likes(&mut missing.as_deep_visitor()); @@ -871,21 +970,21 @@ pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { remaining_lib_features.remove(&Symbol::intern("libc")); remaining_lib_features.remove(&Symbol::intern("test")); - let check_features = - |remaining_lib_features: &mut FxHashMap<_, _>, defined_features: &Vec<_>| { - for &(feature, since) in defined_features { - if let Some(since) = since { - if let Some(span) = remaining_lib_features.get(&feature) { - // Warn if the user has enabled an already-stable lib feature. - unnecessary_stable_feature_lint(tcx, *span, feature, since); - } - } - remaining_lib_features.remove(&feature); - if remaining_lib_features.is_empty() { - break; + let check_features = |remaining_lib_features: &mut FxHashMap<_, _>, + defined_features: &Vec<_>| { + for &(feature, since) in defined_features { + if let Some(since) = since { + if let Some(span) = remaining_lib_features.get(&feature) { + // Warn if the user has enabled an already-stable lib feature. + unnecessary_stable_feature_lint(tcx, *span, feature, since); } } - }; + remaining_lib_features.remove(&feature); + if remaining_lib_features.is_empty() { + break; + } + } + }; // We always collect the lib features declared in the current crate, even if there are // no unknown features, because the collection also does feature attribute validation. @@ -913,16 +1012,27 @@ fn unnecessary_stable_feature_lint<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, span: Span, feature: Symbol, - since: Symbol + since: Symbol, ) { - tcx.lint_node(lint::builtin::STABLE_FEATURES, + tcx.lint_node( + lint::builtin::STABLE_FEATURES, ast::CRATE_NODE_ID, span, - &format!("the feature `{}` has been stable since {} and no longer requires \ - an attribute to enable", feature, since)); + &format!( + "the feature `{}` has been stable since {} and no longer requires \ + an attribute to enable", + feature, since + ), + ); } fn duplicate_feature_err(sess: &Session, span: Span, feature: Symbol) { - struct_span_err!(sess, span, E0636, "the feature `{}` has already been declared", feature) - .emit(); + struct_span_err!( + sess, + span, + E0636, + "the feature `{}` has already been declared", + feature + ) + .emit(); } diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index 82f19cbb82a19..f2e96930589e3 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -1,17 +1,17 @@ //! Validity checking for weak lang items -use session::config; use middle::lang_items; +use session::config; +use hir; +use hir::def_id::DefId; +use hir::intravisit; +use hir::intravisit::{NestedVisitorMap, Visitor}; use rustc_data_structures::fx::FxHashSet; use rustc_target::spec::PanicStrategy; use syntax::ast; use syntax::symbol::Symbol; use syntax_pos::Span; -use hir::def_id::DefId; -use hir::intravisit::{Visitor, NestedVisitorMap}; -use hir::intravisit; -use hir; use ty::TyCtxt; macro_rules! weak_lang_items { diff --git a/src/librustc/mir/cache.rs b/src/librustc/mir/cache.rs index 56ab263c47740..141ec44e9255b 100644 --- a/src/librustc/mir/cache.rs +++ b/src/librustc/mir/cache.rs @@ -1,18 +1,16 @@ -use rustc_data_structures::indexed_vec::IndexVec; -use rustc_data_structures::sync::{RwLock, MappedReadGuard, ReadGuard}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; use ich::StableHashingContext; -use mir::{Mir, BasicBlock}; +use mir::{BasicBlock, Mir}; +use rustc_data_structures::indexed_vec::IndexVec; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; +use rustc_data_structures::sync::{MappedReadGuard, ReadGuard, RwLock}; use rustc_serialize as serialize; #[derive(Clone, Debug)] pub struct Cache { - predecessors: RwLock>>> + predecessors: RwLock>>>, } - impl serialize::Encodable for Cache { fn encode(&self, s: &mut S) -> Result<(), S::Error> { serialize::Encodable::encode(&(), s) @@ -26,9 +24,11 @@ impl serialize::Decodable for Cache { } impl<'a> HashStable> for Cache { - fn hash_stable(&self, - _: &mut StableHashingContext<'a>, - _: &mut StableHasher) { + fn hash_stable( + &self, + _: &mut StableHashingContext<'a>, + _: &mut StableHasher, + ) { // do nothing } } @@ -36,7 +36,7 @@ impl<'a> HashStable> for Cache { impl Cache { pub fn new() -> Self { Cache { - predecessors: RwLock::new(None) + predecessors: RwLock::new(None), } } @@ -47,7 +47,7 @@ impl Cache { pub fn predecessors( &self, - mir: &Mir<'_> + mir: &Mir<'_>, ) -> MappedReadGuard<'_, IndexVec>> { if self.predecessors.borrow().is_none() { *self.predecessors.borrow_mut() = Some(calculate_predecessors(mir)); diff --git a/src/librustc/mir/interpret/allocation.rs b/src/librustc/mir/interpret/allocation.rs index 7ed29c5afd03f..1694575d5b443 100644 --- a/src/librustc/mir/interpret/allocation.rs +++ b/src/librustc/mir/interpret/allocation.rs @@ -1,17 +1,17 @@ //! The virtual memory representation of the MIR interpreter use super::{ - Pointer, EvalResult, AllocId, ScalarMaybeUndef, write_target_uint, read_target_uint, Scalar, - truncate, + read_target_uint, truncate, write_target_uint, AllocId, EvalResult, Pointer, Scalar, + ScalarMaybeUndef, }; -use ty::layout::{Size, Align}; -use syntax::ast::Mutability; -use std::iter; use mir; -use std::ops::{Deref, DerefMut}; use rustc_data_structures::sorted_map::SortedMap; use rustc_target::abi::HasDataLayout; +use std::iter; +use std::ops::{Deref, DerefMut}; +use syntax::ast::Mutability; +use ty::layout::{Align, Size}; /// Used by `check_bounds` to indicate whether the pointer needs to be just inbounds /// or also inbounds of a *live* allocation. @@ -22,7 +22,7 @@ pub enum InboundsCheck { } #[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)] -pub struct Allocation { +pub struct Allocation { /// The actual bytes of the allocation. /// Note that the bytes of a pointer represent the offset of the pointer pub bytes: Vec, @@ -43,13 +43,9 @@ pub struct Allocation { pub extra: Extra, } - pub trait AllocationExtra: ::std::fmt::Debug + Clone { /// Hook to initialize the extra data when an allocation gets created. - fn memory_allocated( - _size: Size, - _memory_extra: &MemoryExtra - ) -> Self; + fn memory_allocated(_size: Size, _memory_extra: &MemoryExtra) -> Self; /// Hook for performing extra checks on a memory read access. /// @@ -89,10 +85,7 @@ pub trait AllocationExtra: ::std::fmt::Debug + Clone { impl AllocationExtra<(), ()> for () { #[inline(always)] - fn memory_allocated( - _size: Size, - _memory_extra: &() - ) -> Self { + fn memory_allocated(_size: Size, _memory_extra: &()) -> Self { () } } @@ -137,10 +130,7 @@ impl<'tcx, Tag, Extra> Allocation { /// of an allocation (i.e., at the first *inaccessible* location) *is* considered /// in-bounds! This follows C's/LLVM's rules. /// If you want to check bounds before doing a memory access, better use `check_bounds`. - fn check_bounds_ptr( - &self, - ptr: Pointer, - ) -> EvalResult<'tcx> { + fn check_bounds_ptr(&self, ptr: Pointer) -> EvalResult<'tcx> { let allocation_size = self.bytes.len() as u64; ptr.check_in_alloc(Size::from_bytes(allocation_size), InboundsCheck::Live) } @@ -174,8 +164,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { size: Size, check_defined_and_ptr: bool, ) -> EvalResult<'tcx, &[u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { self.check_bounds(cx, ptr, size)?; @@ -202,8 +193,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { ptr: Pointer, size: Size, ) -> EvalResult<'tcx, &[u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { self.get_bytes_internal(cx, ptr, size, true) } @@ -217,8 +209,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { ptr: Pointer, size: Size, ) -> EvalResult<'tcx, &[u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { self.get_bytes_internal(cx, ptr, size, false) } @@ -231,10 +224,15 @@ impl<'tcx, Tag: Copy, Extra> Allocation { ptr: Pointer, size: Size, ) -> EvalResult<'tcx, &mut [u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { - assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`"); + assert_ne!( + size.bytes(), + 0, + "0-sized accesses should never even get a `Pointer`" + ); self.check_bounds(cx, ptr, size)?; self.mark_definedness(ptr, size, true)?; @@ -258,8 +256,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { cx: &impl HasDataLayout, ptr: Pointer, ) -> EvalResult<'tcx, &[u8]> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes()); let offset = ptr.offset.bytes() as usize; @@ -285,8 +284,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { size: Size, allow_ptr_and_undef: bool, ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { // Check bounds and relocations on the edges self.get_bytes_with_undef_and_ptr(cx, ptr, size)?; @@ -307,8 +307,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { ptr: Pointer, src: &[u8], ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(src.len() as u64))?; bytes.clone_from_slice(src); @@ -321,10 +322,11 @@ impl<'tcx, Tag: Copy, Extra> Allocation { cx: &impl HasDataLayout, ptr: Pointer, val: u8, - count: Size + count: Size, ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { let bytes = self.get_bytes_mut(cx, ptr, count)?; for b in bytes { @@ -345,10 +347,11 @@ impl<'tcx, Tag: Copy, Extra> Allocation { &self, cx: &impl HasDataLayout, ptr: Pointer, - size: Size + size: Size, ) -> EvalResult<'tcx, ScalarMaybeUndef> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { // get_bytes_unchecked tests relocation edges let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?; @@ -369,9 +372,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { match self.relocations.get(&ptr.offset) { Some(&(tag, alloc_id)) => { let ptr = Pointer::new_with_tag(alloc_id, Size::from_bytes(bits as u64), tag); - return Ok(ScalarMaybeUndef::Scalar(ptr.into())) + return Ok(ScalarMaybeUndef::Scalar(ptr.into())); } - None => {}, + None => {} } } // We don't. Just return the bits. @@ -384,8 +387,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { cx: &impl HasDataLayout, ptr: Pointer, ) -> EvalResult<'tcx, ScalarMaybeUndef> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { self.read_scalar(cx, ptr, cx.data_layout().pointer_size) } @@ -405,8 +409,9 @@ impl<'tcx, Tag: Copy, Extra> Allocation { val: ScalarMaybeUndef, type_size: Size, ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { let val = match val { ScalarMaybeUndef::Scalar(scalar) => scalar, @@ -421,10 +426,14 @@ impl<'tcx, Tag: Copy, Extra> Allocation { Scalar::Bits { bits, size } => { assert_eq!(size as u64, type_size.bytes()); - debug_assert_eq!(truncate(bits, Size::from_bytes(size.into())), bits, - "Unexpected value of size {} when writing to memory", size); + debug_assert_eq!( + truncate(bits, Size::from_bytes(size.into())), + bits, + "Unexpected value of size {} when writing to memory", + size + ); bits - }, + } }; let endian = cx.data_layout().endian; @@ -434,10 +443,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation { // See if we have to also write a relocation match val { Scalar::Ptr(val) => { - self.relocations.insert( - ptr.offset, - (val.tag, val.alloc_id), - ); + self.relocations.insert(ptr.offset, (val.tag, val.alloc_id)); } _ => {} } @@ -450,10 +456,11 @@ impl<'tcx, Tag: Copy, Extra> Allocation { &mut self, cx: &impl HasDataLayout, ptr: Pointer, - val: ScalarMaybeUndef + val: ScalarMaybeUndef, ) -> EvalResult<'tcx> - // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 - where Extra: AllocationExtra + // FIXME: Working around https://github.com/rust-lang/rust/issues/56209 + where + Extra: AllocationExtra, { let ptr_size = cx.data_layout().pointer_size; self.write_scalar(cx, ptr.into(), val, ptr_size) @@ -471,7 +478,10 @@ impl<'tcx, Tag: Copy, Extra> Allocation { ) -> &[(Size, (Tag, AllocId))] { // We have to go back `pointer_size - 1` bytes, as that one would still overlap with // the beginning of this range. - let start = ptr.offset.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1); + let start = ptr + .offset + .bytes() + .saturating_sub(cx.data_layout().pointer_size.bytes() - 1); let end = ptr.offset + size; // this does overflow checking self.relocations.range(Size::from_bytes(start)..end) } @@ -511,8 +521,10 @@ impl<'tcx, Tag: Copy, Extra> Allocation { return Ok(()); } - (relocations.first().unwrap().0, - relocations.last().unwrap().0 + cx.data_layout().pointer_size) + ( + relocations.first().unwrap().0, + relocations.last().unwrap().0 + cx.data_layout().pointer_size, + ) }; let start = ptr.offset; let end = start + size; @@ -547,17 +559,15 @@ impl<'tcx, Tag: Copy, Extra> Allocation { } } - /// Undefined bytes impl<'tcx, Tag, Extra> Allocation { /// Checks that a range of bytes is defined. If not, returns the `ReadUndefBytes` /// error which will report the first byte which is undefined. #[inline] fn check_defined(&self, ptr: Pointer, size: Size) -> EvalResult<'tcx> { - self.undef_mask.is_range_defined( - ptr.offset, - ptr.offset + size, - ).or_else(|idx| err!(ReadUndefBytes(idx))) + self.undef_mask + .is_range_defined(ptr.offset, ptr.offset + size) + .or_else(|idx| err!(ReadUndefBytes(idx))) } pub fn mark_definedness( @@ -569,18 +579,15 @@ impl<'tcx, Tag, Extra> Allocation { if size.bytes() == 0 { return Ok(()); } - self.undef_mask.set_range( - ptr.offset, - ptr.offset + size, - new_state, - ); + self.undef_mask + .set_range(ptr.offset, ptr.offset + size, new_state); Ok(()) } } /// Relocations #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] -pub struct Relocations(SortedMap); +pub struct Relocations(SortedMap); impl Relocations { pub fn new() -> Self { @@ -649,7 +656,7 @@ impl UndefMask { match idx { Some(idx) => Err(idx), - None => Ok(()) + None => Ok(()), } } @@ -688,9 +695,8 @@ impl UndefMask { if amount.bytes() > unused_trailing_bits { let additional_blocks = amount.bytes() / BLOCK_SIZE + 1; assert_eq!(additional_blocks as usize as u64, additional_blocks); - self.blocks.extend( - iter::repeat(0).take(additional_blocks as usize), - ); + self.blocks + .extend(iter::repeat(0).take(additional_blocks as usize)); } let start = self.len; self.len += amount; diff --git a/src/librustc/mir/interpret/error.rs b/src/librustc/mir/interpret/error.rs index 19be0c08ef13b..2430bda069e24 100644 --- a/src/librustc/mir/interpret/error.rs +++ b/src/librustc/mir/interpret/error.rs @@ -1,21 +1,21 @@ -use std::{fmt, env}; +use std::{env, fmt}; use hir::map::definitions::DefPathData; use mir; -use ty::{self, Ty, layout}; -use ty::layout::{Size, Align, LayoutError}; use rustc_target::spec::abi::Abi; +use ty::layout::{Align, LayoutError, Size}; +use ty::{self, layout, Ty}; -use super::{RawConst, Pointer, InboundsCheck, ScalarMaybeUndef}; +use super::{InboundsCheck, Pointer, RawConst, ScalarMaybeUndef}; use backtrace::Backtrace; -use ty::query::TyCtxtAt; use errors::DiagnosticBuilder; +use ty::query::TyCtxtAt; -use syntax_pos::{Pos, Span}; use syntax::ast; use syntax::symbol::Symbol; +use syntax_pos::{Pos, Span}; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum ErrorHandled { @@ -29,9 +29,11 @@ pub enum ErrorHandled { impl ErrorHandled { pub fn assert_reported(self) { match self { - ErrorHandled::Reported => {}, - ErrorHandled::TooGeneric => bug!("MIR interpretation failed without reporting an error \ - even though it was fully monomorphized"), + ErrorHandled::Reported => {} + ErrorHandled::TooGeneric => bug!( + "MIR interpretation failed without reporting an error \ + even though it was fully monomorphized" + ), } } } @@ -64,8 +66,17 @@ impl<'tcx> fmt::Display for FrameInfo<'tcx> { write!(f, "inside call to `{}`", self.instance)?; } if !self.call_site.is_dummy() { - let lo = tcx.sess.source_map().lookup_char_pos_adj(self.call_site.lo()); - write!(f, " at {}:{}:{}", lo.filename, lo.line, lo.col.to_usize() + 1)?; + let lo = tcx + .sess + .source_map() + .lookup_char_pos_adj(self.call_site.lo()); + write!( + f, + " at {}:{}:{}", + lo.filename, + lo.line, + lo.col.to_usize() + 1 + )?; } Ok(()) }) @@ -73,43 +84,37 @@ impl<'tcx> fmt::Display for FrameInfo<'tcx> { } impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> { - pub fn struct_error(&self, + pub fn struct_error( + &self, tcx: TyCtxtAt<'a, 'gcx, 'tcx>, - message: &str) - -> Result, ErrorHandled> - { + message: &str, + ) -> Result, ErrorHandled> { self.struct_generic(tcx, message, None) } - pub fn report_as_error(&self, - tcx: TyCtxtAt<'a, 'gcx, 'tcx>, - message: &str - ) -> ErrorHandled { + pub fn report_as_error(&self, tcx: TyCtxtAt<'a, 'gcx, 'tcx>, message: &str) -> ErrorHandled { let err = self.struct_error(tcx, message); match err { Ok(mut err) => { err.emit(); ErrorHandled::Reported - }, + } Err(err) => err, } } - pub fn report_as_lint(&self, + pub fn report_as_lint( + &self, tcx: TyCtxtAt<'a, 'gcx, 'tcx>, message: &str, lint_root: ast::NodeId, ) -> ErrorHandled { - let lint = self.struct_generic( - tcx, - message, - Some(lint_root), - ); + let lint = self.struct_generic(tcx, message, Some(lint_root)); match lint { Ok(mut lint) => { lint.emit(); ErrorHandled::Reported - }, + } Err(err) => err, } } @@ -121,15 +126,18 @@ impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> { lint_root: Option, ) -> Result, ErrorHandled> { match self.error { - EvalErrorKind::Layout(LayoutError::Unknown(_)) | - EvalErrorKind::TooGeneric => return Err(ErrorHandled::TooGeneric), - EvalErrorKind::Layout(LayoutError::SizeOverflow(_)) | - EvalErrorKind::TypeckError => return Err(ErrorHandled::Reported), - _ => {}, + EvalErrorKind::Layout(LayoutError::Unknown(_)) | EvalErrorKind::TooGeneric => { + return Err(ErrorHandled::TooGeneric) + } + EvalErrorKind::Layout(LayoutError::SizeOverflow(_)) | EvalErrorKind::TypeckError => { + return Err(ErrorHandled::Reported) + } + _ => {} } trace!("reporting const eval failure at {:?}", self.span); let mut err = if let Some(lint_root) = lint_root { - let node_id = self.stacktrace + let node_id = self + .stacktrace .iter() .rev() .filter_map(|frame| frame.lint_root) @@ -149,7 +157,7 @@ impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> { // is sometimes empty because we create "fake" eval contexts in CTFE to do work // on constant values. if self.stacktrace.len() > 0 { - for frame_info in &self.stacktrace[..self.stacktrace.len()-1] { + for frame_info in &self.stacktrace[..self.stacktrace.len() - 1] { err.span_label(frame_info.call_site, frame_info.to_string()); } } @@ -197,13 +205,10 @@ impl<'tcx> From> for EvalError<'tcx> { } else { Some(Box::new(backtrace)) } - }, + } _ => None, }; - EvalError { - kind, - backtrace, - } + EvalError { kind, backtrace } } } @@ -243,7 +248,10 @@ pub enum EvalErrorKind<'tcx, O> { Unimplemented(String), DerefFunctionPointer, ExecuteMemory, - BoundsCheck { len: O, index: O }, + BoundsCheck { + len: O, + index: O, + }, Overflow(mir::BinOp), OverflowNeg, DivisionByZero, @@ -303,114 +311,87 @@ impl<'tcx, O> EvalErrorKind<'tcx, O> { use self::EvalErrorKind::*; match *self { MachineError(ref inner) => inner, - FunctionAbiMismatch(..) | FunctionArgMismatch(..) | FunctionRetMismatch(..) - | FunctionArgCountMismatch => - "tried to call a function through a function pointer of incompatible type", - InvalidMemoryAccess => - "tried to access memory through an invalid pointer", - DanglingPointerDeref => - "dangling pointer was dereferenced", - DoubleFree => - "tried to deallocate dangling pointer", - InvalidFunctionPointer => - "tried to use a function pointer after offsetting it", - InvalidBool => - "invalid boolean value read", - InvalidDiscriminant(..) => - "invalid enum discriminant value read", - PointerOutOfBounds { .. } => - "pointer offset outside bounds of allocation", - InvalidNullPointerUsage => - "invalid use of NULL pointer", - ValidationFailure(..) => - "type validation failed", - ReadPointerAsBytes => - "a raw memory access tried to access part of a pointer value as raw bytes", - ReadBytesAsPointer => - "a memory access tried to interpret some bytes as a pointer", - ReadForeignStatic => - "tried to read from foreign (extern) static", - InvalidPointerMath => + FunctionAbiMismatch(..) + | FunctionArgMismatch(..) + | FunctionRetMismatch(..) + | FunctionArgCountMismatch => { + "tried to call a function through a function pointer of incompatible type" + } + InvalidMemoryAccess => "tried to access memory through an invalid pointer", + DanglingPointerDeref => "dangling pointer was dereferenced", + DoubleFree => "tried to deallocate dangling pointer", + InvalidFunctionPointer => "tried to use a function pointer after offsetting it", + InvalidBool => "invalid boolean value read", + InvalidDiscriminant(..) => "invalid enum discriminant value read", + PointerOutOfBounds { .. } => "pointer offset outside bounds of allocation", + InvalidNullPointerUsage => "invalid use of NULL pointer", + ValidationFailure(..) => "type validation failed", + ReadPointerAsBytes => { + "a raw memory access tried to access part of a pointer value as raw bytes" + } + ReadBytesAsPointer => "a memory access tried to interpret some bytes as a pointer", + ReadForeignStatic => "tried to read from foreign (extern) static", + InvalidPointerMath => { "attempted to do invalid arithmetic on pointers that would leak base addresses, \ - e.g., comparing pointers into different allocations", - ReadUndefBytes(_) => - "attempted to read undefined bytes", - DeadLocal => - "tried to access a dead local variable", - InvalidBoolOp(_) => - "invalid boolean operation", + e.g., comparing pointers into different allocations" + } + ReadUndefBytes(_) => "attempted to read undefined bytes", + DeadLocal => "tried to access a dead local variable", + InvalidBoolOp(_) => "invalid boolean operation", Unimplemented(ref msg) => msg, - DerefFunctionPointer => - "tried to dereference a function pointer", - ExecuteMemory => - "tried to treat a memory pointer as a function pointer", - BoundsCheck{..} => - "array index out of bounds", - Intrinsic(..) => - "intrinsic failed", - NoMirFor(..) => - "mir not found", - InvalidChar(..) => - "tried to interpret an invalid 32-bit value as a char", - StackFrameLimitReached => - "reached the configured maximum number of stack frames", - OutOfTls => - "reached the maximum number of representable TLS keys", - TlsOutOfBounds => - "accessed an invalid (unallocated) TLS key", + DerefFunctionPointer => "tried to dereference a function pointer", + ExecuteMemory => "tried to treat a memory pointer as a function pointer", + BoundsCheck { .. } => "array index out of bounds", + Intrinsic(..) => "intrinsic failed", + NoMirFor(..) => "mir not found", + InvalidChar(..) => "tried to interpret an invalid 32-bit value as a char", + StackFrameLimitReached => "reached the configured maximum number of stack frames", + OutOfTls => "reached the maximum number of representable TLS keys", + TlsOutOfBounds => "accessed an invalid (unallocated) TLS key", AbiViolation(ref msg) => msg, - AlignmentCheckFailed{..} => - "tried to execute a misaligned read or write", - CalledClosureAsFunction => - "tried to call a closure through a function pointer", - VtableForArgumentlessMethod => - "tried to call a vtable function without arguments", - ModifiedConstantMemory => - "tried to modify constant memory", - ModifiedStatic => - "tried to modify a static's initial value from another static's initializer", - AssumptionNotHeld => - "`assume` argument was false", - InlineAsm => - "miri does not support inline assembly", - TypeNotPrimitive(_) => - "expected primitive type, got nonprimitive", - ReallocatedWrongMemoryKind(_, _) => - "tried to reallocate memory from one kind to another", - DeallocatedWrongMemoryKind(_, _) => - "tried to deallocate memory of the wrong kind", - ReallocateNonBasePtr => - "tried to reallocate with a pointer not to the beginning of an existing object", - DeallocateNonBasePtr => - "tried to deallocate with a pointer not to the beginning of an existing object", - IncorrectAllocationInformation(..) => - "tried to deallocate or reallocate using incorrect alignment or size", - Layout(_) => - "rustc layout computation failed", - UnterminatedCString(_) => + AlignmentCheckFailed { .. } => "tried to execute a misaligned read or write", + CalledClosureAsFunction => "tried to call a closure through a function pointer", + VtableForArgumentlessMethod => "tried to call a vtable function without arguments", + ModifiedConstantMemory => "tried to modify constant memory", + ModifiedStatic => { + "tried to modify a static's initial value from another static's initializer" + } + AssumptionNotHeld => "`assume` argument was false", + InlineAsm => "miri does not support inline assembly", + TypeNotPrimitive(_) => "expected primitive type, got nonprimitive", + ReallocatedWrongMemoryKind(_, _) => { + "tried to reallocate memory from one kind to another" + } + DeallocatedWrongMemoryKind(_, _) => "tried to deallocate memory of the wrong kind", + ReallocateNonBasePtr => { + "tried to reallocate with a pointer not to the beginning of an existing object" + } + DeallocateNonBasePtr => { + "tried to deallocate with a pointer not to the beginning of an existing object" + } + IncorrectAllocationInformation(..) => { + "tried to deallocate or reallocate using incorrect alignment or size" + } + Layout(_) => "rustc layout computation failed", + UnterminatedCString(_) => { "attempted to get length of a null terminated string, but no null found before end \ - of allocation", - HeapAllocZeroBytes => - "tried to re-, de- or allocate zero bytes on the heap", - HeapAllocNonPowerOfTwoAlignment(_) => + of allocation" + } + HeapAllocZeroBytes => "tried to re-, de- or allocate zero bytes on the heap", + HeapAllocNonPowerOfTwoAlignment(_) => { "tried to re-, de-, or allocate heap memory with alignment that is not a power of \ - two", - Unreachable => - "entered unreachable code", - Panic { .. } => - "the evaluated program panicked", - ReadFromReturnPointer => - "tried to read from the return pointer", - PathNotFound(_) => - "a path could not be resolved, maybe the crate is not loaded", - UnimplementedTraitSelection => - "there were unresolved type arguments during trait selection", - TypeckError => - "encountered constants with type errors, stopping evaluation", - TooGeneric => - "encountered overly generic constant", - ReferencedConstant => - "referenced constant has errors", + two" + } + Unreachable => "entered unreachable code", + Panic { .. } => "the evaluated program panicked", + ReadFromReturnPointer => "tried to read from the return pointer", + PathNotFound(_) => "a path could not be resolved, maybe the crate is not loaded", + UnimplementedTraitSelection => { + "there were unresolved type arguments during trait selection" + } + TypeckError => "encountered constants with type errors, stopping evaluation", + TooGeneric => "encountered overly generic constant", + ReferencedConstant => "referenced constant has errors", Overflow(mir::BinOp::Add) => "attempt to add with overflow", Overflow(mir::BinOp::Sub) => "attempt to subtract with overflow", Overflow(mir::BinOp::Mul) => "attempt to multiply with overflow", @@ -424,8 +405,9 @@ impl<'tcx, O> EvalErrorKind<'tcx, O> { RemainderByZero => "attempt to calculate the remainder with a divisor of zero", GeneratorResumedAfterReturn => "generator resumed after completion", GeneratorResumedAfterPanic => "generator resumed after panicking", - InfiniteLoop => - "duplicate interpreter state observed here, const evaluation will never terminate", + InfiniteLoop => { + "duplicate interpreter state observed here, const evaluation will never terminate" + } } } } @@ -446,61 +428,94 @@ impl<'tcx, O: fmt::Debug> fmt::Debug for EvalErrorKind<'tcx, O> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use self::EvalErrorKind::*; match *self { - PointerOutOfBounds { ptr, check, allocation_size } => { - write!(f, "Pointer must be in-bounds{} at offset {}, but is outside bounds of \ - allocation {} which has size {}", - match check { - InboundsCheck::Live => " and live", - InboundsCheck::MaybeDead => "", - }, - ptr.offset.bytes(), ptr.alloc_id, allocation_size.bytes()) - }, - ValidationFailure(ref err) => { - write!(f, "type validation failed: {}", err) - } + PointerOutOfBounds { + ptr, + check, + allocation_size, + } => write!( + f, + "Pointer must be in-bounds{} at offset {}, but is outside bounds of \ + allocation {} which has size {}", + match check { + InboundsCheck::Live => " and live", + InboundsCheck::MaybeDead => "", + }, + ptr.offset.bytes(), + ptr.alloc_id, + allocation_size.bytes() + ), + ValidationFailure(ref err) => write!(f, "type validation failed: {}", err), NoMirFor(ref func) => write!(f, "no mir for `{}`", func), - FunctionAbiMismatch(caller_abi, callee_abi) => - write!(f, "tried to call a function with ABI {:?} using caller ABI {:?}", - callee_abi, caller_abi), - FunctionArgMismatch(caller_ty, callee_ty) => - write!(f, "tried to call a function with argument of type {:?} \ - passing data of type {:?}", - callee_ty, caller_ty), - FunctionRetMismatch(caller_ty, callee_ty) => - write!(f, "tried to call a function with return type {:?} \ - passing return place of type {:?}", - callee_ty, caller_ty), - FunctionArgCountMismatch => - write!(f, "tried to call a function with incorrect number of arguments"), - BoundsCheck { ref len, ref index } => - write!(f, "index out of bounds: the len is {:?} but the index is {:?}", len, index), - ReallocatedWrongMemoryKind(ref old, ref new) => - write!(f, "tried to reallocate memory from {} to {}", old, new), - DeallocatedWrongMemoryKind(ref old, ref new) => - write!(f, "tried to deallocate {} memory but gave {} as the kind", old, new), - Intrinsic(ref err) => - write!(f, "{}", err), - InvalidChar(c) => - write!(f, "tried to interpret an invalid 32-bit value as a char: {}", c), - AlignmentCheckFailed { required, has } => - write!(f, "tried to access memory with alignment {}, but alignment {} is required", - has.bytes(), required.bytes()), - TypeNotPrimitive(ty) => - write!(f, "expected primitive type, got {}", ty), - Layout(ref err) => - write!(f, "rustc layout computation failed: {:?}", err), - PathNotFound(ref path) => - write!(f, "Cannot find path {:?}", path), - MachineError(ref inner) => - write!(f, "{}", inner), - IncorrectAllocationInformation(size, size2, align, align2) => - write!(f, "incorrect alloc info: expected size {} and align {}, \ - got size {} and align {}", - size.bytes(), align.bytes(), size2.bytes(), align2.bytes()), - Panic { ref msg, line, col, ref file } => - write!(f, "the evaluated program panicked at '{}', {}:{}:{}", msg, file, line, col), - InvalidDiscriminant(val) => - write!(f, "encountered invalid enum discriminant {}", val), + FunctionAbiMismatch(caller_abi, callee_abi) => write!( + f, + "tried to call a function with ABI {:?} using caller ABI {:?}", + callee_abi, caller_abi + ), + FunctionArgMismatch(caller_ty, callee_ty) => write!( + f, + "tried to call a function with argument of type {:?} \ + passing data of type {:?}", + callee_ty, caller_ty + ), + FunctionRetMismatch(caller_ty, callee_ty) => write!( + f, + "tried to call a function with return type {:?} \ + passing return place of type {:?}", + callee_ty, caller_ty + ), + FunctionArgCountMismatch => write!( + f, + "tried to call a function with incorrect number of arguments" + ), + BoundsCheck { ref len, ref index } => write!( + f, + "index out of bounds: the len is {:?} but the index is {:?}", + len, index + ), + ReallocatedWrongMemoryKind(ref old, ref new) => { + write!(f, "tried to reallocate memory from {} to {}", old, new) + } + DeallocatedWrongMemoryKind(ref old, ref new) => write!( + f, + "tried to deallocate {} memory but gave {} as the kind", + old, new + ), + Intrinsic(ref err) => write!(f, "{}", err), + InvalidChar(c) => write!( + f, + "tried to interpret an invalid 32-bit value as a char: {}", + c + ), + AlignmentCheckFailed { required, has } => write!( + f, + "tried to access memory with alignment {}, but alignment {} is required", + has.bytes(), + required.bytes() + ), + TypeNotPrimitive(ty) => write!(f, "expected primitive type, got {}", ty), + Layout(ref err) => write!(f, "rustc layout computation failed: {:?}", err), + PathNotFound(ref path) => write!(f, "Cannot find path {:?}", path), + MachineError(ref inner) => write!(f, "{}", inner), + IncorrectAllocationInformation(size, size2, align, align2) => write!( + f, + "incorrect alloc info: expected size {} and align {}, \ + got size {} and align {}", + size.bytes(), + align.bytes(), + size2.bytes(), + align2.bytes() + ), + Panic { + ref msg, + line, + col, + ref file, + } => write!( + f, + "the evaluated program panicked at '{}', {}:{}:{}", + msg, file, line, col + ), + InvalidDiscriminant(val) => write!(f, "encountered invalid enum discriminant {}", val), _ => write!(f, "{}", self.description()), } } diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs index 25a0f0ba489d1..9eecb03625482 100644 --- a/src/librustc/mir/interpret/mod.rs +++ b/src/librustc/mir/interpret/mod.rs @@ -5,40 +5,37 @@ macro_rules! err { ($($tt:tt)*) => { Err($crate::mir::interpret::EvalErrorKind::$($tt)*.into()) }; } -mod error; -mod value; mod allocation; +mod error; mod pointer; +mod value; pub use self::error::{ - EvalError, EvalResult, EvalErrorKind, AssertMessage, ConstEvalErr, struct_error, - FrameInfo, ConstEvalRawResult, ConstEvalResult, ErrorHandled, + struct_error, AssertMessage, ConstEvalErr, ConstEvalRawResult, ConstEvalResult, ErrorHandled, + EvalError, EvalErrorKind, EvalResult, FrameInfo, }; -pub use self::value::{Scalar, ScalarMaybeUndef, RawConst, ConstValue}; +pub use self::value::{ConstValue, RawConst, Scalar, ScalarMaybeUndef}; -pub use self::allocation::{ - InboundsCheck, Allocation, AllocationExtra, - Relocations, UndefMask, -}; +pub use self::allocation::{Allocation, AllocationExtra, InboundsCheck, Relocations, UndefMask}; pub use self::pointer::{Pointer, PointerArithmetic}; -use std::fmt; -use mir; +use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt}; use hir::def_id::DefId; -use ty::{self, TyCtxt, Instance}; -use ty::layout::{self, Size}; use middle::region; -use std::io; -use rustc_serialize::{Encoder, Decodable, Encodable}; +use mir; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::sync::{Lock as Mutex, HashMapExt}; +use rustc_data_structures::sync::{HashMapExt, Lock as Mutex}; use rustc_data_structures::tiny_list::TinyList; -use byteorder::{WriteBytesExt, ReadBytesExt, LittleEndian, BigEndian}; -use ty::codec::TyDecoder; -use std::sync::atomic::{AtomicU32, Ordering}; +use rustc_serialize::{Decodable, Encodable, Encoder}; +use std::fmt; +use std::io; use std::num::NonZeroU32; +use std::sync::atomic::{AtomicU32, Ordering}; +use ty::codec::TyDecoder; +use ty::layout::{self, Size}; +use ty::{self, Instance, TyCtxt}; #[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum Lock { @@ -85,16 +82,16 @@ enum AllocDiscriminant { Static, } -pub fn specialized_encode_alloc_id< - 'a, 'tcx, - E: Encoder, ->( +pub fn specialized_encode_alloc_id<'a, 'tcx, E: Encoder>( encoder: &mut E, tcx: TyCtxt<'a, 'tcx, 'tcx>, alloc_id: AllocId, ) -> Result<(), E::Error> { - let alloc_kind: AllocKind<'tcx> = - tcx.alloc_map.lock().get(alloc_id).expect("no value for AllocId"); + let alloc_kind: AllocKind<'tcx> = tcx + .alloc_map + .lock() + .get(alloc_id) + .expect("no value for AllocId"); match alloc_kind { AllocKind::Memory(alloc) => { trace!("encoding {:?} with {:#?}", alloc_id, alloc); @@ -135,7 +132,6 @@ pub struct AllocDecodingState { } impl AllocDecodingState { - pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> { static DECODER_SESSION_ID: AtomicU32 = AtomicU32::new(0); let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst); @@ -166,13 +162,11 @@ pub struct AllocDecodingSession<'s> { } impl<'s> AllocDecodingSession<'s> { - // Decodes an AllocId in a thread-safe way. - pub fn decode_alloc_id<'a, 'tcx, D>(&self, - decoder: &mut D) - -> Result - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, + pub fn decode_alloc_id<'a, 'tcx, D>(&self, decoder: &mut D) -> Result + where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { // Read the index of the allocation let idx = decoder.read_u32()? as usize; @@ -201,16 +195,15 @@ impl<'s> AllocDecodingSession<'s> { // If this is an allocation, we need to reserve an // AllocId so we can decode cyclic graphs. let alloc_id = decoder.tcx().alloc_map.lock().reserve(); - *entry = State::InProgress( - TinyList::new_single(self.session_id), - alloc_id); + *entry = + State::InProgress(TinyList::new_single(self.session_id), alloc_id); Some(alloc_id) - }, + } AllocDiscriminant::Fn | AllocDiscriminant::Static => { // Fns and statics cannot be cyclic and their AllocId // is determined later by interning - *entry = State::InProgressNonAlloc( - TinyList::new_single(self.session_id)); + *entry = + State::InProgressNonAlloc(TinyList::new_single(self.session_id)); None } } @@ -227,7 +220,7 @@ impl<'s> AllocDecodingSession<'s> { State::InProgress(ref mut sessions, alloc_id) => { if sessions.contains(&self.session_id) { // Don't recurse. - return Ok(alloc_id) + return Ok(alloc_id); } else { // Start decoding concurrently sessions.insert(self.session_id); @@ -245,9 +238,13 @@ impl<'s> AllocDecodingSession<'s> { // We already have a reserved AllocId. let alloc_id = alloc_id.unwrap(); trace!("decoded alloc {:?} {:#?}", alloc_id, allocation); - decoder.tcx().alloc_map.lock().set_alloc_id_same_memory(alloc_id, allocation); + decoder + .tcx() + .alloc_map + .lock() + .set_alloc_id_same_memory(alloc_id, allocation); Ok(alloc_id) - }, + } AllocDiscriminant::Fn => { assert!(alloc_id.is_none()); trace!("creating fn alloc id"); @@ -255,7 +252,7 @@ impl<'s> AllocDecodingSession<'s> { trace!("decoded fn alloc instance: {:?}", instance); let alloc_id = decoder.tcx().alloc_map.lock().create_fn_alloc(instance); Ok(alloc_id) - }, + } AllocDiscriminant::Static => { assert!(alloc_id.is_none()); trace!("creating extern static alloc id at"); @@ -317,15 +314,13 @@ impl<'tcx> AllocMap<'tcx> { /// /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such /// an `AllocId` from a query. - pub fn reserve( - &mut self, - ) -> AllocId { + pub fn reserve(&mut self) -> AllocId { let next = self.next_id; - self.next_id.0 = self.next_id.0 - .checked_add(1) - .expect("You overflowed a u64 by incrementing by 1... \ - You've just earned yourself a free drink if we ever meet. \ - Seriously, how did you do that?!"); + self.next_id.0 = self.next_id.0.checked_add(1).expect( + "You overflowed a u64 by incrementing by 1... \ + You've just earned yourself a free drink if we ever meet. \ + Seriously, how did you do that?!", + ); next } @@ -388,7 +383,11 @@ impl<'tcx> AllocMap<'tcx> { /// call this function twice, even with the same `Allocation` will ICE the compiler. pub fn set_alloc_id_memory(&mut self, id: AllocId, mem: &'tcx Allocation) { if let Some(old) = self.id_to_kind.insert(id, AllocKind::Memory(mem)) { - bug!("tried to set allocation id {}, but it was already existing as {:#?}", id, old); + bug!( + "tried to set allocation id {}, but it was already existing as {:#?}", + id, + old + ); } } diff --git a/src/librustc/mir/interpret/pointer.rs b/src/librustc/mir/interpret/pointer.rs index a046825f088bb..3c8975ed27b4c 100644 --- a/src/librustc/mir/interpret/pointer.rs +++ b/src/librustc/mir/interpret/pointer.rs @@ -1,9 +1,7 @@ use mir; use ty::layout::{self, HasDataLayout, Size}; -use super::{ - AllocId, EvalResult, InboundsCheck, -}; +use super::{AllocId, EvalResult, InboundsCheck}; //////////////////////////////////////////////////////////////////////////////// // Pointer arithmetic @@ -27,7 +25,11 @@ pub trait PointerArithmetic: layout::HasDataLayout { #[inline] fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> { let (res, over) = self.overflowing_offset(val, i); - if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) } + if over { + err!(Overflow(mir::BinOp::Add)) + } else { + Ok(res) + } } #[inline] @@ -40,7 +42,11 @@ pub trait PointerArithmetic: layout::HasDataLayout { #[inline] fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> { let (res, over) = self.overflowing_signed_offset(val, i128::from(i)); - if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) } + if over { + err!(Overflow(mir::BinOp::Add)) + } else { + Ok(res) + } } // Overflow checking only works properly on the range from -u64 to +u64. @@ -60,7 +66,6 @@ pub trait PointerArithmetic: layout::HasDataLayout { impl PointerArithmetic for T {} - /// Pointer is generic over the type that represents a reference to Allocations, /// thus making it possible for the most convenient representation to be used in /// each context. @@ -69,8 +74,10 @@ impl PointerArithmetic for T {} /// /// Pointer is also generic over the `Tag` associated with each pointer, /// which is used to do provenance tracking during execution. -#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)] -pub struct Pointer { +#[derive( + Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash, +)] +pub struct Pointer { pub alloc_id: Id, pub offset: Size, pub tag: Tag, @@ -87,12 +94,17 @@ impl From for Pointer { impl<'tcx> Pointer<()> { #[inline(always)] pub fn new(alloc_id: AllocId, offset: Size) -> Self { - Pointer { alloc_id, offset, tag: () } + Pointer { + alloc_id, + offset, + tag: (), + } } #[inline(always)] pub fn with_default_tag(self) -> Pointer - where Tag: Default + where + Tag: Default, { Pointer::new_with_tag(self.alloc_id, self.offset, Default::default()) } @@ -101,7 +113,11 @@ impl<'tcx> Pointer<()> { impl<'tcx, Tag> Pointer { #[inline(always)] pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self { - Pointer { alloc_id, offset, tag } + Pointer { + alloc_id, + offset, + tag, + } } #[inline] @@ -109,14 +125,19 @@ impl<'tcx, Tag> Pointer { Ok(Pointer::new_with_tag( self.alloc_id, Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?), - self.tag + self.tag, )) } #[inline] pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) { - let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes()); - (Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over) + let (res, over) = cx + .data_layout() + .overflowing_offset(self.offset.bytes(), i.bytes()); + ( + Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), + over, + ) } #[inline(always)] @@ -135,8 +156,13 @@ impl<'tcx, Tag> Pointer { #[inline] pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) { - let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i); - (Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over) + let (res, over) = cx + .data_layout() + .overflowing_signed_offset(self.offset.bytes(), i); + ( + Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), + over, + ) } #[inline(always)] @@ -146,7 +172,11 @@ impl<'tcx, Tag> Pointer { #[inline(always)] pub fn erase_tag(self) -> Pointer { - Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () } + Pointer { + alloc_id: self.alloc_id, + offset: self.offset, + tag: (), + } } #[inline(always)] diff --git a/src/librustc/mir/interpret/value.rs b/src/librustc/mir/interpret/value.rs index 1e4cfedc2a6ae..2817c46721258 100644 --- a/src/librustc/mir/interpret/value.rs +++ b/src/librustc/mir/interpret/value.rs @@ -1,9 +1,13 @@ use std::fmt; -use crate::ty::{Ty, subst::Substs, layout::{HasDataLayout, Size}}; use crate::hir::def_id::DefId; +use crate::ty::{ + layout::{HasDataLayout, Size}, + subst::Substs, + Ty, +}; -use super::{EvalResult, Pointer, PointerArithmetic, Allocation, AllocId, sign_extend, truncate}; +use super::{sign_extend, truncate, AllocId, Allocation, EvalResult, Pointer, PointerArithmetic}; /// Represents the result of a raw const operation, pre-validation. #[derive(Copy, Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable, Hash)] @@ -16,7 +20,9 @@ pub struct RawConst<'tcx> { /// Represents a constant value in Rust. Scalar and ScalarPair are optimizations which /// matches the LocalValue optimizations for easy conversions between Value and ConstValue. -#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)] +#[derive( + Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash, +)] pub enum ConstValue<'tcx> { /// Never returned from the `const_eval` query, but the HIR contains these frequently in order /// to allow HIR creation to happen for everything before needing to be able to run constant @@ -43,9 +49,9 @@ impl<'tcx> ConstValue<'tcx> { #[inline] pub fn try_to_scalar(&self) -> Option { match *self { - ConstValue::Unevaluated(..) | - ConstValue::ByRef(..) | - ConstValue::ScalarPair(..) => None, + ConstValue::Unevaluated(..) | ConstValue::ByRef(..) | ConstValue::ScalarPair(..) => { + None + } ConstValue::Scalar(val) => Some(val), } } @@ -61,15 +67,14 @@ impl<'tcx> ConstValue<'tcx> { } #[inline] - pub fn new_slice( - val: Scalar, - len: u64, - cx: &impl HasDataLayout - ) -> Self { - ConstValue::ScalarPair(val, Scalar::Bits { - bits: len as u128, - size: cx.data_layout().pointer_size.bytes() as u8, - }) + pub fn new_slice(val: Scalar, len: u64, cx: &impl HasDataLayout) -> Self { + ConstValue::ScalarPair( + val, + Scalar::Bits { + bits: len as u128, + size: cx.data_layout().pointer_size.bytes() as u8, + }, + ) } #[inline] @@ -82,8 +87,10 @@ impl<'tcx> ConstValue<'tcx> { /// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 8 bytes in /// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes /// of a simple value or a pointer into another `Allocation` -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)] -pub enum Scalar { +#[derive( + Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash, +)] +pub enum Scalar { /// The raw bytes of a simple value. Bits { /// The first `size` bytes are the value. @@ -110,7 +117,8 @@ impl fmt::Display for Scalar { impl<'tcx> Scalar<()> { #[inline] pub fn with_default_tag(self) -> Scalar - where Tag: Default + where + Tag: Default, { match self { Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_default_tag()), @@ -131,7 +139,10 @@ impl<'tcx, Tag> Scalar { #[inline] pub fn with_tag(self, new_tag: Tag) -> Self { match self { - Scalar::Ptr(ptr) => Scalar::Ptr(Pointer { tag: new_tag, ..ptr }), + Scalar::Ptr(ptr) => Scalar::Ptr(Pointer { + tag: new_tag, + ..ptr + }), Scalar::Bits { bits, size } => Scalar::Bits { bits, size }, } } @@ -228,27 +239,41 @@ impl<'tcx, Tag> Scalar { Scalar::Bits { bits, size } => { assert_eq!(size as u64, cx.data_layout().pointer_size.bytes()); bits == 0 - }, + } Scalar::Ptr(_) => false, } } #[inline] pub fn from_bool(b: bool) -> Self { - Scalar::Bits { bits: b as u128, size: 1 } + Scalar::Bits { + bits: b as u128, + size: 1, + } } #[inline] pub fn from_char(c: char) -> Self { - Scalar::Bits { bits: c as u128, size: 4 } + Scalar::Bits { + bits: c as u128, + size: 4, + } } #[inline] pub fn from_uint(i: impl Into, size: Size) -> Self { let i = i.into(); - debug_assert_eq!(truncate(i, size), i, - "Unsigned value {} does not fit in {} bits", i, size.bits()); - Scalar::Bits { bits: i, size: size.bytes() as u8 } + debug_assert_eq!( + truncate(i, size), + i, + "Unsigned value {} does not fit in {} bits", + i, + size.bits() + ); + Scalar::Bits { + bits: i, + size: size.bytes() as u8, + } } #[inline] @@ -256,19 +281,33 @@ impl<'tcx, Tag> Scalar { let i = i.into(); // `into` performed sign extension, we have to truncate let truncated = truncate(i as u128, size); - debug_assert_eq!(sign_extend(truncated, size) as i128, i, - "Signed value {} does not fit in {} bits", i, size.bits()); - Scalar::Bits { bits: truncated, size: size.bytes() as u8 } + debug_assert_eq!( + sign_extend(truncated, size) as i128, + i, + "Signed value {} does not fit in {} bits", + i, + size.bits() + ); + Scalar::Bits { + bits: truncated, + size: size.bytes() as u8, + } } #[inline] pub fn from_f32(f: f32) -> Self { - Scalar::Bits { bits: f.to_bits() as u128, size: 4 } + Scalar::Bits { + bits: f.to_bits() as u128, + size: 4, + } } #[inline] pub fn from_f64(f: f64) -> Self { - Scalar::Bits { bits: f.to_bits() as u128, size: 8 } + Scalar::Bits { + bits: f.to_bits() as u128, + size: 8, + } } #[inline] @@ -400,8 +439,10 @@ impl From> for Scalar { } } -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)] -pub enum ScalarMaybeUndef { +#[derive( + Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash, +)] +pub enum ScalarMaybeUndef { Scalar(Scalar), Undef, } @@ -425,7 +466,8 @@ impl fmt::Display for ScalarMaybeUndef { impl<'tcx> ScalarMaybeUndef<()> { #[inline] pub fn with_default_tag(self) -> ScalarMaybeUndef - where Tag: Default + where + Tag: Default, { match self { ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_default_tag()), @@ -436,8 +478,7 @@ impl<'tcx> ScalarMaybeUndef<()> { impl<'tcx, Tag> ScalarMaybeUndef { #[inline] - pub fn erase_tag(self) -> ScalarMaybeUndef - { + pub fn erase_tag(self) -> ScalarMaybeUndef { match self { ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()), ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef, diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index 2936405ebd0b7..392adeb224ec9 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -15,7 +15,7 @@ use rustc_data_structures::graph::{self, GraphPredecessors, GraphSuccessors}; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::MappedReadGuard; -use rustc_serialize::{self as serialize}; +use rustc_serialize as serialize; use smallvec::SmallVec; use std::borrow::Cow; use std::fmt::{self, Debug, Formatter, Write}; @@ -27,11 +27,11 @@ use syntax::ast::{self, Name}; use syntax::symbol::InternedString; use syntax_pos::{Span, DUMMY_SP}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; -use ty::subst::{Subst, Substs}; use ty::layout::VariantIdx; +use ty::subst::{Subst, Substs}; use ty::{ self, AdtDef, CanonicalUserTypeAnnotations, ClosureSubsts, GeneratorSubsts, Region, Ty, TyCtxt, - UserTypeAnnotationIndex, UserTypeAnnotation, + UserTypeAnnotation, UserTypeAnnotationIndex, }; use util::ppaux; @@ -575,7 +575,9 @@ impl BorrowKind { pub fn allows_two_phase_borrow(&self) -> bool { match *self { BorrowKind::Shared | BorrowKind::Shallow | BorrowKind::Unique => false, - BorrowKind::Mut { allow_two_phase_borrow } => allow_two_phase_borrow, + BorrowKind::Mut { + allow_two_phase_borrow, + } => allow_two_phase_borrow, } } } @@ -648,7 +650,7 @@ pub enum ImplicitSelfKind { MutRef, /// Represents when a function does not have a self argument or /// when a function has a `self: X` argument. - None + None, } CloneTypeFoldableAndLiftImpls! { BindingForm<'tcx>, } @@ -879,8 +881,7 @@ impl<'tcx> LocalDecl<'tcx> { pat_span: _, }))) => true, - Some(ClearCrossCrate::Set(BindingForm::ImplicitSelf(ImplicitSelfKind::Imm))) - => true, + Some(ClearCrossCrate::Set(BindingForm::ImplicitSelf(ImplicitSelfKind::Imm))) => true, _ => false, } @@ -932,12 +933,7 @@ impl<'tcx> LocalDecl<'tcx> { } #[inline] - fn new_local( - ty: Ty<'tcx>, - mutability: Mutability, - internal: bool, - span: Span, - ) -> Self { + fn new_local(ty: Ty<'tcx>, mutability: Mutability, internal: bool, span: Span) -> Self { LocalDecl { mutability, ty, @@ -1662,13 +1658,15 @@ impl<'tcx> TerminatorKind<'tcx> { Scalar::Bits { bits: u, size: size.bytes() as u8, - }.into(), + } + .into(), ), ty: switch_ty, }; fmt_const_val(&mut s, &c).unwrap(); s.into() - }).chain(iter::once("otherwise".into())) + }) + .chain(iter::once("otherwise".into())) .collect() } Call { @@ -1855,16 +1853,17 @@ impl<'tcx> Debug for Statement<'tcx> { match self.kind { Assign(ref place, ref rv) => write!(fmt, "{:?} = {:?}", place, rv), FakeRead(ref cause, ref place) => write!(fmt, "FakeRead({:?}, {:?})", cause, place), - Retag(ref kind, ref place) => - write!(fmt, "Retag({}{:?})", - match kind { - RetagKind::FnEntry => "[fn entry] ", - RetagKind::TwoPhase => "[2phase] ", - RetagKind::Raw => "[raw] ", - RetagKind::Default => "", - }, - place, - ), + Retag(ref kind, ref place) => write!( + fmt, + "Retag({}{:?})", + match kind { + RetagKind::FnEntry => "[fn entry] ", + RetagKind::TwoPhase => "[2phase] ", + RetagKind::Raw => "[raw] ", + RetagKind::Default => "", + }, + place, + ), StorageLive(ref place) => write!(fmt, "StorageLive({:?})", place), StorageDead(ref place) => write!(fmt, "StorageDead({:?})", place), SetDiscriminant { @@ -1876,9 +1875,11 @@ impl<'tcx> Debug for Statement<'tcx> { ref outputs, ref inputs, } => write!(fmt, "asm!({:?} : {:?} : {:?})", asm, outputs, inputs), - AscribeUserType(ref place, ref variance, ref c_ty) => { - write!(fmt, "AscribeUserType({:?}, {:?}, {:?})", place, variance, c_ty) - } + AscribeUserType(ref place, ref variance, ref c_ty) => write!( + fmt, + "AscribeUserType({:?}, {:?}, {:?})", + place, variance, c_ty + ), Nop => write!(fmt, "nop"), } } @@ -2015,8 +2016,8 @@ impl<'tcx> Place<'tcx> { /// FIXME: can we safely swap the semantics of `fn base_local` below in here instead? pub fn local(&self) -> Option { match self { - Place::Local(local) | - Place::Projection(box Projection { + Place::Local(local) + | Place::Projection(box Projection { base: Place::Local(local), elem: ProjectionElem::Deref, }) => Some(*local), @@ -2048,9 +2049,11 @@ impl<'tcx> Debug for Place<'tcx> { ), Promoted(ref promoted) => write!(fmt, "({:?}: {:?})", promoted.0, promoted.1), Projection(ref data) => match data.elem { - ProjectionElem::Downcast(ref adt_def, index) => { - write!(fmt, "({:?} as {})", data.base, adt_def.variants[index].ident) - } + ProjectionElem::Downcast(ref adt_def, index) => write!( + fmt, + "({:?} as {})", + data.base, adt_def.variants[index].ident + ), ProjectionElem::Deref => write!(fmt, "(*{:?})", data.base), ProjectionElem::Field(field, ty) => { write!(fmt, "({:?}.{:?}: {:?})", data.base, field.index(), ty) @@ -2508,32 +2511,36 @@ impl<'tcx> UserTypeProjections<'tcx> { UserTypeProjections { contents: vec![] } } - pub fn from_projections(projs: impl Iterator, Span)>) -> Self { - UserTypeProjections { contents: projs.collect() } + pub fn from_projections(projs: impl Iterator, Span)>) -> Self { + UserTypeProjections { + contents: projs.collect(), + } } - pub fn projections_and_spans(&self) -> impl Iterator, Span)> { + pub fn projections_and_spans(&self) -> impl Iterator, Span)> { self.contents.iter() } - pub fn projections(&self) -> impl Iterator> { - self.contents.iter().map(|&(ref user_type, _span)| user_type) + pub fn projections(&self) -> impl Iterator> { + self.contents + .iter() + .map(|&(ref user_type, _span)| user_type) } - pub fn push_projection( - mut self, - user_ty: &UserTypeProjection<'tcx>, - span: Span, - ) -> Self { + pub fn push_projection(mut self, user_ty: &UserTypeProjection<'tcx>, span: Span) -> Self { self.contents.push((user_ty.clone(), span)); self } fn map_projections( mut self, - mut f: impl FnMut(UserTypeProjection<'tcx>) -> UserTypeProjection<'tcx> + mut f: impl FnMut(UserTypeProjection<'tcx>) -> UserTypeProjection<'tcx>, ) -> Self { - self.contents = self.contents.drain(..).map(|(proj, span)| (f(proj), span)).collect(); + self.contents = self + .contents + .drain(..) + .map(|(proj, span)| (f(proj), span)) + .collect(); self } @@ -2553,12 +2560,7 @@ impl<'tcx> UserTypeProjections<'tcx> { self.map_projections(|pat_ty_proj| pat_ty_proj.leaf(field)) } - pub fn variant( - self, - adt_def: &'tcx AdtDef, - variant_index: VariantIdx, - field: Field, - ) -> Self { + pub fn variant(self, adt_def: &'tcx AdtDef, variant_index: VariantIdx, field: Field) -> Self { self.map_projections(|pat_ty_proj| pat_ty_proj.variant(adt_def, variant_index, field)) } } @@ -2584,7 +2586,7 @@ pub struct UserTypeProjection<'tcx> { pub projs: Vec>, } -impl<'tcx> Copy for ProjectionKind<'tcx> { } +impl<'tcx> Copy for ProjectionKind<'tcx> {} impl<'tcx> UserTypeProjection<'tcx> { pub(crate) fn index(mut self) -> Self { @@ -2613,7 +2615,8 @@ impl<'tcx> UserTypeProjection<'tcx> { variant_index: VariantIdx, field: Field, ) -> Self { - self.projs.push(ProjectionElem::Downcast(adt_def, variant_index)); + self.projs + .push(ProjectionElem::Downcast(adt_def, variant_index)); self.projs.push(ProjectionElem::Field(field, ())); self } @@ -2626,15 +2629,15 @@ impl<'tcx> TypeFoldable<'tcx> for UserTypeProjection<'tcx> { use mir::ProjectionElem::*; let base = self.base.fold_with(folder); - let projs: Vec<_> = self.projs + let projs: Vec<_> = self + .projs .iter() - .map(|elem| { - match elem { - Deref => Deref, - Field(f, ()) => Field(f.clone(), ()), - Index(()) => Index(()), - elem => elem.clone(), - }}) + .map(|elem| match elem { + Deref => Deref, + Field(f, ()) => Field(f.clone(), ()), + Index(()) => Index(()), + elem => elem.clone(), + }) .collect(); UserTypeProjection { base, projs } @@ -2960,7 +2963,9 @@ pub struct ClosureOutlivesRequirement<'tcx> { /// order of the category, thereby influencing diagnostic output. /// /// See also [rustc_mir::borrow_check::nll::constraints] -#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)] +#[derive( + Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, +)] pub enum ConstraintCategory { Return, Yield, @@ -3403,11 +3408,12 @@ where fn super_visit_with>(&self, visitor: &mut Vs) -> bool { use mir::ProjectionElem::*; - self.base.visit_with(visitor) || match self.elem { - Field(_, ref ty) => ty.visit_with(visitor), - Index(ref v) => v.visit_with(visitor), - _ => false, - } + self.base.visit_with(visitor) + || match self.elem { + Field(_, ref ty) => ty.visit_with(visitor), + Index(ref v) => v.visit_with(visitor), + _ => false, + } } } diff --git a/src/librustc/mir/mono.rs b/src/librustc/mir/mono.rs index 55f5c36cde66d..7966b8252278a 100644 --- a/src/librustc/mir/mono.rs +++ b/src/librustc/mir/mono.rs @@ -1,14 +1,13 @@ -use hir::def_id::{DefId, CrateNum, LOCAL_CRATE}; -use syntax::ast::NodeId; -use syntax::symbol::{Symbol, InternedString}; -use ty::{Instance, TyCtxt}; -use util::nodemap::FxHashMap; +use hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; +use ich::{Fingerprint, NodeIdHashingMode, StableHashingContext}; use rustc_data_structures::base_n; -use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult, - StableHasher}; -use ich::{Fingerprint, StableHashingContext, NodeIdHashingMode}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use std::fmt; use std::hash::Hash; +use syntax::ast::NodeId; +use syntax::symbol::{InternedString, Symbol}; +use ty::{Instance, TyCtxt}; +use util::nodemap::FxHashMap; #[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)] pub enum MonoItem<'tcx> { @@ -24,19 +23,20 @@ impl<'tcx> MonoItem<'tcx> { // Estimate the size of a function based on how many statements // it contains. tcx.instance_def_size_estimate(instance.def) - }, + } // Conservatively estimate the size of a static declaration // or assembly to be 1. - MonoItem::Static(_) | - MonoItem::GlobalAsm(_) => 1, + MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1, } } } impl<'a, 'tcx> HashStable> for MonoItem<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { ::std::mem::discriminant(self).hash_stable(hcx, hasher); match *self { @@ -128,9 +128,7 @@ impl<'tcx> CodegenUnit<'tcx> { &self.items } - pub fn items_mut(&mut self) - -> &mut FxHashMap, (Linkage, Visibility)> - { + pub fn items_mut(&mut self) -> &mut FxHashMap, (Linkage, Visibility)> { &mut self.items } @@ -152,7 +150,8 @@ impl<'tcx> CodegenUnit<'tcx> { pub fn size_estimate(&self) -> usize { // Should only be called if `estimate_size` has previously been called. - self.size_estimate.expect("estimate_size must be called before getting a size_estimate") + self.size_estimate + .expect("estimate_size must be called before getting a size_estimate") } pub fn modify_size_estimate(&mut self, delta: usize) { @@ -164,9 +163,11 @@ impl<'tcx> CodegenUnit<'tcx> { } impl<'a, 'tcx> HashStable> for CodegenUnit<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let CodegenUnit { ref items, name, @@ -176,12 +177,15 @@ impl<'a, 'tcx> HashStable> for CodegenUnit<'tcx> { name.hash_stable(hcx, hasher); - let mut items: Vec<(Fingerprint, _)> = items.iter().map(|(mono_item, &attrs)| { - let mut hasher = StableHasher::new(); - mono_item.hash_stable(hcx, &mut hasher); - let mono_item_fingerprint = hasher.finish(); - (mono_item_fingerprint, attrs) - }).collect(); + let mut items: Vec<(Fingerprint, _)> = items + .iter() + .map(|(mono_item, &attrs)| { + let mut hasher = StableHasher::new(); + mono_item.hash_stable(hcx, &mut hasher); + let mono_item_fingerprint = hasher.finish(); + (mono_item_fingerprint, attrs) + }) + .collect(); items.sort_unstable_by_key(|i| i.0); items.hash_stable(hcx, hasher); @@ -237,7 +241,6 @@ pub struct CodegenUnitNameBuilder<'a, 'gcx: 'tcx, 'tcx: 'a> { } impl<'a, 'gcx: 'tcx, 'tcx: 'a> CodegenUnitNameBuilder<'a, 'gcx, 'tcx> { - pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self { CodegenUnitNameBuilder { tcx, @@ -263,18 +266,18 @@ impl<'a, 'gcx: 'tcx, 'tcx: 'a> CodegenUnitNameBuilder<'a, 'gcx, 'tcx> { /// The '.' before `` makes sure that names with a special /// suffix can never collide with a name built out of regular Rust /// identifiers (e.g., module paths). - pub fn build_cgu_name(&mut self, - cnum: CrateNum, - components: I, - special_suffix: Option) - -> InternedString - where I: IntoIterator, - C: fmt::Display, - S: fmt::Display, + pub fn build_cgu_name( + &mut self, + cnum: CrateNum, + components: I, + special_suffix: Option, + ) -> InternedString + where + I: IntoIterator, + C: fmt::Display, + S: fmt::Display, { - let cgu_name = self.build_cgu_name_no_mangle(cnum, - components, - special_suffix); + let cgu_name = self.build_cgu_name_no_mangle(cnum, components, special_suffix); if self.tcx.sess.opts.debugging_opts.human_readable_cgu_names { cgu_name @@ -286,14 +289,16 @@ impl<'a, 'gcx: 'tcx, 'tcx: 'a> CodegenUnitNameBuilder<'a, 'gcx, 'tcx> { /// Same as `CodegenUnit::build_cgu_name()` but will never mangle the /// resulting name. - pub fn build_cgu_name_no_mangle(&mut self, - cnum: CrateNum, - components: I, - special_suffix: Option) - -> InternedString - where I: IntoIterator, - C: fmt::Display, - S: fmt::Display, + pub fn build_cgu_name_no_mangle( + &mut self, + cnum: CrateNum, + components: I, + special_suffix: Option, + ) -> InternedString + where + I: IntoIterator, + C: fmt::Display, + S: fmt::Display, { use std::fmt::Write; @@ -306,21 +311,24 @@ impl<'a, 'gcx: 'tcx, 'tcx: 'a> CodegenUnitNameBuilder<'a, 'gcx, 'tcx> { // local crate's ID. Otherwise there can be collisions between CGUs // instantiating stuff for upstream crates. let local_crate_id = if cnum != LOCAL_CRATE { - let local_crate_disambiguator = - format!("{}", tcx.crate_disambiguator(LOCAL_CRATE)); - format!("-in-{}.{}", - tcx.crate_name(LOCAL_CRATE), - &local_crate_disambiguator[0 .. 8]) + let local_crate_disambiguator = format!("{}", tcx.crate_disambiguator(LOCAL_CRATE)); + format!( + "-in-{}.{}", + tcx.crate_name(LOCAL_CRATE), + &local_crate_disambiguator[0..8] + ) } else { String::new() }; let crate_disambiguator = tcx.crate_disambiguator(cnum).to_string(); // Using a shortened disambiguator of about 40 bits - format!("{}.{}{}", + format!( + "{}.{}{}", tcx.crate_name(cnum), - &crate_disambiguator[0 .. 8], - local_crate_id) + &crate_disambiguator[0..8], + local_crate_id + ) }); write!(cgu_name, "{}", crate_prefix).unwrap(); diff --git a/src/librustc/mir/tcx.rs b/src/librustc/mir/tcx.rs index c5b884525da4b..59d5939446389 100644 --- a/src/librustc/mir/tcx.rs +++ b/src/librustc/mir/tcx.rs @@ -3,12 +3,12 @@ * building is complete. */ +use hir; use mir::*; -use ty::subst::{Subst, Substs}; -use ty::{self, AdtDef, Ty, TyCtxt}; use ty::layout::VariantIdx; -use hir; +use ty::subst::{Subst, Substs}; use ty::util::IntTypeExt; +use ty::{self, AdtDef, Ty, TyCtxt}; #[derive(Copy, Clone, Debug)] pub enum PlaceTy<'tcx> { @@ -16,9 +16,11 @@ pub enum PlaceTy<'tcx> { Ty { ty: Ty<'tcx> }, /// Downcast to a particular variant of an enum. - Downcast { adt_def: &'tcx AdtDef, - substs: &'tcx Substs<'tcx>, - variant_index: VariantIdx }, + Downcast { + adt_def: &'tcx AdtDef, + substs: &'tcx Substs<'tcx>, + variant_index: VariantIdx, + }, } static_assert!(PLACE_TY_IS_3_PTRS_LARGE: @@ -32,10 +34,12 @@ impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> { pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { match *self { - PlaceTy::Ty { ty } => - ty, - PlaceTy::Downcast { adt_def, substs, variant_index: _ } => - tcx.mk_adt(adt_def, substs), + PlaceTy::Ty { ty } => ty, + PlaceTy::Downcast { + adt_def, + substs, + variant_index: _, + } => tcx.mk_adt(adt_def, substs), } } @@ -46,23 +50,35 @@ impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> { /// not carry a `Ty` for `T`.) /// /// Note that the resulting type has not been normalized. - pub fn field_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, f: &Field) -> Ty<'tcx> - { + pub fn field_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, f: &Field) -> Ty<'tcx> { // Pass `0` here so it can be used as a "default" variant_index in first arm below let answer = match (self, VariantIdx::new(0)) { - (PlaceTy::Ty { - ty: &ty::TyS { sty: ty::TyKind::Adt(adt_def, substs), .. } }, variant_index) | - (PlaceTy::Downcast { adt_def, substs, variant_index }, _) => { + ( + PlaceTy::Ty { + ty: + &ty::TyS { + sty: ty::TyKind::Adt(adt_def, substs), + .. + }, + }, + variant_index, + ) + | ( + PlaceTy::Downcast { + adt_def, + substs, + variant_index, + }, + _, + ) => { let variant_def = &adt_def.variants[variant_index]; let field_def = &variant_def.fields[f.index()]; field_def.ty(tcx, substs) } - (PlaceTy::Ty { ty }, _) => { - match ty.sty { - ty::Tuple(ref tys) => tys[f.index()], - _ => bug!("extracting field of non-tuple non-adt: {:?}", self), - } - } + (PlaceTy::Ty { ty }, _) => match ty.sty { + ty::Tuple(ref tys) => tys[f.index()], + _ => bug!("extracting field of non-tuple non-adt: {:?}", self), + }, }; debug!("field_ty self: {:?} f: {:?} yields: {:?}", self, f, answer); answer @@ -71,10 +87,11 @@ impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> { /// Convenience wrapper around `projection_ty_core` for /// `PlaceElem`, where we can just use the `Ty` that is already /// stored inline on field projection elems. - pub fn projection_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - elem: &PlaceElem<'tcx>) - -> PlaceTy<'tcx> - { + pub fn projection_ty( + self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + elem: &PlaceElem<'tcx>, + ) -> PlaceTy<'tcx> { self.projection_ty_core(tcx, elem, |_, _, ty| -> Result, ()> { Ok(ty) }) .unwrap() } @@ -88,27 +105,24 @@ impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> { self, tcx: TyCtxt<'a, 'gcx, 'tcx>, elem: &ProjectionElem<'tcx, V, T>, - mut handle_field: impl FnMut(&Self, &Field, &T) -> Result, E>) - -> Result, E> + mut handle_field: impl FnMut(&Self, &Field, &T) -> Result, E>, + ) -> Result, E> where - V: ::std::fmt::Debug, T: ::std::fmt::Debug + V: ::std::fmt::Debug, + T: ::std::fmt::Debug, { let answer = match *elem { ProjectionElem::Deref => { - let ty = self.to_ty(tcx) - .builtin_deref(true) - .unwrap_or_else(|| { - bug!("deref projection of non-dereferencable ty {:?}", self) - }) - .ty; - PlaceTy::Ty { - ty, - } + let ty = self + .to_ty(tcx) + .builtin_deref(true) + .unwrap_or_else(|| bug!("deref projection of non-dereferencable ty {:?}", self)) + .ty; + PlaceTy::Ty { ty } } - ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => - PlaceTy::Ty { - ty: self.to_ty(tcx).builtin_index().unwrap() - }, + ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => PlaceTy::Ty { + ty: self.to_ty(tcx).builtin_index().unwrap(), + }, ProjectionElem::Subslice { from, to } => { let ty = self.to_ty(tcx); PlaceTy::Ty { @@ -119,30 +133,31 @@ impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> { tcx.mk_array(inner, len) } ty::Slice(..) => ty, - _ => { - bug!("cannot subslice non-array type: `{:?}`", self) - } - } + _ => bug!("cannot subslice non-array type: `{:?}`", self), + }, } } - ProjectionElem::Downcast(adt_def1, index) => - match self.to_ty(tcx).sty { - ty::Adt(adt_def, substs) => { - assert!(adt_def.is_enum()); - assert!(index.as_usize() < adt_def.variants.len()); - assert_eq!(adt_def, adt_def1); - PlaceTy::Downcast { adt_def, - substs, - variant_index: index } - } - _ => { - bug!("cannot downcast non-ADT type: `{:?}`", self) + ProjectionElem::Downcast(adt_def1, index) => match self.to_ty(tcx).sty { + ty::Adt(adt_def, substs) => { + assert!(adt_def.is_enum()); + assert!(index.as_usize() < adt_def.variants.len()); + assert_eq!(adt_def, adt_def1); + PlaceTy::Downcast { + adt_def, + substs, + variant_index: index, } - }, - ProjectionElem::Field(ref f, ref fty) => - PlaceTy::Ty { ty: handle_field(&self, f, fty)? }, + } + _ => bug!("cannot downcast non-ADT type: `{:?}`", self), + }, + ProjectionElem::Field(ref f, ref fty) => PlaceTy::Ty { + ty: handle_field(&self, f, fty)?, + }, }; - debug!("projection_ty self: {:?} elem: {:?} yields: {:?}", self, elem, answer); + debug!( + "projection_ty self: {:?} elem: {:?} yields: {:?}", + self, elem, answer + ); Ok(answer) } } @@ -156,16 +171,19 @@ EnumTypeFoldableImpl! { impl<'tcx> Place<'tcx> { pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PlaceTy<'tcx> - where D: HasLocalDecls<'tcx> + where + D: HasLocalDecls<'tcx>, { match *self { - Place::Local(index) => - PlaceTy::Ty { ty: local_decls.local_decls()[index].ty }, + Place::Local(index) => PlaceTy::Ty { + ty: local_decls.local_decls()[index].ty, + }, Place::Promoted(ref data) => PlaceTy::Ty { ty: data.1 }, - Place::Static(ref data) => - PlaceTy::Ty { ty: data.ty }, - Place::Projection(ref proj) => - proj.base.ty(local_decls, tcx).projection_ty(tcx, &proj.elem), + Place::Static(ref data) => PlaceTy::Ty { ty: data.ty }, + Place::Projection(ref proj) => proj + .base + .ty(local_decls, tcx) + .projection_ty(tcx, &proj.elem), } } @@ -173,8 +191,11 @@ impl<'tcx> Place<'tcx> { /// then returns the index of the field being projected. Note that this closure will always /// be `self` in the current MIR, because that is the only time we directly access the fields /// of a closure type. - pub fn is_upvar_field_projection<'cx, 'gcx>(&self, mir: &'cx Mir<'tcx>, - tcx: &TyCtxt<'cx, 'gcx, 'tcx>) -> Option { + pub fn is_upvar_field_projection<'cx, 'gcx>( + &self, + mir: &'cx Mir<'tcx>, + tcx: &TyCtxt<'cx, 'gcx, 'tcx>, + ) -> Option { let (place, by_ref) = if let Place::Projection(ref proj) = self { if let ProjectionElem::Deref = proj.elem { (&proj.base, true) @@ -190,16 +211,16 @@ impl<'tcx> Place<'tcx> { ProjectionElem::Field(field, _ty) => { let base_ty = proj.base.ty(mir, *tcx).to_ty(*tcx); - if (base_ty.is_closure() || base_ty.is_generator()) && - (!by_ref || mir.upvar_decls[field.index()].by_ref) + if (base_ty.is_closure() || base_ty.is_generator()) + && (!by_ref || mir.upvar_decls[field.index()].by_ref) { Some(field) } else { None } - }, + } _ => None, - } + }, _ => None, } } @@ -207,25 +228,25 @@ impl<'tcx> Place<'tcx> { pub enum RvalueInitializationState { Shallow, - Deep + Deep, } impl<'tcx> Rvalue<'tcx> { pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> - where D: HasLocalDecls<'tcx> + where + D: HasLocalDecls<'tcx>, { match *self { Rvalue::Use(ref operand) => operand.ty(local_decls, tcx), - Rvalue::Repeat(ref operand, count) => { - tcx.mk_array(operand.ty(local_decls, tcx), count) - } + Rvalue::Repeat(ref operand, count) => tcx.mk_array(operand.ty(local_decls, tcx), count), Rvalue::Ref(reg, bk, ref place) => { let place_ty = place.ty(local_decls, tcx).to_ty(tcx); - tcx.mk_ref(reg, + tcx.mk_ref( + reg, ty::TypeAndMut { ty: place_ty, - mutbl: bk.to_mutbl_lossy() - } + mutbl: bk.to_mutbl_lossy(), + }, ) } Rvalue::Len(..) => tcx.types.usize, @@ -241,8 +262,7 @@ impl<'tcx> Rvalue<'tcx> { let ty = op.ty(tcx, lhs_ty, rhs_ty); tcx.intern_tup(&[ty, tcx.types.bool]) } - Rvalue::UnaryOp(UnOp::Not, ref operand) | - Rvalue::UnaryOp(UnOp::Neg, ref operand) => { + Rvalue::UnaryOp(UnOp::Not, ref operand) | Rvalue::UnaryOp(UnOp::Neg, ref operand) => { operand.ty(local_decls, tcx) } Rvalue::Discriminant(ref place) => { @@ -256,25 +276,15 @@ impl<'tcx> Rvalue<'tcx> { } Rvalue::NullaryOp(NullOp::Box, t) => tcx.mk_box(t), Rvalue::NullaryOp(NullOp::SizeOf, _) => tcx.types.usize, - Rvalue::Aggregate(ref ak, ref ops) => { - match **ak { - AggregateKind::Array(ty) => { - tcx.mk_array(ty, ops.len() as u64) - } - AggregateKind::Tuple => { - tcx.mk_tup(ops.iter().map(|op| op.ty(local_decls, tcx))) - } - AggregateKind::Adt(def, _, substs, _, _) => { - tcx.type_of(def.did).subst(tcx, substs) - } - AggregateKind::Closure(did, substs) => { - tcx.mk_closure(did, substs) - } - AggregateKind::Generator(did, substs, movability) => { - tcx.mk_generator(did, substs, movability) - } + Rvalue::Aggregate(ref ak, ref ops) => match **ak { + AggregateKind::Array(ty) => tcx.mk_array(ty, ops.len() as u64), + AggregateKind::Tuple => tcx.mk_tup(ops.iter().map(|op| op.ty(local_decls, tcx))), + AggregateKind::Adt(def, _, substs, _, _) => tcx.type_of(def.did).subst(tcx, substs), + AggregateKind::Closure(did, substs) => tcx.mk_closure(did, substs), + AggregateKind::Generator(did, substs, movability) => { + tcx.mk_generator(did, substs, movability) } - } + }, } } @@ -284,32 +294,40 @@ impl<'tcx> Rvalue<'tcx> { pub fn initialization_state(&self) -> RvalueInitializationState { match *self { Rvalue::NullaryOp(NullOp::Box, _) => RvalueInitializationState::Shallow, - _ => RvalueInitializationState::Deep + _ => RvalueInitializationState::Deep, } } } impl<'tcx> Operand<'tcx> { pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> - where D: HasLocalDecls<'tcx> + where + D: HasLocalDecls<'tcx>, { match self { - &Operand::Copy(ref l) | - &Operand::Move(ref l) => l.ty(local_decls, tcx).to_ty(tcx), + &Operand::Copy(ref l) | &Operand::Move(ref l) => l.ty(local_decls, tcx).to_ty(tcx), &Operand::Constant(ref c) => c.ty, } } } impl<'tcx> BinOp { - pub fn ty<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - lhs_ty: Ty<'tcx>, - rhs_ty: Ty<'tcx>) - -> Ty<'tcx> { + pub fn ty<'a, 'gcx>( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + lhs_ty: Ty<'tcx>, + rhs_ty: Ty<'tcx>, + ) -> Ty<'tcx> { // FIXME: handle SIMD correctly match self { - &BinOp::Add | &BinOp::Sub | &BinOp::Mul | &BinOp::Div | &BinOp::Rem | - &BinOp::BitXor | &BinOp::BitAnd | &BinOp::BitOr => { + &BinOp::Add + | &BinOp::Sub + | &BinOp::Mul + | &BinOp::Div + | &BinOp::Rem + | &BinOp::BitXor + | &BinOp::BitAnd + | &BinOp::BitOr => { // these should be integers or floats of the same size. assert_eq!(lhs_ty, rhs_ty); lhs_ty @@ -317,8 +335,7 @@ impl<'tcx> BinOp { &BinOp::Shl | &BinOp::Shr | &BinOp::Offset => { lhs_ty // lhs_ty can be != rhs_ty } - &BinOp::Eq | &BinOp::Lt | &BinOp::Le | - &BinOp::Ne | &BinOp::Ge | &BinOp::Gt => { + &BinOp::Eq | &BinOp::Lt | &BinOp::Le | &BinOp::Ne | &BinOp::Ge | &BinOp::Gt => { tcx.types.bool } } @@ -362,7 +379,7 @@ impl BinOp { BinOp::Gt => hir::BinOpKind::Gt, BinOp::Le => hir::BinOpKind::Le, BinOp::Ge => hir::BinOpKind::Ge, - BinOp::Offset => unreachable!() + BinOp::Offset => unreachable!(), } } } diff --git a/src/librustc/mir/traversal.rs b/src/librustc/mir/traversal.rs index f8398c27cc2da..362d48078e15c 100644 --- a/src/librustc/mir/traversal.rs +++ b/src/librustc/mir/traversal.rs @@ -114,7 +114,6 @@ impl<'a, 'tcx> Postorder<'a, 'tcx> { root_is_start_block: root == START_BLOCK, }; - let data = &po.mir[root]; if let Some(ref term) = data.terminator { @@ -254,19 +253,19 @@ impl<'a, 'tcx> Iterator for Postorder<'a, 'tcx> { pub struct ReversePostorder<'a, 'tcx: 'a> { mir: &'a Mir<'tcx>, blocks: Vec, - idx: usize + idx: usize, } impl<'a, 'tcx> ReversePostorder<'a, 'tcx> { pub fn new(mir: &'a Mir<'tcx>, root: BasicBlock) -> ReversePostorder<'a, 'tcx> { - let blocks : Vec<_> = Postorder::new(mir, root).map(|(bb, _)| bb).collect(); + let blocks: Vec<_> = Postorder::new(mir, root).map(|(bb, _)| bb).collect(); let len = blocks.len(); ReversePostorder { mir, blocks, - idx: len + idx: len, } } @@ -275,7 +274,6 @@ impl<'a, 'tcx> ReversePostorder<'a, 'tcx> { } } - pub fn reverse_postorder<'a, 'tcx>(mir: &'a Mir<'tcx>) -> ReversePostorder<'a, 'tcx> { ReversePostorder::new(mir, START_BLOCK) } @@ -284,7 +282,9 @@ impl<'a, 'tcx> Iterator for ReversePostorder<'a, 'tcx> { type Item = (BasicBlock, &'a BasicBlockData<'tcx>); fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { - if self.idx == 0 { return None; } + if self.idx == 0 { + return None; + } self.idx -= 1; self.blocks.get(self.idx).map(|&bb| (bb, &self.mir[bb])) diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs index 67f85fbc8679e..6402191b68b45 100644 --- a/src/librustc/mir/visit.rs +++ b/src/librustc/mir/visit.rs @@ -1,9 +1,9 @@ use hir::def_id::DefId; use infer::canonical::Canonical; -use ty::subst::Substs; -use ty::{ClosureSubsts, GeneratorSubsts, Region, Ty}; use mir::*; use syntax_pos::Span; +use ty::subst::Substs; +use ty::{ClosureSubsts, GeneratorSubsts, Region, Ty}; // # The MIR Visitor // @@ -932,22 +932,19 @@ pub trait MirVisitable<'tcx> { } impl<'tcx> MirVisitable<'tcx> for Statement<'tcx> { - fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) - { + fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) { visitor.visit_statement(location.block, self, location) } } impl<'tcx> MirVisitable<'tcx> for Terminator<'tcx> { - fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) - { + fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) { visitor.visit_terminator(location.block, self, location) } } impl<'tcx> MirVisitable<'tcx> for Option> { - fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) - { + fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) { visitor.visit_terminator(location.block, self.as_ref().unwrap(), location) } } @@ -1050,10 +1047,10 @@ impl<'tcx> PlaceContext<'tcx> { /// Returns `true` if this place context represents a borrow. pub fn is_borrow(&self) -> bool { match *self { - PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow(..)) | - PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow(..)) | - PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow(..)) | - PlaceContext::MutatingUse(MutatingUseContext::Borrow(..)) => true, + PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow(..)) + | PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow(..)) + | PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow(..)) + | PlaceContext::MutatingUse(MutatingUseContext::Borrow(..)) => true, _ => false, } } @@ -1061,8 +1058,8 @@ impl<'tcx> PlaceContext<'tcx> { /// Returns `true` if this place context represents a storage live or storage dead marker. pub fn is_storage_marker(&self) -> bool { match *self { - PlaceContext::NonUse(NonUseContext::StorageLive) | - PlaceContext::NonUse(NonUseContext::StorageDead) => true, + PlaceContext::NonUse(NonUseContext::StorageLive) + | PlaceContext::NonUse(NonUseContext::StorageDead) => true, _ => false, } } @@ -1110,9 +1107,9 @@ impl<'tcx> PlaceContext<'tcx> { /// Returns `true` if this place context represents an assignment statement. pub fn is_place_assignment(&self) -> bool { match *self { - PlaceContext::MutatingUse(MutatingUseContext::Store) | - PlaceContext::MutatingUse(MutatingUseContext::Call) | - PlaceContext::MutatingUse(MutatingUseContext::AsmOutput) => true, + PlaceContext::MutatingUse(MutatingUseContext::Store) + | PlaceContext::MutatingUse(MutatingUseContext::Call) + | PlaceContext::MutatingUse(MutatingUseContext::AsmOutput) => true, _ => false, } } diff --git a/src/librustc/session/code_stats.rs b/src/librustc/session/code_stats.rs index 730abacf6f691..b6a750423314b 100644 --- a/src/librustc/session/code_stats.rs +++ b/src/librustc/session/code_stats.rs @@ -1,5 +1,5 @@ +use rustc_data_structures::fx::FxHashSet; use rustc_target::abi::{Align, Size}; -use rustc_data_structures::fx::{FxHashSet}; use std::cmp::{self, Ordering}; #[derive(Clone, PartialEq, Eq, Hash, Debug)] @@ -50,14 +50,16 @@ pub struct CodeStats { } impl CodeStats { - pub fn record_type_size(&mut self, - kind: DataTypeKind, - type_desc: S, - align: Align, - overall_size: Size, - packed: bool, - opt_discr_size: Option, - variants: Vec) { + pub fn record_type_size( + &mut self, + kind: DataTypeKind, + type_desc: S, + align: Align, + overall_size: Size, + packed: bool, + opt_discr_size: Option, + variants: Vec, + ) { let info = TypeSizeInfo { kind, type_description: type_desc.to_string(), @@ -84,13 +86,17 @@ impl CodeStats { }); for info in &sorted { - println!("print-type-size type: `{}`: {} bytes, alignment: {} bytes", - info.type_description, info.overall_size, info.align); + println!( + "print-type-size type: `{}`: {} bytes, alignment: {} bytes", + info.type_description, info.overall_size, info.align + ); let indent = " "; let discr_size = if let Some(discr_size) = info.opt_discr_size { - println!("print-type-size {}discriminant: {} bytes", - indent, discr_size); + println!( + "print-type-size {}discriminant: {} bytes", + indent, discr_size + ); discr_size } else { 0 @@ -107,14 +113,24 @@ impl CodeStats { DataTypeKind::Enum | DataTypeKind::Union => false, }; for (i, variant_info) in info.variants.iter().enumerate() { - let VariantInfo { ref name, kind: _, align: _, size, ref fields } = *variant_info; + let VariantInfo { + ref name, + kind: _, + align: _, + size, + ref fields, + } = *variant_info; let indent = if !struct_like { let name = match name.as_ref() { Some(name) => name.to_owned(), None => i.to_string(), }; - println!("print-type-size {}variant `{}`: {} bytes", - indent, name, size - discr_size); + println!( + "print-type-size {}variant `{}`: {} bytes", + indent, + name, + size - discr_size + ); " " } else { assert!(i < 1); @@ -129,40 +145,56 @@ impl CodeStats { fields.sort_by_key(|f| f.offset); for field in fields.iter() { - let FieldInfo { ref name, offset, size, align } = *field; + let FieldInfo { + ref name, + offset, + size, + align, + } = *field; if offset > min_offset { let pad = offset - min_offset; - println!("print-type-size {}padding: {} bytes", - indent, pad); + println!("print-type-size {}padding: {} bytes", indent, pad); } if offset < min_offset { // if this happens something is very wrong - println!("print-type-size {}field `.{}`: {} bytes, \ - offset: {} bytes, \ - alignment: {} bytes", - indent, name, size, offset, align); + println!( + "print-type-size {}field `.{}`: {} bytes, \ + offset: {} bytes, \ + alignment: {} bytes", + indent, name, size, offset, align + ); } else if info.packed || offset == min_offset { - println!("print-type-size {}field `.{}`: {} bytes", - indent, name, size); + println!( + "print-type-size {}field `.{}`: {} bytes", + indent, name, size + ); } else { // Include field alignment in output only if it caused padding injection - println!("print-type-size {}field `.{}`: {} bytes, \ - alignment: {} bytes", - indent, name, size, align); + println!( + "print-type-size {}field `.{}`: {} bytes, \ + alignment: {} bytes", + indent, name, size, align + ); } min_offset = offset + size; } } - assert!(max_variant_size <= info.overall_size, - "max_variant_size {} !<= {} overall_size", - max_variant_size, info.overall_size); + assert!( + max_variant_size <= info.overall_size, + "max_variant_size {} !<= {} overall_size", + max_variant_size, + info.overall_size + ); if max_variant_size < info.overall_size { - println!("print-type-size {}end padding: {} bytes", - indent, info.overall_size - max_variant_size); + println!( + "print-type-size {}end padding: {} bytes", + indent, + info.overall_size - max_variant_size + ); } } } diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 33409f9b4a74f..60d044dddf62c 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -3,36 +3,36 @@ use std::str::FromStr; -use session::{early_error, early_warn, Session}; use session::search_paths::SearchPath; +use session::{early_error, early_warn, Session}; -use rustc_target::spec::{LinkerFlavor, PanicStrategy, RelroLevel}; -use rustc_target::spec::{Target, TargetTriple}; use lint; use middle::cstore; +use rustc_target::spec::{LinkerFlavor, PanicStrategy, RelroLevel}; +use rustc_target::spec::{Target, TargetTriple}; -use syntax::ast::{self, IntTy, UintTy, MetaItemKind}; -use syntax::source_map::{FileName, FilePathMapping}; -use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION}; -use syntax::parse::token; +use syntax::ast::{self, IntTy, MetaItemKind, UintTy}; +use syntax::edition::{Edition, DEFAULT_EDITION, EDITION_NAME_LIST}; +use syntax::feature_gate::UnstableFeatures; use syntax::parse; +use syntax::parse::token; +use syntax::source_map::{FileName, FilePathMapping}; use syntax::symbol::Symbol; -use syntax::feature_gate::UnstableFeatures; use errors::{ColorConfig, FatalError, Handler}; use getopts; -use std::collections::{BTreeMap, BTreeSet}; use std::collections::btree_map::Iter as BTreeMapIter; use std::collections::btree_map::Keys as BTreeMapKeysIter; use std::collections::btree_map::Values as BTreeMapValuesIter; +use std::collections::{BTreeMap, BTreeSet}; use rustc_data_structures::fx::FxHashSet; -use std::{fmt, str}; -use std::hash::Hasher; use std::collections::hash_map::DefaultHasher; +use std::hash::Hasher; use std::iter::FromIterator; use std::path::{Path, PathBuf}; +use std::{fmt, str}; pub struct Config { pub target: Target, @@ -97,14 +97,13 @@ pub enum LtoCli { pub enum CrossLangLto { LinkerPlugin(PathBuf), LinkerPluginAuto, - Disabled + Disabled, } impl CrossLangLto { pub fn enabled(&self) -> bool { match *self { - CrossLangLto::LinkerPlugin(_) | - CrossLangLto::LinkerPluginAuto => true, + CrossLangLto::LinkerPlugin(_) | CrossLangLto::LinkerPluginAuto => true, CrossLangLto::Disabled => false, } } @@ -117,7 +116,9 @@ pub enum DebugInfo { Full, } -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord, RustcEncodable, RustcDecodable)] +#[derive( + Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord, RustcEncodable, RustcDecodable, +)] pub enum OutputType { Bitcode, Assembly, @@ -282,21 +283,29 @@ impl Externs { } macro_rules! hash_option { - ($opt_name:ident, $opt_expr:expr, $sub_hashes:expr, [UNTRACKED]) => ({}); - ($opt_name:ident, $opt_expr:expr, $sub_hashes:expr, [TRACKED]) => ({ - if $sub_hashes.insert(stringify!($opt_name), - $opt_expr as &dyn dep_tracking::DepTrackingHash).is_some() { - bug!("Duplicate key in CLI DepTrackingHash: {}", stringify!($opt_name)) + ($opt_name:ident, $opt_expr:expr, $sub_hashes:expr, [UNTRACKED]) => {{}}; + ($opt_name:ident, $opt_expr:expr, $sub_hashes:expr, [TRACKED]) => {{ + if $sub_hashes + .insert( + stringify!($opt_name), + $opt_expr as &dyn dep_tracking::DepTrackingHash, + ) + .is_some() + { + bug!( + "Duplicate key in CLI DepTrackingHash: {}", + stringify!($opt_name) + ) } - }); + }}; ($opt_name:ident, $opt_expr:expr, $sub_hashes:expr, - [UNTRACKED_WITH_WARNING $warn_val:expr, $warn_text:expr, $error_format:expr]) => ({ + [UNTRACKED_WITH_WARNING $warn_val:expr, $warn_text:expr, $error_format:expr]) => {{ if *$opt_expr == $warn_val { early_warn($error_format, $warn_text) } - }); + }}; } macro_rules! top_level_options { @@ -611,7 +620,8 @@ impl Default for Options { impl Options { /// True if there is a reason to build the dep graph. pub fn build_dep_graph(&self) -> bool { - self.incremental.is_some() || self.debugging_opts.dump_dep_graph + self.incremental.is_some() + || self.debugging_opts.dump_dep_graph || self.debugging_opts.query_dep_graph } @@ -635,16 +645,10 @@ impl Options { pub fn share_generics(&self) -> bool { match self.debugging_opts.share_generics { Some(setting) => setting, - None => { - match self.optimize { - OptLevel::No | - OptLevel::Less | - OptLevel::Size | - OptLevel::SizeMin => true, - OptLevel::Default | - OptLevel::Aggressive => false, - } - } + None => match self.optimize { + OptLevel::No | OptLevel::Less | OptLevel::Size | OptLevel::SizeMin => true, + OptLevel::Default | OptLevel::Aggressive => false, + }, } } } @@ -1130,140 +1134,140 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options, } options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, - build_debugging_options, "Z", "debugging", - DB_OPTIONS, db_type_desc, dbsetters, - codegen_backend: Option = (None, parse_opt_string, [TRACKED], - "the backend to use"), - verbose: bool = (false, parse_bool, [UNTRACKED], - "in general, enable more debug printouts"), - span_free_formats: bool = (false, parse_bool, [UNTRACKED], - "when debug-printing compiler state, do not include spans"), // o/w tests have closure@path - identify_regions: bool = (false, parse_bool, [UNTRACKED], - "make unnamed regions display as '# (where # is some non-ident unique id)"), - borrowck: Option = (None, parse_opt_string, [UNTRACKED], - "select which borrowck is used (`ast`, `mir`, `migrate`, or `compare`)"), - two_phase_borrows: bool = (false, parse_bool, [UNTRACKED], - "use two-phase reserved/active distinction for `&mut` borrows in MIR borrowck"), - two_phase_beyond_autoref: bool = (false, parse_bool, [UNTRACKED], - "when using two-phase-borrows, allow two phases even for non-autoref `&mut` borrows"), - time_passes: bool = (false, parse_bool, [UNTRACKED], - "measure time of each rustc pass"), - count_llvm_insns: bool = (false, parse_bool, - [UNTRACKED_WITH_WARNING(true, - "The output generated by `-Z count_llvm_insns` might not be reliable \ - when used with incremental compilation")], - "count where LLVM instrs originate"), - time_llvm_passes: bool = (false, parse_bool, [UNTRACKED_WITH_WARNING(true, - "The output of `-Z time-llvm-passes` will only reflect timings of \ - re-codegened modules when used with incremental compilation" )], - "measure time of each LLVM pass"), - input_stats: bool = (false, parse_bool, [UNTRACKED], - "gather statistics about the input"), - codegen_stats: bool = (false, parse_bool, [UNTRACKED_WITH_WARNING(true, - "The output of `-Z codegen-stats` might not be accurate when incremental \ - compilation is enabled")], - "gather codegen statistics"), - asm_comments: bool = (false, parse_bool, [TRACKED], - "generate comments into the assembly (may change behavior)"), - verify_llvm_ir: bool = (false, parse_bool, [TRACKED], - "verify LLVM IR"), - borrowck_stats: bool = (false, parse_bool, [UNTRACKED], - "gather borrowck statistics"), - no_landing_pads: bool = (false, parse_bool, [TRACKED], - "omit landing pads for unwinding"), - fewer_names: bool = (false, parse_bool, [TRACKED], - "reduce memory use by retaining fewer names within compilation artifacts (LLVM-IR)"), - meta_stats: bool = (false, parse_bool, [UNTRACKED], - "gather metadata statistics"), - print_link_args: bool = (false, parse_bool, [UNTRACKED], - "print the arguments passed to the linker"), - print_llvm_passes: bool = (false, parse_bool, [UNTRACKED], - "prints the llvm optimization passes being run"), - ast_json: bool = (false, parse_bool, [UNTRACKED], - "print the AST as JSON and halt"), - query_threads: Option = (None, parse_opt_uint, [UNTRACKED], - "execute queries on a thread pool with N threads"), - ast_json_noexpand: bool = (false, parse_bool, [UNTRACKED], - "print the pre-expansion AST as JSON and halt"), - ls: bool = (false, parse_bool, [UNTRACKED], - "list the symbols defined by a library crate"), - save_analysis: bool = (false, parse_bool, [UNTRACKED], - "write syntax and type analysis (in JSON format) information, in \ - addition to normal output"), - flowgraph_print_loans: bool = (false, parse_bool, [UNTRACKED], - "include loan analysis data in -Z unpretty flowgraph output"), - flowgraph_print_moves: bool = (false, parse_bool, [UNTRACKED], - "include move analysis data in -Z unpretty flowgraph output"), - flowgraph_print_assigns: bool = (false, parse_bool, [UNTRACKED], - "include assignment analysis data in -Z unpretty flowgraph output"), - flowgraph_print_all: bool = (false, parse_bool, [UNTRACKED], - "include all dataflow analysis data in -Z unpretty flowgraph output"), - print_region_graph: bool = (false, parse_bool, [UNTRACKED], - "prints region inference graph. \ - Use with RUST_REGION_GRAPH=help for more info"), - parse_only: bool = (false, parse_bool, [UNTRACKED], - "parse only; do not compile, assemble, or link"), - no_codegen: bool = (false, parse_bool, [TRACKED], - "run all passes except codegen; no output"), - treat_err_as_bug: bool = (false, parse_bool, [TRACKED], - "treat all errors that occur as bugs"), - report_delayed_bugs: bool = (false, parse_bool, [TRACKED], - "immediately print bugs registered with `delay_span_bug`"), - external_macro_backtrace: bool = (false, parse_bool, [UNTRACKED], - "show macro backtraces even for non-local macros"), - teach: bool = (false, parse_bool, [TRACKED], - "show extended diagnostic help"), - continue_parse_after_error: bool = (false, parse_bool, [TRACKED], - "attempt to recover from parse errors (experimental)"), - incremental: Option = (None, parse_opt_string, [UNTRACKED], - "enable incremental compilation (experimental)"), - incremental_queries: bool = (true, parse_bool, [UNTRACKED], - "enable incremental compilation support for queries (experimental)"), - incremental_info: bool = (false, parse_bool, [UNTRACKED], - "print high-level information about incremental reuse (or the lack thereof)"), - incremental_dump_hash: bool = (false, parse_bool, [UNTRACKED], - "dump hash information in textual format to stdout"), - incremental_verify_ich: bool = (false, parse_bool, [UNTRACKED], - "verify incr. comp. hashes of green query instances"), - incremental_ignore_spans: bool = (false, parse_bool, [UNTRACKED], - "ignore spans during ICH computation -- used for testing"), - instrument_mcount: bool = (false, parse_bool, [TRACKED], - "insert function instrument code for mcount-based tracing"), - dump_dep_graph: bool = (false, parse_bool, [UNTRACKED], - "dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"), - query_dep_graph: bool = (false, parse_bool, [UNTRACKED], - "enable queries of the dependency graph for regression testing"), - profile_queries: bool = (false, parse_bool, [UNTRACKED], - "trace and profile the queries of the incremental compilation framework"), - profile_queries_and_keys: bool = (false, parse_bool, [UNTRACKED], - "trace and profile the queries and keys of the incremental compilation framework"), - no_analysis: bool = (false, parse_bool, [UNTRACKED], - "parse and expand the source, but run no analysis"), - extra_plugins: Vec = (Vec::new(), parse_list, [TRACKED], - "load extra plugins"), - unstable_options: bool = (false, parse_bool, [UNTRACKED], - "adds unstable command line options to rustc interface"), - force_overflow_checks: Option = (None, parse_opt_bool, [TRACKED], - "force overflow checks on or off"), - trace_macros: bool = (false, parse_bool, [UNTRACKED], - "for every macro invocation, print its name and arguments"), - debug_macros: bool = (false, parse_bool, [TRACKED], - "emit line numbers debug info inside macros"), - keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED], - "don't clear the hygiene data after analysis"), - keep_ast: bool = (false, parse_bool, [UNTRACKED], - "keep the AST after lowering it to HIR"), - show_span: Option = (None, parse_opt_string, [TRACKED], - "show spans for compiler debugging (expr|pat|ty)"), - print_type_sizes: bool = (false, parse_bool, [UNTRACKED], - "print layout information for each type encountered"), - print_mono_items: Option = (None, parse_opt_string, [UNTRACKED], - "print the result of the monomorphization collection pass"), - mir_opt_level: usize = (1, parse_uint, [TRACKED], - "set the MIR optimization level (0-3, default: 1)"), - mutable_noalias: Option = (None, parse_opt_bool, [TRACKED], - "emit noalias metadata for mutable references (default: yes on LLVM >= 6)"), - dump_mir: Option = (None, parse_opt_string, [UNTRACKED], + build_debugging_options, "Z", "debugging", + DB_OPTIONS, db_type_desc, dbsetters, +codegen_backend: Option = (None, parse_opt_string, [TRACKED], + "the backend to use"), +verbose: bool = (false, parse_bool, [UNTRACKED], + "in general, enable more debug printouts"), +span_free_formats: bool = (false, parse_bool, [UNTRACKED], + "when debug-printing compiler state, do not include spans"), // o/w tests have closure@path +identify_regions: bool = (false, parse_bool, [UNTRACKED], + "make unnamed regions display as '# (where # is some non-ident unique id)"), +borrowck: Option = (None, parse_opt_string, [UNTRACKED], + "select which borrowck is used (`ast`, `mir`, `migrate`, or `compare`)"), +two_phase_borrows: bool = (false, parse_bool, [UNTRACKED], + "use two-phase reserved/active distinction for `&mut` borrows in MIR borrowck"), +two_phase_beyond_autoref: bool = (false, parse_bool, [UNTRACKED], + "when using two-phase-borrows, allow two phases even for non-autoref `&mut` borrows"), +time_passes: bool = (false, parse_bool, [UNTRACKED], + "measure time of each rustc pass"), +count_llvm_insns: bool = (false, parse_bool, + [UNTRACKED_WITH_WARNING(true, + "The output generated by `-Z count_llvm_insns` might not be reliable \ + when used with incremental compilation")], + "count where LLVM instrs originate"), +time_llvm_passes: bool = (false, parse_bool, [UNTRACKED_WITH_WARNING(true, + "The output of `-Z time-llvm-passes` will only reflect timings of \ + re-codegened modules when used with incremental compilation" )], + "measure time of each LLVM pass"), +input_stats: bool = (false, parse_bool, [UNTRACKED], + "gather statistics about the input"), +codegen_stats: bool = (false, parse_bool, [UNTRACKED_WITH_WARNING(true, + "The output of `-Z codegen-stats` might not be accurate when incremental \ + compilation is enabled")], + "gather codegen statistics"), +asm_comments: bool = (false, parse_bool, [TRACKED], + "generate comments into the assembly (may change behavior)"), +verify_llvm_ir: bool = (false, parse_bool, [TRACKED], + "verify LLVM IR"), +borrowck_stats: bool = (false, parse_bool, [UNTRACKED], + "gather borrowck statistics"), +no_landing_pads: bool = (false, parse_bool, [TRACKED], + "omit landing pads for unwinding"), +fewer_names: bool = (false, parse_bool, [TRACKED], + "reduce memory use by retaining fewer names within compilation artifacts (LLVM-IR)"), +meta_stats: bool = (false, parse_bool, [UNTRACKED], + "gather metadata statistics"), +print_link_args: bool = (false, parse_bool, [UNTRACKED], + "print the arguments passed to the linker"), +print_llvm_passes: bool = (false, parse_bool, [UNTRACKED], + "prints the llvm optimization passes being run"), +ast_json: bool = (false, parse_bool, [UNTRACKED], + "print the AST as JSON and halt"), +query_threads: Option = (None, parse_opt_uint, [UNTRACKED], + "execute queries on a thread pool with N threads"), +ast_json_noexpand: bool = (false, parse_bool, [UNTRACKED], + "print the pre-expansion AST as JSON and halt"), +ls: bool = (false, parse_bool, [UNTRACKED], + "list the symbols defined by a library crate"), +save_analysis: bool = (false, parse_bool, [UNTRACKED], + "write syntax and type analysis (in JSON format) information, in \ + addition to normal output"), +flowgraph_print_loans: bool = (false, parse_bool, [UNTRACKED], + "include loan analysis data in -Z unpretty flowgraph output"), +flowgraph_print_moves: bool = (false, parse_bool, [UNTRACKED], + "include move analysis data in -Z unpretty flowgraph output"), +flowgraph_print_assigns: bool = (false, parse_bool, [UNTRACKED], + "include assignment analysis data in -Z unpretty flowgraph output"), +flowgraph_print_all: bool = (false, parse_bool, [UNTRACKED], + "include all dataflow analysis data in -Z unpretty flowgraph output"), +print_region_graph: bool = (false, parse_bool, [UNTRACKED], + "prints region inference graph. \ + Use with RUST_REGION_GRAPH=help for more info"), +parse_only: bool = (false, parse_bool, [UNTRACKED], + "parse only; do not compile, assemble, or link"), +no_codegen: bool = (false, parse_bool, [TRACKED], + "run all passes except codegen; no output"), +treat_err_as_bug: bool = (false, parse_bool, [TRACKED], + "treat all errors that occur as bugs"), +report_delayed_bugs: bool = (false, parse_bool, [TRACKED], + "immediately print bugs registered with `delay_span_bug`"), +external_macro_backtrace: bool = (false, parse_bool, [UNTRACKED], + "show macro backtraces even for non-local macros"), +teach: bool = (false, parse_bool, [TRACKED], + "show extended diagnostic help"), +continue_parse_after_error: bool = (false, parse_bool, [TRACKED], + "attempt to recover from parse errors (experimental)"), +incremental: Option = (None, parse_opt_string, [UNTRACKED], + "enable incremental compilation (experimental)"), +incremental_queries: bool = (true, parse_bool, [UNTRACKED], + "enable incremental compilation support for queries (experimental)"), +incremental_info: bool = (false, parse_bool, [UNTRACKED], + "print high-level information about incremental reuse (or the lack thereof)"), +incremental_dump_hash: bool = (false, parse_bool, [UNTRACKED], + "dump hash information in textual format to stdout"), +incremental_verify_ich: bool = (false, parse_bool, [UNTRACKED], + "verify incr. comp. hashes of green query instances"), +incremental_ignore_spans: bool = (false, parse_bool, [UNTRACKED], + "ignore spans during ICH computation -- used for testing"), +instrument_mcount: bool = (false, parse_bool, [TRACKED], + "insert function instrument code for mcount-based tracing"), +dump_dep_graph: bool = (false, parse_bool, [UNTRACKED], + "dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"), +query_dep_graph: bool = (false, parse_bool, [UNTRACKED], + "enable queries of the dependency graph for regression testing"), +profile_queries: bool = (false, parse_bool, [UNTRACKED], + "trace and profile the queries of the incremental compilation framework"), +profile_queries_and_keys: bool = (false, parse_bool, [UNTRACKED], + "trace and profile the queries and keys of the incremental compilation framework"), +no_analysis: bool = (false, parse_bool, [UNTRACKED], + "parse and expand the source, but run no analysis"), +extra_plugins: Vec = (Vec::new(), parse_list, [TRACKED], + "load extra plugins"), +unstable_options: bool = (false, parse_bool, [UNTRACKED], + "adds unstable command line options to rustc interface"), +force_overflow_checks: Option = (None, parse_opt_bool, [TRACKED], + "force overflow checks on or off"), +trace_macros: bool = (false, parse_bool, [UNTRACKED], + "for every macro invocation, print its name and arguments"), +debug_macros: bool = (false, parse_bool, [TRACKED], + "emit line numbers debug info inside macros"), +keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED], + "don't clear the hygiene data after analysis"), +keep_ast: bool = (false, parse_bool, [UNTRACKED], + "keep the AST after lowering it to HIR"), +show_span: Option = (None, parse_opt_string, [TRACKED], + "show spans for compiler debugging (expr|pat|ty)"), +print_type_sizes: bool = (false, parse_bool, [UNTRACKED], + "print layout information for each type encountered"), +print_mono_items: Option = (None, parse_opt_string, [UNTRACKED], + "print the result of the monomorphization collection pass"), +mir_opt_level: usize = (1, parse_uint, [TRACKED], + "set the MIR optimization level (0-3, default: 1)"), +mutable_noalias: Option = (None, parse_opt_bool, [TRACKED], + "emit noalias metadata for mutable references (default: yes on LLVM >= 6)"), +dump_mir: Option = (None, parse_opt_string, [UNTRACKED], "dump MIR state to file. `val` is used to select which passes and functions to dump. For example: `all` matches all passes and functions, @@ -1271,112 +1275,112 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, `foo & ConstProp` only the 'ConstProp' pass for function names containing 'foo', `foo | bar` all passes for function names containing 'foo' or 'bar'."), - dump_mir_dir: String = (String::from("mir_dump"), parse_string, [UNTRACKED], - "the directory the MIR is dumped into"), - dump_mir_graphviz: bool = (false, parse_bool, [UNTRACKED], - "in addition to `.mir` files, create graphviz `.dot` files"), - dump_mir_exclude_pass_number: bool = (false, parse_bool, [UNTRACKED], - "if set, exclude the pass number when dumping MIR (used in tests)"), - mir_emit_retag: bool = (false, parse_bool, [TRACKED], - "emit Retagging MIR statements, interpreted e.g., by miri; implies -Zmir-opt-level=0"), - perf_stats: bool = (false, parse_bool, [UNTRACKED], - "print some performance-related statistics"), - query_stats: bool = (false, parse_bool, [UNTRACKED], - "print some statistics about the query system"), - hir_stats: bool = (false, parse_bool, [UNTRACKED], - "print some statistics about AST and HIR"), - always_encode_mir: bool = (false, parse_bool, [TRACKED], - "encode MIR of all functions into the crate metadata"), - osx_rpath_install_name: bool = (false, parse_bool, [TRACKED], - "pass `-install_name @rpath/...` to the macOS linker"), - sanitizer: Option = (None, parse_sanitizer, [TRACKED], - "Use a sanitizer"), - fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED], - "set the optimization fuel quota for a crate"), - print_fuel: Option = (None, parse_opt_string, [TRACKED], - "make Rustc print the total optimization fuel used by a crate"), - force_unstable_if_unmarked: bool = (false, parse_bool, [TRACKED], - "force all crates to be `rustc_private` unstable"), - pre_link_arg: Vec = (vec![], parse_string_push, [UNTRACKED], - "a single extra argument to prepend the linker invocation (can be used several times)"), - pre_link_args: Option> = (None, parse_opt_list, [UNTRACKED], - "extra arguments to prepend to the linker invocation (space separated)"), - profile: bool = (false, parse_bool, [TRACKED], - "insert profiling code"), - pgo_gen: Option = (None, parse_opt_string, [TRACKED], - "Generate PGO profile data, to a given file, or to the default location if it's empty."), - pgo_use: String = (String::new(), parse_string, [TRACKED], - "Use PGO profile data from the given profile file."), - disable_instrumentation_preinliner: bool = (false, parse_bool, [TRACKED], - "Disable the instrumentation pre-inliner, useful for profiling / PGO."), - relro_level: Option = (None, parse_relro_level, [TRACKED], - "choose which RELRO level to use"), - nll_subminimal_causes: bool = (false, parse_bool, [UNTRACKED], - "when tracking region error causes, accept subminimal results for faster execution."), - nll_facts: bool = (false, parse_bool, [UNTRACKED], - "dump facts from NLL analysis into side files"), - disable_nll_user_type_assert: bool = (false, parse_bool, [UNTRACKED], - "disable user provided type assertion in NLL"), - nll_dont_emit_read_for_match: bool = (false, parse_bool, [UNTRACKED], - "in match codegen, do not include FakeRead statements (used by mir-borrowck)"), - dont_buffer_diagnostics: bool = (false, parse_bool, [UNTRACKED], - "emit diagnostics rather than buffering (breaks NLL error downgrading, sorting)."), - polonius: bool = (false, parse_bool, [UNTRACKED], - "enable polonius-based borrow-checker"), - codegen_time_graph: bool = (false, parse_bool, [UNTRACKED], - "generate a graphical HTML report of time spent in codegen and LLVM"), - thinlto: Option = (None, parse_opt_bool, [TRACKED], - "enable ThinLTO when possible"), - inline_in_all_cgus: Option = (None, parse_opt_bool, [TRACKED], - "control whether #[inline] functions are in all cgus"), - tls_model: Option = (None, parse_opt_string, [TRACKED], - "choose the TLS model to use (rustc --print tls-models for details)"), - saturating_float_casts: bool = (false, parse_bool, [TRACKED], - "make float->int casts UB-free: numbers outside the integer type's range are clipped to \ - the max/min integer respectively, and NaN is mapped to 0"), - lower_128bit_ops: Option = (None, parse_opt_bool, [TRACKED], - "rewrite operators on i128 and u128 into lang item calls (typically provided \ +dump_mir_dir: String = (String::from("mir_dump"), parse_string, [UNTRACKED], + "the directory the MIR is dumped into"), +dump_mir_graphviz: bool = (false, parse_bool, [UNTRACKED], + "in addition to `.mir` files, create graphviz `.dot` files"), +dump_mir_exclude_pass_number: bool = (false, parse_bool, [UNTRACKED], + "if set, exclude the pass number when dumping MIR (used in tests)"), +mir_emit_retag: bool = (false, parse_bool, [TRACKED], + "emit Retagging MIR statements, interpreted e.g., by miri; implies -Zmir-opt-level=0"), +perf_stats: bool = (false, parse_bool, [UNTRACKED], + "print some performance-related statistics"), +query_stats: bool = (false, parse_bool, [UNTRACKED], + "print some statistics about the query system"), +hir_stats: bool = (false, parse_bool, [UNTRACKED], + "print some statistics about AST and HIR"), +always_encode_mir: bool = (false, parse_bool, [TRACKED], + "encode MIR of all functions into the crate metadata"), +osx_rpath_install_name: bool = (false, parse_bool, [TRACKED], + "pass `-install_name @rpath/...` to the macOS linker"), +sanitizer: Option = (None, parse_sanitizer, [TRACKED], + "Use a sanitizer"), +fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED], + "set the optimization fuel quota for a crate"), +print_fuel: Option = (None, parse_opt_string, [TRACKED], + "make Rustc print the total optimization fuel used by a crate"), +force_unstable_if_unmarked: bool = (false, parse_bool, [TRACKED], + "force all crates to be `rustc_private` unstable"), +pre_link_arg: Vec = (vec![], parse_string_push, [UNTRACKED], + "a single extra argument to prepend the linker invocation (can be used several times)"), +pre_link_args: Option> = (None, parse_opt_list, [UNTRACKED], + "extra arguments to prepend to the linker invocation (space separated)"), +profile: bool = (false, parse_bool, [TRACKED], + "insert profiling code"), +pgo_gen: Option = (None, parse_opt_string, [TRACKED], + "Generate PGO profile data, to a given file, or to the default location if it's empty."), +pgo_use: String = (String::new(), parse_string, [TRACKED], + "Use PGO profile data from the given profile file."), +disable_instrumentation_preinliner: bool = (false, parse_bool, [TRACKED], + "Disable the instrumentation pre-inliner, useful for profiling / PGO."), +relro_level: Option = (None, parse_relro_level, [TRACKED], + "choose which RELRO level to use"), +nll_subminimal_causes: bool = (false, parse_bool, [UNTRACKED], + "when tracking region error causes, accept subminimal results for faster execution."), +nll_facts: bool = (false, parse_bool, [UNTRACKED], + "dump facts from NLL analysis into side files"), +disable_nll_user_type_assert: bool = (false, parse_bool, [UNTRACKED], + "disable user provided type assertion in NLL"), +nll_dont_emit_read_for_match: bool = (false, parse_bool, [UNTRACKED], + "in match codegen, do not include FakeRead statements (used by mir-borrowck)"), +dont_buffer_diagnostics: bool = (false, parse_bool, [UNTRACKED], + "emit diagnostics rather than buffering (breaks NLL error downgrading, sorting)."), +polonius: bool = (false, parse_bool, [UNTRACKED], + "enable polonius-based borrow-checker"), +codegen_time_graph: bool = (false, parse_bool, [UNTRACKED], + "generate a graphical HTML report of time spent in codegen and LLVM"), +thinlto: Option = (None, parse_opt_bool, [TRACKED], + "enable ThinLTO when possible"), +inline_in_all_cgus: Option = (None, parse_opt_bool, [TRACKED], + "control whether #[inline] functions are in all cgus"), +tls_model: Option = (None, parse_opt_string, [TRACKED], + "choose the TLS model to use (rustc --print tls-models for details)"), +saturating_float_casts: bool = (false, parse_bool, [TRACKED], + "make float->int casts UB-free: numbers outside the integer type's range are clipped to \ + the max/min integer respectively, and NaN is mapped to 0"), +lower_128bit_ops: Option = (None, parse_opt_bool, [TRACKED], + "rewrite operators on i128 and u128 into lang item calls (typically provided \ by compiler-builtins) so codegen doesn't need to support them, overriding the default for the current target"), - human_readable_cgu_names: bool = (false, parse_bool, [TRACKED], - "generate human-readable, predictable names for codegen units"), - dep_info_omit_d_target: bool = (false, parse_bool, [TRACKED], - "in dep-info output, omit targets for tracking dependencies of the dep-info files \ - themselves"), - unpretty: Option = (None, parse_unpretty, [UNTRACKED], +human_readable_cgu_names: bool = (false, parse_bool, [TRACKED], + "generate human-readable, predictable names for codegen units"), +dep_info_omit_d_target: bool = (false, parse_bool, [TRACKED], + "in dep-info output, omit targets for tracking dependencies of the dep-info files \ + themselves"), +unpretty: Option = (None, parse_unpretty, [UNTRACKED], "Present the input source, unstable (and less-pretty) variants; valid types are any of the types for `--pretty`, as well as: `flowgraph=` (graphviz formatted flowgraph for node), `everybody_loops` (all function bodies replaced with `loop {}`), `hir` (the HIR), `hir,identified`, or `hir,typed` (HIR with types for each node)."), - run_dsymutil: Option = (None, parse_opt_bool, [TRACKED], - "run `dsymutil` and delete intermediate object files"), - ui_testing: bool = (false, parse_bool, [UNTRACKED], - "format compiler diagnostics in a way that's better suitable for UI testing"), - embed_bitcode: bool = (false, parse_bool, [TRACKED], - "embed LLVM bitcode in object files"), - strip_debuginfo_if_disabled: Option = (None, parse_opt_bool, [TRACKED], - "tell the linker to strip debuginfo when building without debuginfo enabled."), - share_generics: Option = (None, parse_opt_bool, [TRACKED], - "make the current crate share its generic instantiations"), - chalk: bool = (false, parse_bool, [TRACKED], - "enable the experimental Chalk-based trait solving engine"), - cross_lang_lto: CrossLangLto = (CrossLangLto::Disabled, parse_cross_lang_lto, [TRACKED], - "generate build artifacts that are compatible with linker-based LTO."), - no_parallel_llvm: bool = (false, parse_bool, [UNTRACKED], - "don't run LLVM in parallel (while keeping codegen-units and ThinLTO)"), - no_leak_check: bool = (false, parse_bool, [UNTRACKED], - "disables the 'leak check' for subtyping; unsound, but useful for tests"), - crate_attr: Vec = (Vec::new(), parse_string_push, [TRACKED], - "inject the given attribute in the crate"), - self_profile: bool = (false, parse_bool, [UNTRACKED], - "run the self profiler"), - profile_json: bool = (false, parse_bool, [UNTRACKED], - "output a json file with profiler results"), - emit_stack_sizes: bool = (false, parse_bool, [UNTRACKED], - "emits a section containing stack size metadata"), - plt: Option = (None, parse_opt_bool, [TRACKED], +run_dsymutil: Option = (None, parse_opt_bool, [TRACKED], + "run `dsymutil` and delete intermediate object files"), +ui_testing: bool = (false, parse_bool, [UNTRACKED], + "format compiler diagnostics in a way that's better suitable for UI testing"), +embed_bitcode: bool = (false, parse_bool, [TRACKED], + "embed LLVM bitcode in object files"), +strip_debuginfo_if_disabled: Option = (None, parse_opt_bool, [TRACKED], + "tell the linker to strip debuginfo when building without debuginfo enabled."), +share_generics: Option = (None, parse_opt_bool, [TRACKED], + "make the current crate share its generic instantiations"), +chalk: bool = (false, parse_bool, [TRACKED], + "enable the experimental Chalk-based trait solving engine"), +cross_lang_lto: CrossLangLto = (CrossLangLto::Disabled, parse_cross_lang_lto, [TRACKED], + "generate build artifacts that are compatible with linker-based LTO."), +no_parallel_llvm: bool = (false, parse_bool, [UNTRACKED], + "don't run LLVM in parallel (while keeping codegen-units and ThinLTO)"), +no_leak_check: bool = (false, parse_bool, [UNTRACKED], + "disables the 'leak check' for subtyping; unsound, but useful for tests"), +crate_attr: Vec = (Vec::new(), parse_string_push, [TRACKED], + "inject the given attribute in the crate"), +self_profile: bool = (false, parse_bool, [UNTRACKED], + "run the self profiler"), +profile_json: bool = (false, parse_bool, [UNTRACKED], + "output a json file with profiler results"), +emit_stack_sizes: bool = (false, parse_bool, [UNTRACKED], + "emits a section containing stack size metadata"), +plt: Option = (None, parse_opt_bool, [TRACKED], "whether to use the PLT when calling into shared libraries; only has effect for PIC code on systems with ELF binaries (default: PLT is disabled if full relro is enabled)"), @@ -1399,7 +1403,7 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { let mut ret = FxHashSet::default(); ret.reserve(6); // the minimum number of insertions - // Target bindings. + // Target bindings. ret.insert((Symbol::intern("target_os"), Some(Symbol::intern(os)))); if let Some(ref fam) = sess.target.target.options.target_family { ret.insert((Symbol::intern("target_family"), Some(Symbol::intern(fam)))); @@ -1437,7 +1441,10 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { } } if atomic_cas { - ret.insert((Symbol::intern("target_has_atomic"), Some(Symbol::intern("cas")))); + ret.insert(( + Symbol::intern("target_has_atomic"), + Some(Symbol::intern("cas")), + )); } if sess.opts.debug_assertions { ret.insert((Symbol::intern("debug_assertions"), None)); @@ -1463,8 +1470,8 @@ pub fn build_configuration(sess: &Session, mut user_cfg: ast::CrateConfig) -> as pub fn build_target_config(opts: &Options, sp: &Handler) -> Config { let target = Target::search(&opts.target_triple).unwrap_or_else(|e| { sp.struct_fatal(&format!("Error loading target specification: {}", e)) - .help("Use `--print target-list` for a list of built-in targets") - .emit(); + .help("Use `--print target-list` for a list of built-in targets") + .emit(); FatalError.raise(); }); @@ -1472,11 +1479,13 @@ pub fn build_target_config(opts: &Options, sp: &Handler) -> Config { "16" => (ast::IntTy::I16, ast::UintTy::U16), "32" => (ast::IntTy::I32, ast::UintTy::U32), "64" => (ast::IntTy::I64, ast::UintTy::U64), - w => sp.fatal(&format!( - "target specification was invalid: \ - unrecognized target-pointer-width {}", - w - )).raise(), + w => sp + .fatal(&format!( + "target specification was invalid: \ + unrecognized target-pointer-width {}", + w + )) + .raise(), }; Config { @@ -1537,8 +1546,8 @@ mod opt { // in the future; do not warn about them not being used right now. #![allow(dead_code)] - use getopts; use super::RustcOptGroup; + use getopts; pub type R = RustcOptGroup; pub type S = &'static str; @@ -1756,10 +1765,14 @@ pub fn parse_cfgspecs(cfgspecs: Vec) -> ast::CrateConfig { let filename = FileName::cfg_spec_source_code(&s); let mut parser = parse::new_parser_from_source_str(&sess, filename, s.to_string()); - macro_rules! error {($reason: expr) => { - early_error(ErrorOutputType::default(), - &format!(concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), s)); - }} + macro_rules! error { + ($reason: expr) => { + early_error( + ErrorOutputType::default(), + &format!(concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), s), + ); + }; + } match &mut parser.parse_meta_item() { Ok(meta_item) if parser.token == token::Eof => { @@ -1787,9 +1800,10 @@ pub fn parse_cfgspecs(cfgspecs: Vec) -> ast::CrateConfig { .collect::() } -pub fn get_cmd_lint_options(matches: &getopts::Matches, - error_format: ErrorOutputType) - -> (Vec<(String, lint::Level)>, bool, Option) { +pub fn get_cmd_lint_options( + matches: &getopts::Matches, + error_format: ErrorOutputType, +) -> (Vec<(String, lint::Level)>, bool, Option) { let mut lint_opts = vec![]; let mut describe_lints = false; @@ -1831,32 +1845,30 @@ pub fn build_session_options_and_crate_config( }; let edition = match matches.opt_str("edition") { - Some(arg) => Edition::from_str(&arg).unwrap_or_else(|_| + Some(arg) => Edition::from_str(&arg).unwrap_or_else(|_| { early_error( ErrorOutputType::default(), &format!( "argument for --edition must be one of: \ {}. (instead was `{}`)", - EDITION_NAME_LIST, - arg + EDITION_NAME_LIST, arg ), - ), - ), + ) + }), None => DEFAULT_EDITION, }; if !edition.is_stable() && !nightly_options::is_nightly_build() { early_error( - ErrorOutputType::default(), - &format!( - "Edition {} is unstable and only \ - available for nightly builds of rustc.", - edition, - ) + ErrorOutputType::default(), + &format!( + "Edition {} is unstable and only \ + available for nightly builds of rustc.", + edition, + ), ) } - // We need the opts_present check because the driver will send us Matches // with only stable options if no unstable options are used. Since error-format // is unstable, it will not be present. We have to use opts_present not @@ -1910,7 +1922,7 @@ pub fn build_session_options_and_crate_config( for output_type in list.split(',') { let mut parts = output_type.splitn(2, '='); let shorthand = parts.next().unwrap(); - let output_type = OutputType::from_shorthand(shorthand).unwrap_or_else(|| + let output_type = OutputType::from_shorthand(shorthand).unwrap_or_else(|| { early_error( error_format, &format!( @@ -1918,8 +1930,8 @@ pub fn build_session_options_and_crate_config( shorthand, OutputType::shorthands_display(), ), - ), - ); + ) + }); let path = parts.next().map(PathBuf::from); output_types.insert(output_type, path); } @@ -2006,7 +2018,8 @@ pub fn build_session_options_and_crate_config( (&Some(ref path), &None) => Some(path), (&None, &Some(ref path)) => Some(path), (&None, &None) => None, - }.map(|m| PathBuf::from(m)); + } + .map(|m| PathBuf::from(m)); if debugging_opts.profile && incremental.is_some() { early_error( @@ -2047,8 +2060,12 @@ pub fn build_session_options_and_crate_config( let target_triple = if let Some(target) = matches.opt_str("target") { if target.ends_with(".json") { let path = Path::new(&target); - TargetTriple::from_path(&path).unwrap_or_else(|_| - early_error(error_format, &format!("target file {:?} does not exist", path))) + TargetTriple::from_path(&path).unwrap_or_else(|_| { + early_error( + error_format, + &format!("target file {:?} does not exist", path), + ) + }) } else { TargetTriple::TargetTriple(target) } @@ -2202,8 +2219,9 @@ pub fn build_session_options_and_crate_config( let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new(); for arg in &matches.opt_strs("extern") { let mut parts = arg.splitn(2, '='); - let name = parts.next().unwrap_or_else(|| - early_error(error_format, "--extern value must not be empty")); + let name = parts + .next() + .unwrap_or_else(|| early_error(error_format, "--extern value must not be empty")); let location = parts.next().map(|s| s.to_string()); if location.is_none() && !is_unstable_enabled { early_error( @@ -2213,10 +2231,7 @@ pub fn build_session_options_and_crate_config( ); }; - externs - .entry(name.to_owned()) - .or_default() - .insert(location); + externs.entry(name.to_owned()).or_default().insert(location); } let crate_name = matches.opt_str("crate-name"); @@ -2285,7 +2300,7 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result CrateType::Cdylib, "bin" => CrateType::Executable, "proc-macro" => CrateType::ProcMacro, - _ => return Err(format!("unknown crate type: `{}`", part)) + _ => return Err(format!("unknown crate type: `{}`", part)), }; if !crate_types.contains(&new_part) { crate_types.push(new_part) @@ -2297,10 +2312,10 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result bool { is_nightly_build() @@ -2389,34 +2404,36 @@ impl fmt::Display for CrateType { /// we have an opt-in scheme here, so one is hopefully forced to think about /// how the hash should be calculated when adding a new command-line argument. mod dep_tracking { + use super::{ + CrateType, CrossLangLto, DebugInfo, ErrorOutputType, LtoCli, OptLevel, OutputTypes, Passes, + Sanitizer, + }; use lint; use middle::cstore; + use rustc_target::spec::{PanicStrategy, RelroLevel, TargetTriple}; + use std::collections::hash_map::DefaultHasher; use std::collections::BTreeMap; use std::hash::Hash; use std::path::PathBuf; - use std::collections::hash_map::DefaultHasher; - use super::{CrateType, DebugInfo, ErrorOutputType, OptLevel, OutputTypes, - Passes, Sanitizer, LtoCli, CrossLangLto}; - use syntax::feature_gate::UnstableFeatures; - use rustc_target::spec::{PanicStrategy, RelroLevel, TargetTriple}; use syntax::edition::Edition; + use syntax::feature_gate::UnstableFeatures; pub trait DepTrackingHash { fn hash(&self, hasher: &mut DefaultHasher, error_format: ErrorOutputType); } macro_rules! impl_dep_tracking_hash_via_hash { - ($t:ty) => ( + ($t:ty) => { impl DepTrackingHash for $t { fn hash(&self, hasher: &mut DefaultHasher, _: ErrorOutputType) { Hash::hash(self, hasher); } } - ) + }; } macro_rules! impl_dep_tracking_hash_for_sortable_vec_of { - ($t:ty) => ( + ($t:ty) => { impl DepTrackingHash for Vec<$t> { fn hash(&self, hasher: &mut DefaultHasher, error_format: ErrorOutputType) { let mut elems: Vec<&$t> = self.iter().collect(); @@ -2428,7 +2445,7 @@ mod dep_tracking { } } } - ); + }; } impl_dep_tracking_hash_via_hash!(bool); @@ -2520,23 +2537,23 @@ mod dep_tracking { #[cfg(test)] mod tests { + use super::Options; + use super::{Externs, OutputType, OutputTypes}; use errors; use getopts; use lint; use middle::cstore; - use session::config::{build_configuration, build_session_options_and_crate_config}; - use session::config::{LtoCli, CrossLangLto}; + use rustc_target::spec::{PanicStrategy, RelroLevel}; use session::build_session; + use session::config::{build_configuration, build_session_options_and_crate_config}; + use session::config::{CrossLangLto, LtoCli}; use session::search_paths::SearchPath; use std::collections::{BTreeMap, BTreeSet}; use std::iter::FromIterator; use std::path::PathBuf; - use super::{Externs, OutputType, OutputTypes}; - use rustc_target::spec::{PanicStrategy, RelroLevel}; - use syntax::symbol::Symbol; - use syntax::edition::{Edition, DEFAULT_EDITION}; use syntax; - use super::Options; + use syntax::edition::{Edition, DEFAULT_EDITION}; + use syntax::symbol::Symbol; fn optgroups() -> getopts::Options { let mut opts = getopts::Options::new(); @@ -2575,11 +2592,11 @@ mod tests { #[test] fn test_switch_implies_cfg_test_unless_cfg_test() { syntax::with_globals(|| { - let matches = &match optgroups().parse(&["--test".to_string(), - "--cfg=test".to_string()]) { - Ok(m) => m, - Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f), - }; + let matches = + &match optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]) { + Ok(m) => m, + Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f), + }; let registry = errors::registry::Registry::new(&[]); let (sessopts, cfg) = build_session_options_and_crate_config(matches); let sess = build_session(sessopts, None, registry); @@ -2782,49 +2799,89 @@ mod tests { let mut v4 = Options::default(); // Reference - v1.search_paths - .push(SearchPath::from_cli_opt("native=abc", super::ErrorOutputType::Json(false))); - v1.search_paths - .push(SearchPath::from_cli_opt("crate=def", super::ErrorOutputType::Json(false))); - v1.search_paths - .push(SearchPath::from_cli_opt("dependency=ghi", super::ErrorOutputType::Json(false))); - v1.search_paths - .push(SearchPath::from_cli_opt("framework=jkl", super::ErrorOutputType::Json(false))); - v1.search_paths - .push(SearchPath::from_cli_opt("all=mno", super::ErrorOutputType::Json(false))); - - v2.search_paths - .push(SearchPath::from_cli_opt("native=abc", super::ErrorOutputType::Json(false))); - v2.search_paths - .push(SearchPath::from_cli_opt("dependency=ghi", super::ErrorOutputType::Json(false))); - v2.search_paths - .push(SearchPath::from_cli_opt("crate=def", super::ErrorOutputType::Json(false))); - v2.search_paths - .push(SearchPath::from_cli_opt("framework=jkl", super::ErrorOutputType::Json(false))); - v2.search_paths - .push(SearchPath::from_cli_opt("all=mno", super::ErrorOutputType::Json(false))); - - v3.search_paths - .push(SearchPath::from_cli_opt("crate=def", super::ErrorOutputType::Json(false))); - v3.search_paths - .push(SearchPath::from_cli_opt("framework=jkl", super::ErrorOutputType::Json(false))); - v3.search_paths - .push(SearchPath::from_cli_opt("native=abc", super::ErrorOutputType::Json(false))); - v3.search_paths - .push(SearchPath::from_cli_opt("dependency=ghi", super::ErrorOutputType::Json(false))); - v3.search_paths - .push(SearchPath::from_cli_opt("all=mno", super::ErrorOutputType::Json(false))); - - v4.search_paths - .push(SearchPath::from_cli_opt("all=mno", super::ErrorOutputType::Json(false))); - v4.search_paths - .push(SearchPath::from_cli_opt("native=abc", super::ErrorOutputType::Json(false))); - v4.search_paths - .push(SearchPath::from_cli_opt("crate=def", super::ErrorOutputType::Json(false))); - v4.search_paths - .push(SearchPath::from_cli_opt("dependency=ghi", super::ErrorOutputType::Json(false))); - v4.search_paths - .push(SearchPath::from_cli_opt("framework=jkl", super::ErrorOutputType::Json(false))); + v1.search_paths.push(SearchPath::from_cli_opt( + "native=abc", + super::ErrorOutputType::Json(false), + )); + v1.search_paths.push(SearchPath::from_cli_opt( + "crate=def", + super::ErrorOutputType::Json(false), + )); + v1.search_paths.push(SearchPath::from_cli_opt( + "dependency=ghi", + super::ErrorOutputType::Json(false), + )); + v1.search_paths.push(SearchPath::from_cli_opt( + "framework=jkl", + super::ErrorOutputType::Json(false), + )); + v1.search_paths.push(SearchPath::from_cli_opt( + "all=mno", + super::ErrorOutputType::Json(false), + )); + + v2.search_paths.push(SearchPath::from_cli_opt( + "native=abc", + super::ErrorOutputType::Json(false), + )); + v2.search_paths.push(SearchPath::from_cli_opt( + "dependency=ghi", + super::ErrorOutputType::Json(false), + )); + v2.search_paths.push(SearchPath::from_cli_opt( + "crate=def", + super::ErrorOutputType::Json(false), + )); + v2.search_paths.push(SearchPath::from_cli_opt( + "framework=jkl", + super::ErrorOutputType::Json(false), + )); + v2.search_paths.push(SearchPath::from_cli_opt( + "all=mno", + super::ErrorOutputType::Json(false), + )); + + v3.search_paths.push(SearchPath::from_cli_opt( + "crate=def", + super::ErrorOutputType::Json(false), + )); + v3.search_paths.push(SearchPath::from_cli_opt( + "framework=jkl", + super::ErrorOutputType::Json(false), + )); + v3.search_paths.push(SearchPath::from_cli_opt( + "native=abc", + super::ErrorOutputType::Json(false), + )); + v3.search_paths.push(SearchPath::from_cli_opt( + "dependency=ghi", + super::ErrorOutputType::Json(false), + )); + v3.search_paths.push(SearchPath::from_cli_opt( + "all=mno", + super::ErrorOutputType::Json(false), + )); + + v4.search_paths.push(SearchPath::from_cli_opt( + "all=mno", + super::ErrorOutputType::Json(false), + )); + v4.search_paths.push(SearchPath::from_cli_opt( + "native=abc", + super::ErrorOutputType::Json(false), + )); + v4.search_paths.push(SearchPath::from_cli_opt( + "crate=def", + super::ErrorOutputType::Json(false), + )); + v4.search_paths.push(SearchPath::from_cli_opt( + "dependency=ghi", + super::ErrorOutputType::Json(false), + )); + v4.search_paths.push(SearchPath::from_cli_opt( + "framework=jkl", + super::ErrorOutputType::Json(false), + )); assert!(v1.dep_tracking_hash() == v2.dep_tracking_hash()); assert!(v1.dep_tracking_hash() == v3.dep_tracking_hash()); @@ -3195,9 +3252,7 @@ mod tests { let options = Options::default(); assert!(options.edition == DEFAULT_EDITION); - let matches = optgroups() - .parse(&["--edition=2018".to_string()]) - .unwrap(); + let matches = optgroups().parse(&["--edition=2018".to_string()]).unwrap(); let (sessopts, _) = build_session_options_and_crate_config(&matches); assert!(sessopts.edition == Edition::Edition2018) } diff --git a/src/librustc/session/filesearch.rs b/src/librustc/session/filesearch.rs index 19f1c7a18fad1..c1216ad9b13eb 100644 --- a/src/librustc/session/filesearch.rs +++ b/src/librustc/session/filesearch.rs @@ -7,8 +7,8 @@ use std::env; use std::fs; use std::path::{Path, PathBuf}; -use session::search_paths::{SearchPath, PathKind}; use rustc_fs_util::fix_windows_verbatim_for_gcc; +use session::search_paths::{PathKind, SearchPath}; #[derive(Copy, Clone)] pub enum FileMatch { @@ -29,7 +29,8 @@ pub struct FileSearch<'a> { impl<'a> FileSearch<'a> { pub fn search_paths(&self) -> impl Iterator { let kind = self.kind; - self.search_paths.iter() + self.search_paths + .iter() .filter(move |sp| sp.kind.matches(kind)) .chain(std::iter::once(self.tlib_path)) } @@ -39,7 +40,8 @@ impl<'a> FileSearch<'a> { } pub fn search(&self, mut pick: F) - where F: FnMut(&Path, PathKind) -> FileMatch + where + F: FnMut(&Path, PathKind) -> FileMatch, { for search_path in self.search_paths() { debug!("searching {}", search_path.dir.display()); @@ -67,12 +69,13 @@ impl<'a> FileSearch<'a> { } } - pub fn new(sysroot: &'a Path, - triple: &'a str, - search_paths: &'a Vec, - tlib_path: &'a SearchPath, - kind: PathKind) - -> FileSearch<'a> { + pub fn new( + sysroot: &'a Path, + triple: &'a str, + search_paths: &'a Vec, + tlib_path: &'a SearchPath, + kind: PathKind, + ) -> FileSearch<'a> { debug!("using sysroot = {}, triple = {}", sysroot.display(), triple); FileSearch { sysroot, @@ -85,9 +88,7 @@ impl<'a> FileSearch<'a> { // Returns just the directories within the search paths. pub fn search_path_dirs(&self) -> Vec { - self.search_paths() - .map(|sp| sp.dir.to_path_buf()) - .collect() + self.search_paths().map(|sp| sp.dir.to_path_buf()).collect() } // Returns a list of directories where target-specific tool binaries are located. @@ -129,13 +130,15 @@ pub fn get_or_default_sysroot() -> PathBuf { } match env::current_exe() { - Ok(exe) => { - match canonicalize(Some(exe)) { - Some(mut p) => { p.pop(); p.pop(); p }, - None => bug!("can't determine value for sysroot") + Ok(exe) => match canonicalize(Some(exe)) { + Some(mut p) => { + p.pop(); + p.pop(); + p } - } - Err(ref e) => panic!(format!("failed to get current_exe: {}", e)) + None => bug!("can't determine value for sysroot"), + }, + Err(ref e) => panic!(format!("failed to get current_exe: {}", e)), } } @@ -159,10 +162,12 @@ fn find_libdir(sysroot: &Path) -> Cow<'static, str> { match option_env!("CFG_LIBDIR_RELATIVE") { Some(libdir) if libdir != "lib" => libdir.into(), - _ => if sysroot.join(PRIMARY_LIB_DIR).join(RUST_LIB_DIR).exists() { - PRIMARY_LIB_DIR.into() - } else { - SECONDARY_LIB_DIR.into() + _ => { + if sysroot.join(PRIMARY_LIB_DIR).join(RUST_LIB_DIR).exists() { + PRIMARY_LIB_DIR.into() + } else { + SECONDARY_LIB_DIR.into() + } } } } diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 5c977d5969e16..54d0e7f682471 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -1,5 +1,5 @@ -pub use self::code_stats::{DataTypeKind, SizeKind, FieldInfo, VariantInfo}; use self::code_stats::CodeStats; +pub use self::code_stats::{DataTypeKind, FieldInfo, SizeKind, VariantInfo}; use dep_graph::cgu_reuse_tracker::CguReuseTracker; use hir::def_id::CrateNum; @@ -9,29 +9,29 @@ use lint; use lint::builtin::BuiltinLintDiagnostics; use middle::allocator::AllocatorKind; use middle::dependency_format; -use session::config::{OutputType, Lto}; +use session::config::{Lto, OutputType}; use session::search_paths::{PathKind, SearchPath}; -use util::nodemap::{FxHashMap, FxHashSet}; -use util::common::{duration_to_secs_str, ErrorReported}; use util::common::ProfileQueriesMsg; +use util::common::{duration_to_secs_str, ErrorReported}; +use util::nodemap::{FxHashMap, FxHashSet}; use rustc_data_structures::base_n; -use rustc_data_structures::sync::{self, Lrc, Lock, LockCell, OneThread, Once, RwLock}; +use rustc_data_structures::sync::{self, Lock, LockCell, Lrc, Once, OneThread, RwLock}; -use errors::{self, DiagnosticBuilder, DiagnosticId, Applicability}; use errors::emitter::{Emitter, EmitterWriter}; +use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId}; use syntax::ast::{self, NodeId}; use syntax::edition::Edition; use syntax::feature_gate::{self, AttributeType}; use syntax::json::JsonEmitter; -use syntax::source_map; use syntax::parse::{self, ParseSess}; +use syntax::source_map; use syntax_pos::{MultiSpan, Span}; use util::profiling::SelfProfiler; -use rustc_target::spec::{PanicStrategy, RelroLevel, Target, TargetTriple}; -use rustc_data_structures::flock; use jobserver::Client; +use rustc_data_structures::flock; +use rustc_target::spec::{PanicStrategy, RelroLevel, Target, TargetTriple}; use std; use std::cell::{self, Cell, RefCell}; @@ -39,9 +39,9 @@ use std::env; use std::fmt; use std::io::Write; use std::path::PathBuf; -use std::time::Duration; -use std::sync::mpsc; use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::mpsc; +use std::time::Duration; mod code_stats; pub mod config; @@ -414,7 +414,8 @@ impl Session { span_maybe: Option, ) { let id_span_message = (msg_id, span_maybe, message.to_owned()); - let fresh = self.one_time_diagnostics + let fresh = self + .one_time_diagnostics .borrow_mut() .insert(id_span_message); if fresh { @@ -522,8 +523,7 @@ impl Session { self.opts.debugging_opts.asm_comments } pub fn verify_llvm_ir(&self) -> bool { - self.opts.debugging_opts.verify_llvm_ir - || cfg!(always_verify_llvm_ir) + self.opts.debugging_opts.verify_llvm_ir || cfg!(always_verify_llvm_ir) } pub fn borrowck_stats(&self) -> bool { self.opts.debugging_opts.borrowck_stats @@ -563,9 +563,7 @@ impl Session { // The user explicitly opted out of any kind of LTO return config::Lto::No; } - config::LtoCli::Yes | - config::LtoCli::Fat | - config::LtoCli::NoParam => { + config::LtoCli::Yes | config::LtoCli::Fat | config::LtoCli::NoParam => { // All of these mean fat LTO return config::Lto::Fat; } @@ -623,7 +621,8 @@ impl Session { .unwrap_or(self.target.target.options.panic_strategy) } pub fn fewer_names(&self) -> bool { - let more_names = self.opts + let more_names = self + .opts .output_types .contains_key(&OutputType::LlvmAssembly) || self.opts.output_types.contains_key(&OutputType::Bitcode); @@ -703,7 +702,9 @@ impl Session { self.opts.target_triple.triple(), &self.opts.search_paths, // target_tlib_path==None means it's the same as host_tlib_path. - self.target_tlib_path.as_ref().unwrap_or(&self.host_tlib_path), + self.target_tlib_path + .as_ref() + .unwrap_or(&self.host_tlib_path), kind, ) } @@ -720,7 +721,11 @@ impl Session { pub fn set_incr_session_load_dep_graph(&self, load: bool) { let mut incr_comp_session = self.incr_comp_session.borrow_mut(); - if let IncrCompSession::Active { ref mut load_dep_graph, .. } = *incr_comp_session { + if let IncrCompSession::Active { + ref mut load_dep_graph, + .. + } = *incr_comp_session + { *load_dep_graph = load; } } @@ -856,12 +861,24 @@ impl Session { "Total time spent decoding DefPath tables: {}", duration_to_secs_str(*self.perf_stats.decode_def_path_tables_time.lock()) ); - println!("Total queries canonicalized: {}", - self.perf_stats.queries_canonicalized.load(Ordering::Relaxed)); - println!("normalize_ty_after_erasing_regions: {}", - self.perf_stats.normalize_ty_after_erasing_regions.load(Ordering::Relaxed)); - println!("normalize_projection_ty: {}", - self.perf_stats.normalize_projection_ty.load(Ordering::Relaxed)); + println!( + "Total queries canonicalized: {}", + self.perf_stats + .queries_canonicalized + .load(Ordering::Relaxed) + ); + println!( + "normalize_ty_after_erasing_regions: {}", + self.perf_stats + .normalize_ty_after_erasing_regions + .load(Ordering::Relaxed) + ); + println!( + "normalize_projection_ty: {}", + self.perf_stats + .normalize_projection_ty + .load(Ordering::Relaxed) + ); } /// We want to know if we're allowed to do an optimization for crate foo from -z fuel=foo=n. @@ -990,7 +1007,8 @@ impl Session { let dbg_opts = &self.opts.debugging_opts; - let relro_level = dbg_opts.relro_level + let relro_level = dbg_opts + .relro_level .unwrap_or(self.target.target.options.relro_level); // Only enable this optimization by default if full relro is also enabled. @@ -1055,33 +1073,30 @@ pub fn build_session_with_source_map( Some(source_map.clone()), false, sopts.debugging_opts.teach, - ).ui_testing(sopts.debugging_opts.ui_testing), + ) + .ui_testing(sopts.debugging_opts.ui_testing), ), (config::ErrorOutputType::HumanReadable(_), Some(dst)) => Box::new( EmitterWriter::new(dst, Some(source_map.clone()), false, false) .ui_testing(sopts.debugging_opts.ui_testing), ), (config::ErrorOutputType::Json(pretty), None) => Box::new( - JsonEmitter::stderr( - Some(registry), - source_map.clone(), - pretty, - ).ui_testing(sopts.debugging_opts.ui_testing), + JsonEmitter::stderr(Some(registry), source_map.clone(), pretty) + .ui_testing(sopts.debugging_opts.ui_testing), ), (config::ErrorOutputType::Json(pretty), Some(dst)) => Box::new( - JsonEmitter::new( - dst, - Some(registry), - source_map.clone(), - pretty, - ).ui_testing(sopts.debugging_opts.ui_testing), + JsonEmitter::new(dst, Some(registry), source_map.clone(), pretty) + .ui_testing(sopts.debugging_opts.ui_testing), ), (config::ErrorOutputType::Short(color_config), None) => Box::new( EmitterWriter::stderr(color_config, Some(source_map.clone()), true, false), ), - (config::ErrorOutputType::Short(_), Some(dst)) => { - Box::new(EmitterWriter::new(dst, Some(source_map.clone()), true, false)) - } + (config::ErrorOutputType::Short(_), Some(dst)) => Box::new(EmitterWriter::new( + dst, + Some(source_map.clone()), + true, + false, + )), }; let diagnostic_handler = errors::Handler::with_emitter_and_flags( @@ -1096,7 +1111,12 @@ pub fn build_session_with_source_map( }, ); - build_session_(sopts, local_crate_source_file, diagnostic_handler, source_map) + build_session_( + sopts, + local_crate_source_file, + diagnostic_handler, + source_map, + ) } pub fn build_session_( @@ -1106,11 +1126,11 @@ pub fn build_session_( source_map: Lrc, ) -> Session { let host_triple = TargetTriple::from_triple(config::host_triple()); - let host = Target::search(&host_triple).unwrap_or_else(|e| + let host = Target::search(&host_triple).unwrap_or_else(|e| { span_diagnostic .fatal(&format!("Error loading host specification: {}", e)) .raise() - ); + }); let target_cfg = config::build_target_config(&sopts, &span_diagnostic); let p_s = parse::ParseSess::with_span_handler(span_diagnostic, source_map); @@ -1139,11 +1159,11 @@ pub fn build_session_( let print_fuel_crate = sopts.debugging_opts.print_fuel.clone(); let print_fuel = LockCell::new(0); - let working_dir = env::current_dir().unwrap_or_else(|e| + let working_dir = env::current_dir().unwrap_or_else(|e| { p_s.span_diagnostic .fatal(&format!("Current directory is invalid: {}", e)) .raise() - ); + }); let working_dir = file_path_mapping.map_prefix(working_dir); let cgu_reuse_tracker = if sopts.debugging_opts.query_dep_graph { @@ -1152,8 +1172,8 @@ pub fn build_session_( CguReuseTracker::new_disabled() }; - let self_profiling_active = sopts.debugging_opts.self_profile || - sopts.debugging_opts.profile_json; + let self_profiling_active = + sopts.debugging_opts.self_profile || sopts.debugging_opts.profile_json; let sess = Session { target: target_cfg, @@ -1222,9 +1242,8 @@ pub fn build_session_( static mut GLOBAL_JOBSERVER: *mut Client = 0 as *mut _; static INIT: std::sync::Once = std::sync::ONCE_INIT; INIT.call_once(|| { - let client = Client::from_env().unwrap_or_else(|| { - Client::new(32).expect("failed to create jobserver") - }); + let client = Client::from_env() + .unwrap_or_else(|| Client::new(32).expect("failed to create jobserver")); GLOBAL_JOBSERVER = Box::into_raw(Box::new(client)); }); (*GLOBAL_JOBSERVER).clone() @@ -1242,15 +1261,12 @@ pub fn build_session_( // If it is useful to have a Session available already for validating a // commandline argument, you can do so here. fn validate_commandline_args_with_session_available(sess: &Session) { - if sess.opts.incremental.is_some() { match sess.lto() { - Lto::Thin | - Lto::Fat => { + Lto::Thin | Lto::Fat => { sess.err("can't perform LTO when compiling incrementally"); } - Lto::ThinLocal | - Lto::No => { + Lto::ThinLocal | Lto::No => { // This is fine } } @@ -1263,18 +1279,23 @@ fn validate_commandline_args_with_session_available(sess: &Session) { // bitcode during ThinLTO. Therefore we disallow dynamic linking on MSVC // when compiling for LLD ThinLTO. This way we can validly just not generate // the `dllimport` attributes and `__imp_` symbols in that case. - if sess.opts.debugging_opts.cross_lang_lto.enabled() && - sess.opts.cg.prefer_dynamic && - sess.target.target.options.is_like_msvc { - sess.err("Linker plugin based LTO is not supported together with \ - `-C prefer-dynamic` when targeting MSVC"); + if sess.opts.debugging_opts.cross_lang_lto.enabled() + && sess.opts.cg.prefer_dynamic + && sess.target.target.options.is_like_msvc + { + sess.err( + "Linker plugin based LTO is not supported together with \ + `-C prefer-dynamic` when targeting MSVC", + ); } } /// Hash value constructed out of all the `-C metadata` arguments passed to the /// compiler. Together with the crate-name forms a unique global identifier for /// the crate. -#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Clone, Copy, RustcEncodable, RustcDecodable)] +#[derive( + Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Clone, Copy, RustcEncodable, RustcDecodable, +)] pub struct CrateDisambiguator(Fingerprint); impl CrateDisambiguator { diff --git a/src/librustc/session/search_paths.rs b/src/librustc/session/search_paths.rs index 85d64b1571266..29b8f66a0b9b8 100644 --- a/src/librustc/session/search_paths.rs +++ b/src/librustc/session/search_paths.rs @@ -1,6 +1,6 @@ -use std::path::{Path, PathBuf}; -use session::{early_error, config}; use session::filesearch::make_target_lib_path; +use session::{config, early_error}; +use std::path::{Path, PathBuf}; #[derive(Clone, Debug)] pub struct SearchPath { @@ -58,16 +58,12 @@ impl SearchPath { fn new(kind: PathKind, dir: PathBuf) -> Self { // Get the files within the directory. let files = match std::fs::read_dir(&dir) { - Ok(files) => { - files.filter_map(|p| { - p.ok().map(|s| s.path()) - }) - .collect::>() - } + Ok(files) => files + .filter_map(|p| p.ok().map(|s| s.path())) + .collect::>(), Err(..) => vec![], }; SearchPath { kind, dir, files } } } - diff --git a/src/librustc/traits/auto_trait.rs b/src/librustc/traits/auto_trait.rs index 92004ece26d00..6e9f05c8dae01 100644 --- a/src/librustc/traits/auto_trait.rs +++ b/src/librustc/traits/auto_trait.rs @@ -111,7 +111,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { ); true } - _ => false + _ => false, } }); @@ -170,21 +170,23 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { None => return AutoTraitResult::NegativeImpl, }; - let (full_env, full_user_env) = self.evaluate_predicates( - &mut infcx, - did, - trait_did, - ty, - new_env.clone(), - user_env, - &mut fresh_preds, - true, - ).unwrap_or_else(|| { - panic!( - "Failed to fully process: {:?} {:?} {:?}", - ty, trait_did, orig_params + let (full_env, full_user_env) = self + .evaluate_predicates( + &mut infcx, + did, + trait_did, + ty, + new_env.clone(), + user_env, + &mut fresh_preds, + true, ) - }); + .unwrap_or_else(|| { + panic!( + "Failed to fully process: {:?} {:?} {:?}", + ty, trait_did, orig_params + ) + }); debug!( "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): fulfilling \ @@ -326,9 +328,11 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { // Call infcx.resolve_type_vars_if_possible to see if we can // get rid of any inference variables. - let obligation = infcx.resolve_type_vars_if_possible( - &Obligation::new(dummy_cause.clone(), new_env, pred) - ); + let obligation = infcx.resolve_type_vars_if_possible(&Obligation::new( + dummy_cause.clone(), + new_env, + pred, + )); let result = select.select(&obligation); match &result { @@ -340,11 +344,14 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { // Blame tidy for the weird bracket placement if infcx.tcx.impl_polarity(*impl_def_id) == hir::ImplPolarity::Negative { - debug!("evaluate_nested_obligations: Found explicit negative impl\ - {:?}, bailing out", impl_def_id); + debug!( + "evaluate_nested_obligations: Found explicit negative impl\ + {:?}, bailing out", + impl_def_id + ); return None; } - }, + } _ => {} } @@ -388,17 +395,14 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { computed_preds.extend(user_computed_preds.iter().cloned()); let normalized_preds = elaborate_predicates(tcx, computed_preds.clone().into_iter().collect()); - new_env = ty::ParamEnv::new( - tcx.mk_predicates(normalized_preds), - param_env.reveal, - None - ); + new_env = + ty::ParamEnv::new(tcx.mk_predicates(normalized_preds), param_env.reveal, None); } let final_user_env = ty::ParamEnv::new( tcx.mk_predicates(user_computed_preds.into_iter()), user_env.reveal, - None + None, ); debug!( "evaluate_nested_obligations(ty_did={:?}, trait_did={:?}): succeeded with '{:?}' \ @@ -462,8 +466,8 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { ty::RegionKind::ReLateBound(_, _), ) => {} - (ty::RegionKind::ReLateBound(_, _), _) | - (_, ty::RegionKind::ReVar(_)) => { + (ty::RegionKind::ReLateBound(_, _), _) + | (_, ty::RegionKind::ReVar(_)) => { // One of these is true: // The new predicate has a HRTB in a spot where the old // predicate does not (if they both had a HRTB, the previous @@ -489,8 +493,8 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { // user_computed_preds return false; } - (_, ty::RegionKind::ReLateBound(_, _)) | - (ty::RegionKind::ReVar(_), _) => { + (_, ty::RegionKind::ReLateBound(_, _)) + | (ty::RegionKind::ReVar(_), _) => { // This is the opposite situation as the previous arm. // One of these is true: // @@ -506,7 +510,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { // predicate in user_computed_preds, and skip adding // new_pred to user_computed_params. should_add_new = false - }, + } _ => {} } } @@ -529,14 +533,17 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { } } - pub fn get_lifetime(&self, region: Region<'_>, - names_map: &FxHashMap) -> String { + pub fn get_lifetime( + &self, + region: Region<'_>, + names_map: &FxHashMap, + ) -> String { self.region_name(region) - .map(|name| - names_map.get(&name).unwrap_or_else(|| + .map(|name| { + names_map.get(&name).unwrap_or_else(|| { panic!("Missing lifetime with name {:?} for {:?}", name, region) - ) - ) + }) + }) .cloned() .unwrap_or_else(|| "'static".to_owned()) } @@ -632,8 +639,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { } fn is_param_no_infer(&self, substs: &Substs<'_>) -> bool { - return self.is_of_param(substs.type_at(0)) && - !substs.types().any(|t| t.has_infer_types()); + return self.is_of_param(substs.type_at(0)) && !substs.types().any(|t| t.has_infer_types()); } pub fn is_of_param(&self, ty: Ty<'_>) -> bool { @@ -646,10 +652,8 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { fn is_self_referential_projection(&self, p: ty::PolyProjectionPredicate<'_>) -> bool { match p.ty().skip_binder().sty { - ty::Projection(proj) if proj == p.skip_binder().projection_ty => { - true - }, - _ => false + ty::Projection(proj) if proj == p.skip_binder().projection_ty => true, + _ => false, } } @@ -671,9 +675,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { ) -> bool { let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID); - for (obligation, mut predicate) in nested - .map(|o| (o.clone(), o.predicate.clone())) - { + for (obligation, mut predicate) in nested.map(|o| (o.clone(), o.predicate.clone())) { let is_new_pred = fresh_preds.insert(self.clean_pred(select.infcx(), predicate.clone())); @@ -696,15 +698,17 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { &ty::Predicate::Trait(ref p) => { if self.is_param_no_infer(p.skip_binder().trait_ref.substs) && !only_projections - && is_new_pred { - + && is_new_pred + { self.add_user_pred(computed_preds, predicate); } predicates.push_back(p.clone()); } &ty::Predicate::Projection(p) => { - debug!("evaluate_nested_obligations: examining projection predicate {:?}", - predicate); + debug!( + "evaluate_nested_obligations: examining projection predicate {:?}", + predicate + ); // As described above, we only want to display // bounds which include a generic parameter but don't include @@ -713,35 +717,42 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { // to avoid rendering duplicate bounds to the user. if self.is_param_no_infer(p.skip_binder().projection_ty.substs) && !p.ty().skip_binder().is_ty_infer() - && is_new_pred { - debug!("evaluate_nested_obligations: adding projection predicate\ - to computed_preds: {:?}", predicate); - - // Under unusual circumstances, we can end up with a self-refeential - // projection predicate. For example: - // ::Value == ::Value - // Not only is displaying this to the user pointless, - // having it in the ParamEnv will cause an issue if we try to call - // poly_project_and_unify_type on the predicate, since this kind of - // predicate will normally never end up in a ParamEnv. - // - // For these reasons, we ignore these weird predicates, - // ensuring that we're able to properly synthesize an auto trait impl - if self.is_self_referential_projection(p) { - debug!("evaluate_nested_obligations: encountered a projection - predicate equating a type with itself! Skipping"); - - } else { - self.add_user_pred(computed_preds, predicate); - } + && is_new_pred + { + debug!( + "evaluate_nested_obligations: adding projection predicate\ + to computed_preds: {:?}", + predicate + ); + + // Under unusual circumstances, we can end up with a self-refeential + // projection predicate. For example: + // ::Value == ::Value + // Not only is displaying this to the user pointless, + // having it in the ParamEnv will cause an issue if we try to call + // poly_project_and_unify_type on the predicate, since this kind of + // predicate will normally never end up in a ParamEnv. + // + // For these reasons, we ignore these weird predicates, + // ensuring that we're able to properly synthesize an auto trait impl + if self.is_self_referential_projection(p) { + debug!( + "evaluate_nested_obligations: encountered a projection + predicate equating a type with itself! Skipping" + ); + } else { + self.add_user_pred(computed_preds, predicate); + } } // We can only call poly_project_and_unify_type when our predicate's // Ty contains an inference variable - otherwise, there won't be anything to // unify if p.ty().skip_binder().has_infer_types() { - debug!("Projecting and unifying projection predicate {:?}", - predicate); + debug!( + "Projecting and unifying projection predicate {:?}", + predicate + ); match poly_project_and_unify_type(select, &obligation.with(p.clone())) { Err(e) => { debug!( @@ -771,7 +782,9 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { } } &ty::Predicate::RegionOutlives(ref binder) => { - let () = select.infcx().region_outlives_predicate(&dummy_cause, binder); + let () = select + .infcx() + .region_outlives_predicate(&dummy_cause, binder); } &ty::Predicate::TypeOutlives(ref binder) => { match ( @@ -825,6 +838,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> { (match r { &ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(), _ => None, - }).unwrap_or_else(|| r.super_fold_with(self)) + }) + .unwrap_or_else(|| r.super_fold_with(self)) } } diff --git a/src/librustc/traits/chalk_fulfill.rs b/src/librustc/traits/chalk_fulfill.rs index df4e08e0eb5f3..6406c48c223fb 100644 --- a/src/librustc/traits/chalk_fulfill.rs +++ b/src/librustc/traits/chalk_fulfill.rs @@ -1,18 +1,12 @@ +use infer::canonical::{Canonical, OriginalQueryValues}; +use infer::InferCtxt; +use rustc_data_structures::fx::FxHashSet; +use traits::query::NoSolution; use traits::{ - Environment, - InEnvironment, - TraitEngine, - ObligationCause, - PredicateObligation, - FulfillmentError, - FulfillmentErrorCode, - SelectionError, + Environment, FulfillmentError, FulfillmentErrorCode, InEnvironment, ObligationCause, + PredicateObligation, SelectionError, TraitEngine, }; -use traits::query::NoSolution; -use infer::InferCtxt; -use infer::canonical::{Canonical, OriginalQueryValues}; use ty::{self, Ty}; -use rustc_data_structures::fx::FxHashSet; pub type CanonicalGoal<'tcx> = Canonical<'tcx, InEnvironment<'tcx, ty::Predicate<'tcx>>>; @@ -30,7 +24,7 @@ impl FulfillmentContext<'tcx> { fn in_environment( infcx: &InferCtxt<'_, 'gcx, 'tcx>, - obligation: PredicateObligation<'tcx> + obligation: PredicateObligation<'tcx>, ) -> InEnvironment<'tcx, PredicateObligation<'tcx>> { assert!(!infcx.is_in_snapshot()); let obligation = infcx.resolve_type_vars_if_possible(&obligation); @@ -77,7 +71,9 @@ impl TraitEngine<'tcx> for FulfillmentContext<'tcx> { if self.obligations.is_empty() { Ok(()) } else { - let errors = self.obligations.iter() + let errors = self + .obligations + .iter() .map(|obligation| FulfillmentError { obligation: obligation.goal.clone(), code: FulfillmentErrorCode::CodeAmbiguity, @@ -102,10 +98,13 @@ impl TraitEngine<'tcx> for FulfillmentContext<'tcx> { // to unambiguously prove at least one obligation. for obligation in self.obligations.drain() { let mut orig_values = OriginalQueryValues::default(); - let canonical_goal = infcx.canonicalize_query(&InEnvironment { - environment: obligation.environment, - goal: obligation.goal.predicate, - }, &mut orig_values); + let canonical_goal = infcx.canonicalize_query( + &InEnvironment { + environment: obligation.environment, + goal: obligation.goal.predicate, + }, + &mut orig_values, + ); match infcx.tcx.global_tcx().evaluate_goal(canonical_goal) { Ok(response) => { @@ -116,18 +115,19 @@ impl TraitEngine<'tcx> for FulfillmentContext<'tcx> { &obligation.goal.cause, obligation.goal.param_env, &orig_values, - &response + &response, ) { Ok(infer_ok) => next_round.extend( - infer_ok.obligations + infer_ok + .obligations .into_iter() - .map(|obligation| in_environment(infcx, obligation)) + .map(|obligation| in_environment(infcx, obligation)), ), Err(_err) => errors.push(FulfillmentError { obligation: obligation.goal, code: FulfillmentErrorCode::CodeSelectionError( - SelectionError::Unimplemented + SelectionError::Unimplemented, ), }), } @@ -140,9 +140,9 @@ impl TraitEngine<'tcx> for FulfillmentContext<'tcx> { Err(NoSolution) => errors.push(FulfillmentError { obligation: obligation.goal, code: FulfillmentErrorCode::CodeSelectionError( - SelectionError::Unimplemented + SelectionError::Unimplemented, ), - }) + }), } } next_round = std::mem::replace(&mut self.obligations, next_round); @@ -160,6 +160,9 @@ impl TraitEngine<'tcx> for FulfillmentContext<'tcx> { } fn pending_obligations(&self) -> Vec> { - self.obligations.iter().map(|obligation| obligation.goal.clone()).collect() + self.obligations + .iter() + .map(|obligation| obligation.goal.clone()) + .collect() } } diff --git a/src/librustc/traits/codegen/mod.rs b/src/librustc/traits/codegen/mod.rs index 94d56c2cbfc88..9244a867709b5 100644 --- a/src/librustc/traits/codegen/mod.rs +++ b/src/librustc/traits/codegen/mod.rs @@ -4,15 +4,16 @@ // general routines. use dep_graph::{DepKind, DepTrackingMapConfig}; -use std::marker::PhantomData; -use syntax_pos::DUMMY_SP; use infer::InferCtxt; +use std::marker::PhantomData; use syntax_pos::Span; -use traits::{FulfillmentContext, Obligation, ObligationCause, SelectionContext, - TraitEngine, Vtable}; -use ty::{self, Ty, TyCtxt}; -use ty::subst::{Subst, Substs}; +use syntax_pos::DUMMY_SP; +use traits::{ + FulfillmentContext, Obligation, ObligationCause, SelectionContext, TraitEngine, Vtable, +}; use ty::fold::TypeFoldable; +use ty::subst::{Subst, Substs}; +use ty::{self, Ty, TyCtxt}; /// Attempts to resolve an obligation to a vtable.. The result is /// a shallow vtable resolution -- meaning that we do not @@ -20,16 +21,18 @@ use ty::fold::TypeFoldable; /// that type check should guarantee to us that all nested /// obligations *could be* resolved if we wanted to. /// Assumes that this is run after the entire crate has been successfully type-checked. -pub fn codegen_fulfill_obligation<'a, 'tcx>(ty: TyCtxt<'a, 'tcx, 'tcx>, - (param_env, trait_ref): - (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) - -> Vtable<'tcx, ()> -{ +pub fn codegen_fulfill_obligation<'a, 'tcx>( + ty: TyCtxt<'a, 'tcx, 'tcx>, + (param_env, trait_ref): (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>), +) -> Vtable<'tcx, ()> { // Remove any references to regions; this helps improve caching. let trait_ref = ty.erase_regions(&trait_ref); - debug!("codegen_fulfill_obligation(trait_ref={:?}, def_id={:?})", - (param_env, trait_ref), trait_ref.def_id()); + debug!( + "codegen_fulfill_obligation(trait_ref={:?}, def_id={:?})", + (param_env, trait_ref), + trait_ref.def_id() + ); // Do the initial selection for the obligation. This yields the // shallow result we are looking for -- that is, what specific impl. @@ -37,9 +40,11 @@ pub fn codegen_fulfill_obligation<'a, 'tcx>(ty: TyCtxt<'a, 'tcx, 'tcx>, let mut selcx = SelectionContext::new(&infcx); let obligation_cause = ObligationCause::dummy(); - let obligation = Obligation::new(obligation_cause, - param_env, - trait_ref.to_poly_trait_predicate()); + let obligation = Obligation::new( + obligation_cause, + param_env, + trait_ref.to_poly_trait_predicate(), + ); let selection = match selcx.select(&obligation) { Ok(Some(selection)) => selection, @@ -50,13 +55,17 @@ pub fn codegen_fulfill_obligation<'a, 'tcx>(ty: TyCtxt<'a, 'tcx, 'tcx>, // leading to an ambiguous result. So report this as an // overflow bug, since I believe this is the only case // where ambiguity can result. - bug!("Encountered ambiguity selecting `{:?}` during codegen, \ - presuming due to overflow", - trait_ref) - } - Err(e) => { - bug!("Encountered error `{:?}` selecting `{:?}` during codegen", e, trait_ref) + bug!( + "Encountered ambiguity selecting `{:?}` during codegen, \ + presuming due to overflow", + trait_ref + ) } + Err(e) => bug!( + "Encountered error `{:?}` selecting `{:?}` during codegen", + e, + trait_ref + ), }; debug!("fulfill_obligation: selection={:?}", selection); @@ -66,7 +75,10 @@ pub fn codegen_fulfill_obligation<'a, 'tcx>(ty: TyCtxt<'a, 'tcx, 'tcx>, // inference of the impl's type parameters. let mut fulfill_cx = FulfillmentContext::new(); let vtable = selection.map(|predicate| { - debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate); + debug!( + "fulfill_obligation: register_predicate_obligation {:?}", + predicate + ); fulfill_cx.register_predicate_obligation(&infcx, predicate); }); let vtable = infcx.drain_fulfillment_cx_or_panic(DUMMY_SP, &mut fulfill_cx, &vtable); @@ -84,7 +96,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { self, param_substs: &Substs<'tcx>, param_env: ty::ParamEnv<'tcx>, - value: &T + value: &T, ) -> T where T: TypeFoldable<'tcx>, @@ -94,9 +106,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { param_substs={:?}, \ value={:?}, \ param_env={:?})", - param_substs, - value, - param_env, + param_substs, value, param_env, ); let substituted = value.subst(self, param_substs); self.normalize_erasing_regions(param_env, substituted) @@ -105,7 +115,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { // Implement DepTrackingMapConfig for `trait_cache` pub struct TraitSelectionCache<'tcx> { - data: PhantomData<&'tcx ()> + data: PhantomData<&'tcx ()>, } impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> { @@ -119,7 +129,7 @@ impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> { // # Global Cache pub struct ProjectionCache<'gcx> { - data: PhantomData<&'gcx ()> + data: PhantomData<&'gcx ()>, } impl<'gcx> DepTrackingMapConfig for ProjectionCache<'gcx> { @@ -140,12 +150,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// type inference variables that appear in `result` to be /// unified, and hence we need to process those obligations to get /// the complete picture of the type. - fn drain_fulfillment_cx_or_panic(&self, - span: Span, - fulfill_cx: &mut FulfillmentContext<'tcx>, - result: &T) - -> T::Lifted - where T: TypeFoldable<'tcx> + ty::Lift<'gcx> + fn drain_fulfillment_cx_or_panic( + &self, + span: Span, + fulfill_cx: &mut FulfillmentContext<'tcx>, + result: &T, + ) -> T::Lifted + where + T: TypeFoldable<'tcx> + ty::Lift<'gcx>, { debug!("drain_fulfillment_cx_or_panic()"); @@ -153,15 +165,18 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // contains unbound type parameters. It could be a slight // optimization to stop iterating early. if let Err(errors) = fulfill_cx.select_all_or_error(self) { - span_bug!(span, "Encountered errors `{:?}` resolving bounds after type-checking", - errors); + span_bug!( + span, + "Encountered errors `{:?}` resolving bounds after type-checking", + errors + ); } let result = self.resolve_type_vars_if_possible(result); let result = self.tcx.erase_regions(&result); - self.tcx.lift_to_global(&result).unwrap_or_else(|| - span_bug!(span, "Uninferred types/regions in `{:?}`", result) - ) + self.tcx + .lift_to_global(&result) + .unwrap_or_else(|| span_bug!(span, "Uninferred types/regions in `{:?}`", result)) } } diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs index b3d732ebcd7dc..8f4268e9dd601 100644 --- a/src/librustc/traits/coherence.rs +++ b/src/librustc/traits/coherence.rs @@ -4,31 +4,31 @@ //! [trait-resolution]: https://rust-lang.github.io/rustc-guide/traits/resolution.html //! [trait-specialization]: https://rust-lang.github.io/rustc-guide/traits/specialization.html -use infer::CombinedSnapshot; use hir::def_id::{DefId, LOCAL_CRATE}; +use infer::CombinedSnapshot; use syntax_pos::DUMMY_SP; -use traits::{self, Normalized, SelectionContext, Obligation, ObligationCause}; -use traits::IntercrateMode; use traits::select::IntercrateAmbiguityCause; -use ty::{self, Ty, TyCtxt}; -use ty::relate::TraitObjectMode; +use traits::IntercrateMode; +use traits::{self, Normalized, Obligation, ObligationCause, SelectionContext}; use ty::fold::TypeFoldable; +use ty::relate::TraitObjectMode; use ty::subst::Subst; +use ty::{self, Ty, TyCtxt}; -use infer::{InferOk}; +use infer::InferOk; /// Whether we do the orphan check relative to this crate or /// to some remote crate. #[derive(Copy, Clone, Debug)] enum InCrate { Local, - Remote + Remote, } #[derive(Debug, Copy, Clone)] pub enum Conflict { Upstream, - Downstream { used_to_be_broken: bool } + Downstream { used_to_be_broken: bool }, } pub struct OverlapResult<'tcx> { @@ -63,20 +63,22 @@ where F1: FnOnce(OverlapResult<'_>) -> R, F2: FnOnce() -> R, { - debug!("overlapping_impls(\ + debug!( + "overlapping_impls(\ impl1_def_id={:?}, \ impl2_def_id={:?}, intercrate_mode={:?}, trait_object_mode={:?})", - impl1_def_id, - impl2_def_id, - intercrate_mode, - trait_object_mode); + impl1_def_id, impl2_def_id, intercrate_mode, trait_object_mode + ); - let overlaps = tcx.infer_ctxt().with_trait_object_mode(trait_object_mode).enter(|infcx| { - let selcx = &mut SelectionContext::intercrate(&infcx, intercrate_mode); - overlap(selcx, impl1_def_id, impl2_def_id).is_some() - }); + let overlaps = tcx + .infer_ctxt() + .with_trait_object_mode(trait_object_mode) + .enter(|infcx| { + let selcx = &mut SelectionContext::intercrate(&infcx, intercrate_mode); + overlap(selcx, impl1_def_id, impl2_def_id).is_some() + }); if !overlaps { return no_overlap(); @@ -85,18 +87,20 @@ where // In the case where we detect an error, run the check again, but // this time tracking intercrate ambuiguity causes for better // diagnostics. (These take time and can lead to false errors.) - tcx.infer_ctxt().with_trait_object_mode(trait_object_mode).enter(|infcx| { - let selcx = &mut SelectionContext::intercrate(&infcx, intercrate_mode); - selcx.enable_tracking_intercrate_ambiguity_causes(); - on_overlap(overlap(selcx, impl1_def_id, impl2_def_id).unwrap()) - }) + tcx.infer_ctxt() + .with_trait_object_mode(trait_object_mode) + .enter(|infcx| { + let selcx = &mut SelectionContext::intercrate(&infcx, intercrate_mode); + selcx.enable_tracking_intercrate_ambiguity_causes(); + on_overlap(overlap(selcx, impl1_def_id, impl2_def_id).unwrap()) + }) } -fn with_fresh_ty_vars<'cx, 'gcx, 'tcx>(selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - impl_def_id: DefId) - -> ty::ImplHeader<'tcx> -{ +fn with_fresh_ty_vars<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + impl_def_id: DefId, +) -> ty::ImplHeader<'tcx> { let tcx = selcx.tcx(); let impl_substs = selcx.infcx().fresh_substs_for_item(DUMMY_SP, impl_def_id); @@ -104,13 +108,20 @@ fn with_fresh_ty_vars<'cx, 'gcx, 'tcx>(selcx: &mut SelectionContext<'cx, 'gcx, ' impl_def_id, self_ty: tcx.type_of(impl_def_id).subst(tcx, impl_substs), trait_ref: tcx.impl_trait_ref(impl_def_id).subst(tcx, impl_substs), - predicates: tcx.predicates_of(impl_def_id).instantiate(tcx, impl_substs).predicates, + predicates: tcx + .predicates_of(impl_def_id) + .instantiate(tcx, impl_substs) + .predicates, }; - let Normalized { value: mut header, obligations } = - traits::normalize(selcx, param_env, ObligationCause::dummy(), &header); + let Normalized { + value: mut header, + obligations, + } = traits::normalize(selcx, param_env, ObligationCause::dummy(), &header); - header.predicates.extend(obligations.into_iter().map(|o| o.predicate)); + header + .predicates + .extend(obligations.into_iter().map(|o| o.predicate)); header } @@ -123,7 +134,9 @@ fn overlap<'cx, 'gcx, 'tcx>( ) -> Option> { debug!("overlap(a_def_id={:?}, b_def_id={:?})", a_def_id, b_def_id); - selcx.infcx().probe(|snapshot| overlap_within_probe(selcx, a_def_id, b_def_id, snapshot)) + selcx + .infcx() + .probe(|snapshot| overlap_within_probe(selcx, a_def_id, b_def_id, snapshot)) } fn overlap_within_probe( @@ -145,53 +158,67 @@ fn overlap_within_probe( debug!("overlap: b_impl_header={:?}", b_impl_header); // Do `a` and `b` unify? If not, no overlap. - let obligations = match selcx.infcx().at(&ObligationCause::dummy(), param_env) - .eq_impl_headers(&a_impl_header, &b_impl_header) + let obligations = match selcx + .infcx() + .at(&ObligationCause::dummy(), param_env) + .eq_impl_headers(&a_impl_header, &b_impl_header) { - Ok(InferOk { obligations, value: () }) => obligations, - Err(_) => return None + Ok(InferOk { + obligations, + value: (), + }) => obligations, + Err(_) => return None, }; debug!("overlap: unification check succeeded"); // Are any of the obligations unsatisfiable? If so, no overlap. let infcx = selcx.infcx(); - let opt_failing_obligation = - a_impl_header.predicates - .iter() - .chain(&b_impl_header.predicates) - .map(|p| infcx.resolve_type_vars_if_possible(p)) - .map(|p| Obligation { cause: ObligationCause::dummy(), - param_env, - recursion_depth: 0, - predicate: p }) - .chain(obligations) - .find(|o| !selcx.predicate_may_hold_fatal(o)); + let opt_failing_obligation = a_impl_header + .predicates + .iter() + .chain(&b_impl_header.predicates) + .map(|p| infcx.resolve_type_vars_if_possible(p)) + .map(|p| Obligation { + cause: ObligationCause::dummy(), + param_env, + recursion_depth: 0, + predicate: p, + }) + .chain(obligations) + .find(|o| !selcx.predicate_may_hold_fatal(o)); // FIXME: the call to `selcx.predicate_may_hold_fatal` above should be ported // to the canonical trait query form, `infcx.predicate_may_hold`, once // the new system supports intercrate mode (which coherence needs). if let Some(failing_obligation) = opt_failing_obligation { debug!("overlap: obligation unsatisfiable {:?}", failing_obligation); - return None + return None; } let impl_header = selcx.infcx().resolve_type_vars_if_possible(&a_impl_header); let intercrate_ambiguity_causes = selcx.take_intercrate_ambiguity_causes(); - debug!("overlap: intercrate_ambiguity_causes={:#?}", intercrate_ambiguity_causes); + debug!( + "overlap: intercrate_ambiguity_causes={:#?}", + intercrate_ambiguity_causes + ); let involves_placeholder = match selcx.infcx().region_constraints_added_in_snapshot(snapshot) { Some(true) => true, _ => false, }; - Some(OverlapResult { impl_header, intercrate_ambiguity_causes, involves_placeholder }) + Some(OverlapResult { + impl_header, + intercrate_ambiguity_causes, + involves_placeholder, + }) } -pub fn trait_ref_is_knowable<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_ref: ty::TraitRef<'tcx>) - -> Option -{ +pub fn trait_ref_is_knowable<'a, 'gcx, 'tcx>( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: ty::TraitRef<'tcx>, +) -> Option { debug!("trait_ref_is_knowable(trait_ref={:?})", trait_ref); if orphan_check_trait_ref(tcx, trait_ref, InCrate::Remote).is_ok() { // A downstream or cousin crate is allowed to implement some @@ -200,8 +227,7 @@ pub fn trait_ref_is_knowable<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, // A trait can be implementable for a trait ref by both the current // crate and crates downstream of it. Older versions of rustc // were not aware of this, causing incoherence (issue #43355). - let used_to_be_broken = - orphan_check_trait_ref(tcx, trait_ref, InCrate::Local).is_ok(); + let used_to_be_broken = orphan_check_trait_ref(tcx, trait_ref, InCrate::Local).is_ok(); if used_to_be_broken { debug!("trait_ref_is_knowable({:?}) - USED TO BE BROKEN", trait_ref); } @@ -234,9 +260,10 @@ pub fn trait_ref_is_knowable<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, } } -pub fn trait_ref_is_local_or_fundamental<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_ref: ty::TraitRef<'tcx>) - -> bool { +pub fn trait_ref_is_local_or_fundamental<'a, 'gcx, 'tcx>( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: ty::TraitRef<'tcx>, +) -> bool { trait_ref.def_id.krate == LOCAL_CRATE || tcx.has_attr(trait_ref.def_id, "fundamental") } @@ -251,10 +278,10 @@ pub enum OrphanCheckErr<'tcx> { /// /// 1. All type parameters in `Self` must be "covered" by some local type constructor. /// 2. Some local type must appear in `Self`. -pub fn orphan_check<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId) - -> Result<(), OrphanCheckErr<'tcx>> -{ +pub fn orphan_check<'a, 'gcx, 'tcx>( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + impl_def_id: DefId, +) -> Result<(), OrphanCheckErr<'tcx>> { debug!("orphan_check({:?})", impl_def_id); // We only except this routine to be invoked on implementations @@ -264,8 +291,7 @@ pub fn orphan_check<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, // If the *trait* is local to the crate, ok. if trait_ref.def_id.is_local() { - debug!("trait {:?} is local to current crate", - trait_ref.def_id); + debug!("trait {:?} is local to current crate", trait_ref.def_id); return Ok(()); } @@ -358,17 +384,21 @@ pub fn orphan_check<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, /// /// Note that this function is never called for types that have both type /// parameters and inference variables. -fn orphan_check_trait_ref<'tcx>(tcx: TyCtxt<'_, '_, '_>, - trait_ref: ty::TraitRef<'tcx>, - in_crate: InCrate) - -> Result<(), OrphanCheckErr<'tcx>> -{ - debug!("orphan_check_trait_ref(trait_ref={:?}, in_crate={:?})", - trait_ref, in_crate); +fn orphan_check_trait_ref<'tcx>( + tcx: TyCtxt<'_, '_, '_>, + trait_ref: ty::TraitRef<'tcx>, + in_crate: InCrate, +) -> Result<(), OrphanCheckErr<'tcx>> { + debug!( + "orphan_check_trait_ref(trait_ref={:?}, in_crate={:?})", + trait_ref, in_crate + ); if trait_ref.needs_infer() && trait_ref.needs_subst() { - bug!("can't orphan check a trait ref with both params and inference variables {:?}", - trait_ref); + bug!( + "can't orphan check a trait ref with both params and inference variables {:?}", + trait_ref + ); } // First, create an ordered iterator over all the type parameters to the trait, with the self @@ -383,7 +413,8 @@ fn orphan_check_trait_ref<'tcx>(tcx: TyCtxt<'_, '_, '_>, // uncovered type parameters. let uncovered_tys = uncovered_tys(tcx, input_ty, in_crate); for uncovered_ty in uncovered_tys { - if let Some(param) = uncovered_ty.walk() + if let Some(param) = uncovered_ty + .walk() .find(|t| is_possibly_remote_type(t, in_crate)) { debug!("orphan_check_trait_ref: uncovered type `{:?}`", param); @@ -397,7 +428,8 @@ fn orphan_check_trait_ref<'tcx>(tcx: TyCtxt<'_, '_, '_>, // Otherwise, enforce invariant that there are no type // parameters reachable. - if let Some(param) = input_ty.walk() + if let Some(param) = input_ty + .walk() .find(|t| is_possibly_remote_type(t, in_crate)) { debug!("orphan_check_trait_ref: uncovered type `{:?}`", param); @@ -410,14 +442,13 @@ fn orphan_check_trait_ref<'tcx>(tcx: TyCtxt<'_, '_, '_>, return Err(OrphanCheckErr::NoLocalInputType); } -fn uncovered_tys<'tcx>(tcx: TyCtxt<'_, '_, '_>, ty: Ty<'tcx>, in_crate: InCrate) - -> Vec> { +fn uncovered_tys<'tcx>(tcx: TyCtxt<'_, '_, '_>, ty: Ty<'tcx>, in_crate: InCrate) -> Vec> { if ty_is_local_constructor(ty, in_crate) { vec![] } else if fundamental_ty(ty) { ty.walk_shallow() - .flat_map(|t| uncovered_tys(tcx, t, in_crate)) - .collect() + .flat_map(|t| uncovered_tys(tcx, t, in_crate)) + .collect() } else { vec![ty] } @@ -431,15 +462,15 @@ fn is_possibly_remote_type(ty: Ty<'_>, _in_crate: InCrate) -> bool { } fn ty_is_local(tcx: TyCtxt<'_, '_, '_>, ty: Ty<'_>, in_crate: InCrate) -> bool { - ty_is_local_constructor(ty, in_crate) || - fundamental_ty(ty) && ty.walk_shallow().any(|t| ty_is_local(tcx, t, in_crate)) + ty_is_local_constructor(ty, in_crate) + || fundamental_ty(ty) && ty.walk_shallow().any(|t| ty_is_local(tcx, t, in_crate)) } fn fundamental_ty(ty: Ty<'_>) -> bool { match ty.sty { ty::Ref(..) => true, ty::Adt(def, _) => def.is_fundamental(), - _ => false + _ => false, } } @@ -448,7 +479,7 @@ fn def_id_is_local(def_id: DefId, in_crate: InCrate) -> bool { // The type is local to *this* crate - it will not be // local in any other crate. InCrate::Remote => false, - InCrate::Local => def_id.is_local() + InCrate::Local => def_id.is_local(), } } @@ -456,24 +487,22 @@ fn ty_is_local_constructor(ty: Ty<'_>, in_crate: InCrate) -> bool { debug!("ty_is_local_constructor({:?})", ty); match ty.sty { - ty::Bool | - ty::Char | - ty::Int(..) | - ty::Uint(..) | - ty::Float(..) | - ty::Str | - ty::FnDef(..) | - ty::FnPtr(_) | - ty::Array(..) | - ty::Slice(..) | - ty::RawPtr(..) | - ty::Ref(..) | - ty::Never | - ty::Tuple(..) | - ty::Param(..) | - ty::Projection(..) => { - false - } + ty::Bool + | ty::Char + | ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::Str + | ty::FnDef(..) + | ty::FnPtr(_) + | ty::Array(..) + | ty::Slice(..) + | ty::RawPtr(..) + | ty::Ref(..) + | ty::Never + | ty::Tuple(..) + | ty::Param(..) + | ty::Projection(..) => false, ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) => match in_crate { InCrate::Local => false, @@ -489,12 +518,10 @@ fn ty_is_local_constructor(ty: Ty<'_>, in_crate: InCrate) -> bool { ty::Error => true, - ty::UnnormalizedProjection(..) | - ty::Closure(..) | - ty::Generator(..) | - ty::GeneratorWitness(..) | - ty::Opaque(..) => { - bug!("ty_is_local invoked on unexpected type: {:?}", ty) - } + ty::UnnormalizedProjection(..) + | ty::Closure(..) + | ty::Generator(..) + | ty::GeneratorWitness(..) + | ty::Opaque(..) => bug!("ty_is_local invoked on unexpected type: {:?}", ty), } } diff --git a/src/librustc/traits/engine.rs b/src/librustc/traits/engine.rs index c759a9ddf2ce6..1cc966469f983 100644 --- a/src/librustc/traits/engine.rs +++ b/src/librustc/traits/engine.rs @@ -1,7 +1,7 @@ +use hir::def_id::DefId; use infer::InferCtxt; -use ty::{self, Ty, TyCtxt, ToPredicate}; use traits::Obligation; -use hir::def_id::DefId; +use ty::{self, ToPredicate, Ty, TyCtxt}; use super::{ChalkFulfillmentContext, FulfillmentContext, FulfillmentError}; use super::{ObligationCause, PredicateObligation}; @@ -30,12 +30,15 @@ pub trait TraitEngine<'tcx>: 'tcx { def_id, substs: infcx.tcx.mk_substs_trait(ty, &[]), }; - self.register_predicate_obligation(infcx, Obligation { - cause, - recursion_depth: 0, - param_env, - predicate: trait_ref.to_predicate() - }); + self.register_predicate_obligation( + infcx, + Obligation { + cause, + recursion_depth: 0, + param_env, + predicate: trait_ref.to_predicate(), + }, + ); } fn register_predicate_obligation( diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 21352ac1053a8..532b62a50adc3 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -1,60 +1,60 @@ use super::{ - FulfillmentError, - FulfillmentErrorCode, - MismatchedProjectionTypes, - Obligation, - ObligationCause, - ObligationCauseCode, - OnUnimplementedDirective, - OnUnimplementedNote, - OutputTypeParameterMismatch, - TraitNotObjectSafe, - ConstEvalFailure, - PredicateObligation, - SelectionContext, - SelectionError, - ObjectSafetyViolation, - Overflow, + ConstEvalFailure, FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes, + ObjectSafetyViolation, Obligation, ObligationCause, ObligationCauseCode, + OnUnimplementedDirective, OnUnimplementedNote, OutputTypeParameterMismatch, Overflow, + PredicateObligation, SelectionContext, SelectionError, TraitNotObjectSafe, }; use errors::{Applicability, DiagnosticBuilder}; use hir; -use hir::Node; use hir::def_id::DefId; -use infer::{self, InferCtxt}; +use hir::Node; use infer::type_variable::TypeVariableOrigin; +use infer::{self, InferCtxt}; +use session::DiagnosticMessageId; use std::fmt; use syntax::ast; -use session::DiagnosticMessageId; -use ty::{self, AdtKind, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable}; -use ty::GenericParamDefKind; use ty::error::ExpectedFound; use ty::fast_reject; use ty::fold::TypeFolder; use ty::subst::Subst; +use ty::GenericParamDefKind; use ty::SubtypePredicate; +use ty::{self, AdtKind, ToPolyTraitRef, ToPredicate, Ty, TyCtxt, TypeFoldable}; use util::nodemap::{FxHashMap, FxHashSet}; -use syntax_pos::{DUMMY_SP, Span, ExpnInfo, ExpnFormat}; +use syntax_pos::{ExpnFormat, ExpnInfo, Span, DUMMY_SP}; impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { - pub fn report_fulfillment_errors(&self, - errors: &[FulfillmentError<'tcx>], - body_id: Option, - fallback_has_occurred: bool) { + pub fn report_fulfillment_errors( + &self, + errors: &[FulfillmentError<'tcx>], + body_id: Option, + fallback_has_occurred: bool, + ) { #[derive(Debug)] struct ErrorDescriptor<'tcx> { predicate: ty::Predicate<'tcx>, index: Option, // None if this is an old error } - let mut error_map: FxHashMap<_, Vec<_>> = - self.reported_trait_errors.borrow().iter().map(|(&span, predicates)| { - (span, predicates.iter().map(|predicate| ErrorDescriptor { - predicate: predicate.clone(), - index: None - }).collect()) - }).collect(); + let mut error_map: FxHashMap<_, Vec<_>> = self + .reported_trait_errors + .borrow() + .iter() + .map(|(&span, predicates)| { + ( + span, + predicates + .iter() + .map(|predicate| ErrorDescriptor { + predicate: predicate.clone(), + index: None, + }) + .collect(), + ) + }) + .collect(); for (index, error) in errors.iter().enumerate() { // We want to ignore desugarings here: spans are equivalent even @@ -64,19 +64,20 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { format: ExpnFormat::CompilerDesugaring(_), def_site: Some(def_span), .. - }) = span.ctxt().outer().expn_info() { + }) = span.ctxt().outer().expn_info() + { span = def_span; } - error_map.entry(span).or_default().push( - ErrorDescriptor { - predicate: error.obligation.predicate.clone(), - index: Some(index) - } - ); + error_map.entry(span).or_default().push(ErrorDescriptor { + predicate: error.obligation.predicate.clone(), + index: Some(index), + }); - self.reported_trait_errors.borrow_mut() - .entry(span).or_default() + self.reported_trait_errors + .borrow_mut() + .entry(span) + .or_default() .push(error.obligation.predicate.clone()); } @@ -95,16 +96,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // Avoid errors being suppressed by already-suppressed // errors, to prevent all errors from being suppressed // at once. - continue + continue; } - if self.error_implies(&error2.predicate, &error.predicate) && - !(error2.index >= error.index && - self.error_implies(&error.predicate, &error2.predicate)) + if self.error_implies(&error2.predicate, &error.predicate) + && !(error2.index >= error.index + && self.error_implies(&error.predicate, &error2.predicate)) { info!("skipping {:?} (implied by {:?})", error, error2); is_suppressed[index] = true; - break + break; } } } @@ -120,21 +121,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // returns if `cond` not occurring implies that `error` does not occur - i.e., that // `error` occurring implies that `cond` occurs. - fn error_implies(&self, - cond: &ty::Predicate<'tcx>, - error: &ty::Predicate<'tcx>) - -> bool - { + fn error_implies(&self, cond: &ty::Predicate<'tcx>, error: &ty::Predicate<'tcx>) -> bool { if cond == error { - return true + return true; } let (cond, error) = match (cond, error) { - (&ty::Predicate::Trait(..), &ty::Predicate::Trait(ref error)) - => (cond, error), + (&ty::Predicate::Trait(..), &ty::Predicate::Trait(ref error)) => (cond, error), _ => { // FIXME: make this work in other cases too. - return false + return false; } }; @@ -147,8 +143,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // `Γ₁ ⊦ φ₁ => Γ₂ ⊦ φ₂` logic. let param_env = ty::ParamEnv::empty(); if self.can_sub(param_env, error, implication).is_ok() { - debug!("error_implies: {:?} -> {:?} -> {:?}", cond, error, implication); - return true + debug!( + "error_implies: {:?} -> {:?} -> {:?}", + cond, error, implication + ); + return true; } } } @@ -156,9 +155,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { false } - fn report_fulfillment_error(&self, error: &FulfillmentError<'tcx>, - body_id: Option, - fallback_has_occurred: bool) { + fn report_fulfillment_error( + &self, + error: &FulfillmentError<'tcx>, + body_id: Option, + fallback_has_occurred: bool, + ) { debug!("report_fulfillment_errors({:?})", error); match error.code { FulfillmentErrorCode::CodeSelectionError(ref e) => { @@ -171,24 +173,26 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.maybe_report_ambiguity(&error.obligation, body_id); } FulfillmentErrorCode::CodeSubtypeError(ref expected_found, ref err) => { - self.report_mismatched_types(&error.obligation.cause, - expected_found.expected, - expected_found.found, - err.clone()) - .emit(); + self.report_mismatched_types( + &error.obligation.cause, + expected_found.expected, + expected_found.found, + err.clone(), + ) + .emit(); } } } - fn report_projection_error(&self, - obligation: &PredicateObligation<'tcx>, - error: &MismatchedProjectionTypes<'tcx>) - { - let predicate = - self.resolve_type_vars_if_possible(&obligation.predicate); + fn report_projection_error( + &self, + obligation: &PredicateObligation<'tcx>, + error: &MismatchedProjectionTypes<'tcx>, + ) { + let predicate = self.resolve_type_vars_if_possible(&obligation.predicate); if predicate.references_error() { - return + return; } self.probe(|_| { @@ -205,7 +209,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let (data, _) = self.replace_bound_vars_with_fresh_vars( obligation.cause.span, infer::LateBoundRegionConversionTime::HigherRankedType, - data + data, ); let mut obligations = vec![]; let normalized_ty = super::normalize_projection_type( @@ -214,10 +218,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { data.projection_ty, obligation.cause.clone(), 0, - &mut obligations + &mut obligations, ); - if let Err(error) = self.at(&obligation.cause, obligation.param_env) - .eq(normalized_ty, data.ty) { + if let Err(error) = self + .at(&obligation.cause, obligation.param_env) + .eq(normalized_ty, data.ty) + { values = Some(infer::ValuePairs::Types(ExpectedFound { expected: normalized_ty, found: data.ty, @@ -228,13 +234,24 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } let msg = format!("type mismatch resolving `{}`", predicate); - let error_id = (DiagnosticMessageId::ErrorId(271), - Some(obligation.cause.span), msg); - let fresh = self.tcx.sess.one_time_diagnostics.borrow_mut().insert(error_id); + let error_id = ( + DiagnosticMessageId::ErrorId(271), + Some(obligation.cause.span), + msg, + ); + let fresh = self + .tcx + .sess + .one_time_diagnostics + .borrow_mut() + .insert(error_id); if fresh { let mut diag = struct_span_err!( - self.tcx.sess, obligation.cause.span, E0271, - "type mismatch resolving `{}`", predicate + self.tcx.sess, + obligation.cause.span, + E0271, + "type mismatch resolving `{}`", + predicate ); self.note_type_err(&mut diag, &obligation.cause, None, values, err); self.note_obligation_cause(&mut diag, obligation); @@ -279,18 +296,18 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { match (type_category(a), type_category(b)) { (Some(cat_a), Some(cat_b)) => match (&a.sty, &b.sty) { (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => def_a == def_b, - _ => cat_a == cat_b + _ => cat_a == cat_b, }, // infer and error can be equated to all types - _ => true + _ => true, } } - fn impl_similar_to(&self, - trait_ref: ty::PolyTraitRef<'tcx>, - obligation: &PredicateObligation<'tcx>) - -> Option - { + fn impl_similar_to( + &self, + trait_ref: ty::PolyTraitRef<'tcx>, + obligation: &PredicateObligation<'tcx>, + ) -> Option { let tcx = self.tcx; let param_env = obligation.param_env; let trait_ref = tcx.erase_late_bound_regions(&trait_ref); @@ -299,22 +316,22 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let mut self_match_impls = vec![]; let mut fuzzy_match_impls = vec![]; - self.tcx.for_each_relevant_impl( - trait_ref.def_id, trait_self_ty, |def_id| { + self.tcx + .for_each_relevant_impl(trait_ref.def_id, trait_self_ty, |def_id| { let impl_substs = self.fresh_substs_for_item(obligation.cause.span, def_id); - let impl_trait_ref = tcx - .impl_trait_ref(def_id) - .unwrap() - .subst(tcx, impl_substs); + let impl_trait_ref = tcx.impl_trait_ref(def_id).unwrap().subst(tcx, impl_substs); let impl_self_ty = impl_trait_ref.self_ty(); if let Ok(..) = self.can_eq(param_env, trait_self_ty, impl_self_ty) { self_match_impls.push(def_id); - if trait_ref.substs.types().skip(1) + if trait_ref + .substs + .types() + .skip(1) .zip(impl_trait_ref.substs.types().skip(1)) - .all(|(u,v)| self.fuzzy_match_tys(u, v)) + .all(|(u, v)| self.fuzzy_match_tys(u, v)) { fuzzy_match_impls.push(def_id); } @@ -326,7 +343,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } else if fuzzy_match_impls.len() == 1 { fuzzy_match_impls[0] } else { - return None + return None; }; if tcx.has_attr(impl_def_id, "rustc_on_unimplemented") { @@ -341,14 +358,15 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { trait_ref: ty::PolyTraitRef<'tcx>, obligation: &PredicateObligation<'tcx>, ) -> OnUnimplementedNote { - let def_id = self.impl_similar_to(trait_ref, obligation) + let def_id = self + .impl_similar_to(trait_ref, obligation) .unwrap_or_else(|| trait_ref.def_id()); let trait_ref = *trait_ref.skip_binder(); let mut flags = vec![]; match obligation.cause.code { - ObligationCauseCode::BuiltinDerivedObligation(..) | - ObligationCauseCode::ImplDerivedObligation(..) => {} + ObligationCauseCode::BuiltinDerivedObligation(..) + | ObligationCauseCode::ImplDerivedObligation(..) => {} _ => { // this is a "direct", user-specified, rather than derived, // obligation. @@ -384,14 +402,17 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { if let Some(def) = self_ty.ty_adt_def() { // We also want to be able to select self's original // signature with no type arguments resolved - flags.push(("_Self".to_owned(), Some(self.tcx.type_of(def.did).to_string()))); + flags.push(( + "_Self".to_owned(), + Some(self.tcx.type_of(def.did).to_string()), + )); } for param in generics.params.iter() { let value = match param.kind { - GenericParamDefKind::Type {..} => { + GenericParamDefKind::Type { .. } => { trait_ref.substs[param.index as usize].to_string() - }, + } GenericParamDefKind::Lifetime => continue, }; let name = param.name.to_string(); @@ -418,12 +439,18 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { Some(format!("[{}]", self.tcx.type_of(def.did).to_string())), )); let tcx = self.tcx; - if let Some(len) = len.val.try_to_scalar().and_then(|scalar| { - scalar.to_usize(&tcx).ok() - }) { + if let Some(len) = len + .val + .try_to_scalar() + .and_then(|scalar| scalar.to_usize(&tcx).ok()) + { flags.push(( "_Self".to_owned(), - Some(format!("[{}; {}]", self.tcx.type_of(def.did).to_string(), len)), + Some(format!( + "[{}; {}]", + self.tcx.type_of(def.did).to_string(), + len + )), )); } else { flags.push(( @@ -434,48 +461,49 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } - if let Ok(Some(command)) = OnUnimplementedDirective::of_item( - self.tcx, trait_ref.def_id, def_id - ) { + if let Ok(Some(command)) = + OnUnimplementedDirective::of_item(self.tcx, trait_ref.def_id, def_id) + { command.evaluate(self.tcx, trait_ref, &flags[..]) } else { OnUnimplementedNote::empty() } } - fn find_similar_impl_candidates(&self, - trait_ref: ty::PolyTraitRef<'tcx>) - -> Vec> - { - let simp = fast_reject::simplify_type(self.tcx, - trait_ref.skip_binder().self_ty(), - true,); + fn find_similar_impl_candidates( + &self, + trait_ref: ty::PolyTraitRef<'tcx>, + ) -> Vec> { + let simp = fast_reject::simplify_type(self.tcx, trait_ref.skip_binder().self_ty(), true); let all_impls = self.tcx.all_impls(trait_ref.def_id()); match simp { - Some(simp) => all_impls.iter().filter_map(|&def_id| { - let imp = self.tcx.impl_trait_ref(def_id).unwrap(); - let imp_simp = fast_reject::simplify_type(self.tcx, - imp.self_ty(), - true); - if let Some(imp_simp) = imp_simp { - if simp != imp_simp { - return None + Some(simp) => all_impls + .iter() + .filter_map(|&def_id| { + let imp = self.tcx.impl_trait_ref(def_id).unwrap(); + let imp_simp = fast_reject::simplify_type(self.tcx, imp.self_ty(), true); + if let Some(imp_simp) = imp_simp { + if simp != imp_simp { + return None; + } } - } - Some(imp) - }).collect(), - None => all_impls.iter().map(|&def_id| - self.tcx.impl_trait_ref(def_id).unwrap() - ).collect() + Some(imp) + }) + .collect(), + None => all_impls + .iter() + .map(|&def_id| self.tcx.impl_trait_ref(def_id).unwrap()) + .collect(), } } - fn report_similar_impl_candidates(&self, - mut impl_candidates: Vec>, - err: &mut DiagnosticBuilder<'_>) - { + fn report_similar_impl_candidates( + &self, + mut impl_candidates: Vec>, + err: &mut DiagnosticBuilder<'_>, + ) { if impl_candidates.is_empty() { return; } @@ -487,16 +515,18 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { 4 }; - let normalize = |candidate| self.tcx.global_tcx().infer_ctxt().enter(|ref infcx| { - let normalized = infcx - .at(&ObligationCause::dummy(), ty::ParamEnv::empty()) - .normalize(candidate) - .ok(); - match normalized { - Some(normalized) => format!("\n {:?}", normalized.value), - None => format!("\n {:?}", candidate), - } - }); + let normalize = |candidate| { + self.tcx.global_tcx().infer_ctxt().enter(|ref infcx| { + let normalized = infcx + .at(&ObligationCause::dummy(), ty::ParamEnv::empty()) + .normalize(candidate) + .ok(); + match normalized { + Some(normalized) => format!("\n {:?}", normalized.value), + None => format!("\n {:?}", candidate), + } + }) + }; // Sort impl candidates so that ordering is consistent for UI tests. let normalized_impl_candidates = &mut impl_candidates[0..end] @@ -505,14 +535,15 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { .collect::>(); normalized_impl_candidates.sort(); - err.help(&format!("the following implementations were found:{}{}", - normalized_impl_candidates.join(""), - if len > 5 { - format!("\nand {} others", len - 4) - } else { - String::new() - } - )); + err.help(&format!( + "the following implementations were found:{}{}", + normalized_impl_candidates.join(""), + if len > 5 { + format!("\nand {} others", len - 4) + } else { + String::new() + } + )); } /// Reports that an overflow has occurred and halts compilation. We @@ -521,16 +552,22 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// whose result could not be truly determined and thus we can't say /// if the program type checks or not -- and they are unusual /// occurrences in any case. - pub fn report_overflow_error(&self, - obligation: &Obligation<'tcx, T>, - suggest_increasing_limit: bool) -> ! - where T: fmt::Display + TypeFoldable<'tcx> + pub fn report_overflow_error( + &self, + obligation: &Obligation<'tcx, T>, + suggest_increasing_limit: bool, + ) -> ! + where + T: fmt::Display + TypeFoldable<'tcx>, { - let predicate = - self.resolve_type_vars_if_possible(&obligation.predicate); - let mut err = struct_span_err!(self.tcx.sess, obligation.cause.span, E0275, - "overflow evaluating the requirement `{}`", - predicate); + let predicate = self.resolve_type_vars_if_possible(&obligation.predicate); + let mut err = struct_span_err!( + self.tcx.sess, + obligation.cause.span, + E0275, + "overflow evaluating the requirement `{}`", + predicate + ); if suggest_increasing_limit { self.suggest_new_overflow_limit(&mut err); @@ -557,14 +594,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.report_overflow_error(&cycle[0], false); } - pub fn report_extra_impl_obligation(&self, - error_span: Span, - item_name: ast::Name, - _impl_item_def_id: DefId, - trait_item_def_id: DefId, - requirement: &dyn fmt::Display) - -> DiagnosticBuilder<'tcx> - { + pub fn report_extra_impl_obligation( + &self, + error_span: Span, + item_name: ast::Name, + _impl_item_def_id: DefId, + trait_item_def_id: DefId, + requirement: &dyn fmt::Display, + ) -> DiagnosticBuilder<'tcx> { let msg = "impl has stricter requirements than trait"; let sp = self.tcx.sess.source_map().def_span(error_span); @@ -580,13 +617,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { err } - /// Get the parent trait chain start fn get_parent_trait_ref(&self, code: &ObligationCauseCode<'tcx>) -> Option { match code { &ObligationCauseCode::BuiltinDerivedObligation(ref data) => { - let parent_trait_ref = self.resolve_type_vars_if_possible( - &data.parent_trait_ref); + let parent_trait_ref = self.resolve_type_vars_if_possible(&data.parent_trait_ref); match self.get_parent_trait_ref(&data.parent_code) { Some(t) => Some(t), None => Some(parent_trait_ref.skip_binder().self_ty().to_string()), @@ -596,43 +631,50 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } - pub fn report_selection_error(&self, - obligation: &PredicateObligation<'tcx>, - error: &SelectionError<'tcx>, - fallback_has_occurred: bool) - { + pub fn report_selection_error( + &self, + obligation: &PredicateObligation<'tcx>, + error: &SelectionError<'tcx>, + fallback_has_occurred: bool, + ) { let span = obligation.cause.span; let mut err = match *error { SelectionError::Unimplemented => { if let ObligationCauseCode::CompareImplMethodObligation { - item_name, impl_item_def_id, trait_item_def_id, - } = obligation.cause.code { + item_name, + impl_item_def_id, + trait_item_def_id, + } = obligation.cause.code + { self.report_extra_impl_obligation( span, item_name, impl_item_def_id, trait_item_def_id, - &format!("`{}`", obligation.predicate)) - .emit(); + &format!("`{}`", obligation.predicate), + ) + .emit(); return; } match obligation.predicate { ty::Predicate::Trait(ref trait_predicate) => { - let trait_predicate = - self.resolve_type_vars_if_possible(trait_predicate); + let trait_predicate = self.resolve_type_vars_if_possible(trait_predicate); if self.tcx.sess.has_errors() && trait_predicate.references_error() { return; } let trait_ref = trait_predicate.to_poly_trait_ref(); - let (post_message, pre_message) = - self.get_parent_trait_ref(&obligation.cause.code) - .map(|t| (format!(" in `{}`", t), format!("within `{}`, ", t))) + let (post_message, pre_message) = self + .get_parent_trait_ref(&obligation.cause.code) + .map(|t| (format!(" in `{}`", t), format!("within `{}`, ", t))) .unwrap_or_default(); - let OnUnimplementedNote { message, label, note } - = self.on_unimplemented_note(trait_ref, obligation); + let OnUnimplementedNote { + message, + label, + note, + } = self.on_unimplemented_note(trait_ref, obligation); let have_alt_message = message.is_some() || label.is_some(); let mut err = struct_span_err!( @@ -640,19 +682,23 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { span, E0277, "{}", - message.unwrap_or_else(|| - format!("the trait bound `{}` is not satisfied{}", - trait_ref.to_predicate(), post_message) - )); + message.unwrap_or_else(|| format!( + "the trait bound `{}` is not satisfied{}", + trait_ref.to_predicate(), + post_message + )) + ); let explanation = if obligation.cause.code == ObligationCauseCode::MainFunctionType { "consider using `()`, or a `Result`".to_owned() } else { - format!("{}the trait `{}` is not implemented for `{}`", - pre_message, - trait_ref, - trait_ref.self_ty()) + format!( + "{}the trait `{}` is not implemented for `{}`", + pre_message, + trait_ref, + trait_ref.self_ty() + ) }; if let Some(ref s) = label { @@ -672,8 +718,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.suggest_remove_reference(&obligation, &mut err, &trait_ref); // Try to report a help message - if !trait_ref.has_infer_types() && - self.predicate_can_apply(obligation.param_env, trait_ref) { + if !trait_ref.has_infer_types() + && self.predicate_can_apply(obligation.param_env, trait_ref) + { // If a where-clause may be useful, remind the // user that they can add it. // @@ -681,8 +728,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // these notes will often be of the form // "the type `T` can't be frobnicated" // which is somewhat confusing. - err.help(&format!("consider adding a `where {}` bound", - trait_ref.to_predicate())); + err.help(&format!( + "consider adding a `where {}` bound", + trait_ref.to_predicate() + )); } else if !have_alt_message { // Can't show anything else useful, try to find similar impls. let impl_candidates = self.find_similar_impl_candidates(trait_ref); @@ -705,15 +754,17 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { }); let unit_obligation = Obligation { predicate: ty::Predicate::Trait(predicate), - .. obligation.clone() + ..obligation.clone() }; if self.predicate_may_hold(&unit_obligation) { - err.note("the trait is implemented for `()`. \ - Possibly this error has been caused by changes to \ - Rust's type-inference algorithm \ - (see: https://github.com/rust-lang/rust/issues/48950 \ - for more info). Consider whether you meant to use the \ - type `()` here instead."); + err.note( + "the trait is implemented for `()`. \ + Possibly this error has been caused by changes to \ + Rust's type-inference algorithm \ + (see: https://github.com/rust-lang/rust/issues/48950 \ + for more info). Consider whether you meant to use the \ + type `()` here instead.", + ); } } @@ -724,7 +775,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // Errors for Subtype predicates show up as // `FulfillmentErrorCode::CodeSubtypeError`, // not selection error. - span_bug!(span, "subtype requirement gave wrong error: `{:?}`", predicate) + span_bug!( + span, + "subtype requirement gave wrong error: `{:?}`", + predicate + ) } ty::Predicate::RegionOutlives(ref predicate) => { @@ -734,56 +789,79 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } ty::Predicate::Projection(..) | ty::Predicate::TypeOutlives(..) => { - let predicate = - self.resolve_type_vars_if_possible(&obligation.predicate); - struct_span_err!(self.tcx.sess, span, E0280, + let predicate = self.resolve_type_vars_if_possible(&obligation.predicate); + struct_span_err!( + self.tcx.sess, + span, + E0280, "the requirement `{}` is not satisfied", - predicate) + predicate + ) } ty::Predicate::ObjectSafe(trait_def_id) => { - let violations = self.tcx.global_tcx() - .object_safety_violations(trait_def_id); - self.tcx.report_object_safety_error(span, - trait_def_id, - violations) + let violations = + self.tcx.global_tcx().object_safety_violations(trait_def_id); + self.tcx + .report_object_safety_error(span, trait_def_id, violations) } ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => { let found_kind = self.closure_kind(closure_def_id, closure_substs).unwrap(); - let closure_span = self.tcx.sess.source_map() + let closure_span = self + .tcx + .sess + .source_map() .def_span(self.tcx.hir().span_if_local(closure_def_id).unwrap()); let node_id = self.tcx.hir().as_local_node_id(closure_def_id).unwrap(); let mut err = struct_span_err!( - self.tcx.sess, closure_span, E0525, + self.tcx.sess, + closure_span, + E0525, "expected a closure that implements the `{}` trait, \ but this closure only implements `{}`", kind, - found_kind); + found_kind + ); err.span_label( closure_span, - format!("this closure implements `{}`, not `{}`", found_kind, kind)); + format!("this closure implements `{}`, not `{}`", found_kind, kind), + ); err.span_label( obligation.cause.span, - format!("the requirement to implement `{}` derives from here", kind)); + format!("the requirement to implement `{}` derives from here", kind), + ); // Additional context information explaining why the closure only implements // a particular trait. if let Some(tables) = self.in_progress_tables { let tables = tables.borrow(); let closure_hir_id = self.tcx.hir().node_to_hir_id(node_id); - match (found_kind, tables.closure_kind_origins().get(closure_hir_id)) { + match ( + found_kind, + tables.closure_kind_origins().get(closure_hir_id), + ) { (ty::ClosureKind::FnOnce, Some((span, name))) => { - err.span_label(*span, format!( - "closure is `FnOnce` because it moves the \ - variable `{}` out of its environment", name)); - }, + err.span_label( + *span, + format!( + "closure is `FnOnce` because it moves the \ + variable `{}` out of its environment", + name + ), + ); + } (ty::ClosureKind::FnMut, Some((span, name))) => { - err.span_label(*span, format!( - "closure is `FnMut` because it mutates the \ - variable `{}` here", name)); - }, + err.span_label( + *span, + format!( + "closure is `FnMut` because it mutates the \ + variable `{}` here", + name + ), + ); + } _ => {} } } @@ -805,7 +883,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // which bounds actually failed to hold. self.tcx.sess.struct_span_err( span, - &format!("the type `{}` is not well-formed (chalk)", ty) + &format!("the type `{}` is not well-formed (chalk)", ty), ) } } @@ -814,8 +892,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // Errors for `ConstEvaluatable` predicates show up as // `SelectionError::ConstEvalFailure`, // not `Unimplemented`. - span_bug!(span, - "const-evaluatable requirement gave wrong error: `{:?}`", obligation) + span_bug!( + span, + "const-evaluatable requirement gave wrong error: `{:?}`", + obligation + ) } } } @@ -836,9 +917,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { _ => None, }; - let found_span = found_did.and_then(|did| - self.tcx.hir().span_if_local(did) - ).map(|sp| self.tcx.sess.source_map().def_span(sp)); // the sp could be an fn def + let found_span = found_did + .and_then(|did| self.tcx.hir().span_if_local(did)) + .map(|sp| self.tcx.sess.source_map().def_span(sp)); // the sp could be an fn def let found = match found_trait_ref.skip_binder().substs.type_at(1).sty { ty::Tuple(ref tys) => vec![ArgKind::empty(); tys.len()], @@ -846,29 +927,36 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { }; let expected = match expected_trait_ref.skip_binder().substs.type_at(1).sty { - ty::Tuple(ref tys) => tys.iter() - .map(|t| ArgKind::from_expected_ty(t, Some(span))).collect(), + ty::Tuple(ref tys) => tys + .iter() + .map(|t| ArgKind::from_expected_ty(t, Some(span))) + .collect(), ref sty => vec![ArgKind::Arg("_".to_owned(), sty.to_string())], }; if found.len() == expected.len() { - self.report_closure_arg_mismatch(span, - found_span, - found_trait_ref, - expected_trait_ref) + self.report_closure_arg_mismatch( + span, + found_span, + found_trait_ref, + expected_trait_ref, + ) } else { let (closure_span, found) = found_did .and_then(|did| self.tcx.hir().get_if_local(did)) .map(|node| { let (found_span, found) = self.get_fn_like_arguments(node); (Some(found_span), found) - }).unwrap_or((found_span, found)); + }) + .unwrap_or((found_span, found)); - self.report_arg_count_mismatch(span, - closure_span, - expected, - found, - found_trait_ty.is_closure()) + self.report_arg_count_mismatch( + span, + closure_span, + expected, + found, + found_trait_ty.is_closure(), + ) } } @@ -879,7 +967,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // already reported in the query ConstEvalFailure(_) => { - self.tcx.sess.delay_span_bug(span, "constant in type had an ignored error"); + self.tcx + .sess + .delay_span_bug(span, "constant in type had an ignored error"); return; } @@ -893,9 +983,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// When encountering an assignment of an unsized trait, like `let x = ""[..];`, provide a /// suggestion to borrow the initializer in order to use have a slice instead. - fn suggest_borrow_on_unsized_slice(&self, - code: &ObligationCauseCode<'tcx>, - err: &mut DiagnosticBuilder<'tcx>) { + fn suggest_borrow_on_unsized_slice( + &self, + code: &ObligationCauseCode<'tcx>, + err: &mut DiagnosticBuilder<'tcx>, + ) { if let &ObligationCauseCode::VariableType(node_id) = code { let parent_node = self.tcx.hir().get_parent_node(node_id); if let Some(Node::Local(ref local)) = self.tcx.hir().find(parent_node) { @@ -906,7 +998,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { expr.span, "consider borrowing here", format!("&{}", snippet), - Applicability::MachineApplicable + Applicability::MachineApplicable, ); } } @@ -917,15 +1009,18 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// Whenever references are used by mistake, like `for (i, e) in &vec.iter().enumerate()`, /// suggest removing these references until we reach a type that implements the trait. - fn suggest_remove_reference(&self, - obligation: &PredicateObligation<'tcx>, - err: &mut DiagnosticBuilder<'tcx>, - trait_ref: &ty::Binder>) { + fn suggest_remove_reference( + &self, + obligation: &PredicateObligation<'tcx>, + err: &mut DiagnosticBuilder<'tcx>, + trait_ref: &ty::Binder>, + ) { let trait_ref = trait_ref.skip_binder(); let span = obligation.cause.span; if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { - let refs_number = snippet.chars() + let refs_number = snippet + .chars() .filter(|c| !c.is_whitespace()) .take_while(|c| *c == '&') .count(); @@ -938,20 +1033,28 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let substs = self.tcx.mk_substs_trait(trait_type, &[]); let new_trait_ref = ty::TraitRef::new(trait_ref.def_id, substs); - let new_obligation = Obligation::new(ObligationCause::dummy(), - obligation.param_env, - new_trait_ref.to_predicate()); + let new_obligation = Obligation::new( + ObligationCause::dummy(), + obligation.param_env, + new_trait_ref.to_predicate(), + ); if self.predicate_may_hold(&new_obligation) { - let sp = self.tcx.sess.source_map() + let sp = self + .tcx + .sess + .source_map() .span_take_while(span, |c| c.is_whitespace() || *c == '&'); let remove_refs = refs_remaining + 1; - let format_str = format!("consider removing {} leading `&`-references", - remove_refs); + let format_str = + format!("consider removing {} leading `&`-references", remove_refs); err.span_suggestion_short_with_applicability( - sp, &format_str, String::new(), Applicability::MachineApplicable + sp, + &format_str, + String::new(), + Applicability::MachineApplicable, ); break; } @@ -971,71 +1074,95 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { Node::Expr(&hir::Expr { node: hir::ExprKind::Closure(_, ref _decl, id, span, _), .. - }) => { - (self.tcx.sess.source_map().def_span(span), self.tcx.hir().body(id).arguments.iter() + }) => ( + self.tcx.sess.source_map().def_span(span), + self.tcx + .hir() + .body(id) + .arguments + .iter() .map(|arg| { if let hir::Pat { node: hir::PatKind::Tuple(args, _), span, .. - } = arg.pat.clone().into_inner() { + } = arg.pat.clone().into_inner() + { ArgKind::Tuple( Some(span), - args.iter().map(|pat| { - let snippet = self.tcx.sess.source_map() - .span_to_snippet(pat.span).unwrap(); - (snippet, "_".to_owned()) - }).collect::>(), + args.iter() + .map(|pat| { + let snippet = self + .tcx + .sess + .source_map() + .span_to_snippet(pat.span) + .unwrap(); + (snippet, "_".to_owned()) + }) + .collect::>(), ) } else { - let name = self.tcx.sess.source_map() - .span_to_snippet(arg.pat.span).unwrap(); + let name = self + .tcx + .sess + .source_map() + .span_to_snippet(arg.pat.span) + .unwrap(); ArgKind::Arg(name, "_".to_owned()) } }) - .collect::>()) - } + .collect::>(), + ), Node::Item(&hir::Item { span, node: hir::ItemKind::Fn(ref decl, ..), .. - }) | - Node::ImplItem(&hir::ImplItem { + }) + | Node::ImplItem(&hir::ImplItem { span, node: hir::ImplItemKind::Method(hir::MethodSig { ref decl, .. }, _), .. - }) | - Node::TraitItem(&hir::TraitItem { + }) + | Node::TraitItem(&hir::TraitItem { span, node: hir::TraitItemKind::Method(hir::MethodSig { ref decl, .. }, _), .. - }) => { - (self.tcx.sess.source_map().def_span(span), decl.inputs.iter() - .map(|arg| match arg.clone().node { - hir::TyKind::Tup(ref tys) => ArgKind::Tuple( - Some(arg.span), - vec![("_".to_owned(), "_".to_owned()); tys.len()] - ), - _ => ArgKind::empty() - }).collect::>()) - } + }) => ( + self.tcx.sess.source_map().def_span(span), + decl.inputs + .iter() + .map(|arg| match arg.clone().node { + hir::TyKind::Tup(ref tys) => ArgKind::Tuple( + Some(arg.span), + vec![("_".to_owned(), "_".to_owned()); tys.len()], + ), + _ => ArgKind::empty(), + }) + .collect::>(), + ), Node::Variant(&hir::Variant { span, - node: hir::VariantKind { - data: hir::VariantData::Tuple(ref fields, _), - .. - }, + node: + hir::VariantKind { + data: hir::VariantData::Tuple(ref fields, _), + .. + }, .. - }) => { - (self.tcx.sess.source_map().def_span(span), - fields.iter().map(|field| - ArgKind::Arg(field.ident.to_string(), "_".to_string()) - ).collect::>()) - } - Node::StructCtor(ref variant_data) => { - (self.tcx.sess.source_map().def_span(self.tcx.hir().span(variant_data.id())), - vec![ArgKind::empty(); variant_data.fields().len()]) - } + }) => ( + self.tcx.sess.source_map().def_span(span), + fields + .iter() + .map(|field| ArgKind::Arg(field.ident.to_string(), "_".to_string())) + .collect::>(), + ), + Node::StructCtor(ref variant_data) => ( + self.tcx + .sess + .source_map() + .def_span(self.tcx.hir().span(variant_data.id())), + vec![ArgKind::empty(); variant_data.fields().len()], + ), _ => panic!("non-FnLike node found: {:?}", node), } } @@ -1063,10 +1190,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { (1, Some(&ArgKind::Tuple(_, ref fields))) => { format!("a single {}-tuple as argument", fields.len()) } - _ => format!("{} {}argument{}", - arg_length, - if distinct && arg_length > 1 { "distinct " } else { "" }, - if arg_length == 1 { "" } else { "s" }), + _ => format!( + "{} {}argument{}", + arg_length, + if distinct && arg_length > 1 { + "distinct " + } else { + "" + }, + if arg_length == 1 { "" } else { "s" } + ), } }; @@ -1083,7 +1216,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { found_str, ); - err.span_label(span, format!("expected {} that takes {}", kind, expected_str)); + err.span_label( + span, + format!("expected {} that takes {}", kind, expected_str), + ); if let Some(found_span) = found_span { err.span_label(found_span, format!("takes {}", found_str)); @@ -1093,7 +1229,11 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // // move |_| { ... } // ^^^^^-- prefix - let prefix_span = self.tcx.sess.source_map().span_until_non_whitespace(found_span); + let prefix_span = self + .tcx + .sess + .source_map() + .span_until_non_whitespace(found_span); // move |_| { ... } // ^^^-- pipe_span let pipe_span = if let Some(span) = found_span.trim_start(prefix_span) { @@ -1111,11 +1251,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { pipe_span, &format!( "consider changing the closure to take and ignore the expected argument{}", - if expected_args.len() < 2 { - "" - } else { - "s" - } + if expected_args.len() < 2 { "" } else { "s" } ), format!("|{}|", underscores), Applicability::MachineApplicable, @@ -1124,22 +1260,26 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { if let &[ArgKind::Tuple(_, ref fields)] = &found_args[..] { if fields.len() == expected_args.len() { - let sugg = fields.iter() + let sugg = fields + .iter() .map(|(name, _)| name.to_owned()) .collect::>() .join(", "); - err.span_suggestion_with_applicability(found_span, - "change the closure to take multiple \ - arguments instead of a single tuple", - format!("|{}|", sugg), - Applicability::MachineApplicable); + err.span_suggestion_with_applicability( + found_span, + "change the closure to take multiple \ + arguments instead of a single tuple", + format!("|{}|", sugg), + Applicability::MachineApplicable, + ); } } if let &[ArgKind::Tuple(_, ref fields)] = &expected_args[..] { if fields.len() == found_args.len() && is_closure { let sugg = format!( "|({}){}|", - found_args.iter() + found_args + .iter() .map(|arg| match arg { ArgKind::Arg(name, _) => name.to_owned(), _ => "_".to_owned(), @@ -1151,11 +1291,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { ArgKind::Arg(_, ty) => ty != "_", _ => false, }) { - format!(": ({})", - fields.iter() - .map(|(_, ty)| ty.to_owned()) - .collect::>() - .join(", ")) + format!( + ": ({})", + fields + .iter() + .map(|(_, ty)| ty.to_owned()) + .collect::>() + .join(", ") + ) } else { String::new() }, @@ -1165,7 +1308,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { "change the closure to accept a tuple instead of \ individual arguments", sugg, - Applicability::MachineApplicable + Applicability::MachineApplicable, ); } } @@ -1174,15 +1317,17 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { err } - fn report_closure_arg_mismatch(&self, - span: Span, - found_span: Option, - expected_ref: ty::PolyTraitRef<'tcx>, - found: ty::PolyTraitRef<'tcx>) - -> DiagnosticBuilder<'tcx> - { - fn build_fn_sig_string<'a, 'gcx, 'tcx>(tcx: ty::TyCtxt<'a, 'gcx, 'tcx>, - trait_ref: &ty::TraitRef<'tcx>) -> String { + fn report_closure_arg_mismatch( + &self, + span: Span, + found_span: Option, + expected_ref: ty::PolyTraitRef<'tcx>, + found: ty::PolyTraitRef<'tcx>, + ) -> DiagnosticBuilder<'tcx> { + fn build_fn_sig_string<'a, 'gcx, 'tcx>( + tcx: ty::TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: &ty::TraitRef<'tcx>, + ) -> String { let inputs = trait_ref.substs.type_at(1); let sig = if let ty::Tuple(inputs) = inputs.sty { tcx.mk_fn_sig( @@ -1190,7 +1335,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { tcx.mk_infer(ty::TyVar(ty::TyVid { index: 0 })), false, hir::Unsafety::Normal, - ::rustc_target::spec::abi::Abi::Rust + ::rustc_target::spec::abi::Abi::Rust, ) } else { tcx.mk_fn_sig( @@ -1198,16 +1343,24 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { tcx.mk_infer(ty::TyVar(ty::TyVid { index: 0 })), false, hir::Unsafety::Normal, - ::rustc_target::spec::abi::Abi::Rust + ::rustc_target::spec::abi::Abi::Rust, ) }; ty::Binder::bind(sig).to_string() } let argument_is_closure = expected_ref.skip_binder().substs.type_at(0).is_closure(); - let mut err = struct_span_err!(self.tcx.sess, span, E0631, - "type mismatch in {} arguments", - if argument_is_closure { "closure" } else { "function" }); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0631, + "type mismatch in {} arguments", + if argument_is_closure { + "closure" + } else { + "function" + } + ); let found_str = format!( "expected signature of `{}`", @@ -1227,36 +1380,48 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - pub fn recursive_type_with_infinite_size_error(self, - type_def_id: DefId) - -> DiagnosticBuilder<'tcx> - { + pub fn recursive_type_with_infinite_size_error( + self, + type_def_id: DefId, + ) -> DiagnosticBuilder<'tcx> { assert!(type_def_id.is_local()); let span = self.hir().span_if_local(type_def_id).unwrap(); let span = self.sess.source_map().def_span(span); - let mut err = struct_span_err!(self.sess, span, E0072, - "recursive type `{}` has infinite size", - self.item_path_str(type_def_id)); + let mut err = struct_span_err!( + self.sess, + span, + E0072, + "recursive type `{}` has infinite size", + self.item_path_str(type_def_id) + ); err.span_label(span, "recursive type has infinite size"); - err.help(&format!("insert indirection (e.g., a `Box`, `Rc`, or `&`) \ - at some point to make `{}` representable", - self.item_path_str(type_def_id))); + err.help(&format!( + "insert indirection (e.g., a `Box`, `Rc`, or `&`) \ + at some point to make `{}` representable", + self.item_path_str(type_def_id) + )); err } - pub fn report_object_safety_error(self, - span: Span, - trait_def_id: DefId, - violations: Vec) - -> DiagnosticBuilder<'tcx> - { + pub fn report_object_safety_error( + self, + span: Span, + trait_def_id: DefId, + violations: Vec, + ) -> DiagnosticBuilder<'tcx> { let trait_str = self.item_path_str(trait_def_id); let span = self.sess.source_map().def_span(span); let mut err = struct_span_err!( - self.sess, span, E0038, + self.sess, + span, + E0038, "the trait `{}` cannot be made into an object", - trait_str); - err.span_label(span, format!("the trait `{}` cannot be made into an object", trait_str)); + trait_str + ); + err.span_label( + span, + format!("the trait `{}` cannot be made into an object", trait_str), + ); let mut reported_violations = FxHashSet::default(); for violation in violations { @@ -1269,8 +1434,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { - fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>, - body_id: Option) { + fn maybe_report_ambiguity( + &self, + obligation: &PredicateObligation<'tcx>, + body_id: Option, + ) { // Unable to successfully determine, probably means // insufficient type information, but could mean // ambiguous impls. The latter *ought* to be a @@ -1279,9 +1447,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let predicate = self.resolve_type_vars_if_possible(&obligation.predicate); let span = obligation.cause.span; - debug!("maybe_report_ambiguity(predicate={:?}, obligation={:?})", - predicate, - obligation); + debug!( + "maybe_report_ambiguity(predicate={:?}, obligation={:?})", + predicate, obligation + ); // Ambiguity errors are often caused as fallout from earlier // errors. So just ignore them if this infcx is tainted. @@ -1321,17 +1490,22 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // has_errors() to be sure that compilation isn't happening // anyway. In that case, why inundate the user. if !self.tcx.sess.has_errors() { - if - self.tcx.lang_items().sized_trait() + if self + .tcx + .lang_items() + .sized_trait() .map_or(false, |sized_id| sized_id == trait_ref.def_id()) { self.need_type_info_err(body_id, span, self_ty).emit(); } else { - let mut err = struct_span_err!(self.tcx.sess, - span, E0283, - "type annotations required: \ - cannot resolve `{}`", - predicate); + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0283, + "type annotations required: \ + cannot resolve `{}`", + predicate + ); self.note_obligation_cause(&mut err, obligation); err.emit(); } @@ -1350,22 +1524,28 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { if data.references_error() || self.tcx.sess.has_errors() { // no need to overload user in such cases } else { - let &SubtypePredicate { a_is_expected: _, a, b } = data.skip_binder(); + let &SubtypePredicate { + a_is_expected: _, + a, + b, + } = data.skip_binder(); // both must be type variables, or the other would've been instantiated assert!(a.is_ty_var() && b.is_ty_var()); - self.need_type_info_err(body_id, - obligation.cause.span, - a).emit(); + self.need_type_info_err(body_id, obligation.cause.span, a) + .emit(); } } _ => { if !self.tcx.sess.has_errors() { - let mut err = struct_span_err!(self.tcx.sess, - obligation.cause.span, E0284, - "type annotations required: \ - cannot resolve `{}`", - predicate); + let mut err = struct_span_err!( + self.tcx.sess, + obligation.cause.span, + E0284, + "type annotations required: \ + cannot resolve `{}`", + predicate + ); self.note_obligation_cause(&mut err, obligation); err.emit(); } @@ -1375,24 +1555,29 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// Returns whether the trait predicate may apply for *some* assignment /// to the type parameters. - fn predicate_can_apply(&self, - param_env: ty::ParamEnv<'tcx>, - pred: ty::PolyTraitRef<'tcx>) - -> bool { - struct ParamToVarFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fn predicate_can_apply( + &self, + param_env: ty::ParamEnv<'tcx>, + pred: ty::PolyTraitRef<'tcx>, + ) -> bool { + struct ParamToVarFolder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - var_map: FxHashMap, Ty<'tcx>> + var_map: FxHashMap, Ty<'tcx>>, } impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ParamToVarFolder<'a, 'gcx, 'tcx> { - fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.infcx.tcx } + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.infcx.tcx + } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - if let ty::Param(ty::ParamTy {name, ..}) = ty.sty { + if let ty::Param(ty::ParamTy { name, .. }) = ty.sty { let infcx = self.infcx; - self.var_map.entry(ty).or_insert_with(|| - infcx.next_ty_var( - TypeVariableOrigin::TypeParameterDefinition(DUMMY_SP, name))) + self.var_map.entry(ty).or_insert_with(|| { + infcx.next_ty_var(TypeVariableOrigin::TypeParameterDefinition( + DUMMY_SP, name, + )) + }) } else { ty.super_fold_with(self) } @@ -1404,57 +1589,63 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let cleaned_pred = pred.fold_with(&mut ParamToVarFolder { infcx: self, - var_map: Default::default() + var_map: Default::default(), }); let cleaned_pred = super::project::normalize( &mut selcx, param_env, ObligationCause::dummy(), - &cleaned_pred - ).value; + &cleaned_pred, + ) + .value; let obligation = Obligation::new( ObligationCause::dummy(), param_env, - cleaned_pred.to_predicate() + cleaned_pred.to_predicate(), ); self.predicate_may_hold(&obligation) }) } - fn note_obligation_cause(&self, - err: &mut DiagnosticBuilder<'_>, - obligation: &Obligation<'tcx, T>) - where T: fmt::Display + fn note_obligation_cause( + &self, + err: &mut DiagnosticBuilder<'_>, + obligation: &Obligation<'tcx, T>, + ) where + T: fmt::Display, { - self.note_obligation_cause_code(err, - &obligation.predicate, - &obligation.cause.code, - &mut vec![]); + self.note_obligation_cause_code( + err, + &obligation.predicate, + &obligation.cause.code, + &mut vec![], + ); } - fn note_obligation_cause_code(&self, - err: &mut DiagnosticBuilder<'_>, - predicate: &T, - cause_code: &ObligationCauseCode<'tcx>, - obligated_types: &mut Vec<&ty::TyS<'tcx>>) - where T: fmt::Display + fn note_obligation_cause_code( + &self, + err: &mut DiagnosticBuilder<'_>, + predicate: &T, + cause_code: &ObligationCauseCode<'tcx>, + obligated_types: &mut Vec<&ty::TyS<'tcx>>, + ) where + T: fmt::Display, { let tcx = self.tcx; match *cause_code { - ObligationCauseCode::ExprAssignable | - ObligationCauseCode::MatchExpressionArm { .. } | - ObligationCauseCode::IfExpression | - ObligationCauseCode::IfExpressionWithNoElse | - ObligationCauseCode::MainFunctionType | - ObligationCauseCode::StartFunctionType | - ObligationCauseCode::IntrinsicType | - ObligationCauseCode::MethodReceiver | - ObligationCauseCode::ReturnNoExpression | - ObligationCauseCode::MiscObligation => { - } + ObligationCauseCode::ExprAssignable + | ObligationCauseCode::MatchExpressionArm { .. } + | ObligationCauseCode::IfExpression + | ObligationCauseCode::IfExpressionWithNoElse + | ObligationCauseCode::MainFunctionType + | ObligationCauseCode::StartFunctionType + | ObligationCauseCode::IntrinsicType + | ObligationCauseCode::MethodReceiver + | ObligationCauseCode::ReturnNoExpression + | ObligationCauseCode::MiscObligation => {} ObligationCauseCode::SliceOrArrayElem => { err.note("slice and array elements must have `Sized` type"); } @@ -1462,17 +1653,23 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { err.note("only the last element of a tuple may have a dynamically sized type"); } ObligationCauseCode::ProjectionWf(data) => { - err.note(&format!("required so that the projection `{}` is well-formed", - data)); + err.note(&format!( + "required so that the projection `{}` is well-formed", + data + )); } ObligationCauseCode::ReferenceOutlivesReferent(ref_ty) => { - err.note(&format!("required so that reference `{}` does not outlive its referent", - ref_ty)); + err.note(&format!( + "required so that reference `{}` does not outlive its referent", + ref_ty + )); } ObligationCauseCode::ObjectTypeBound(object_ty, region) => { - err.note(&format!("required so that the lifetime bound of `{}` for `{}` \ - is satisfied", - region, object_ty)); + err.note(&format!( + "required so that the lifetime bound of `{}` for `{}` \ + is satisfied", + region, object_ty + )); } ObligationCauseCode::ItemObligation(item_def_id) => { let item_name = tcx.item_path_str(item_def_id); @@ -1486,12 +1683,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } ObligationCauseCode::ObjectCastObligation(object_ty) => { - err.note(&format!("required for the cast to the object type `{}`", - self.ty_to_string(object_ty))); + err.note(&format!( + "required for the cast to the object type `{}`", + self.ty_to_string(object_ty) + )); } ObligationCauseCode::RepeatVec => { - err.note("the `Copy` trait is required because the \ - repeated element will be copied"); + err.note( + "the `Copy` trait is required because the \ + repeated element will be copied", + ); } ObligationCauseCode::VariableType(_) => { err.note("all local variables must have a statically known size"); @@ -1506,12 +1707,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } ObligationCauseCode::SizedReturnType => { - err.note("the return type of a function must have a \ - statically known size"); + err.note( + "the return type of a function must have a \ + statically known size", + ); } ObligationCauseCode::SizedYieldType => { - err.note("the yield type of a generator must have a \ - statically known size"); + err.note( + "the yield type of a generator must have a \ + statically known size", + ); } ObligationCauseCode::AssignmentLhsSized => { err.note("the left-hand-side of an assignment must have a statically known size"); @@ -1522,25 +1727,30 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { ObligationCauseCode::StructInitializerSized => { err.note("structs must have a statically known size to be initialized"); } - ObligationCauseCode::FieldSized { adt_kind: ref item, last } => { - match *item { - AdtKind::Struct => { - if last { - err.note("the last field of a packed struct may only have a \ - dynamically sized type if it does not need drop to be run"); - } else { - err.note("only the last field of a struct may have a dynamically \ - sized type"); - } - } - AdtKind::Union => { - err.note("no field of a union may have a dynamically sized type"); - } - AdtKind::Enum => { - err.note("no field of an enum variant may have a dynamically sized type"); + ObligationCauseCode::FieldSized { + adt_kind: ref item, + last, + } => match *item { + AdtKind::Struct => { + if last { + err.note( + "the last field of a packed struct may only have a \ + dynamically sized type if it does not need drop to be run", + ); + } else { + err.note( + "only the last field of a struct may have a dynamically \ + sized type", + ); } } - } + AdtKind::Union => { + err.note("no field of a union may have a dynamically sized type"); + } + AdtKind::Enum => { + err.note("no field of an enum variant may have a dynamically sized type"); + } + }, ObligationCauseCode::ConstSized => { err.note("constant expressions must have a statically known size"); } @@ -1550,42 +1760,51 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { ObligationCauseCode::BuiltinDerivedObligation(ref data) => { let parent_trait_ref = self.resolve_type_vars_if_possible(&data.parent_trait_ref); let ty = parent_trait_ref.skip_binder().self_ty(); - err.note(&format!("required because it appears within the type `{}`", ty)); + err.note(&format!( + "required because it appears within the type `{}`", + ty + )); obligated_types.push(ty); let parent_predicate = parent_trait_ref.to_predicate(); if !self.is_recursive_obligation(obligated_types, &data.parent_code) { - self.note_obligation_cause_code(err, - &parent_predicate, - &data.parent_code, - obligated_types); + self.note_obligation_cause_code( + err, + &parent_predicate, + &data.parent_code, + obligated_types, + ); } } ObligationCauseCode::ImplDerivedObligation(ref data) => { let parent_trait_ref = self.resolve_type_vars_if_possible(&data.parent_trait_ref); - err.note( - &format!("required because of the requirements on the impl of `{}` for `{}`", - parent_trait_ref, - parent_trait_ref.skip_binder().self_ty())); + err.note(&format!( + "required because of the requirements on the impl of `{}` for `{}`", + parent_trait_ref, + parent_trait_ref.skip_binder().self_ty() + )); let parent_predicate = parent_trait_ref.to_predicate(); - self.note_obligation_cause_code(err, - &parent_predicate, - &data.parent_code, - obligated_types); + self.note_obligation_cause_code( + err, + &parent_predicate, + &data.parent_code, + obligated_types, + ); } ObligationCauseCode::CompareImplMethodObligation { .. } => { - err.note( - &format!("the requirement `{}` appears on the impl method \ - but not on the corresponding trait method", - predicate)); + err.note(&format!( + "the requirement `{}` appears on the impl method \ + but not on the corresponding trait method", + predicate + )); } - ObligationCauseCode::ReturnType(_) | - ObligationCauseCode::BlockTailExpression(_) => (), + ObligationCauseCode::ReturnType(_) | ObligationCauseCode::BlockTailExpression(_) => (), ObligationCauseCode::TrivialBound => { err.help("see issue #48214"); if tcx.sess.opts.unstable_features.is_nightly_build() { - err.help("add #![feature(trivial_bounds)] to the \ - crate attributes to enable", + err.help( + "add #![feature(trivial_bounds)] to the \ + crate attributes to enable", ); } } @@ -1595,17 +1814,24 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { fn suggest_new_overflow_limit(&self, err: &mut DiagnosticBuilder<'_>) { let current_limit = self.tcx.sess.recursion_limit.get(); let suggested_limit = current_limit * 2; - err.help(&format!("consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", - suggested_limit)); + err.help(&format!( + "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", + suggested_limit + )); } - fn is_recursive_obligation(&self, - obligated_types: &mut Vec<&ty::TyS<'tcx>>, - cause_code: &ObligationCauseCode<'tcx>) -> bool { + fn is_recursive_obligation( + &self, + obligated_types: &mut Vec<&ty::TyS<'tcx>>, + cause_code: &ObligationCauseCode<'tcx>, + ) -> bool { if let ObligationCauseCode::BuiltinDerivedObligation(ref data) = cause_code { let parent_trait_ref = self.resolve_type_vars_if_possible(&data.parent_trait_ref); - if obligated_types.iter().any(|ot| ot == &parent_trait_ref.skip_binder().self_ty()) { + if obligated_types + .iter() + .any(|ot| ot == &parent_trait_ref.skip_binder().self_ty()) + { return true; } } @@ -1638,8 +1864,8 @@ impl ArgKind { ty::Tuple(ref tys) => ArgKind::Tuple( span, tys.iter() - .map(|ty| ("_".to_owned(), ty.sty.to_string())) - .collect::>() + .map(|ty| ("_".to_owned(), ty.sty.to_string())) + .collect::>(), ), _ => ArgKind::Arg("_".to_owned(), t.sty.to_string()), } diff --git a/src/librustc/traits/fulfill.rs b/src/librustc/traits/fulfill.rs index 2e00d4d4b7c3b..4506ea5ff8e4a 100644 --- a/src/librustc/traits/fulfill.rs +++ b/src/librustc/traits/fulfill.rs @@ -1,26 +1,28 @@ use infer::InferCtxt; -use mir::interpret::{GlobalId, ErrorHandled}; -use ty::{self, Ty, TypeFoldable, ToPolyTraitRef}; -use ty::error::ExpectedFound; +use mir::interpret::{ErrorHandled, GlobalId}; +use rustc_data_structures::obligation_forest::ProcessResult; use rustc_data_structures::obligation_forest::{DoCompleted, Error, ForestObligation}; use rustc_data_structures::obligation_forest::{ObligationForest, ObligationProcessor}; -use rustc_data_structures::obligation_forest::{ProcessResult}; use std::marker::PhantomData; +use ty::error::ExpectedFound; +use ty::{self, ToPolyTraitRef, Ty, TypeFoldable}; +use super::engine::{TraitEngine, TraitEngineExt}; +use super::project; +use super::select::SelectionContext; use super::CodeAmbiguity; use super::CodeProjectionError; use super::CodeSelectionError; -use super::engine::{TraitEngine, TraitEngineExt}; +use super::{ConstEvalFailure, Unimplemented}; use super::{FulfillmentError, FulfillmentErrorCode}; use super::{ObligationCause, PredicateObligation}; -use super::project; -use super::select::SelectionContext; -use super::{Unimplemented, ConstEvalFailure}; impl<'tcx> ForestObligation for PendingPredicateObligation<'tcx> { type Predicate = ty::Predicate<'tcx>; - fn as_predicate(&self) -> &Self::Predicate { &self.obligation.predicate } + fn as_predicate(&self) -> &Self::Predicate { + &self.obligation.predicate + } } /// The fulfillment context is used to drive trait resolution. It @@ -59,7 +61,7 @@ pub struct FulfillmentContext<'tcx> { // other fulfillment contexts sometimes do live inside of // a snapshot (they don't *straddle* a snapshot, so there // is no trouble there). - usable_in_snapshot: bool + usable_in_snapshot: bool, } #[derive(Clone, Debug)] @@ -90,13 +92,15 @@ impl<'a, 'gcx, 'tcx> FulfillmentContext<'tcx> { FulfillmentContext { predicates: ObligationForest::new(), register_region_obligations: false, - usable_in_snapshot: false + usable_in_snapshot: false, } } /// Attempts to select obligations using `selcx`. - fn select(&mut self, selcx: &mut SelectionContext<'a, 'gcx, 'tcx>) - -> Result<(), Vec>> { + fn select( + &mut self, + selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, + ) -> Result<(), Vec>> { debug!("select(obligation-forest-size={})", self.predicates.len()); let mut errors = Vec::new(); @@ -105,18 +109,19 @@ impl<'a, 'gcx, 'tcx> FulfillmentContext<'tcx> { debug!("select: starting another iteration"); // Process pending obligations. - let outcome = self.predicates.process_obligations(&mut FulfillProcessor { - selcx, - register_region_obligations: self.register_region_obligations - }, DoCompleted::No); + let outcome = self.predicates.process_obligations( + &mut FulfillProcessor { + selcx, + register_region_obligations: self.register_region_obligations, + }, + DoCompleted::No, + ); debug!("select: outcome={:#?}", outcome); // FIXME: if we kept the original cache key, we could mark projection // obligations as complete for the projection cache here. - errors.extend( - outcome.errors.into_iter() - .map(|e| to_fulfillment_error(e))); + errors.extend(outcome.errors.into_iter().map(|e| to_fulfillment_error(e))); // If nothing new was added, no need to keep looping. if outcome.stalled { @@ -124,8 +129,11 @@ impl<'a, 'gcx, 'tcx> FulfillmentContext<'tcx> { } } - debug!("select({} predicates remaining, {} errors) done", - self.predicates.len(), errors.len()); + debug!( + "select({} predicates remaining, {} errors) done", + self.predicates.len(), + errors.len() + ); if errors.is_empty() { Ok(()) @@ -143,15 +151,17 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> { /// `SomeTrait` or a where clause that lets us unify `$0` with /// something concrete. If this fails, we'll unify `$0` with /// `projection_ty` again. - fn normalize_projection_type<'a, 'gcx>(&mut self, - infcx: &InferCtxt<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - projection_ty: ty::ProjectionTy<'tcx>, - cause: ObligationCause<'tcx>) - -> Ty<'tcx> - { - debug!("normalize_projection_type(projection_ty={:?})", - projection_ty); + fn normalize_projection_type<'a, 'gcx>( + &mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + projection_ty: ty::ProjectionTy<'tcx>, + cause: ObligationCause<'tcx>, + ) -> Ty<'tcx> { + debug!( + "normalize_projection_type(projection_ty={:?})", + projection_ty + ); debug_assert!(!projection_ty.has_escaping_bound_vars()); @@ -159,12 +169,14 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> { let mut selcx = SelectionContext::new(infcx); let mut obligations = vec![]; - let normalized_ty = project::normalize_projection_type(&mut selcx, - param_env, - projection_ty, - cause, - 0, - &mut obligations); + let normalized_ty = project::normalize_projection_type( + &mut selcx, + param_env, + projection_ty, + cause, + 0, + &mut obligations, + ); self.register_predicate_obligations(infcx, obligations); debug!("normalize_projection_type: result={:?}", normalized_ty); @@ -172,10 +184,11 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> { normalized_ty } - fn register_predicate_obligation<'a, 'gcx>(&mut self, - infcx: &InferCtxt<'a, 'gcx, 'tcx>, - obligation: PredicateObligation<'tcx>) - { + fn register_predicate_obligation<'a, 'gcx>( + &mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + obligation: PredicateObligation<'tcx>, + ) { // this helps to reduce duplicate errors, as well as making // debug output much nicer to read and so on. let obligation = infcx.resolve_type_vars_if_possible(&obligation); @@ -184,24 +197,25 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> { assert!(!infcx.is_in_snapshot() || self.usable_in_snapshot); - self.predicates.register_obligation(PendingPredicateObligation { - obligation, - stalled_on: vec![] - }); + self.predicates + .register_obligation(PendingPredicateObligation { + obligation, + stalled_on: vec![], + }); } fn select_all_or_error<'a, 'gcx>( &mut self, - infcx: &InferCtxt<'a, 'gcx, 'tcx> - ) -> Result<(),Vec>> - { + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + ) -> Result<(), Vec>> { self.select_where_possible(infcx)?; - let errors: Vec<_> = - self.predicates.to_errors(CodeAmbiguity) - .into_iter() - .map(|e| to_fulfillment_error(e)) - .collect(); + let errors: Vec<_> = self + .predicates + .to_errors(CodeAmbiguity) + .into_iter() + .map(|e| to_fulfillment_error(e)) + .collect(); if errors.is_empty() { Ok(()) } else { @@ -209,29 +223,32 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> { } } - fn select_where_possible<'a, 'gcx>(&mut self, - infcx: &InferCtxt<'a, 'gcx, 'tcx>) - -> Result<(),Vec>> - { + fn select_where_possible<'a, 'gcx>( + &mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + ) -> Result<(), Vec>> { let mut selcx = SelectionContext::new(infcx); self.select(&mut selcx) } fn pending_obligations(&self) -> Vec> { - self.predicates.map_pending_obligations(|o| o.obligation.clone()) + self.predicates + .map_pending_obligations(|o| o.obligation.clone()) } } struct FulfillProcessor<'a, 'b: 'a, 'gcx: 'tcx, 'tcx: 'b> { selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, - register_region_obligations: bool + register_region_obligations: bool, } fn mk_pending(os: Vec>) -> Vec> { - os.into_iter().map(|o| PendingPredicateObligation { - obligation: o, - stalled_on: vec![] - }).collect() + os.into_iter() + .map(|o| PendingPredicateObligation { + obligation: o, + stalled_on: vec![], + }) + .collect() } impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, 'tcx> { @@ -246,10 +263,10 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, /// This is always inlined, despite its size, because it has a single /// callsite and it is called *very* frequently. #[inline(always)] - fn process_obligation(&mut self, - pending_obligation: &mut Self::Obligation) - -> ProcessResult - { + fn process_obligation( + &mut self, + pending_obligation: &mut Self::Obligation, + ) -> ProcessResult { // if we were stalled on some unresolved variables, first check // whether any of them have been resolved; if not, don't bother // doing more work yet @@ -259,10 +276,13 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, let resolved_ty = self.selcx.infcx().inlined_shallow_resolve(&ty); resolved_ty == ty // nothing changed here }) { - debug!("process_predicate: pending obligation {:?} still stalled on {:?}", - self.selcx.infcx() - .resolve_type_vars_if_possible(&pending_obligation.obligation), - pending_obligation.stalled_on); + debug!( + "process_predicate: pending obligation {:?} still stalled on {:?}", + self.selcx + .infcx() + .resolve_type_vars_if_possible(&pending_obligation.obligation), + pending_obligation.stalled_on + ); return ProcessResult::Unchanged; } pending_obligation.stalled_on = vec![]; @@ -271,8 +291,10 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, let obligation = &mut pending_obligation.obligation; if obligation.predicate.has_infer_types() { - obligation.predicate = - self.selcx.infcx().resolve_type_vars_if_possible(&obligation.predicate); + obligation.predicate = self + .selcx + .infcx() + .resolve_type_vars_if_possible(&obligation.predicate); } match obligation.predicate { @@ -282,22 +304,32 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, if data.is_global() { // no type variables present, can use evaluation for better caching. // FIXME: consider caching errors too. - if self.selcx.infcx().predicate_must_hold_considering_regions(&obligation) { - debug!("selecting trait `{:?}` at depth {} evaluated to holds", - data, obligation.recursion_depth); - return ProcessResult::Changed(vec![]) + if self + .selcx + .infcx() + .predicate_must_hold_considering_regions(&obligation) + { + debug!( + "selecting trait `{:?}` at depth {} evaluated to holds", + data, obligation.recursion_depth + ); + return ProcessResult::Changed(vec![]); } } match self.selcx.select(&trait_obligation) { Ok(Some(vtable)) => { - debug!("selecting trait `{:?}` at depth {} yielded Ok(Some)", - data, obligation.recursion_depth); + debug!( + "selecting trait `{:?}` at depth {} yielded Ok(Some)", + data, obligation.recursion_depth + ); ProcessResult::Changed(mk_pending(vtable.nested_obligations())) } Ok(None) => { - debug!("selecting trait `{:?}` at depth {} yielded Ok(None)", - data, obligation.recursion_depth); + debug!( + "selecting trait `{:?}` at depth {} yielded Ok(None)", + data, obligation.recursion_depth + ); // This is a bit subtle: for the most part, the // only reason we can fail to make progress on @@ -315,15 +347,19 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, pending_obligation.stalled_on = trait_ref_type_vars(self.selcx, data.to_poly_trait_ref()); - debug!("process_predicate: pending obligation {:?} now stalled on {:?}", - self.selcx.infcx().resolve_type_vars_if_possible(obligation), - pending_obligation.stalled_on); + debug!( + "process_predicate: pending obligation {:?} now stalled on {:?}", + self.selcx.infcx().resolve_type_vars_if_possible(obligation), + pending_obligation.stalled_on + ); ProcessResult::Unchanged } Err(selection_err) => { - info!("selecting trait `{:?}` at depth {} yielded Err", - data, obligation.recursion_depth); + info!( + "selecting trait `{:?}` at depth {} yielded Err", + data, obligation.recursion_depth + ); ProcessResult::Error(CodeSelectionError(selection_err)) } @@ -331,7 +367,10 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, } ty::Predicate::RegionOutlives(ref binder) => { - let () = self.selcx.infcx().region_outlives_predicate(&obligation.cause, binder); + let () = self + .selcx + .infcx() + .region_outlives_predicate(&obligation.cause, binder); ProcessResult::Changed(vec![]) } @@ -348,9 +387,7 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, // If so, this obligation is an error (for now). Eventually we should be // able to support additional cases here, like `for<'a> &'a str: 'a`. // NOTE: this is duplicate-implemented between here and fulfillment. - None => { - ProcessResult::Error(CodeSelectionError(Unimplemented)) - } + None => ProcessResult::Error(CodeSelectionError(Unimplemented)), // Otherwise, we have something of the form // `for<'a> T: 'a where 'a not in T`, which we can treat as // `T: 'static`. @@ -391,7 +428,7 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, ProcessResult::Unchanged } Ok(Some(os)) => ProcessResult::Changed(mk_pending(os)), - Err(e) => ProcessResult::Error(CodeProjectionError(e)) + Err(e) => ProcessResult::Error(CodeProjectionError(e)), } } @@ -404,7 +441,11 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, } ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => { - match self.selcx.infcx().closure_kind(closure_def_id, closure_substs) { + match self + .selcx + .infcx() + .closure_kind(closure_def_id, closure_substs) + { Some(closure_kind) => { if closure_kind.extends(kind) { ProcessResult::Changed(vec![]) @@ -412,95 +453,107 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, ProcessResult::Error(CodeSelectionError(Unimplemented)) } } - None => { - ProcessResult::Unchanged - } + None => ProcessResult::Unchanged, } } ty::Predicate::WellFormed(ty) => { - match ty::wf::obligations(self.selcx.infcx(), - obligation.param_env, - obligation.cause.body_id, - ty, obligation.cause.span) { + match ty::wf::obligations( + self.selcx.infcx(), + obligation.param_env, + obligation.cause.body_id, + ty, + obligation.cause.span, + ) { None => { pending_obligation.stalled_on = vec![ty]; ProcessResult::Unchanged } - Some(os) => ProcessResult::Changed(mk_pending(os)) + Some(os) => ProcessResult::Changed(mk_pending(os)), } } ty::Predicate::Subtype(ref subtype) => { - match self.selcx.infcx().subtype_predicate(&obligation.cause, - obligation.param_env, - subtype) { + match self.selcx.infcx().subtype_predicate( + &obligation.cause, + obligation.param_env, + subtype, + ) { None => { // None means that both are unresolved. - pending_obligation.stalled_on = vec![subtype.skip_binder().a, - subtype.skip_binder().b]; + pending_obligation.stalled_on = + vec![subtype.skip_binder().a, subtype.skip_binder().b]; ProcessResult::Unchanged } - Some(Ok(ok)) => { - ProcessResult::Changed(mk_pending(ok.obligations)) - } + Some(Ok(ok)) => ProcessResult::Changed(mk_pending(ok.obligations)), Some(Err(err)) => { - let expected_found = ExpectedFound::new(subtype.skip_binder().a_is_expected, - subtype.skip_binder().a, - subtype.skip_binder().b); - ProcessResult::Error( - FulfillmentErrorCode::CodeSubtypeError(expected_found, err)) + let expected_found = ExpectedFound::new( + subtype.skip_binder().a_is_expected, + subtype.skip_binder().a, + subtype.skip_binder().b, + ); + ProcessResult::Error(FulfillmentErrorCode::CodeSubtypeError( + expected_found, + err, + )) } } } ty::Predicate::ConstEvaluatable(def_id, substs) => { match self.selcx.tcx().lift_to_global(&obligation.param_env) { - None => { - ProcessResult::Unchanged - } - Some(param_env) => { - match self.selcx.tcx().lift_to_global(&substs) { - Some(substs) => { - let instance = ty::Instance::resolve( - self.selcx.tcx().global_tcx(), - param_env, - def_id, - substs, - ); - if let Some(instance) = instance { - let cid = GlobalId { - instance, - promoted: None, - }; - match self.selcx.tcx().at(obligation.cause.span) - .const_eval(param_env.and(cid)) { - Ok(_) => ProcessResult::Changed(vec![]), - Err(err) => ProcessResult::Error( - CodeSelectionError(ConstEvalFailure(err))) - } - } else { - ProcessResult::Error(CodeSelectionError( - ConstEvalFailure(ErrorHandled::TooGeneric) - )) + None => ProcessResult::Unchanged, + Some(param_env) => match self.selcx.tcx().lift_to_global(&substs) { + Some(substs) => { + let instance = ty::Instance::resolve( + self.selcx.tcx().global_tcx(), + param_env, + def_id, + substs, + ); + if let Some(instance) = instance { + let cid = GlobalId { + instance, + promoted: None, + }; + match self + .selcx + .tcx() + .at(obligation.cause.span) + .const_eval(param_env.and(cid)) + { + Ok(_) => ProcessResult::Changed(vec![]), + Err(err) => ProcessResult::Error(CodeSelectionError( + ConstEvalFailure(err), + )), } - }, - None => { - pending_obligation.stalled_on = substs.types().collect(); - ProcessResult::Unchanged + } else { + ProcessResult::Error(CodeSelectionError(ConstEvalFailure( + ErrorHandled::TooGeneric, + ))) } } - } + None => { + pending_obligation.stalled_on = substs.types().collect(); + ProcessResult::Unchanged + } + }, } } } } - fn process_backedge<'c, I>(&mut self, cycle: I, - _marker: PhantomData<&'c PendingPredicateObligation<'tcx>>) - where I: Clone + Iterator>, + fn process_backedge<'c, I>( + &mut self, + cycle: I, + _marker: PhantomData<&'c PendingPredicateObligation<'tcx>>, + ) where + I: Clone + Iterator>, { - if self.selcx.coinductive_match(cycle.clone().map(|s| s.obligation.predicate)) { + if self + .selcx + .coinductive_match(cycle.clone().map(|s| s.obligation.predicate)) + { debug!("process_child_obligations: coinductive match"); } else { let cycle: Vec<_> = cycle.map(|c| c.obligation.clone()).collect(); @@ -510,22 +563,25 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, } /// Return the set of type variables contained in a trait ref -fn trait_ref_type_vars<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, - t: ty::PolyTraitRef<'tcx>) -> Vec> -{ +fn trait_ref_type_vars<'a, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, + t: ty::PolyTraitRef<'tcx>, +) -> Vec> { t.skip_binder() // ok b/c this check doesn't care about regions - .input_types() - .map(|t| selcx.infcx().resolve_type_vars_if_possible(&t)) - .filter(|t| t.has_infer_types()) - .flat_map(|t| t.walk()) - .filter(|t| match t.sty { ty::Infer(_) => true, _ => false }) - .collect() + .input_types() + .map(|t| selcx.infcx().resolve_type_vars_if_possible(&t)) + .filter(|t| t.has_infer_types()) + .flat_map(|t| t.walk()) + .filter(|t| match t.sty { + ty::Infer(_) => true, + _ => false, + }) + .collect() } fn to_fulfillment_error<'tcx>( - error: Error, FulfillmentErrorCode<'tcx>>) - -> FulfillmentError<'tcx> -{ + error: Error, FulfillmentErrorCode<'tcx>>, +) -> FulfillmentError<'tcx> { let obligation = error.backtrace.into_iter().next().unwrap().obligation; FulfillmentError::new(obligation, error.error) } diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index b42d742b7f841..cbce1163a0d9c 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -5,71 +5,71 @@ #[allow(dead_code)] pub mod auto_trait; mod chalk_fulfill; +pub mod codegen; mod coherence; -pub mod error_reporting; mod engine; +pub mod error_reporting; mod fulfill; -mod project; mod object_safety; mod on_unimplemented; +mod project; +pub mod query; mod select; mod specialize; mod structural_impls; -pub mod codegen; mod util; -pub mod query; use chalk_engine; use hir; use hir::def_id::DefId; -use infer::{InferCtxt, SuppressRegionErrors}; use infer::outlives::env::OutlivesEnvironment; +use infer::{InferCtxt, SuppressRegionErrors}; use middle::region; use mir::interpret::ErrorHandled; use rustc_data_structures::sync::Lrc; use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; -use ty::subst::Substs; -use ty::{self, AdtKind, List, Ty, TyCtxt, GenericParamDefKind, ToPredicate}; use ty::error::{ExpectedFound, TypeError}; -use ty::fold::{TypeFolder, TypeFoldable, TypeVisitor}; +use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; +use ty::subst::Substs; +use ty::{self, AdtKind, GenericParamDefKind, List, ToPredicate, Ty, TyCtxt}; use util::common::ErrorReported; use std::fmt::Debug; use std::rc::Rc; -pub use self::SelectionError::*; pub use self::FulfillmentErrorCode::*; -pub use self::Vtable::*; pub use self::ObligationCauseCode::*; +pub use self::SelectionError::*; +pub use self::Vtable::*; pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls}; pub use self::coherence::{OrphanCheckErr, OverlapResult}; +pub use self::engine::{TraitEngine, TraitEngineExt}; pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation}; -pub use self::project::MismatchedProjectionTypes; -pub use self::project::{normalize, normalize_projection_type, poly_project_and_unify_type}; -pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal, Normalized}; -pub use self::object_safety::ObjectSafetyViolation; pub use self::object_safety::MethodViolationCode; +pub use self::object_safety::ObjectSafetyViolation; pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote}; -pub use self::select::{EvaluationCache, SelectionContext, SelectionCache}; +pub use self::project::MismatchedProjectionTypes; +pub use self::project::{normalize, normalize_projection_type, poly_project_and_unify_type}; +pub use self::project::{Normalized, ProjectionCache, ProjectionCacheSnapshot, Reveal}; +pub use self::select::{EvaluationCache, SelectionCache, SelectionContext}; pub use self::select::{EvaluationResult, IntercrateAmbiguityCause, OverflowError}; -pub use self::specialize::{OverlapError, specialization_graph, translate_substs}; pub use self::specialize::find_associated_item; pub use self::specialize::specialization_graph::FutureCompatOverlapError; pub use self::specialize::specialization_graph::FutureCompatOverlapErrorKind; -pub use self::engine::{TraitEngine, TraitEngineExt}; +pub use self::specialize::{specialization_graph, translate_substs, OverlapError}; pub use self::util::{elaborate_predicates, elaborate_trait_ref, elaborate_trait_refs}; -pub use self::util::{supertraits, supertrait_def_ids, transitive_bounds, - Supertraits, SupertraitDefIds}; +pub use self::util::{ + supertrait_def_ids, supertraits, transitive_bounds, SupertraitDefIds, Supertraits, +}; pub use self::chalk_fulfill::{ - CanonicalGoal as ChalkCanonicalGoal, - FulfillmentContext as ChalkFulfillmentContext + CanonicalGoal as ChalkCanonicalGoal, FulfillmentContext as ChalkFulfillmentContext, }; -pub use self::ObligationCauseCode::*; pub use self::FulfillmentErrorCode::*; +pub use self::ObligationCauseCode::*; pub use self::SelectionError::*; pub use self::Vtable::*; @@ -77,7 +77,7 @@ pub use self::Vtable::*; #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum IntercrateMode { Issue43355, - Fixed + Fixed, } // The mode that trait queries run in @@ -134,17 +134,15 @@ pub struct ObligationCause<'tcx> { // information. pub body_id: ast::NodeId, - pub code: ObligationCauseCode<'tcx> + pub code: ObligationCauseCode<'tcx>, } impl<'tcx> ObligationCause<'tcx> { pub fn span<'a, 'gcx>(&self, tcx: &TyCtxt<'a, 'gcx, 'tcx>) -> Span { match self.code { - ObligationCauseCode::CompareImplMethodObligation { .. } | - ObligationCauseCode::MainFunctionType | - ObligationCauseCode::StartFunctionType => { - tcx.sess.source_map().def_span(self.span) - } + ObligationCauseCode::CompareImplMethodObligation { .. } + | ObligationCauseCode::MainFunctionType + | ObligationCauseCode::StartFunctionType => tcx.sess.source_map().def_span(self.span), _ => self.span, } } @@ -196,7 +194,10 @@ pub enum ObligationCauseCode<'tcx> { RepeatVec, /// Types of fields (other than the last, except for packed structs) in a struct must be sized. - FieldSized { adt_kind: AdtKind, last: bool }, + FieldSized { + adt_kind: AdtKind, + last: bool, + }, /// Constant expressions must be sized. ConstSized, @@ -220,8 +221,10 @@ pub enum ObligationCauseCode<'tcx> { ExprAssignable, /// Computing common supertype in the arms of a match expression - MatchExpressionArm { arm_span: Span, - source: hir::MatchSource }, + MatchExpressionArm { + arm_span: Span, + source: hir::MatchSource, + }, /// Computing common supertype in an if expression IfExpression, @@ -263,7 +266,7 @@ pub struct DerivedObligationCause<'tcx> { parent_trait_ref: ty::PolyTraitRef<'tcx>, /// The parent trait had this cause - parent_code: Rc> + parent_code: Rc>, } pub type Obligations<'tcx, O> = Vec>; @@ -356,7 +359,7 @@ impl<'tcx> GoalKind<'tcx> { Some(p) => p.into_goal(), None => GoalKind::Quantified( QuantifierKind::Universal, - domain_goal.map_bound(|p| tcx.mk_goal(p.into_goal())) + domain_goal.map_bound(|p| tcx.mk_goal(p.into_goal())), ), } } @@ -431,12 +434,14 @@ pub struct InEnvironment<'tcx, G> { pub type Selection<'tcx> = Vtable<'tcx, PredicateObligation<'tcx>>; -#[derive(Clone,Debug)] +#[derive(Clone, Debug)] pub enum SelectionError<'tcx> { Unimplemented, - OutputTypeParameterMismatch(ty::PolyTraitRef<'tcx>, - ty::PolyTraitRef<'tcx>, - ty::error::TypeError<'tcx>), + OutputTypeParameterMismatch( + ty::PolyTraitRef<'tcx>, + ty::PolyTraitRef<'tcx>, + ty::error::TypeError<'tcx>, + ), TraitNotObjectSafe(DefId), ConstEvalFailure(ErrorHandled), Overflow, @@ -444,15 +449,14 @@ pub enum SelectionError<'tcx> { pub struct FulfillmentError<'tcx> { pub obligation: PredicateObligation<'tcx>, - pub code: FulfillmentErrorCode<'tcx> + pub code: FulfillmentErrorCode<'tcx>, } #[derive(Clone)] pub enum FulfillmentErrorCode<'tcx> { CodeSelectionError(SelectionError<'tcx>), CodeProjectionError(MismatchedProjectionTypes<'tcx>), - CodeSubtypeError(ExpectedFound>, - TypeError<'tcx>), // always comes from a SubtypePredicate + CodeSubtypeError(ExpectedFound>, TypeError<'tcx>), // always comes from a SubtypePredicate CodeAmbiguity, } @@ -555,7 +559,7 @@ pub enum Vtable<'tcx, N> { pub struct VtableImplData<'tcx, N> { pub impl_def_id: DefId, pub substs: &'tcx Substs<'tcx>, - pub nested: Vec + pub nested: Vec, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)] @@ -564,7 +568,7 @@ pub struct VtableGeneratorData<'tcx, N> { pub substs: ty::GeneratorSubsts<'tcx>, /// Nested obligations. This can be non-empty if the generator /// signature contains associated types. - pub nested: Vec + pub nested: Vec, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)] @@ -573,18 +577,18 @@ pub struct VtableClosureData<'tcx, N> { pub substs: ty::ClosureSubsts<'tcx>, /// Nested obligations. This can be non-empty if the closure /// signature contains associated types. - pub nested: Vec + pub nested: Vec, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub struct VtableAutoImplData { pub trait_def_id: DefId, - pub nested: Vec + pub nested: Vec, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub struct VtableBuiltinData { - pub nested: Vec + pub nested: Vec, } /// A vtable for some object-safe trait `Foo` automatically derived @@ -605,7 +609,7 @@ pub struct VtableObjectData<'tcx, N> { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub struct VtableFnPointerData<'tcx, N> { pub fn_ty: Ty<'tcx>, - pub nested: Vec + pub nested: Vec, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)] @@ -616,11 +620,11 @@ pub struct VtableTraitAliasData<'tcx, N> { } /// Creates predicate obligations from the generic bounds. -pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>, - param_env: ty::ParamEnv<'tcx>, - generic_bounds: &ty::InstantiatedPredicates<'tcx>) - -> PredicateObligations<'tcx> -{ +pub fn predicates_for_generics<'tcx>( + cause: ObligationCause<'tcx>, + param_env: ty::ParamEnv<'tcx>, + generic_bounds: &ty::InstantiatedPredicates<'tcx>, +) -> PredicateObligations<'tcx> { util::predicates_for_generics(cause, 0, param_env, generic_bounds) } @@ -636,9 +640,11 @@ pub fn type_known_to_meet_bound_modulo_regions<'a, 'gcx, 'tcx>( def_id: DefId, span: Span, ) -> bool { - debug!("type_known_to_meet_bound_modulo_regions(ty={:?}, bound={:?})", - ty, - infcx.tcx.item_path_str(def_id)); + debug!( + "type_known_to_meet_bound_modulo_regions(ty={:?}, bound={:?})", + ty, + infcx.tcx.item_path_str(def_id) + ); let trait_ref = ty::TraitRef { def_id, @@ -652,8 +658,12 @@ pub fn type_known_to_meet_bound_modulo_regions<'a, 'gcx, 'tcx>( }; let result = infcx.predicate_must_hold_modulo_regions(&obligation); - debug!("type_known_to_meet_ty={:?} bound={} => {:?}", - ty, infcx.tcx.item_path_str(def_id), result); + debug!( + "type_known_to_meet_ty={:?} bound={} => {:?}", + ty, + infcx.tcx.item_path_str(def_id), + result + ); if result && (ty.has_infer_types() || ty.has_closure_types()) { // Because of inference "guessing", selection can sometimes claim @@ -678,16 +688,20 @@ pub fn type_known_to_meet_bound_modulo_regions<'a, 'gcx, 'tcx>( // assume it is move; linear is always ok. match fulfill_cx.select_all_or_error(infcx) { Ok(()) => { - debug!("type_known_to_meet_bound_modulo_regions: ty={:?} bound={} success", - ty, - infcx.tcx.item_path_str(def_id)); + debug!( + "type_known_to_meet_bound_modulo_regions: ty={:?} bound={} success", + ty, + infcx.tcx.item_path_str(def_id) + ); true } Err(e) => { - debug!("type_known_to_meet_bound_modulo_regions: ty={:?} bound={} errors={:?}", - ty, - infcx.tcx.item_path_str(def_id), - e); + debug!( + "type_known_to_meet_bound_modulo_regions: ty={:?} bound={} errors={:?}", + ty, + infcx.tcx.item_path_str(def_id), + e + ); false } } @@ -696,18 +710,16 @@ pub fn type_known_to_meet_bound_modulo_regions<'a, 'gcx, 'tcx>( } } -fn do_normalize_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - region_context: DefId, - cause: ObligationCause<'tcx>, - elaborated_env: ty::ParamEnv<'tcx>, - predicates: Vec>) - -> Result>, ErrorReported> -{ +fn do_normalize_predicates<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + region_context: DefId, + cause: ObligationCause<'tcx>, + elaborated_env: ty::ParamEnv<'tcx>, + predicates: Vec>, +) -> Result>, ErrorReported> { debug!( "do_normalize_predicates(predicates={:?}, region_context={:?}, cause={:?})", - predicates, - region_context, - cause, + predicates, region_context, cause, ); let span = cause.span; tcx.infer_ctxt().enter(|infcx| { @@ -725,21 +737,19 @@ fn do_normalize_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // them here too, and we will remove this function when // we move over to lazy normalization *anyway*. let fulfill_cx = FulfillmentContext::new_ignoring_regions(); - let predicates = match fully_normalize( - &infcx, - fulfill_cx, - cause, - elaborated_env, - &predicates, - ) { - Ok(predicates) => predicates, - Err(errors) => { - infcx.report_fulfillment_errors(&errors, None, false); - return Err(ErrorReported) - } - }; + let predicates = + match fully_normalize(&infcx, fulfill_cx, cause, elaborated_env, &predicates) { + Ok(predicates) => predicates, + Err(errors) => { + infcx.report_fulfillment_errors(&errors, None, false); + return Err(ErrorReported); + } + }; - debug!("do_normalize_predictes: normalized predicates = {:?}", predicates); + debug!( + "do_normalize_predictes: normalized predicates = {:?}", + predicates + ); let region_scope_tree = region::ScopeTree::default(); @@ -765,7 +775,7 @@ fn do_normalize_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // unconstrained variable, and it seems better not to ICE, // all things considered. tcx.sess.span_err(span, &fixup_err.to_string()); - return Err(ErrorReported) + return Err(ErrorReported); } }; @@ -781,12 +791,12 @@ fn do_normalize_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // FIXME: this is gonna need to be removed ... /// Normalizes the parameter environment, reporting errors if they occur. -pub fn normalize_param_env_or_error<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - region_context: DefId, - unnormalized_env: ty::ParamEnv<'tcx>, - cause: ObligationCause<'tcx>) - -> ty::ParamEnv<'tcx> -{ +pub fn normalize_param_env_or_error<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + region_context: DefId, + unnormalized_env: ty::ParamEnv<'tcx>, + cause: ObligationCause<'tcx>, +) -> ty::ParamEnv<'tcx> { // I'm not wild about reporting errors here; I'd prefer to // have the errors get reported at a defined place (e.g., // during typeck). Instead I have all parameter @@ -802,20 +812,23 @@ pub fn normalize_param_env_or_error<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // and errors will get reported then; so after typeck we // can be sure that no errors should occur. - debug!("normalize_param_env_or_error(region_context={:?}, unnormalized_env={:?}, cause={:?})", - region_context, unnormalized_env, cause); + debug!( + "normalize_param_env_or_error(region_context={:?}, unnormalized_env={:?}, cause={:?})", + region_context, unnormalized_env, cause + ); let mut predicates: Vec<_> = - util::elaborate_predicates(tcx, unnormalized_env.caller_bounds.to_vec()) - .collect(); + util::elaborate_predicates(tcx, unnormalized_env.caller_bounds.to_vec()).collect(); - debug!("normalize_param_env_or_error: elaborated-predicates={:?}", - predicates); + debug!( + "normalize_param_env_or_error: elaborated-predicates={:?}", + predicates + ); let elaborated_env = ty::ParamEnv::new( tcx.intern_predicates(&predicates), unnormalized_env.reveal, - unnormalized_env.def_id + unnormalized_env.def_id, ); // HACK: we are trying to normalize the param-env inside *itself*. The problem is that @@ -836,57 +849,79 @@ pub fn normalize_param_env_or_error<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // // This works fairly well because trait matching does not actually care about param-env // TypeOutlives predicates - these are normally used by regionck. - let outlives_predicates: Vec<_> = predicates.drain_filter(|predicate| { - match predicate { + let outlives_predicates: Vec<_> = predicates + .drain_filter(|predicate| match predicate { ty::Predicate::TypeOutlives(..) => true, - _ => false - } - }).collect(); + _ => false, + }) + .collect(); - debug!("normalize_param_env_or_error: predicates=(non-outlives={:?}, outlives={:?})", - predicates, outlives_predicates); - let non_outlives_predicates = - match do_normalize_predicates(tcx, region_context, cause.clone(), - elaborated_env, predicates) { - Ok(predicates) => predicates, - // An unnormalized env is better than nothing. - Err(ErrorReported) => { - debug!("normalize_param_env_or_error: errored resolving non-outlives predicates"); - return elaborated_env - } - }; + debug!( + "normalize_param_env_or_error: predicates=(non-outlives={:?}, outlives={:?})", + predicates, outlives_predicates + ); + let non_outlives_predicates = match do_normalize_predicates( + tcx, + region_context, + cause.clone(), + elaborated_env, + predicates, + ) { + Ok(predicates) => predicates, + // An unnormalized env is better than nothing. + Err(ErrorReported) => { + debug!("normalize_param_env_or_error: errored resolving non-outlives predicates"); + return elaborated_env; + } + }; - debug!("normalize_param_env_or_error: non-outlives predicates={:?}", non_outlives_predicates); + debug!( + "normalize_param_env_or_error: non-outlives predicates={:?}", + non_outlives_predicates + ); // Not sure whether it is better to include the unnormalized TypeOutlives predicates // here. I believe they should not matter, because we are ignoring TypeOutlives param-env // predicates here anyway. Keeping them here anyway because it seems safer. - let outlives_env: Vec<_> = - non_outlives_predicates.iter().chain(&outlives_predicates).cloned().collect(); + let outlives_env: Vec<_> = non_outlives_predicates + .iter() + .chain(&outlives_predicates) + .cloned() + .collect(); let outlives_env = ty::ParamEnv::new( tcx.intern_predicates(&outlives_env), unnormalized_env.reveal, - None + None, + ); + let outlives_predicates = match do_normalize_predicates( + tcx, + region_context, + cause, + outlives_env, + outlives_predicates, + ) { + Ok(predicates) => predicates, + // An unnormalized env is better than nothing. + Err(ErrorReported) => { + debug!("normalize_param_env_or_error: errored resolving outlives predicates"); + return elaborated_env; + } + }; + debug!( + "normalize_param_env_or_error: outlives predicates={:?}", + outlives_predicates ); - let outlives_predicates = - match do_normalize_predicates(tcx, region_context, cause, - outlives_env, outlives_predicates) { - Ok(predicates) => predicates, - // An unnormalized env is better than nothing. - Err(ErrorReported) => { - debug!("normalize_param_env_or_error: errored resolving outlives predicates"); - return elaborated_env - } - }; - debug!("normalize_param_env_or_error: outlives predicates={:?}", outlives_predicates); let mut predicates = non_outlives_predicates; predicates.extend(outlives_predicates); - debug!("normalize_param_env_or_error: final predicates={:?}", predicates); + debug!( + "normalize_param_env_or_error: final predicates={:?}", + predicates + ); ty::ParamEnv::new( tcx.intern_predicates(&predicates), unnormalized_env.reveal, - unnormalized_env.def_id + unnormalized_env.def_id, ) } @@ -895,17 +930,21 @@ pub fn fully_normalize<'a, 'gcx, 'tcx, T>( mut fulfill_cx: FulfillmentContext<'tcx>, cause: ObligationCause<'tcx>, param_env: ty::ParamEnv<'tcx>, - value: &T) - -> Result>> - where T : TypeFoldable<'tcx> + value: &T, +) -> Result>> +where + T: TypeFoldable<'tcx>, { debug!("fully_normalize_with_fulfillcx(value={:?})", value); let selcx = &mut SelectionContext::new(infcx); - let Normalized { value: normalized_value, obligations } = - project::normalize(selcx, param_env, cause, value); - debug!("fully_normalize: normalized_value={:?} obligations={:?}", - normalized_value, - obligations); + let Normalized { + value: normalized_value, + obligations, + } = project::normalize(selcx, param_env, cause, value); + debug!( + "fully_normalize: normalized_value={:?} obligations={:?}", + normalized_value, obligations + ); for obligation in obligations { fulfill_cx.register_predicate_obligation(selcx.infcx(), obligation); } @@ -922,20 +961,21 @@ pub fn fully_normalize<'a, 'gcx, 'tcx, T>( /// environment. If this returns false, then either normalize /// encountered an error or one of the predicates did not hold. Used /// when creating vtables to check for unsatisfiable methods. -fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - predicates: Vec>) - -> bool -{ - debug!("normalize_and_test_predicates(predicates={:?})", - predicates); +fn normalize_and_test_predicates<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + predicates: Vec>, +) -> bool { + debug!("normalize_and_test_predicates(predicates={:?})", predicates); let result = tcx.infer_ctxt().enter(|infcx| { let param_env = ty::ParamEnv::reveal_all(); let mut selcx = SelectionContext::new(&infcx); let mut fulfill_cx = FulfillmentContext::new(); let cause = ObligationCause::dummy(); - let Normalized { value: predicates, obligations } = - normalize(&mut selcx, param_env, cause.clone(), &predicates); + let Normalized { + value: predicates, + obligations, + } = normalize(&mut selcx, param_env, cause.clone(), &predicates); for obligation in obligations { fulfill_cx.register_predicate_obligation(&infcx, obligation); } @@ -946,23 +986,26 @@ fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fulfill_cx.select_all_or_error(&infcx).is_ok() }); - debug!("normalize_and_test_predicates(predicates={:?}) = {:?}", - predicates, result); + debug!( + "normalize_and_test_predicates(predicates={:?}) = {:?}", + predicates, result + ); result } -fn substitute_normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - key: (DefId, &'tcx Substs<'tcx>)) - -> bool -{ - debug!("substitute_normalize_and_test_predicates(key={:?})", - key); +fn substitute_normalize_and_test_predicates<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + key: (DefId, &'tcx Substs<'tcx>), +) -> bool { + debug!("substitute_normalize_and_test_predicates(key={:?})", key); let predicates = tcx.predicates_of(key.0).instantiate(tcx, key.1).predicates; let result = normalize_and_test_predicates(tcx, predicates); - debug!("substitute_normalize_and_test_predicates(key={:?}) = {:?}", - key, result); + debug!( + "substitute_normalize_and_test_predicates(key={:?}) = {:?}", + key, result + ); result } @@ -971,114 +1014,138 @@ fn substitute_normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx #[inline] // FIXME(#35870): avoid closures being unexported due to `impl Trait`. fn vtable_methods<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>) - -> Lrc)>>> -{ + trait_ref: ty::PolyTraitRef<'tcx>, +) -> Lrc)>>> { debug!("vtable_methods({:?})", trait_ref); Lrc::new( - supertraits(tcx, trait_ref).flat_map(move |trait_ref| { - let trait_methods = tcx.associated_items(trait_ref.def_id()) - .filter(|item| item.kind == ty::AssociatedKind::Method); - - // Now list each method's DefId and Substs (for within its trait). - // If the method can never be called from this object, produce None. - trait_methods.map(move |trait_method| { - debug!("vtable_methods: trait_method={:?}", trait_method); - let def_id = trait_method.def_id; - - // Some methods cannot be called on an object; skip those. - if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) { - debug!("vtable_methods: not vtable safe"); - return None; - } - - // the method may have some early-bound lifetimes, add - // regions for those - let substs = trait_ref.map_bound(|trait_ref| - Substs::for_item(tcx, def_id, |param, _| - match param.kind { + supertraits(tcx, trait_ref) + .flat_map(move |trait_ref| { + let trait_methods = tcx + .associated_items(trait_ref.def_id()) + .filter(|item| item.kind == ty::AssociatedKind::Method); + + // Now list each method's DefId and Substs (for within its trait). + // If the method can never be called from this object, produce None. + trait_methods.map(move |trait_method| { + debug!("vtable_methods: trait_method={:?}", trait_method); + let def_id = trait_method.def_id; + + // Some methods cannot be called on an object; skip those. + if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) { + debug!("vtable_methods: not vtable safe"); + return None; + } + + // the method may have some early-bound lifetimes, add + // regions for those + let substs = trait_ref.map_bound(|trait_ref| { + Substs::for_item(tcx, def_id, |param, _| match param.kind { GenericParamDefKind::Lifetime => tcx.types.re_erased.into(), - GenericParamDefKind::Type {..} => { + GenericParamDefKind::Type { .. } => { trait_ref.substs[param.index as usize] } - } - ) - ); - - // the trait type may have higher-ranked lifetimes in it; - // so erase them if they appear, so that we get the type - // at some particular call site - let substs = tcx.normalize_erasing_late_bound_regions( - ty::ParamEnv::reveal_all(), - &substs - ); - - // It's possible that the method relies on where clauses that - // do not hold for this particular set of type parameters. - // Note that this method could then never be called, so we - // do not want to try and codegen it, in that case (see #23435). - let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs); - if !normalize_and_test_predicates(tcx, predicates.predicates) { - debug!("vtable_methods: predicates do not hold"); - return None; - } - - Some((def_id, substs)) + }) + }); + + // the trait type may have higher-ranked lifetimes in it; + // so erase them if they appear, so that we get the type + // at some particular call site + let substs = tcx + .normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &substs); + + // It's possible that the method relies on where clauses that + // do not hold for this particular set of type parameters. + // Note that this method could then never be called, so we + // do not want to try and codegen it, in that case (see #23435). + let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs); + if !normalize_and_test_predicates(tcx, predicates.predicates) { + debug!("vtable_methods: predicates do not hold"); + return None; + } + + Some((def_id, substs)) + }) }) - }).collect() + .collect(), ) } -impl<'tcx,O> Obligation<'tcx,O> { - pub fn new(cause: ObligationCause<'tcx>, - param_env: ty::ParamEnv<'tcx>, - predicate: O) - -> Obligation<'tcx, O> - { - Obligation { cause, param_env, recursion_depth: 0, predicate } +impl<'tcx, O> Obligation<'tcx, O> { + pub fn new( + cause: ObligationCause<'tcx>, + param_env: ty::ParamEnv<'tcx>, + predicate: O, + ) -> Obligation<'tcx, O> { + Obligation { + cause, + param_env, + recursion_depth: 0, + predicate, + } } - fn with_depth(cause: ObligationCause<'tcx>, - recursion_depth: usize, - param_env: ty::ParamEnv<'tcx>, - predicate: O) - -> Obligation<'tcx, O> - { - Obligation { cause, param_env, recursion_depth, predicate } + fn with_depth( + cause: ObligationCause<'tcx>, + recursion_depth: usize, + param_env: ty::ParamEnv<'tcx>, + predicate: O, + ) -> Obligation<'tcx, O> { + Obligation { + cause, + param_env, + recursion_depth, + predicate, + } } - pub fn misc(span: Span, - body_id: ast::NodeId, - param_env: ty::ParamEnv<'tcx>, - trait_ref: O) - -> Obligation<'tcx, O> { + pub fn misc( + span: Span, + body_id: ast::NodeId, + param_env: ty::ParamEnv<'tcx>, + trait_ref: O, + ) -> Obligation<'tcx, O> { Obligation::new(ObligationCause::misc(span, body_id), param_env, trait_ref) } - pub fn with

(&self, value: P) -> Obligation<'tcx,P> { - Obligation { cause: self.cause.clone(), - param_env: self.param_env, - recursion_depth: self.recursion_depth, - predicate: value } + pub fn with

(&self, value: P) -> Obligation<'tcx, P> { + Obligation { + cause: self.cause.clone(), + param_env: self.param_env, + recursion_depth: self.recursion_depth, + predicate: value, + } } } impl<'tcx> ObligationCause<'tcx> { #[inline] - pub fn new(span: Span, - body_id: ast::NodeId, - code: ObligationCauseCode<'tcx>) - -> ObligationCause<'tcx> { - ObligationCause { span: span, body_id: body_id, code: code } + pub fn new( + span: Span, + body_id: ast::NodeId, + code: ObligationCauseCode<'tcx>, + ) -> ObligationCause<'tcx> { + ObligationCause { + span: span, + body_id: body_id, + code: code, + } } pub fn misc(span: Span, body_id: ast::NodeId) -> ObligationCause<'tcx> { - ObligationCause { span: span, body_id: body_id, code: MiscObligation } + ObligationCause { + span: span, + body_id: body_id, + code: MiscObligation, + } } pub fn dummy() -> ObligationCause<'tcx> { - ObligationCause { span: DUMMY_SP, body_id: ast::CRATE_NODE_ID, code: MiscObligation } + ObligationCause { + span: DUMMY_SP, + body_id: ast::CRATE_NODE_ID, + code: MiscObligation, + } } } @@ -1097,7 +1164,10 @@ impl<'tcx, N> Vtable<'tcx, N> { } } - pub fn map(self, f: F) -> Vtable<'tcx, M> where F: FnMut(N) -> M { + pub fn map(self, f: F) -> Vtable<'tcx, M> + where + F: FnMut(N) -> M, + { match self { VtableImpl(i) => VtableImpl(VtableImplData { impl_def_id: i.impl_def_id, @@ -1141,11 +1211,14 @@ impl<'tcx, N> Vtable<'tcx, N> { } impl<'tcx> FulfillmentError<'tcx> { - fn new(obligation: PredicateObligation<'tcx>, - code: FulfillmentErrorCode<'tcx>) - -> FulfillmentError<'tcx> - { - FulfillmentError { obligation: obligation, code: code } + fn new( + obligation: PredicateObligation<'tcx>, + code: FulfillmentErrorCode<'tcx>, + ) -> FulfillmentError<'tcx> { + FulfillmentError { + obligation: obligation, + code: code, + } } } diff --git a/src/librustc/traits/object_safety.rs b/src/librustc/traits/object_safety.rs index 31342c250e2bd..0989268dd45ed 100644 --- a/src/librustc/traits/object_safety.rs +++ b/src/librustc/traits/object_safety.rs @@ -12,13 +12,13 @@ use super::elaborate_predicates; use hir::def_id::DefId; use lint; -use traits::{self, Obligation, ObligationCause}; -use ty::{self, Ty, TyCtxt, TypeFoldable, Predicate, ToPredicate}; -use ty::subst::{Subst, Substs}; use std::borrow::Cow; -use std::iter::{self}; +use std::iter; use syntax::ast::{self, Name}; use syntax_pos::Span; +use traits::{self, Obligation, ObligationCause}; +use ty::subst::{Subst, Substs}; +use ty::{self, Predicate, ToPredicate, Ty, TyCtxt, TypeFoldable}; #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum ObjectSafetyViolation { @@ -39,25 +39,40 @@ pub enum ObjectSafetyViolation { impl ObjectSafetyViolation { pub fn error_msg(&self) -> Cow<'static, str> { match *self { - ObjectSafetyViolation::SizedSelf => - "the trait cannot require that `Self : Sized`".into(), - ObjectSafetyViolation::SupertraitSelf => + ObjectSafetyViolation::SizedSelf => { + "the trait cannot require that `Self : Sized`".into() + } + ObjectSafetyViolation::SupertraitSelf => { "the trait cannot use `Self` as a type parameter \ - in the supertraits or where-clauses".into(), - ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod) => - format!("method `{}` has no receiver", name).into(), - ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelf) => - format!("method `{}` references the `Self` type \ - in its arguments or return type", name).into(), - ObjectSafetyViolation::Method(name, - MethodViolationCode::WhereClauseReferencesSelf(_)) => - format!("method `{}` references the `Self` type in where clauses", name).into(), - ObjectSafetyViolation::Method(name, MethodViolationCode::Generic) => - format!("method `{}` has generic type parameters", name).into(), - ObjectSafetyViolation::Method(name, MethodViolationCode::UndispatchableReceiver) => - format!("method `{}`'s receiver cannot be dispatched on", name).into(), - ObjectSafetyViolation::AssociatedConst(name) => - format!("the trait cannot contain associated consts like `{}`", name).into(), + in the supertraits or where-clauses" + .into() + } + ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod) => { + format!("method `{}` has no receiver", name).into() + } + ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelf) => format!( + "method `{}` references the `Self` type \ + in its arguments or return type", + name + ) + .into(), + ObjectSafetyViolation::Method( + name, + MethodViolationCode::WhereClauseReferencesSelf(_), + ) => format!( + "method `{}` references the `Self` type in where clauses", + name + ) + .into(), + ObjectSafetyViolation::Method(name, MethodViolationCode::Generic) => { + format!("method `{}` has generic type parameters", name).into() + } + ObjectSafetyViolation::Method(name, MethodViolationCode::UndispatchableReceiver) => { + format!("method `{}`'s receiver cannot be dispatched on", name).into() + } + ObjectSafetyViolation::AssociatedConst(name) => { + format!("the trait cannot contain associated consts like `{}`", name).into() + } } } } @@ -82,29 +97,28 @@ pub enum MethodViolationCode { } impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { - /// Returns the object safety violations that affect /// astconv - currently, Self in supertraits. This is needed /// because `object_safety_violations` can't be used during /// type collection. - pub fn astconv_object_safety_violations(self, trait_def_id: DefId) - -> Vec - { + pub fn astconv_object_safety_violations( + self, + trait_def_id: DefId, + ) -> Vec { let violations = traits::supertrait_def_ids(self, trait_def_id) .filter(|&def_id| self.predicates_reference_self(def_id, true)) .map(|_| ObjectSafetyViolation::SupertraitSelf) .collect(); - debug!("astconv_object_safety_violations(trait_def_id={:?}) = {:?}", - trait_def_id, - violations); + debug!( + "astconv_object_safety_violations(trait_def_id={:?}) = {:?}", + trait_def_id, violations + ); violations } - pub fn object_safety_violations(self, trait_def_id: DefId) - -> Vec - { + pub fn object_safety_violations(self, trait_def_id: DefId) -> Vec { debug!("object_safety_violations: {:?}", trait_def_id); traits::supertrait_def_ids(self, trait_def_id) @@ -112,18 +126,20 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { .collect() } - fn object_safety_violations_for_trait(self, trait_def_id: DefId) - -> Vec - { + fn object_safety_violations_for_trait(self, trait_def_id: DefId) -> Vec { // Check methods for violations. - let mut violations: Vec<_> = self.associated_items(trait_def_id) + let mut violations: Vec<_> = self + .associated_items(trait_def_id) .filter(|item| item.kind == ty::AssociatedKind::Method) - .filter_map(|item| + .filter_map(|item| { self.object_safety_violation_for_method(trait_def_id, &item) .map(|code| ObjectSafetyViolation::Method(item.ident.name, code)) - ).filter(|violation| { - if let ObjectSafetyViolation::Method(_, - MethodViolationCode::WhereClauseReferencesSelf(span)) = violation + }) + .filter(|violation| { + if let ObjectSafetyViolation::Method( + _, + MethodViolationCode::WhereClauseReferencesSelf(span), + ) = violation { // Using `CRATE_NODE_ID` is wrong, but it's hard to get a more precise id. // It's also hard to get a use site span, so we use the method definition span. @@ -131,14 +147,18 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { lint::builtin::WHERE_CLAUSES_OBJECT_SAFETY, ast::CRATE_NODE_ID, *span, - &format!("the trait `{}` cannot be made into an object", - self.item_path_str(trait_def_id)), - &violation.error_msg()); + &format!( + "the trait `{}` cannot be made into an object", + self.item_path_str(trait_def_id) + ), + &violation.error_msg(), + ); false } else { true } - }).collect(); + }) + .collect(); // Check the trait itself. if self.trait_has_sized_self(trait_def_id) { @@ -148,22 +168,21 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { violations.push(ObjectSafetyViolation::SupertraitSelf); } - violations.extend(self.associated_items(trait_def_id) - .filter(|item| item.kind == ty::AssociatedKind::Const) - .map(|item| ObjectSafetyViolation::AssociatedConst(item.ident.name))); + violations.extend( + self.associated_items(trait_def_id) + .filter(|item| item.kind == ty::AssociatedKind::Const) + .map(|item| ObjectSafetyViolation::AssociatedConst(item.ident.name)), + ); - debug!("object_safety_violations_for_trait(trait_def_id={:?}) = {:?}", - trait_def_id, - violations); + debug!( + "object_safety_violations_for_trait(trait_def_id={:?}) = {:?}", + trait_def_id, violations + ); violations } - fn predicates_reference_self( - self, - trait_def_id: DefId, - supertraits_only: bool) -> bool - { + fn predicates_reference_self(self, trait_def_id: DefId, supertraits_only: bool) -> bool { let trait_ref = ty::Binder::dummy(ty::TraitRef::identity(self, trait_def_id)); let predicates = if supertraits_only { self.super_predicates_of(trait_def_id) @@ -178,7 +197,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { match predicate { ty::Predicate::Trait(ref data) => { // In the case of a trait predicate, we can skip the "self" type. - data.skip_binder().input_types().skip(1).any(|t| t.has_self_ty()) + data.skip_binder() + .input_types() + .skip(1) + .any(|t| t.has_self_ty()) } ty::Predicate::Projection(ref data) => { // And similarly for projections. This should be redundant with @@ -200,15 +222,13 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { .skip(1) .any(|t| t.has_self_ty()) } - ty::Predicate::WellFormed(..) | - ty::Predicate::ObjectSafe(..) | - ty::Predicate::TypeOutlives(..) | - ty::Predicate::RegionOutlives(..) | - ty::Predicate::ClosureKind(..) | - ty::Predicate::Subtype(..) | - ty::Predicate::ConstEvaluatable(..) => { - false - } + ty::Predicate::WellFormed(..) + | ty::Predicate::ObjectSafe(..) + | ty::Predicate::TypeOutlives(..) + | ty::Predicate::RegionOutlives(..) + | ty::Predicate::ClosureKind(..) + | ty::Predicate::Subtype(..) + | ty::Predicate::ConstEvaluatable(..) => false, } }) } @@ -220,38 +240,36 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { fn generics_require_sized_self(self, def_id: DefId) -> bool { let sized_def_id = match self.lang_items().sized_trait() { Some(def_id) => def_id, - None => { return false; /* No Sized trait, can't require it! */ } + None => { + return false; /* No Sized trait, can't require it! */ + } }; // Search for a predicate like `Self : Sized` amongst the trait bounds. let predicates = self.predicates_of(def_id); let predicates = predicates.instantiate_identity(self).predicates; - elaborate_predicates(self, predicates) - .any(|predicate| match predicate { - ty::Predicate::Trait(ref trait_pred) if trait_pred.def_id() == sized_def_id => { - trait_pred.skip_binder().self_ty().is_self() - } - ty::Predicate::Projection(..) | - ty::Predicate::Trait(..) | - ty::Predicate::Subtype(..) | - ty::Predicate::RegionOutlives(..) | - ty::Predicate::WellFormed(..) | - ty::Predicate::ObjectSafe(..) | - ty::Predicate::ClosureKind(..) | - ty::Predicate::TypeOutlives(..) | - ty::Predicate::ConstEvaluatable(..) => { - false - } + elaborate_predicates(self, predicates).any(|predicate| match predicate { + ty::Predicate::Trait(ref trait_pred) if trait_pred.def_id() == sized_def_id => { + trait_pred.skip_binder().self_ty().is_self() } - ) + ty::Predicate::Projection(..) + | ty::Predicate::Trait(..) + | ty::Predicate::Subtype(..) + | ty::Predicate::RegionOutlives(..) + | ty::Predicate::WellFormed(..) + | ty::Predicate::ObjectSafe(..) + | ty::Predicate::ClosureKind(..) + | ty::Predicate::TypeOutlives(..) + | ty::Predicate::ConstEvaluatable(..) => false, + }) } /// Returns `Some(_)` if this method makes the containing trait not object safe. - fn object_safety_violation_for_method(self, - trait_def_id: DefId, - method: &ty::AssociatedItem) - -> Option - { + fn object_safety_violation_for_method( + self, + trait_def_id: DefId, + method: &ty::AssociatedItem, + ) -> Option { // Any method that has a `Self : Sized` requisite is otherwise // exempt from the regulations. if self.generics_require_sized_self(method.def_id) { @@ -265,11 +283,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { /// object. Note that object-safe traits can have some /// non-vtable-safe methods, so long as they require `Self:Sized` or /// otherwise ensure that they cannot be used when `Self=Trait`. - pub fn is_vtable_safe_method(self, - trait_def_id: DefId, - method: &ty::AssociatedItem) - -> bool - { + pub fn is_vtable_safe_method(self, trait_def_id: DefId, method: &ty::AssociatedItem) -> bool { // Any method that has a `Self : Sized` requisite can't be called. if self.generics_require_sized_self(method.def_id) { return false; @@ -285,11 +299,11 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { /// object; this does not necessarily imply that the enclosing trait /// is not object safe, because the method might have a where clause /// `Self:Sized`. - fn virtual_call_violation_for_method(self, - trait_def_id: DefId, - method: &ty::AssociatedItem) - -> Option - { + fn virtual_call_violation_for_method( + self, + trait_def_id: DefId, + method: &ty::AssociatedItem, + ) -> Option { // The method's first parameter must be named `self` if !method.method_has_self_argument { return Some(MethodViolationCode::StaticMethod); @@ -311,23 +325,25 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { return Some(MethodViolationCode::Generic); } - if self.predicates_of(method.def_id).predicates.iter() - // A trait object can't claim to live more than the concrete type, - // so outlives predicates will always hold. - .cloned() - .filter(|(p, _)| p.to_opt_type_outlives().is_none()) - .collect::>() - // Do a shallow visit so that `contains_illegal_self_type_reference` - // may apply it's custom visiting. - .visit_tys_shallow(|t| self.contains_illegal_self_type_reference(trait_def_id, t)) { + if self + .predicates_of(method.def_id) + .predicates + .iter() + // A trait object can't claim to live more than the concrete type, + // so outlives predicates will always hold. + .cloned() + .filter(|(p, _)| p.to_opt_type_outlives().is_none()) + .collect::>() + // Do a shallow visit so that `contains_illegal_self_type_reference` + // may apply it's custom visiting. + .visit_tys_shallow(|t| self.contains_illegal_self_type_reference(trait_def_id, t)) + { let span = self.def_span(method.def_id); return Some(MethodViolationCode::WhereClauseReferencesSelf(span)); } - let receiver_ty = self.liberate_late_bound_regions( - method.def_id, - &sig.map_bound(|sig| sig.inputs()[0]), - ); + let receiver_ty = + self.liberate_late_bound_regions(method.def_id, &sig.map_bound(|sig| sig.inputs()[0])); // until `unsized_locals` is fully implemented, `self: Self` can't be dispatched on. // However, this is already considered object-safe. We allow it as a special case here. @@ -346,37 +362,38 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { let abi_of_ty = |ty: Ty<'tcx>| -> &Abi { match self.layout_of(param_env.and(ty)) { Ok(layout) => &layout.abi, - Err(err) => bug!( - "Error: {}\n while computing layout for type {:?}", err, ty - ) + Err(err) => { + bug!("Error: {}\n while computing layout for type {:?}", err, ty) + } } }; // e.g., Rc<()> - let unit_receiver_ty = self.receiver_for_self_ty( - receiver_ty, self.mk_unit(), method.def_id - ); + let unit_receiver_ty = + self.receiver_for_self_ty(receiver_ty, self.mk_unit(), method.def_id); match abi_of_ty(unit_receiver_ty) { &Abi::Scalar(..) => (), - abi => bug!("Receiver when Self = () should have a Scalar ABI, found {:?}", abi) + abi => bug!( + "Receiver when Self = () should have a Scalar ABI, found {:?}", + abi + ), } - let trait_object_ty = self.object_ty_for_trait( - trait_def_id, self.mk_region(ty::ReStatic) - ); + let trait_object_ty = + self.object_ty_for_trait(trait_def_id, self.mk_region(ty::ReStatic)); // e.g., Rc - let trait_object_receiver = self.receiver_for_self_ty( - receiver_ty, trait_object_ty, method.def_id - ); + let trait_object_receiver = + self.receiver_for_self_ty(receiver_ty, trait_object_ty, method.def_id); match abi_of_ty(trait_object_receiver) { &Abi::ScalarPair(..) => (), abi => bug!( "Receiver when Self = {} should have a ScalarPair ABI, found {:?}", - trait_object_ty, abi - ) + trait_object_ty, + abi + ), } } } @@ -387,7 +404,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { /// performs a type substitution to produce the version of receiver_ty when `Self = self_ty` /// e.g., for receiver_ty = `Rc` and self_ty = `Foo`, returns `Rc` fn receiver_for_self_ty( - self, receiver_ty: Ty<'tcx>, self_ty: Ty<'tcx>, method_def_id: DefId + self, + receiver_ty: Ty<'tcx>, + self_ty: Ty<'tcx>, + method_def_id: DefId, ) -> Ty<'tcx> { let substs = Substs::for_item(self, method_def_id, |param, _| { if param.index == 0 { @@ -409,7 +429,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { let trait_ref = ty::TraitRef::identity(self, trait_def_id); let trait_predicate = ty::ExistentialPredicate::Trait( - ty::ExistentialTraitRef::erase_self_ty(self, trait_ref) + ty::ExistentialTraitRef::erase_self_ty(self, trait_ref), ); let mut associated_types = traits::supertraits(self, ty::Binder::dummy(trait_ref)) @@ -428,14 +448,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { }) }); - let existential_predicates = self.mk_existential_predicates( - iter::once(trait_predicate).chain(projection_predicates) - ); + let existential_predicates = self + .mk_existential_predicates(iter::once(trait_predicate).chain(projection_predicates)); - let object_ty = self.mk_dynamic( - ty::Binder::dummy(existential_predicates), - lifetime, - ); + let object_ty = self.mk_dynamic(ty::Binder::dummy(existential_predicates), lifetime); debug!("object_ty_for_trait: object_ty=`{}`", object_ty); @@ -485,15 +501,16 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { // fallback query: `Receiver: Unsize U]>` to support receivers like // `self: Wrapper`. #[allow(dead_code)] - fn receiver_is_dispatchable( - self, - method: &ty::AssociatedItem, - receiver_ty: Ty<'tcx>, - ) -> bool { - debug!("receiver_is_dispatchable: method = {:?}, receiver_ty = {:?}", method, receiver_ty); + fn receiver_is_dispatchable(self, method: &ty::AssociatedItem, receiver_ty: Ty<'tcx>) -> bool { + debug!( + "receiver_is_dispatchable: method = {:?}, receiver_ty = {:?}", + method, receiver_ty + ); - let traits = (self.lang_items().unsize_trait(), - self.lang_items().dispatch_from_dyn_trait()); + let traits = ( + self.lang_items().unsize_trait(), + self.lang_items().dispatch_from_dyn_trait(), + ); let (unsize_did, dispatch_from_dyn_did) = if let (Some(u), Some(cu)) = traits { (u, cu) } else { @@ -511,9 +528,8 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { ); // `Receiver[Self => U]` - let unsized_receiver_ty = self.receiver_for_self_ty( - receiver_ty, unsized_self_ty, method.def_id - ); + let unsized_receiver_ty = + self.receiver_for_self_ty(receiver_ty, unsized_self_ty, method.def_id); // create a modified param env, with `Self: Unsize` and `U: Trait` added to caller bounds // `U: ?Sized` is already implied here @@ -524,7 +540,8 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { let unsize_predicate = ty::TraitRef { def_id: unsize_did, substs: self.mk_substs_trait(self.mk_self_type(), &[unsized_self_ty.into()]), - }.to_predicate(); + } + .to_predicate(); // U: Trait let trait_predicate = { @@ -539,10 +556,14 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { ty::TraitRef { def_id: unsize_did, substs, - }.to_predicate() + } + .to_predicate() }; - let caller_bounds: Vec> = param_env.caller_bounds.iter().cloned() + let caller_bounds: Vec> = param_env + .caller_bounds + .iter() + .cloned() .chain(iter::once(unsize_predicate)) .chain(iter::once(trait_predicate)) .collect(); @@ -557,13 +578,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { let predicate = ty::TraitRef { def_id: dispatch_from_dyn_did, substs: self.mk_substs_trait(receiver_ty, &[unsized_receiver_ty.into()]), - }.to_predicate(); + } + .to_predicate(); - Obligation::new( - ObligationCause::dummy(), - param_env, - predicate, - ) + Obligation::new(ObligationCause::dummy(), param_env, predicate) }; self.infer_ctxt().enter(|ref infcx| { @@ -572,11 +590,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { }) } - fn contains_illegal_self_type_reference(self, - trait_def_id: DefId, - ty: Ty<'tcx>) - -> bool - { + fn contains_illegal_self_type_reference(self, trait_def_id: DefId, ty: Ty<'tcx>) -> bool { // This is somewhat subtle. In general, we want to forbid // references to `Self` in the argument and return types, // since the value of `Self` is erased. However, there is one @@ -633,9 +647,8 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { // Compute supertraits of current trait lazily. if supertraits.is_none() { - let trait_ref = ty::Binder::bind( - ty::TraitRef::identity(self, trait_def_id), - ); + let trait_ref = + ty::Binder::bind(ty::TraitRef::identity(self, trait_def_id)); supertraits = Some(traits::supertraits(self, trait_ref).collect()); } @@ -648,8 +661,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { // are part of the formal parameter listing, and // hence there should be no inference variables. let projection_trait_ref = ty::Binder::bind(data.trait_ref(self)); - let is_supertrait_of_current_trait = - supertraits.as_ref().unwrap().contains(&projection_trait_ref); + let is_supertrait_of_current_trait = supertraits + .as_ref() + .unwrap() + .contains(&projection_trait_ref); if is_supertrait_of_current_trait { false // do not walk contained types, do not report error, do collect $200 @@ -666,7 +681,9 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { } } -pub(super) fn is_object_safe_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - trait_def_id: DefId) -> bool { +pub(super) fn is_object_safe_provider<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + trait_def_id: DefId, +) -> bool { tcx.object_safety_violations(trait_def_id).is_empty() } diff --git a/src/librustc/traits/on_unimplemented.rs b/src/librustc/traits/on_unimplemented.rs index 4b188d2517511..76ce3820c0336 100644 --- a/src/librustc/traits/on_unimplemented.rs +++ b/src/librustc/traits/on_unimplemented.rs @@ -1,14 +1,14 @@ use fmt_macros::{Parser, Piece, Position}; use hir::def_id::DefId; -use ty::{self, TyCtxt, GenericParamDefKind}; +use ty::{self, GenericParamDefKind, TyCtxt}; use util::common::ErrorReported; use util::nodemap::FxHashMap; use syntax::ast::{MetaItem, NestedMetaItem}; use syntax::attr; -use syntax_pos::Span; use syntax_pos::symbol::LocalInternedString; +use syntax_pos::Span; #[derive(Clone, Debug)] pub struct OnUnimplementedFormatString(LocalInternedString); @@ -30,18 +30,22 @@ pub struct OnUnimplementedNote { impl OnUnimplementedNote { pub fn empty() -> Self { - OnUnimplementedNote { message: None, label: None, note: None } + OnUnimplementedNote { + message: None, + label: None, + note: None, + } } } -fn parse_error(tcx: TyCtxt<'_, '_, '_>, span: Span, - message: &str, - label: &str, - note: Option<&str>) - -> ErrorReported -{ - let mut diag = struct_span_err!( - tcx.sess, span, E0232, "{}", message); +fn parse_error( + tcx: TyCtxt<'_, '_, '_>, + span: Span, + message: &str, + label: &str, + note: Option<&str>, +) -> ErrorReported { + let mut diag = struct_span_err!(tcx.sess, span, E0232, "{}", message); diag.span_label(span, label); if let Some(note) = note { diag.note(note); @@ -51,30 +55,40 @@ fn parse_error(tcx: TyCtxt<'_, '_, '_>, span: Span, } impl<'a, 'gcx, 'tcx> OnUnimplementedDirective { - pub fn parse(tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_def_id: DefId, - items: &[NestedMetaItem], - span: Span, - is_root: bool) - -> Result - { + pub fn parse( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_def_id: DefId, + items: &[NestedMetaItem], + span: Span, + is_root: bool, + ) -> Result { let mut errored = false; let mut item_iter = items.iter(); let condition = if is_root { None } else { - let cond = item_iter.next().ok_or_else(|| - parse_error(tcx, span, - "empty `on`-clause in `#[rustc_on_unimplemented]`", - "empty on-clause here", - None) - )?.meta_item().ok_or_else(|| - parse_error(tcx, span, - "invalid `on`-clause in `#[rustc_on_unimplemented]`", - "invalid on-clause here", - None) - )?; + let cond = item_iter + .next() + .ok_or_else(|| { + parse_error( + tcx, + span, + "empty `on`-clause in `#[rustc_on_unimplemented]`", + "empty on-clause here", + None, + ) + })? + .meta_item() + .ok_or_else(|| { + parse_error( + tcx, + span, + "invalid `on`-clause in `#[rustc_on_unimplemented]`", + "invalid on-clause here", + None, + ) + })?; attr::eval_condition(cond, &tcx.sess.parse_sess, &mut |_| true); Some(cond.clone()) }; @@ -87,56 +101,78 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedDirective { if item.check_name("message") && message.is_none() { if let Some(message_) = item.value_str() { message = Some(OnUnimplementedFormatString::try_parse( - tcx, trait_def_id, message_.as_str(), span)?); + tcx, + trait_def_id, + message_.as_str(), + span, + )?); continue; } } else if item.check_name("label") && label.is_none() { if let Some(label_) = item.value_str() { label = Some(OnUnimplementedFormatString::try_parse( - tcx, trait_def_id, label_.as_str(), span)?); + tcx, + trait_def_id, + label_.as_str(), + span, + )?); continue; } } else if item.check_name("note") && note.is_none() { if let Some(note_) = item.value_str() { note = Some(OnUnimplementedFormatString::try_parse( - tcx, trait_def_id, note_.as_str(), span)?); + tcx, + trait_def_id, + note_.as_str(), + span, + )?); continue; } - } else if item.check_name("on") && is_root && - message.is_none() && label.is_none() && note.is_none() + } else if item.check_name("on") + && is_root + && message.is_none() + && label.is_none() + && note.is_none() { if let Some(items) = item.meta_item_list() { - if let Ok(subcommand) = - Self::parse(tcx, trait_def_id, &items, item.span, false) + if let Ok(subcommand) = Self::parse(tcx, trait_def_id, &items, item.span, false) { subcommands.push(subcommand); } else { errored = true; } - continue + continue; } } // nothing found - parse_error(tcx, item.span, - "this attribute must have a valid value", - "expected value here", - Some(r#"eg `#[rustc_on_unimplemented(message="foo")]`"#)); + parse_error( + tcx, + item.span, + "this attribute must have a valid value", + "expected value here", + Some(r#"eg `#[rustc_on_unimplemented(message="foo")]`"#), + ); } if errored { Err(ErrorReported) } else { - Ok(OnUnimplementedDirective { condition, message, label, subcommands, note }) + Ok(OnUnimplementedDirective { + condition, + message, + label, + subcommands, + note, + }) } } - - pub fn of_item(tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_def_id: DefId, - impl_def_id: DefId) - -> Result, ErrorReported> - { + pub fn of_item( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_def_id: DefId, + impl_def_id: DefId, + ) -> Result, ErrorReported> { let attrs = tcx.get_attrs(impl_def_id); let attr = if let Some(item) = attr::find_by_name(&attrs, "rustc_on_unimplemented") { @@ -153,40 +189,53 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedDirective { message: None, subcommands: vec![], label: Some(OnUnimplementedFormatString::try_parse( - tcx, trait_def_id, value.as_str(), attr.span)?), + tcx, + trait_def_id, + value.as_str(), + attr.span, + )?), note: None, })) } else { - return Err(parse_error(tcx, attr.span, - "`#[rustc_on_unimplemented]` requires a value", - "value required here", - Some(r#"eg `#[rustc_on_unimplemented(message="foo")]`"#))); + return Err(parse_error( + tcx, + attr.span, + "`#[rustc_on_unimplemented]` requires a value", + "value required here", + Some(r#"eg `#[rustc_on_unimplemented(message="foo")]`"#), + )); }; - debug!("of_item({:?}/{:?}) = {:?}", trait_def_id, impl_def_id, result); + debug!( + "of_item({:?}/{:?}) = {:?}", + trait_def_id, impl_def_id, result + ); result } - pub fn evaluate(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_ref: ty::TraitRef<'tcx>, - options: &[(String, Option)]) - -> OnUnimplementedNote - { + pub fn evaluate( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: ty::TraitRef<'tcx>, + options: &[(String, Option)], + ) -> OnUnimplementedNote { let mut message = None; let mut label = None; let mut note = None; - info!("evaluate({:?}, trait_ref={:?}, options={:?})", self, trait_ref, options); + info!( + "evaluate({:?}, trait_ref={:?}, options={:?})", + self, trait_ref, options + ); for command in self.subcommands.iter().chain(Some(self)).rev() { if let Some(ref condition) = command.condition { if !attr::eval_condition(condition, &tcx.sess.parse_sess, &mut |c| { options.contains(&( c.name().as_str().to_string(), - c.value_str().map(|s| s.as_str().to_string()) + c.value_str().map(|s| s.as_str().to_string()), )) }) { debug!("evaluate: skipping {:?} due to condition", command); - continue + continue; } } debug!("evaluate: {:?} succeeded", command); @@ -203,7 +252,8 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedDirective { } } - let options: FxHashMap = options.into_iter() + let options: FxHashMap = options + .into_iter() .filter_map(|(k, v)| v.as_ref().map(|v| (k.to_owned(), v.to_owned()))) .collect(); OnUnimplementedNote { @@ -215,23 +265,23 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedDirective { } impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString { - pub fn try_parse(tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_def_id: DefId, - from: LocalInternedString, - err_sp: Span) - -> Result - { + pub fn try_parse( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_def_id: DefId, + from: LocalInternedString, + err_sp: Span, + ) -> Result { let result = OnUnimplementedFormatString(from); result.verify(tcx, trait_def_id, err_sp)?; Ok(result) } - fn verify(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_def_id: DefId, - span: Span) - -> Result<(), ErrorReported> - { + fn verify( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_def_id: DefId, + span: Span, + ) -> Result<(), ErrorReported> { let name = tcx.item_name(trait_def_id); let generics = tcx.generics_of(trait_def_id); let parser = Parser::new(&self.0, None, vec![], false); @@ -249,60 +299,72 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString { // `{from_desugaring}` is allowed Position::ArgumentNamed(s) if s == "from_desugaring" => (), // So is `{A}` if A is a type parameter - Position::ArgumentNamed(s) => match generics.params.iter().find(|param| - param.name == s - ) { - Some(_) => (), - None => { - span_err!(tcx.sess, span, E0230, - "there is no parameter `{}` on trait `{}`", s, name); - result = Err(ErrorReported); + Position::ArgumentNamed(s) => { + match generics.params.iter().find(|param| param.name == s) { + Some(_) => (), + None => { + span_err!( + tcx.sess, + span, + E0230, + "there is no parameter `{}` on trait `{}`", + s, + name + ); + result = Err(ErrorReported); + } } - }, + } // `{:1}` and `{}` are not to be used Position::ArgumentIs(_) | Position::ArgumentImplicitlyIs(_) => { - span_err!(tcx.sess, span, E0231, - "only named substitution parameters are allowed"); + span_err!( + tcx.sess, + span, + E0231, + "only named substitution parameters are allowed" + ); result = Err(ErrorReported); } - } + }, } } result } - pub fn format(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_ref: ty::TraitRef<'tcx>, - options: &FxHashMap) - -> String - { + pub fn format( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: ty::TraitRef<'tcx>, + options: &FxHashMap, + ) -> String { let name = tcx.item_name(trait_ref.def_id); let trait_str = tcx.item_path_str(trait_ref.def_id); let generics = tcx.generics_of(trait_ref.def_id); - let generic_map = generics.params.iter().filter_map(|param| { - let value = match param.kind { - GenericParamDefKind::Type {..} => { - trait_ref.substs[param.index as usize].to_string() - }, - GenericParamDefKind::Lifetime => return None - }; - let name = param.name.to_string(); - Some((name, value)) - }).collect::>(); + let generic_map = generics + .params + .iter() + .filter_map(|param| { + let value = match param.kind { + GenericParamDefKind::Type { .. } => { + trait_ref.substs[param.index as usize].to_string() + } + GenericParamDefKind::Lifetime => return None, + }; + let name = param.name.to_string(); + Some((name, value)) + }) + .collect::>(); let empty_string = String::new(); let parser = Parser::new(&self.0, None, vec![], false); - parser.map(|p| - match p { + parser + .map(|p| match p { Piece::String(s) => s, Piece::NextArgument(a) => match a.position { Position::ArgumentNamed(s) => match generic_map.get(s) { Some(val) => val, - None if s == name => { - &trait_str - } + None if s == name => &trait_str, None => { if let Some(val) = options.get(s) { val @@ -310,15 +372,19 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString { // don't break messages using these two arguments incorrectly &empty_string } else { - bug!("broken on_unimplemented {:?} for {:?}: \ - no argument matching {:?}", - self.0, trait_ref, s) + bug!( + "broken on_unimplemented {:?} for {:?}: \ + no argument matching {:?}", + self.0, + trait_ref, + s + ) } } }, - _ => bug!("broken on_unimplemented {:?} - bad format arg", self.0) - } - } - ).collect() + _ => bug!("broken on_unimplemented {:?} - bad format arg", self.0), + }, + }) + .collect() } } diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index 732ca70dc78c5..cbd56ad0986af 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -3,25 +3,25 @@ use super::elaborate_predicates; use super::specialization_graph; use super::translate_substs; +use super::util; use super::Obligation; use super::ObligationCause; use super::PredicateObligation; use super::Selection; use super::SelectionContext; use super::SelectionError; -use super::{VtableImplData, VtableClosureData, VtableGeneratorData, VtableFnPointerData}; -use super::util; +use super::{VtableClosureData, VtableFnPointerData, VtableGeneratorData, VtableImplData}; use hir::def_id::DefId; -use infer::{InferCtxt, InferOk, LateBoundRegionConversionTime}; use infer::type_variable::TypeVariableOrigin; +use infer::{InferCtxt, InferOk, LateBoundRegionConversionTime}; use mir::interpret::ConstValue; -use mir::interpret::{GlobalId}; +use mir::interpret::GlobalId; use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap}; use syntax::ast::Ident; -use ty::subst::{Subst, Substs}; -use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder}; +use ty::subst::{Subst, Substs}; +use ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt}; use util::common::FN_OUTPUT_NAME; /// Depending on the stage of compilation, we want projection to be @@ -66,14 +66,11 @@ pub enum Reveal { All, } -pub type PolyProjectionObligation<'tcx> = - Obligation<'tcx, ty::PolyProjectionPredicate<'tcx>>; +pub type PolyProjectionObligation<'tcx> = Obligation<'tcx, ty::PolyProjectionPredicate<'tcx>>; -pub type ProjectionObligation<'tcx> = - Obligation<'tcx, ty::ProjectionPredicate<'tcx>>; +pub type ProjectionObligation<'tcx> = Obligation<'tcx, ty::ProjectionPredicate<'tcx>>; -pub type ProjectionTyObligation<'tcx> = - Obligation<'tcx, ty::ProjectionTy<'tcx>>; +pub type ProjectionTyObligation<'tcx> = Obligation<'tcx, ty::ProjectionTy<'tcx>>; /// When attempting to resolve `::Name` ... #[derive(Debug)] @@ -87,7 +84,7 @@ pub enum ProjectionTyError<'tcx> { #[derive(Clone)] pub struct MismatchedProjectionTypes<'tcx> { - pub err: ty::error::TypeError<'tcx> + pub err: ty::error::TypeError<'tcx>, } #[derive(PartialEq, Eq, Debug)] @@ -122,8 +119,8 @@ impl<'tcx> ProjectionTyCandidateSet<'tcx> { // was discarded -- this could be because of ambiguity, or because // a higher-priority candidate is already there. fn push_candidate(&mut self, candidate: ProjectionTyCandidate<'tcx>) -> bool { - use self::ProjectionTyCandidateSet::*; use self::ProjectionTyCandidate::*; + use self::ProjectionTyCandidateSet::*; // This wacky variable is just used to try and // make code readable and avoid confusing paths. @@ -184,12 +181,9 @@ impl<'tcx> ProjectionTyCandidateSet<'tcx> { /// the projection cache key used to track these additional obligations. pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, - obligation: &PolyProjectionObligation<'tcx>) - -> Result>>, - MismatchedProjectionTypes<'tcx>> -{ - debug!("poly_project_and_unify_type(obligation={:?})", - obligation); + obligation: &PolyProjectionObligation<'tcx>, +) -> Result>>, MismatchedProjectionTypes<'tcx>> { + debug!("poly_project_and_unify_type(obligation={:?})", obligation); let infcx = selcx.infcx(); infcx.commit_if_ok(|_| { @@ -208,38 +202,45 @@ pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>( /// If successful, this may result in additional obligations. fn project_and_unify_type<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, - obligation: &ProjectionObligation<'tcx>) - -> Result>>, - MismatchedProjectionTypes<'tcx>> -{ - debug!("project_and_unify_type(obligation={:?})", - obligation); + obligation: &ProjectionObligation<'tcx>, +) -> Result>>, MismatchedProjectionTypes<'tcx>> { + debug!("project_and_unify_type(obligation={:?})", obligation); let mut obligations = vec![]; - let normalized_ty = - match opt_normalize_projection_type(selcx, - obligation.param_env, - obligation.predicate.projection_ty, - obligation.cause.clone(), - obligation.recursion_depth, - &mut obligations) { - Some(n) => n, - None => return Ok(None), - }; + let normalized_ty = match opt_normalize_projection_type( + selcx, + obligation.param_env, + obligation.predicate.projection_ty, + obligation.cause.clone(), + obligation.recursion_depth, + &mut obligations, + ) { + Some(n) => n, + None => return Ok(None), + }; - debug!("project_and_unify_type: normalized_ty={:?} obligations={:?}", - normalized_ty, - obligations); + debug!( + "project_and_unify_type: normalized_ty={:?} obligations={:?}", + normalized_ty, obligations + ); let infcx = selcx.infcx(); - match infcx.at(&obligation.cause, obligation.param_env) - .eq(normalized_ty, obligation.predicate.ty) { - Ok(InferOk { obligations: inferred_obligations, value: () }) => { + match infcx + .at(&obligation.cause, obligation.param_env) + .eq(normalized_ty, obligation.predicate.ty) + { + Ok(InferOk { + obligations: inferred_obligations, + value: (), + }) => { obligations.extend(inferred_obligations); Ok(Some(obligations)) - }, + } Err(err) => { - debug!("project_and_unify_type: equating types encountered error {:?}", err); + debug!( + "project_and_unify_type: equating types encountered error {:?}", + err + ); Err(MismatchedProjectionTypes { err }) } } @@ -249,12 +250,14 @@ fn project_and_unify_type<'cx, 'gcx, 'tcx>( /// them with a fully resolved type where possible. The return value /// combines the normalized result and any additional obligations that /// were incurred as result. -pub fn normalize<'a, 'b, 'gcx, 'tcx, T>(selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - cause: ObligationCause<'tcx>, - value: &T) - -> Normalized<'tcx, T> - where T : TypeFoldable<'tcx> +pub fn normalize<'a, 'b, 'gcx, 'tcx, T>( + selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + cause: ObligationCause<'tcx>, + value: &T, +) -> Normalized<'tcx, T> +where + T: TypeFoldable<'tcx>, { normalize_with_depth(selcx, param_env, cause, 0, value) } @@ -265,25 +268,31 @@ pub fn normalize_with_depth<'a, 'b, 'gcx, 'tcx, T>( param_env: ty::ParamEnv<'tcx>, cause: ObligationCause<'tcx>, depth: usize, - value: &T) - -> Normalized<'tcx, T> - - where T : TypeFoldable<'tcx> + value: &T, +) -> Normalized<'tcx, T> +where + T: TypeFoldable<'tcx>, { debug!("normalize_with_depth(depth={}, value={:?})", depth, value); let mut normalizer = AssociatedTypeNormalizer::new(selcx, param_env, cause, depth); let result = normalizer.fold(value); - debug!("normalize_with_depth: depth={} result={:?} with {} obligations", - depth, result, normalizer.obligations.len()); - debug!("normalize_with_depth: depth={} obligations={:?}", - depth, normalizer.obligations); + debug!( + "normalize_with_depth: depth={} result={:?} with {} obligations", + depth, + result, + normalizer.obligations.len() + ); + debug!( + "normalize_with_depth: depth={} obligations={:?}", + depth, normalizer.obligations + ); Normalized { value: result, obligations: normalizer.obligations, } } -struct AssociatedTypeNormalizer<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> { +struct AssociatedTypeNormalizer<'a, 'b: 'a, 'gcx: 'b + 'tcx, 'tcx: 'b> { selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, param_env: ty::ParamEnv<'tcx>, cause: ObligationCause<'tcx>, @@ -292,12 +301,12 @@ struct AssociatedTypeNormalizer<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> { } impl<'a, 'b, 'gcx, 'tcx> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> { - fn new(selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - cause: ObligationCause<'tcx>, - depth: usize) - -> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> - { + fn new( + selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + cause: ObligationCause<'tcx>, + depth: usize, + ) -> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> { AssociatedTypeNormalizer { selcx, param_env, @@ -307,7 +316,7 @@ impl<'a, 'b, 'gcx, 'tcx> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> { } } - fn fold>(&mut self, value: &T) -> T { + fn fold>(&mut self, value: &T) -> T { let value = self.selcx.infcx().resolve_type_vars_if_possible(value); if !value.has_projections() { @@ -337,7 +346,8 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a, let ty = ty.super_fold_with(self); match ty.sty { - ty::Opaque(def_id, substs) if !substs.has_escaping_bound_vars() => { // (*) + ty::Opaque(def_id, substs) if !substs.has_escaping_bound_vars() => { + // (*) // Only normalize `impl Trait` after type-checking, usually in codegen. match self.param_env.reveal { Reveal::UserFacing => ty, @@ -364,7 +374,8 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a, } } - ty::Projection(ref data) if !data.has_escaping_bound_vars() => { // (*) + ty::Projection(ref data) if !data.has_escaping_bound_vars() => { + // (*) // (*) This is kind of hacky -- we need to be able to // handle normalization within binders because @@ -378,19 +389,26 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a, // binder). It would be better to normalize in a // binding-aware fashion. - let normalized_ty = normalize_projection_type(self.selcx, - self.param_env, - data.clone(), - self.cause.clone(), - self.depth, - &mut self.obligations); - debug!("AssociatedTypeNormalizer: depth={} normalized {:?} to {:?}, \ - now with {} obligations", - self.depth, ty, normalized_ty, self.obligations.len()); + let normalized_ty = normalize_projection_type( + self.selcx, + self.param_env, + data.clone(), + self.cause.clone(), + self.depth, + &mut self.obligations, + ); + debug!( + "AssociatedTypeNormalizer: depth={} normalized {:?} to {:?}, \ + now with {} obligations", + self.depth, + ty, + normalized_ty, + self.obligations.len() + ); normalized_ty } - _ => ty + _ => ty, } } @@ -404,7 +422,7 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a, if let Some(instance) = instance { let cid = GlobalId { instance, - promoted: None + promoted: None, }; if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) { let evaluated = evaluated.subst(self.tcx(), substs); @@ -417,10 +435,10 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a, if let Some(instance) = instance { let cid = GlobalId { instance, - promoted: None + promoted: None, }; if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) { - return self.fold_const(evaluated) + return self.fold_const(evaluated); } } } @@ -432,16 +450,19 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a, } #[derive(Clone)] -pub struct Normalized<'tcx,T> { +pub struct Normalized<'tcx, T> { pub value: T, pub obligations: Vec>, } pub type NormalizedTy<'tcx> = Normalized<'tcx, Ty<'tcx>>; -impl<'tcx,T> Normalized<'tcx,T> { - pub fn with(self, value: U) -> Normalized<'tcx,U> { - Normalized { value: value, obligations: self.obligations } +impl<'tcx, T> Normalized<'tcx, T> { + pub fn with(self, value: U) -> Normalized<'tcx, U> { + Normalized { + value: value, + obligations: self.obligations, + } } } @@ -457,29 +478,37 @@ pub fn normalize_projection_type<'a, 'b, 'gcx, 'tcx>( projection_ty: ty::ProjectionTy<'tcx>, cause: ObligationCause<'tcx>, depth: usize, - obligations: &mut Vec>) - -> Ty<'tcx> -{ - opt_normalize_projection_type(selcx, param_env, projection_ty.clone(), cause.clone(), depth, - obligations) - .unwrap_or_else(move || { - // if we bottom out in ambiguity, create a type variable - // and a deferred predicate to resolve this when more type - // information is available. - - let tcx = selcx.infcx().tcx; - let def_id = projection_ty.item_def_id; - let ty_var = selcx.infcx().next_ty_var( - TypeVariableOrigin::NormalizeProjectionType(tcx.def_span(def_id))); - let projection = ty::Binder::dummy(ty::ProjectionPredicate { - projection_ty, - ty: ty_var - }); - let obligation = Obligation::with_depth( - cause, depth + 1, param_env, projection.to_predicate()); - obligations.push(obligation); - ty_var - }) + obligations: &mut Vec>, +) -> Ty<'tcx> { + opt_normalize_projection_type( + selcx, + param_env, + projection_ty.clone(), + cause.clone(), + depth, + obligations, + ) + .unwrap_or_else(move || { + // if we bottom out in ambiguity, create a type variable + // and a deferred predicate to resolve this when more type + // information is available. + + let tcx = selcx.infcx().tcx; + let def_id = projection_ty.item_def_id; + let ty_var = selcx + .infcx() + .next_ty_var(TypeVariableOrigin::NormalizeProjectionType( + tcx.def_span(def_id), + )); + let projection = ty::Binder::dummy(ty::ProjectionPredicate { + projection_ty, + ty: ty_var, + }); + let obligation = + Obligation::with_depth(cause, depth + 1, param_env, projection.to_predicate()); + obligations.push(obligation); + ty_var + }) } /// The guts of `normalize`: normalize a specific projection like `( projection_ty: ty::ProjectionTy<'tcx>, cause: ObligationCause<'tcx>, depth: usize, - obligations: &mut Vec>) - -> Option> -{ + obligations: &mut Vec>, +) -> Option> { let infcx = selcx.infcx(); let projection_ty = infcx.resolve_type_vars_if_possible(&projection_ty); let cache_key = ProjectionCacheKey { ty: projection_ty }; - debug!("opt_normalize_projection_type(\ - projection_ty={:?}, \ - depth={})", - projection_ty, - depth); + debug!( + "opt_normalize_projection_type(\ + projection_ty={:?}, \ + depth={})", + projection_ty, depth + ); // FIXME(#20304) For now, I am caching here, which is good, but it // means we don't capture the type variables that are created in @@ -522,7 +551,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( let cache_result = infcx.projection_cache.borrow_mut().try_start(cache_key); match cache_result { - Ok(()) => { } + Ok(()) => {} Err(ProjectionCacheEntry::Ambiguous) => { // If we found ambiguity the last time, that generally // means we will continue to do so until some type in the @@ -533,8 +562,10 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( // // FIXME(#32286) refactor this so that closure type // changes - debug!("opt_normalize_projection_type: \ - found cache entry: ambiguous"); + debug!( + "opt_normalize_projection_type: \ + found cache entry: ambiguous" + ); if !projection_ty.has_closure_types() { return None; } @@ -560,15 +591,15 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( // return Some(NormalizedTy { value: v, obligations: vec![] }); // ``` - debug!("opt_normalize_projection_type: \ - found cache entry: in-progress"); + debug!( + "opt_normalize_projection_type: \ + found cache entry: in-progress" + ); // But for now, let's classify this as an overflow: let recursion_limit = *selcx.tcx().sess.recursion_limit.get(); - let obligation = Obligation::with_depth(cause, - recursion_limit, - param_env, - projection_ty); + let obligation = + Obligation::with_depth(cause, recursion_limit, param_env, projection_ty); selcx.infcx().report_overflow_error(&obligation, false); } Err(ProjectionCacheEntry::NormalizedTy(ty)) => { @@ -583,63 +614,73 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( // discarded as duplicated). But when doing trait // evaluation this is not the case, and dropping the trait // evaluations can causes ICEs (e.g., #43132). - debug!("opt_normalize_projection_type: \ - found normalized ty `{:?}`", - ty); + debug!( + "opt_normalize_projection_type: \ + found normalized ty `{:?}`", + ty + ); // Once we have inferred everything we need to know, we // can ignore the `obligations` from that point on. if !infcx.any_unresolved_type_vars(&ty.value) { - infcx.projection_cache.borrow_mut().complete_normalized(cache_key, &ty); - // No need to extend `obligations`. + infcx + .projection_cache + .borrow_mut() + .complete_normalized(cache_key, &ty); + // No need to extend `obligations`. } else { obligations.extend(ty.obligations); } - obligations.push(get_paranoid_cache_value_obligation(infcx, - param_env, - projection_ty, - cause, - depth)); + obligations.push(get_paranoid_cache_value_obligation( + infcx, + param_env, + projection_ty, + cause, + depth, + )); return Some(ty.value); } Err(ProjectionCacheEntry::Error) => { - debug!("opt_normalize_projection_type: \ - found error"); + debug!( + "opt_normalize_projection_type: \ + found error" + ); let result = normalize_to_error(selcx, param_env, projection_ty, cause, depth); obligations.extend(result.obligations); - return Some(result.value) + return Some(result.value); } } let obligation = Obligation::with_depth(cause.clone(), depth, param_env, projection_ty); match project_type(selcx, &obligation) { - Ok(ProjectedTy::Progress(Progress { ty: projected_ty, - obligations: mut projected_obligations })) => { + Ok(ProjectedTy::Progress(Progress { + ty: projected_ty, + obligations: mut projected_obligations, + })) => { // if projection succeeded, then what we get out of this // is also non-normalized (consider: it was derived from // an impl, where-clause etc) and hence we must // re-normalize it - debug!("opt_normalize_projection_type: \ - projected_ty={:?} \ - depth={} \ - projected_obligations={:?}", - projected_ty, - depth, - projected_obligations); + debug!( + "opt_normalize_projection_type: \ + projected_ty={:?} \ + depth={} \ + projected_obligations={:?}", + projected_ty, depth, projected_obligations + ); let result = if projected_ty.has_projections() { - let mut normalizer = AssociatedTypeNormalizer::new(selcx, - param_env, - cause, - depth+1); + let mut normalizer = + AssociatedTypeNormalizer::new(selcx, param_env, cause, depth + 1); let normalized_ty = normalizer.fold(&projected_ty); - debug!("opt_normalize_projection_type: \ - normalized_ty={:?} depth={}", - normalized_ty, - depth); + debug!( + "opt_normalize_projection_type: \ + normalized_ty={:?} depth={}", + normalized_ty, depth + ); projected_obligations.extend(normalizer.obligations); Normalized { @@ -654,27 +695,36 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( }; let cache_value = prune_cache_value_obligations(infcx, &result); - infcx.projection_cache.borrow_mut().insert_ty(cache_key, cache_value); + infcx + .projection_cache + .borrow_mut() + .insert_ty(cache_key, cache_value); obligations.extend(result.obligations); Some(result.value) } Ok(ProjectedTy::NoProgress(projected_ty)) => { - debug!("opt_normalize_projection_type: \ - projected_ty={:?} no progress", - projected_ty); + debug!( + "opt_normalize_projection_type: \ + projected_ty={:?} no progress", + projected_ty + ); let result = Normalized { value: projected_ty, - obligations: vec![] + obligations: vec![], }; - infcx.projection_cache.borrow_mut().insert_ty(cache_key, result.clone()); + infcx + .projection_cache + .borrow_mut() + .insert_ty(cache_key, result.clone()); // No need to extend `obligations`. Some(result.value) } Err(ProjectionTyError::TooManyCandidates) => { - debug!("opt_normalize_projection_type: \ - too many candidates"); - infcx.projection_cache.borrow_mut() - .ambiguous(cache_key); + debug!( + "opt_normalize_projection_type: \ + too many candidates" + ); + infcx.projection_cache.borrow_mut().ambiguous(cache_key); None } Err(ProjectionTyError::TraitSelectionError(_)) => { @@ -684,8 +734,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( // Trait`, which when processed will cause the error to be // reported later - infcx.projection_cache.borrow_mut() - .error(cache_key); + infcx.projection_cache.borrow_mut().error(cache_key); let result = normalize_to_error(selcx, param_env, projection_ty, cause, depth); obligations.extend(result.obligations); Some(result.value) @@ -696,39 +745,45 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( /// If there are unresolved type variables, then we need to include /// any subobligations that bind them, at least until those type /// variables are fully resolved. -fn prune_cache_value_obligations<'a, 'gcx, 'tcx>(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, - result: &NormalizedTy<'tcx>) - -> NormalizedTy<'tcx> { +fn prune_cache_value_obligations<'a, 'gcx, 'tcx>( + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, + result: &NormalizedTy<'tcx>, +) -> NormalizedTy<'tcx> { if !infcx.any_unresolved_type_vars(&result.value) { - return NormalizedTy { value: result.value, obligations: vec![] }; + return NormalizedTy { + value: result.value, + obligations: vec![], + }; } - let mut obligations: Vec<_> = - result.obligations - .iter() - .filter(|obligation| match obligation.predicate { - // We found a `T: Foo` predicate, let's check - // if `U` references any unresolved type - // variables. In principle, we only care if this - // projection can help resolve any of the type - // variables found in `result.value` -- but we just - // check for any type variables here, for fear of - // indirect obligations (e.g., we project to `?0`, - // but we have `T: Foo` and `?1: Bar`). - ty::Predicate::Projection(ref data) => - infcx.any_unresolved_type_vars(&data.ty()), - - // We are only interested in `T: Foo` predicates, whre - // `U` references one of `unresolved_type_vars`. =) - _ => false, - }) - .cloned() - .collect(); + let mut obligations: Vec<_> = result + .obligations + .iter() + .filter(|obligation| match obligation.predicate { + // We found a `T: Foo` predicate, let's check + // if `U` references any unresolved type + // variables. In principle, we only care if this + // projection can help resolve any of the type + // variables found in `result.value` -- but we just + // check for any type variables here, for fear of + // indirect obligations (e.g., we project to `?0`, + // but we have `T: Foo` and `?1: Bar`). + ty::Predicate::Projection(ref data) => infcx.any_unresolved_type_vars(&data.ty()), + + // We are only interested in `T: Foo` predicates, whre + // `U` references one of `unresolved_type_vars`. =) + _ => false, + }) + .cloned() + .collect(); obligations.shrink_to_fit(); - NormalizedTy { value: result.value, obligations } + NormalizedTy { + value: result.value, + obligations, + } } /// Whenever we give back a cache result for a projection like `( param_env: ty::ParamEnv<'tcx>, projection_ty: ty::ProjectionTy<'tcx>, cause: ObligationCause<'tcx>, - depth: usize) - -> PredicateObligation<'tcx> -{ + depth: usize, +) -> PredicateObligation<'tcx> { let trait_ref = projection_ty.trait_ref(infcx.tcx).to_poly_trait_ref(); Obligation { cause, @@ -790,25 +844,30 @@ fn get_paranoid_cache_value_obligation<'a, 'gcx, 'tcx>( /// an error for this obligation, but we legitimately should not, /// because it contains `[type error]`. Yuck! (See issue #29857 for /// one case where this arose.) -fn normalize_to_error<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - projection_ty: ty::ProjectionTy<'tcx>, - cause: ObligationCause<'tcx>, - depth: usize) - -> NormalizedTy<'tcx> -{ +fn normalize_to_error<'a, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + projection_ty: ty::ProjectionTy<'tcx>, + cause: ObligationCause<'tcx>, + depth: usize, +) -> NormalizedTy<'tcx> { let trait_ref = projection_ty.trait_ref(selcx.tcx()).to_poly_trait_ref(); - let trait_obligation = Obligation { cause, - recursion_depth: depth, - param_env, - predicate: trait_ref.to_predicate() }; + let trait_obligation = Obligation { + cause, + recursion_depth: depth, + param_env, + predicate: trait_ref.to_predicate(), + }; let tcx = selcx.infcx().tcx; let def_id = projection_ty.item_def_id; - let new_value = selcx.infcx().next_ty_var( - TypeVariableOrigin::NormalizeProjectionType(tcx.def_span(def_id))); + let new_value = selcx + .infcx() + .next_ty_var(TypeVariableOrigin::NormalizeProjectionType( + tcx.def_span(def_id), + )); Normalized { value: new_value, - obligations: vec![trait_obligation] + obligations: vec![trait_obligation], } } @@ -823,21 +882,24 @@ struct Progress<'tcx> { } impl<'tcx> Progress<'tcx> { - fn error<'a,'gcx>(tcx: TyCtxt<'a,'gcx,'tcx>) -> Self { + fn error<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self { Progress { ty: tcx.types.err, obligations: vec![], } } - fn with_addl_obligations(mut self, - mut obligations: Vec>) - -> Self { - debug!("with_addl_obligations: self.obligations.len={} obligations.len={}", - self.obligations.len(), obligations.len()); + fn with_addl_obligations(mut self, mut obligations: Vec>) -> Self { + debug!( + "with_addl_obligations: self.obligations.len={} obligations.len={}", + self.obligations.len(), + obligations.len() + ); - debug!("with_addl_obligations: self.obligations={:?} obligations={:?}", - self.obligations, obligations); + debug!( + "with_addl_obligations: self.obligations={:?} obligations={:?}", + self.obligations, obligations + ); self.obligations.append(&mut obligations); self @@ -850,16 +912,16 @@ impl<'tcx> Progress<'tcx> { /// - `obligation` must be fully normalized fn project_type<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, - obligation: &ProjectionTyObligation<'tcx>) - -> Result, ProjectionTyError<'tcx>> -{ - debug!("project(obligation={:?})", - obligation); + obligation: &ProjectionTyObligation<'tcx>, +) -> Result, ProjectionTyError<'tcx>> { + debug!("project(obligation={:?})", obligation); let recursion_limit = *selcx.tcx().sess.recursion_limit.get(); if obligation.recursion_depth >= recursion_limit { debug!("project: overflow!"); - return Err(ProjectionTyError::TraitSelectionError(SelectionError::Overflow)); + return Err(ProjectionTyError::TraitSelectionError( + SelectionError::Overflow, + )); } let obligation_trait_ref = &obligation.predicate.trait_ref(selcx.tcx()); @@ -875,37 +937,25 @@ fn project_type<'cx, 'gcx, 'tcx>( // Make sure that the following procedures are kept in order. ParamEnv // needs to be first because it has highest priority, and Select checks // the return value of push_candidate which assumes it's ran at last. - assemble_candidates_from_param_env(selcx, - obligation, - &obligation_trait_ref, - &mut candidates); + assemble_candidates_from_param_env(selcx, obligation, &obligation_trait_ref, &mut candidates); - assemble_candidates_from_trait_def(selcx, - obligation, - &obligation_trait_ref, - &mut candidates); + assemble_candidates_from_trait_def(selcx, obligation, &obligation_trait_ref, &mut candidates); - assemble_candidates_from_impls(selcx, - obligation, - &obligation_trait_ref, - &mut candidates); + assemble_candidates_from_impls(selcx, obligation, &obligation_trait_ref, &mut candidates); match candidates { ProjectionTyCandidateSet::Single(candidate) => Ok(ProjectedTy::Progress( - confirm_candidate(selcx, - obligation, - &obligation_trait_ref, - candidate))), - ProjectionTyCandidateSet::None => Ok(ProjectedTy::NoProgress( - selcx.tcx().mk_projection( - obligation.predicate.item_def_id, - obligation.predicate.substs))), + confirm_candidate(selcx, obligation, &obligation_trait_ref, candidate), + )), + ProjectionTyCandidateSet::None => Ok(ProjectedTy::NoProgress(selcx.tcx().mk_projection( + obligation.predicate.item_def_id, + obligation.predicate.substs, + ))), // Error occurred while trying to processing impls. ProjectionTyCandidateSet::Error(e) => Err(ProjectionTyError::TraitSelectionError(e)), // Inherent ambiguity that prevents us from even enumerating the // candidates. ProjectionTyCandidateSet::Ambiguous => Err(ProjectionTyError::TooManyCandidates), - } } @@ -916,15 +966,17 @@ fn assemble_candidates_from_param_env<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, - candidate_set: &mut ProjectionTyCandidateSet<'tcx>) -{ + candidate_set: &mut ProjectionTyCandidateSet<'tcx>, +) { debug!("assemble_candidates_from_param_env(..)"); - assemble_candidates_from_predicates(selcx, - obligation, - obligation_trait_ref, - candidate_set, - ProjectionTyCandidate::ParamEnv, - obligation.param_env.caller_bounds.iter().cloned()); + assemble_candidates_from_predicates( + selcx, + obligation, + obligation_trait_ref, + candidate_set, + ProjectionTyCandidate::ParamEnv, + obligation.param_env.caller_bounds.iter().cloned(), + ); } /// In the case of a nested projection like <::FooT as Bar>::BarT, we may find @@ -941,16 +993,14 @@ fn assemble_candidates_from_trait_def<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, - candidate_set: &mut ProjectionTyCandidateSet<'tcx>) -{ + candidate_set: &mut ProjectionTyCandidateSet<'tcx>, +) { debug!("assemble_candidates_from_trait_def(..)"); let tcx = selcx.tcx(); // Check whether the self-type is itself a projection. let (def_id, substs) = match obligation_trait_ref.self_ty().sty { - ty::Projection(ref data) => { - (data.trait_ref(tcx).def_id, data.substs) - } + ty::Projection(ref data) => (data.trait_ref(tcx).def_id, data.substs), ty::Opaque(def_id, substs) => (def_id, substs), ty::Infer(ty::TyVar(_)) => { // If the self-type is an inference variable, then it MAY wind up @@ -958,19 +1008,21 @@ fn assemble_candidates_from_trait_def<'cx, 'gcx, 'tcx>( candidate_set.mark_ambiguous(); return; } - _ => return + _ => return, }; // If so, extract what we know from the trait and try to come up with a good answer. let trait_predicates = tcx.predicates_of(def_id); let bounds = trait_predicates.instantiate(tcx, substs); let bounds = elaborate_predicates(tcx, bounds.predicates); - assemble_candidates_from_predicates(selcx, - obligation, - obligation_trait_ref, - candidate_set, - ProjectionTyCandidate::TraitDef, - bounds) + assemble_candidates_from_predicates( + selcx, + obligation, + obligation_trait_ref, + candidate_set, + ProjectionTyCandidate::TraitDef, + bounds, + ) } fn assemble_candidates_from_predicates<'cx, 'gcx, 'tcx, I>( @@ -979,35 +1031,47 @@ fn assemble_candidates_from_predicates<'cx, 'gcx, 'tcx, I>( obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>, ctor: fn(ty::PolyProjectionPredicate<'tcx>) -> ProjectionTyCandidate<'tcx>, - env_predicates: I) - where I: IntoIterator> + env_predicates: I, +) where + I: IntoIterator>, { - debug!("assemble_candidates_from_predicates(obligation={:?})", - obligation); + debug!( + "assemble_candidates_from_predicates(obligation={:?})", + obligation + ); let infcx = selcx.infcx(); for predicate in env_predicates { - debug!("assemble_candidates_from_predicates: predicate={:?}", - predicate); + debug!( + "assemble_candidates_from_predicates: predicate={:?}", + predicate + ); if let ty::Predicate::Projection(data) = predicate { let same_def_id = data.projection_def_id() == obligation.predicate.item_def_id; - let is_match = same_def_id && infcx.probe(|_| { - let data_poly_trait_ref = - data.to_poly_trait_ref(infcx.tcx); - let obligation_poly_trait_ref = - obligation_trait_ref.to_poly_trait_ref(); - infcx.at(&obligation.cause, obligation.param_env) - .sup(obligation_poly_trait_ref, data_poly_trait_ref) - .map(|InferOk { obligations: _, value: () }| { - // FIXME(#32730) -- do we need to take obligations - // into account in any way? At the moment, no. - }) - .is_ok() - }); - - debug!("assemble_candidates_from_predicates: candidate={:?} \ - is_match={} same_def_id={}", - data, is_match, same_def_id); + let is_match = same_def_id + && infcx.probe(|_| { + let data_poly_trait_ref = data.to_poly_trait_ref(infcx.tcx); + let obligation_poly_trait_ref = obligation_trait_ref.to_poly_trait_ref(); + infcx + .at(&obligation.cause, obligation.param_env) + .sup(obligation_poly_trait_ref, data_poly_trait_ref) + .map( + |InferOk { + obligations: _, + value: (), + }| { + // FIXME(#32730) -- do we need to take obligations + // into account in any way? At the moment, no. + }, + ) + .is_ok() + }); + + debug!( + "assemble_candidates_from_predicates: candidate={:?} \ + is_match={} same_def_id={}", + data, is_match, same_def_id + ); if is_match { candidate_set.push_candidate(ctor(data)); @@ -1020,8 +1084,8 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, - candidate_set: &mut ProjectionTyCandidateSet<'tcx>) -{ + candidate_set: &mut ProjectionTyCandidateSet<'tcx>, +) { // If we are resolving `>::Item == Type`, // start out by selecting the predicate `T as TraitRef<...>`: let poly_trait_ref = obligation_trait_ref.to_poly_trait_ref(); @@ -1041,13 +1105,12 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>( }; let eligible = match &vtable { - super::VtableClosure(_) | - super::VtableGenerator(_) | - super::VtableFnPointer(_) | - super::VtableObject(_) | - super::VtableTraitAlias(_) => { - debug!("assemble_candidates_from_impls: vtable={:?}", - vtable); + super::VtableClosure(_) + | super::VtableGenerator(_) + | super::VtableFnPointer(_) + | super::VtableObject(_) + | super::VtableTraitAlias(_) => { + debug!("assemble_candidates_from_impls: vtable={:?}", vtable); true } super::VtableImpl(impl_data) => { @@ -1070,9 +1133,11 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>( // In either case, we handle this by not adding a // candidate for an impl if it contains a `default` // type. - let node_item = assoc_ty_def(selcx, - impl_data.impl_def_id, - obligation.predicate.item_def_id); + let node_item = assoc_ty_def( + selcx, + impl_data.impl_def_id, + obligation.predicate.item_def_id, + ); let is_default = if node_item.node.is_from_trait() { // If true, the impl inherited a `type Foo = Bar` @@ -1093,8 +1158,8 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>( // being invoked). node_item.item.defaultness.has_value() } else { - node_item.item.defaultness.is_default() || - selcx.tcx().impl_is_default(node_item.node.def_id()) + node_item.item.defaultness.is_default() + || selcx.tcx().impl_is_default(node_item.node.def_id()) }; // Only reveal a specializable default if we're past type-checking @@ -1142,13 +1207,13 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>( // in `assemble_candidates_from_param_env`. false } - super::VtableAutoImpl(..) | - super::VtableBuiltin(..) => { + super::VtableAutoImpl(..) | super::VtableBuiltin(..) => { // These traits have no associated types. span_bug!( obligation.cause.span, "Cannot project an associated type from `{:?}`", - vtable); + vtable + ); } }; @@ -1168,16 +1233,16 @@ fn confirm_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, - candidate: ProjectionTyCandidate<'tcx>) - -> Progress<'tcx> -{ - debug!("confirm_candidate(candidate={:?}, obligation={:?})", - candidate, - obligation); + candidate: ProjectionTyCandidate<'tcx>, +) -> Progress<'tcx> { + debug!( + "confirm_candidate(candidate={:?}, obligation={:?})", + candidate, obligation + ); match candidate { - ProjectionTyCandidate::ParamEnv(poly_projection) | - ProjectionTyCandidate::TraitDef(poly_projection) => { + ProjectionTyCandidate::ParamEnv(poly_projection) + | ProjectionTyCandidate::TraitDef(poly_projection) => { confirm_param_env_candidate(selcx, obligation, poly_projection) } @@ -1191,78 +1256,76 @@ fn confirm_select_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, - vtable: Selection<'tcx>) - -> Progress<'tcx> -{ + vtable: Selection<'tcx>, +) -> Progress<'tcx> { match vtable { - super::VtableImpl(data) => - confirm_impl_candidate(selcx, obligation, data), - super::VtableGenerator(data) => - confirm_generator_candidate(selcx, obligation, data), - super::VtableClosure(data) => - confirm_closure_candidate(selcx, obligation, data), - super::VtableFnPointer(data) => - confirm_fn_pointer_candidate(selcx, obligation, data), - super::VtableObject(_) => - confirm_object_candidate(selcx, obligation, obligation_trait_ref), - super::VtableAutoImpl(..) | - super::VtableParam(..) | - super::VtableBuiltin(..) | - super::VtableTraitAlias(..) => - // we don't create Select candidates with this kind of resolution + super::VtableImpl(data) => confirm_impl_candidate(selcx, obligation, data), + super::VtableGenerator(data) => confirm_generator_candidate(selcx, obligation, data), + super::VtableClosure(data) => confirm_closure_candidate(selcx, obligation, data), + super::VtableFnPointer(data) => confirm_fn_pointer_candidate(selcx, obligation, data), + super::VtableObject(_) => confirm_object_candidate(selcx, obligation, obligation_trait_ref), + super::VtableAutoImpl(..) + | super::VtableParam(..) + | super::VtableBuiltin(..) + | super::VtableTraitAlias(..) => + // we don't create Select candidates with this kind of resolution + { span_bug!( obligation.cause.span, "Cannot project an associated type from `{:?}`", - vtable), + vtable + ) + } } } fn confirm_object_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, - obligation: &ProjectionTyObligation<'tcx>, - obligation_trait_ref: &ty::TraitRef<'tcx>) - -> Progress<'tcx> -{ + obligation: &ProjectionTyObligation<'tcx>, + obligation_trait_ref: &ty::TraitRef<'tcx>, +) -> Progress<'tcx> { let self_ty = obligation_trait_ref.self_ty(); let object_ty = selcx.infcx().shallow_resolve(self_ty); - debug!("confirm_object_candidate(object_ty={:?})", - object_ty); + debug!("confirm_object_candidate(object_ty={:?})", object_ty); let data = match object_ty.sty { ty::Dynamic(ref data, ..) => data, - _ => { - span_bug!( - obligation.cause.span, - "confirm_object_candidate called with non-object: {:?}", - object_ty) - } + _ => span_bug!( + obligation.cause.span, + "confirm_object_candidate called with non-object: {:?}", + object_ty + ), }; - let env_predicates = data.projection_bounds().map(|p| { - p.with_self_ty(selcx.tcx(), object_ty).to_predicate() - }).collect(); + let env_predicates = data + .projection_bounds() + .map(|p| p.with_self_ty(selcx.tcx(), object_ty).to_predicate()) + .collect(); let env_predicate = { let env_predicates = elaborate_predicates(selcx.tcx(), env_predicates); // select only those projections that are actually projecting an // item with the correct name let env_predicates = env_predicates.filter_map(|p| match p { - ty::Predicate::Projection(data) => + ty::Predicate::Projection(data) => { if data.projection_def_id() == obligation.predicate.item_def_id { Some(data) } else { None - }, - _ => None + } + } + _ => None, }); // select those with a relevant trait-ref let mut env_predicates = env_predicates.filter(|data| { let data_poly_trait_ref = data.to_poly_trait_ref(selcx.tcx()); let obligation_poly_trait_ref = obligation_trait_ref.to_poly_trait_ref(); - selcx.infcx().probe(|_| - selcx.infcx().at(&obligation.cause, obligation.param_env) - .sup(obligation_poly_trait_ref, data_poly_trait_ref) - .is_ok() - ) + selcx.infcx().probe(|_| { + selcx + .infcx() + .at(&obligation.cause, obligation.param_env) + .sup(obligation_poly_trait_ref, data_poly_trait_ref) + .is_ok() + }) }); // select the first matching one; there really ought to be one or @@ -1271,9 +1334,11 @@ fn confirm_object_candidate<'cx, 'gcx, 'tcx>( match env_predicates.next() { Some(env_predicate) => env_predicate, None => { - debug!("confirm_object_candidate: no env-predicate \ - found in object type `{:?}`; ill-formed", - object_ty); + debug!( + "confirm_object_candidate: no env-predicate \ + found in object type `{:?}`; ill-formed", + object_ty + ); return Progress::error(selcx.tcx()); } } @@ -1285,34 +1350,36 @@ fn confirm_object_candidate<'cx, 'gcx, 'tcx>( fn confirm_generator_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, - vtable: VtableGeneratorData<'tcx, PredicateObligation<'tcx>>) - -> Progress<'tcx> -{ + vtable: VtableGeneratorData<'tcx, PredicateObligation<'tcx>>, +) -> Progress<'tcx> { let gen_sig = vtable.substs.poly_sig(vtable.generator_def_id, selcx.tcx()); let Normalized { value: gen_sig, - obligations - } = normalize_with_depth(selcx, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth+1, - &gen_sig); - - debug!("confirm_generator_candidate: obligation={:?},gen_sig={:?},obligations={:?}", - obligation, - gen_sig, - obligations); + obligations, + } = normalize_with_depth( + selcx, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + &gen_sig, + ); + + debug!( + "confirm_generator_candidate: obligation={:?},gen_sig={:?},obligations={:?}", + obligation, gen_sig, obligations + ); let tcx = selcx.tcx(); let gen_def_id = tcx.lang_items().gen_trait().unwrap(); - let predicate = - tcx.generator_trait_ref_and_outputs(gen_def_id, - obligation.predicate.self_ty(), - gen_sig) + let predicate = tcx + .generator_trait_ref_and_outputs(gen_def_id, obligation.predicate.self_ty(), gen_sig) .map_bound(|(trait_ref, yield_ty, return_ty)| { - let name = tcx.associated_item(obligation.predicate.item_def_id).ident.name; + let name = tcx + .associated_item(obligation.predicate.item_def_id) + .ident + .name; let ty = if name == "Return" { return_ty } else if name == "Yield" { @@ -1326,7 +1393,7 @@ fn confirm_generator_candidate<'cx, 'gcx, 'tcx>( substs: trait_ref.substs, item_def_id: obligation.predicate.item_def_id, }, - ty: ty + ty: ty, } }); @@ -1338,19 +1405,20 @@ fn confirm_generator_candidate<'cx, 'gcx, 'tcx>( fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, - fn_pointer_vtable: VtableFnPointerData<'tcx, PredicateObligation<'tcx>>) - -> Progress<'tcx> -{ + fn_pointer_vtable: VtableFnPointerData<'tcx, PredicateObligation<'tcx>>, +) -> Progress<'tcx> { let fn_type = selcx.infcx().shallow_resolve(fn_pointer_vtable.fn_ty); let sig = fn_type.fn_sig(selcx.tcx()); let Normalized { value: sig, - obligations - } = normalize_with_depth(selcx, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth+1, - &sig); + obligations, + } = normalize_with_depth( + selcx, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + &sig, + ); confirm_callable_candidate(selcx, obligation, sig, util::TupleArgumentsFlag::Yes) .with_addl_obligations(fn_pointer_vtable.nested) @@ -1360,31 +1428,29 @@ fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>( fn confirm_closure_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, - vtable: VtableClosureData<'tcx, PredicateObligation<'tcx>>) - -> Progress<'tcx> -{ + vtable: VtableClosureData<'tcx, PredicateObligation<'tcx>>, +) -> Progress<'tcx> { let tcx = selcx.tcx(); let infcx = selcx.infcx(); let closure_sig_ty = vtable.substs.closure_sig_ty(vtable.closure_def_id, tcx); let closure_sig = infcx.shallow_resolve(&closure_sig_ty).fn_sig(tcx); let Normalized { value: closure_sig, - obligations - } = normalize_with_depth(selcx, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth+1, - &closure_sig); - - debug!("confirm_closure_candidate: obligation={:?},closure_sig={:?},obligations={:?}", - obligation, - closure_sig, - obligations); - - confirm_callable_candidate(selcx, - obligation, - closure_sig, - util::TupleArgumentsFlag::No) + obligations, + } = normalize_with_depth( + selcx, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + &closure_sig, + ); + + debug!( + "confirm_closure_candidate: obligation={:?},closure_sig={:?},obligations={:?}", + obligation, closure_sig, obligations + ); + + confirm_callable_candidate(selcx, obligation, closure_sig, util::TupleArgumentsFlag::No) .with_addl_obligations(vtable.nested) .with_addl_obligations(obligations) } @@ -1393,33 +1459,30 @@ fn confirm_callable_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, fn_sig: ty::PolyFnSig<'tcx>, - flag: util::TupleArgumentsFlag) - -> Progress<'tcx> -{ + flag: util::TupleArgumentsFlag, +) -> Progress<'tcx> { let tcx = selcx.tcx(); - debug!("confirm_callable_candidate({:?},{:?})", - obligation, - fn_sig); + debug!("confirm_callable_candidate({:?},{:?})", obligation, fn_sig); // the `Output` associated type is declared on `FnOnce` let fn_once_def_id = tcx.lang_items().fn_once_trait().unwrap(); - let predicate = - tcx.closure_trait_ref_and_return_type(fn_once_def_id, - obligation.predicate.self_ty(), - fn_sig, - flag) - .map_bound(|(trait_ref, ret_type)| - ty::ProjectionPredicate { - projection_ty: ty::ProjectionTy::from_ref_and_name( - tcx, - trait_ref, - Ident::from_str(FN_OUTPUT_NAME), - ), - ty: ret_type - } - ); + let predicate = tcx + .closure_trait_ref_and_return_type( + fn_once_def_id, + obligation.predicate.self_ty(), + fn_sig, + flag, + ) + .map_bound(|(trait_ref, ret_type)| ty::ProjectionPredicate { + projection_ty: ty::ProjectionTy::from_ref_and_name( + tcx, + trait_ref, + Ident::from_str(FN_OUTPUT_NAME), + ), + ty: ret_type, + }); confirm_param_env_candidate(selcx, obligation, predicate) } @@ -1427,28 +1490,31 @@ fn confirm_callable_candidate<'cx, 'gcx, 'tcx>( fn confirm_param_env_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, - poly_cache_entry: ty::PolyProjectionPredicate<'tcx>) - -> Progress<'tcx> -{ + poly_cache_entry: ty::PolyProjectionPredicate<'tcx>, +) -> Progress<'tcx> { let infcx = selcx.infcx(); let cause = &obligation.cause; let param_env = obligation.param_env; - let (cache_entry, _) = - infcx.replace_bound_vars_with_fresh_vars( - cause.span, - LateBoundRegionConversionTime::HigherRankedType, - &poly_cache_entry); + let (cache_entry, _) = infcx.replace_bound_vars_with_fresh_vars( + cause.span, + LateBoundRegionConversionTime::HigherRankedType, + &poly_cache_entry, + ); let cache_trait_ref = cache_entry.projection_ty.trait_ref(infcx.tcx); let obligation_trait_ref = obligation.predicate.trait_ref(infcx.tcx); - match infcx.at(cause, param_env).eq(cache_trait_ref, obligation_trait_ref) { - Ok(InferOk { value: _, obligations }) => { - Progress { - ty: cache_entry.ty, - obligations, - } - } + match infcx + .at(cause, param_env) + .eq(cache_trait_ref, obligation_trait_ref) + { + Ok(InferOk { + value: _, + obligations, + }) => Progress { + ty: cache_entry.ty, + obligations, + }, Err(e) => { span_bug!( obligation.cause.span, @@ -1456,7 +1522,8 @@ fn confirm_param_env_candidate<'cx, 'gcx, 'tcx>( with poly_projection `{:?}`: {:?}", obligation, poly_cache_entry, - e); + e + ); } } } @@ -1464,10 +1531,13 @@ fn confirm_param_env_candidate<'cx, 'gcx, 'tcx>( fn confirm_impl_candidate<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, - impl_vtable: VtableImplData<'tcx, PredicateObligation<'tcx>>) - -> Progress<'tcx> -{ - let VtableImplData { impl_def_id, substs, nested } = impl_vtable; + impl_vtable: VtableImplData<'tcx, PredicateObligation<'tcx>>, +) -> Progress<'tcx> { + let VtableImplData { + impl_def_id, + substs, + nested, + } = impl_vtable; let tcx = selcx.tcx(); let param_env = obligation.param_env; @@ -1478,9 +1548,10 @@ fn confirm_impl_candidate<'cx, 'gcx, 'tcx>( // associated type. This error will be reported by the type // checker method `check_impl_items_against_trait`, so here we // just return Error. - debug!("confirm_impl_candidate: no associated type {:?} for {:?}", - assoc_ty.item.ident, - obligation.predicate); + debug!( + "confirm_impl_candidate: no associated type {:?} for {:?}", + assoc_ty.item.ident, obligation.predicate + ); return Progress { ty: tcx.types.err, obligations: nested, @@ -1507,9 +1578,8 @@ fn confirm_impl_candidate<'cx, 'gcx, 'tcx>( fn assoc_ty_def<'cx, 'gcx, 'tcx>( selcx: &SelectionContext<'cx, 'gcx, 'tcx>, impl_def_id: DefId, - assoc_ty_def_id: DefId) - -> specialization_graph::NodeItem -{ + assoc_ty_def_id: DefId, +) -> specialization_graph::NodeItem { let tcx = selcx.tcx(); let assoc_ty_name = tcx.associated_item(assoc_ty_def_id).ident; let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id; @@ -1523,8 +1593,9 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>( // cycle error if the specialization graph is currently being built. let impl_node = specialization_graph::Node::Impl(impl_def_id); for item in impl_node.items(tcx) { - if item.kind == ty::AssociatedKind::Type && - tcx.hygienic_eq(item.ident, assoc_ty_name, trait_def_id) { + if item.kind == ty::AssociatedKind::Type + && tcx.hygienic_eq(item.ident, assoc_ty_name, trait_def_id) + { return specialization_graph::NodeItem { node: specialization_graph::Node::Impl(impl_def_id), item, @@ -1535,7 +1606,8 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>( if let Some(assoc_item) = trait_def .ancestors(tcx, impl_def_id) .defs(tcx, assoc_ty_name, ty::AssociatedKind::Type, trait_def_id) - .next() { + .next() + { assoc_item } else { // This is saying that neither the trait nor @@ -1544,9 +1616,11 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>( // could only arise through a compiler bug -- // if the user wrote a bad item name, it // should have failed in astconv. - bug!("No associated type `{}` for {}", - assoc_ty_name, - tcx.item_path_str(impl_def_id)) + bug!( + "No associated type `{}` for {}", + assoc_ty_name, + tcx.item_path_str(impl_def_id) + ) } } @@ -1589,24 +1663,25 @@ pub struct ProjectionCache<'tcx> { #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub struct ProjectionCacheKey<'tcx> { - ty: ty::ProjectionTy<'tcx> + ty: ty::ProjectionTy<'tcx>, } impl<'cx, 'gcx, 'tcx> ProjectionCacheKey<'tcx> { - pub fn from_poly_projection_predicate(selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, - predicate: &ty::PolyProjectionPredicate<'tcx>) - -> Option - { + pub fn from_poly_projection_predicate( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, + predicate: &ty::PolyProjectionPredicate<'tcx>, + ) -> Option { let infcx = selcx.infcx(); // We don't do cross-snapshot caching of obligations with escaping regions, // so there's no cache key to use - predicate.no_bound_vars() + predicate + .no_bound_vars() .map(|predicate| ProjectionCacheKey { // We don't attempt to match up with a specific type-variable state // from a specific call to `opt_normalize_projection_type` - if // there's no precise match, the original cache entry is "stranded" // anyway. - ty: infcx.resolve_type_vars_if_possible(&predicate.projection_ty) + ty: infcx.resolve_type_vars_if_possible(&predicate.projection_ty), }) } } @@ -1630,7 +1705,9 @@ impl<'tcx> ProjectionCache<'tcx> { } pub fn snapshot(&mut self) -> ProjectionCacheSnapshot { - ProjectionCacheSnapshot { snapshot: self.map.snapshot() } + ProjectionCacheSnapshot { + snapshot: self.map.snapshot(), + } } pub fn rollback_to(&mut self, snapshot: ProjectionCacheSnapshot) { @@ -1638,7 +1715,8 @@ impl<'tcx> ProjectionCache<'tcx> { } pub fn rollback_placeholder(&mut self, snapshot: &ProjectionCacheSnapshot) { - self.map.partial_rollback(&snapshot.snapshot, &|k| k.ty.has_re_placeholders()); + self.map + .partial_rollback(&snapshot.snapshot, &|k| k.ty.has_re_placeholders()); } pub fn commit(&mut self, snapshot: ProjectionCacheSnapshot) { @@ -1648,8 +1726,10 @@ impl<'tcx> ProjectionCache<'tcx> { /// Try to start normalize `key`; returns an error if /// normalization already occurred (this error corresponds to a /// cache hit, so it's actually a good thing). - fn try_start(&mut self, key: ProjectionCacheKey<'tcx>) - -> Result<(), ProjectionCacheEntry<'tcx>> { + fn try_start( + &mut self, + key: ProjectionCacheKey<'tcx>, + ) -> Result<(), ProjectionCacheEntry<'tcx>> { if let Some(entry) = self.map.get(&key) { return Err(entry.clone()); } @@ -1660,9 +1740,13 @@ impl<'tcx> ProjectionCache<'tcx> { /// Indicates that `key` was normalized to `value`. fn insert_ty(&mut self, key: ProjectionCacheKey<'tcx>, value: NormalizedTy<'tcx>) { - debug!("ProjectionCacheEntry::insert_ty: adding cache entry: key={:?}, value={:?}", - key, value); - let fresh_key = self.map.insert(key, ProjectionCacheEntry::NormalizedTy(value)); + debug!( + "ProjectionCacheEntry::insert_ty: adding cache entry: key={:?}, value={:?}", + key, value + ); + let fresh_key = self + .map + .insert(key, ProjectionCacheEntry::NormalizedTy(value)); assert!(!fresh_key, "never started projecting `{:?}`", key); } @@ -1673,23 +1757,30 @@ impl<'tcx> ProjectionCache<'tcx> { pub fn complete(&mut self, key: ProjectionCacheKey<'tcx>) { let ty = match self.map.get(&key) { Some(&ProjectionCacheEntry::NormalizedTy(ref ty)) => { - debug!("ProjectionCacheEntry::complete({:?}) - completing {:?}", - key, ty); + debug!( + "ProjectionCacheEntry::complete({:?}) - completing {:?}", + key, ty + ); ty.value } ref value => { // Type inference could "strand behind" old cache entries. Leave // them alone for now. - debug!("ProjectionCacheEntry::complete({:?}) - ignoring {:?}", - key, value); - return + debug!( + "ProjectionCacheEntry::complete({:?}) - ignoring {:?}", + key, value + ); + return; } }; - self.map.insert(key, ProjectionCacheEntry::NormalizedTy(Normalized { - value: ty, - obligations: vec![] - })); + self.map.insert( + key, + ProjectionCacheEntry::NormalizedTy(Normalized { + value: ty, + obligations: vec![], + }), + ); } /// A specialized version of `complete` for when the key's value is known @@ -1698,10 +1789,13 @@ impl<'tcx> ProjectionCache<'tcx> { // We want to insert `ty` with no obligations. If the existing value // already has no obligations (as is common) we don't insert anything. if !ty.obligations.is_empty() { - self.map.insert(key, ProjectionCacheEntry::NormalizedTy(Normalized { - value: ty.value, - obligations: vec![] - })); + self.map.insert( + key, + ProjectionCacheEntry::NormalizedTy(Normalized { + value: ty.value, + obligations: vec![], + }), + ); } } diff --git a/src/librustc/traits/query/dropck_outlives.rs b/src/librustc/traits/query/dropck_outlives.rs index 1fd2172212d3c..61bc9e5035815 100644 --- a/src/librustc/traits/query/dropck_outlives.rs +++ b/src/librustc/traits/query/dropck_outlives.rs @@ -1,6 +1,6 @@ use infer::at::At; -use infer::InferOk; use infer::canonical::OriginalQueryValues; +use infer::InferOk; use std::iter::FromIterator; use syntax::source_map::Span; use ty::subst::Kind; @@ -42,17 +42,21 @@ impl<'cx, 'gcx, 'tcx> At<'cx, 'gcx, 'tcx> { let gcx = tcx.global_tcx(); let mut orig_values = OriginalQueryValues::default(); - let c_ty = self.infcx.canonicalize_query(&self.param_env.and(ty), &mut orig_values); + let c_ty = self + .infcx + .canonicalize_query(&self.param_env.and(ty), &mut orig_values); let span = self.cause.span; debug!("c_ty = {:?}", c_ty); if let Ok(result) = &gcx.dropck_outlives(c_ty) { if result.is_proven() { - if let Ok(InferOk { value, obligations }) = - self.infcx.instantiate_query_response_and_region_obligations( - self.cause, - self.param_env, - &orig_values, - result) + if let Ok(InferOk { value, obligations }) = self + .infcx + .instantiate_query_response_and_region_obligations( + self.cause, + self.param_env, + &orig_values, + result, + ) { let ty = self.infcx.resolve_type_vars_if_possible(&ty); let kinds = value.into_kinds_reporting_overflows(tcx, span, ty); @@ -85,12 +89,7 @@ pub struct DropckOutlivesResult<'tcx> { } impl<'tcx> DropckOutlivesResult<'tcx> { - pub fn report_overflows( - &self, - tcx: TyCtxt<'_, '_, 'tcx>, - span: Span, - ty: Ty<'tcx>, - ) { + pub fn report_overflows(&self, tcx: TyCtxt<'_, '_, 'tcx>, span: Span, ty: Ty<'tcx>) { if let Some(overflow_ty) = self.overflows.iter().next() { let mut err = struct_span_err!( tcx.sess, @@ -111,7 +110,10 @@ impl<'tcx> DropckOutlivesResult<'tcx> { ty: Ty<'tcx>, ) -> Vec> { self.report_overflows(tcx, span, ty); - let DropckOutlivesResult { kinds, overflows: _ } = self; + let DropckOutlivesResult { + kinds, + overflows: _, + } = self; kinds } } @@ -147,7 +149,12 @@ impl<'tcx> FromIterator> for DtorckConstraint<'tcx> { fn from_iter>>(iter: I) -> Self { let mut result = Self::empty(); - for DtorckConstraint { outlives, dtorck_types, overflows } in iter { + for DtorckConstraint { + outlives, + dtorck_types, + overflows, + } in iter + { result.outlives.extend(outlives); result.dtorck_types.extend(dtorck_types); result.overflows.extend(overflows); diff --git a/src/librustc/traits/query/evaluate_obligation.rs b/src/librustc/traits/query/evaluate_obligation.rs index fdae7d833734e..ce8a24b9fe33b 100644 --- a/src/librustc/traits/query/evaluate_obligation.rs +++ b/src/librustc/traits/query/evaluate_obligation.rs @@ -1,15 +1,13 @@ -use infer::InferCtxt; use infer::canonical::OriginalQueryValues; -use traits::{EvaluationResult, PredicateObligation, SelectionContext, - TraitQueryMode, OverflowError}; +use infer::InferCtxt; +use traits::{ + EvaluationResult, OverflowError, PredicateObligation, SelectionContext, TraitQueryMode, +}; impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { /// Evaluates whether the predicate can be satisfied (by any means) /// in the given `ParamEnv`. - pub fn predicate_may_hold( - &self, - obligation: &PredicateObligation<'tcx>, - ) -> bool { + pub fn predicate_may_hold(&self, obligation: &PredicateObligation<'tcx>) -> bool { self.evaluate_obligation_no_overflow(obligation).may_apply() } @@ -23,7 +21,8 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { &self, obligation: &PredicateObligation<'tcx>, ) -> bool { - self.evaluate_obligation_no_overflow(obligation).must_apply_considering_regions() + self.evaluate_obligation_no_overflow(obligation) + .must_apply_considering_regions() } /// Evaluates whether the predicate can be satisfied in the given @@ -35,7 +34,8 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { &self, obligation: &PredicateObligation<'tcx>, ) -> bool { - self.evaluate_obligation_no_overflow(obligation).must_apply_modulo_regions() + self.evaluate_obligation_no_overflow(obligation) + .must_apply_modulo_regions() } /// Evaluate a given predicate, capturing overflow and propagating it back. @@ -44,8 +44,10 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { obligation: &PredicateObligation<'tcx>, ) -> Result { let mut _orig_values = OriginalQueryValues::default(); - let c_pred = self.canonicalize_query(&obligation.param_env.and(obligation.predicate), - &mut _orig_values); + let c_pred = self.canonicalize_query( + &obligation.param_env.and(obligation.predicate), + &mut _orig_values, + ); // Run canonical query. If overflow occurs, rerun from scratch but this time // in standard trait query mode so that overflow is handled appropriately // within `SelectionContext`. @@ -62,9 +64,9 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { match self.evaluate_obligation(obligation) { Ok(result) => result, Err(OverflowError) => { - let mut selcx = - SelectionContext::with_query_mode(&self, TraitQueryMode::Standard); - selcx.evaluate_obligation_recursively(obligation) + let mut selcx = SelectionContext::with_query_mode(&self, TraitQueryMode::Standard); + selcx + .evaluate_obligation_recursively(obligation) .unwrap_or_else(|r| { span_bug!( obligation.cause.span, diff --git a/src/librustc/traits/query/method_autoderef.rs b/src/librustc/traits/query/method_autoderef.rs index b4984e1237857..7be6a187a77fc 100644 --- a/src/librustc/traits/query/method_autoderef.rs +++ b/src/librustc/traits/query/method_autoderef.rs @@ -1,5 +1,5 @@ -use rustc_data_structures::sync::Lrc; use infer::canonical::{Canonical, QueryResponse}; +use rustc_data_structures::sync::Lrc; use ty::Ty; #[derive(Debug)] diff --git a/src/librustc/traits/query/mod.rs b/src/librustc/traits/query/mod.rs index 59f786025b224..a386887fd4e4d 100644 --- a/src/librustc/traits/query/mod.rs +++ b/src/librustc/traits/query/mod.rs @@ -22,8 +22,7 @@ pub type CanonicalProjectionGoal<'tcx> = pub type CanonicalTyGoal<'tcx> = Canonical<'tcx, ty::ParamEnvAnd<'tcx, Ty<'tcx>>>; -pub type CanonicalPredicateGoal<'tcx> = - Canonical<'tcx, ty::ParamEnvAnd<'tcx, ty::Predicate<'tcx>>>; +pub type CanonicalPredicateGoal<'tcx> = Canonical<'tcx, ty::ParamEnvAnd<'tcx, ty::Predicate<'tcx>>>; pub type CanonicalTypeOpAscribeUserTypeGoal<'tcx> = Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::ascribe_user_type::AscribeUserType<'tcx>>>; @@ -51,4 +50,6 @@ impl<'tcx> From> for NoSolution { } } -impl_stable_hash_for!(struct NoSolution { }); +impl_stable_hash_for!( + struct NoSolution {} +); diff --git a/src/librustc/traits/query/normalize.rs b/src/librustc/traits/query/normalize.rs index 0d126d37546d6..6f56204ce0993 100644 --- a/src/librustc/traits/query/normalize.rs +++ b/src/librustc/traits/query/normalize.rs @@ -145,8 +145,9 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for QueryNormalizer<'cx, 'gcx, 'tcx let gcx = self.infcx.tcx.global_tcx(); let mut orig_values = OriginalQueryValues::default(); - let c_data = self.infcx.canonicalize_query( - &self.param_env.and(*data), &mut orig_values); + let c_data = self + .infcx + .canonicalize_query(&self.param_env.and(*data), &mut orig_values); debug!("QueryNormalizer: c_data = {:#?}", c_data); debug!("QueryNormalizer: orig_values = {:#?}", orig_values); match gcx.normalize_projection_ty(c_data) { @@ -157,13 +158,18 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for QueryNormalizer<'cx, 'gcx, 'tcx return ty; } - match self.infcx.instantiate_query_response_and_region_obligations( - self.cause, - self.param_env, - &orig_values, - &result) - { - Ok(InferOk { value: result, obligations }) => { + match self + .infcx + .instantiate_query_response_and_region_obligations( + self.cause, + self.param_env, + &orig_values, + &result, + ) { + Ok(InferOk { + value: result, + obligations, + }) => { debug!("QueryNormalizer: result = {:#?}", result); debug!("QueryNormalizer: obligations = {:#?}", obligations); self.obligations.extend(obligations); @@ -214,7 +220,7 @@ impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for QueryNormalizer<'cx, 'gcx, 'tcx promoted: None, }; if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) { - return self.fold_const(evaluated) + return self.fold_const(evaluated); } } } diff --git a/src/librustc/traits/query/normalize_erasing_regions.rs b/src/librustc/traits/query/normalize_erasing_regions.rs index e7034065bdf2e..b149c7c5601fc 100644 --- a/src/librustc/traits/query/normalize_erasing_regions.rs +++ b/src/librustc/traits/query/normalize_erasing_regions.rs @@ -7,8 +7,8 @@ //! `normalize_ty_after_erasing_regions` query for each type found //! within. (This underlying query is what is cached.) -use ty::{self, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder}; +use ty::{self, Ty, TyCtxt}; impl<'cx, 'tcx> TyCtxt<'cx, 'tcx, 'tcx> { /// Erase the regions in `value` and then fully normalize all the @@ -73,6 +73,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx, 'tcx> for NormalizeAfterErasingRegionsFolder<'c } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - self.tcx.normalize_ty_after_erasing_regions(self.param_env.and(ty)) + self.tcx + .normalize_ty_after_erasing_regions(self.param_env.and(ty)) } } diff --git a/src/librustc/traits/query/outlives_bounds.rs b/src/librustc/traits/query/outlives_bounds.rs index 1134cb1b2f5d0..8270d7055ba5e 100644 --- a/src/librustc/traits/query/outlives_bounds.rs +++ b/src/librustc/traits/query/outlives_bounds.rs @@ -1,14 +1,13 @@ -use infer::InferCtxt; use infer::canonical::OriginalQueryValues; +use infer::InferCtxt; use syntax::ast; use syntax::source_map::Span; -use traits::{FulfillmentContext, ObligationCause, TraitEngine, TraitEngineExt}; use traits::query::NoSolution; +use traits::{FulfillmentContext, ObligationCause, TraitEngine, TraitEngineExt}; use ty::{self, Ty, TyCtxt}; use ich::StableHashingContext; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use std::mem; /// Outlives bounds are relationships between generic parameters, @@ -43,9 +42,11 @@ EnumTypeFoldableImpl! { } impl<'a, 'tcx> HashStable> for OutlivesBound<'tcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { OutlivesBound::RegionSubRegion(ref a, ref b) => { @@ -102,7 +103,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { Err(NoSolution) => { self.tcx.sess.delay_span_bug( span, - "implied_outlives_bounds failed to solve all obligations" + "implied_outlives_bounds failed to solve all obligations", ); return vec![]; } @@ -110,15 +111,18 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { assert!(result.value.is_proven()); let result = self.instantiate_query_response_and_region_obligations( - &ObligationCause::misc(span, body_id), param_env, &orig_values, &result); + &ObligationCause::misc(span, body_id), + param_env, + &orig_values, + &result, + ); debug!("implied_outlives_bounds for {:?}: {:#?}", ty, result); let result = match result { Ok(v) => v, Err(_) => { - self.tcx.sess.delay_span_bug( - span, - "implied_outlives_bounds failed to instantiate" - ); + self.tcx + .sess + .delay_span_bug(span, "implied_outlives_bounds failed to instantiate"); return vec![]; } }; @@ -130,7 +134,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { if fulfill_cx.select_all_or_error(self).is_err() { self.tcx.sess.delay_span_bug( span, - "implied_outlives_bounds failed to solve obligations from instantiation" + "implied_outlives_bounds failed to solve obligations from instantiation", ); } @@ -146,16 +150,16 @@ pub fn explicit_outlives_bounds<'tcx>( .caller_bounds .into_iter() .filter_map(move |predicate| match predicate { - ty::Predicate::Projection(..) | - ty::Predicate::Trait(..) | - ty::Predicate::Subtype(..) | - ty::Predicate::WellFormed(..) | - ty::Predicate::ObjectSafe(..) | - ty::Predicate::ClosureKind(..) | - ty::Predicate::TypeOutlives(..) | - ty::Predicate::ConstEvaluatable(..) => None, - ty::Predicate::RegionOutlives(ref data) => data.no_bound_vars().map( - |ty::OutlivesPredicate(r_a, r_b)| OutlivesBound::RegionSubRegion(r_b, r_a), - ), + ty::Predicate::Projection(..) + | ty::Predicate::Trait(..) + | ty::Predicate::Subtype(..) + | ty::Predicate::WellFormed(..) + | ty::Predicate::ObjectSafe(..) + | ty::Predicate::ClosureKind(..) + | ty::Predicate::TypeOutlives(..) + | ty::Predicate::ConstEvaluatable(..) => None, + ty::Predicate::RegionOutlives(ref data) => data + .no_bound_vars() + .map(|ty::OutlivesPredicate(r_a, r_b)| OutlivesBound::RegionSubRegion(r_b, r_a)), }) } diff --git a/src/librustc/traits/query/type_op/ascribe_user_type.rs b/src/librustc/traits/query/type_op/ascribe_user_type.rs index b2f30564de93a..562893426cbdf 100644 --- a/src/librustc/traits/query/type_op/ascribe_user_type.rs +++ b/src/librustc/traits/query/type_op/ascribe_user_type.rs @@ -1,9 +1,9 @@ -use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; -use traits::query::Fallible; use hir::def_id::DefId; +use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse}; use mir::ProjectionKind; -use ty::{self, ParamEnvAnd, Ty, TyCtxt}; +use traits::query::Fallible; use ty::subst::UserSubsts; +use ty::{self, ParamEnvAnd, Ty, TyCtxt}; #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub struct AscribeUserType<'tcx> { @@ -22,7 +22,13 @@ impl<'tcx> AscribeUserType<'tcx> { user_substs: UserSubsts<'tcx>, projs: &'tcx ty::List>, ) -> Self { - Self { mir_ty, variance, def_id, user_substs, projs } + Self { + mir_ty, + variance, + def_id, + user_substs, + projs, + } } } diff --git a/src/librustc/traits/query/type_op/subtype.rs b/src/librustc/traits/query/type_op/subtype.rs index f001c7ea10a17..0b9d260566ec5 100644 --- a/src/librustc/traits/query/type_op/subtype.rs +++ b/src/librustc/traits/query/type_op/subtype.rs @@ -10,10 +10,7 @@ pub struct Subtype<'tcx> { impl<'tcx> Subtype<'tcx> { pub fn new(sub: Ty<'tcx>, sup: Ty<'tcx>) -> Self { - Self { - sub, - sup, - } + Self { sub, sup } } } diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 373ec2d5e490f..e050ca15d3463 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -33,7 +33,7 @@ use infer::{InferCtxt, InferOk, TypeFreshener}; use middle::lang_items; use mir::interpret::GlobalId; use ty::fast_reject; -use ty::relate::{TypeRelation, TraitObjectMode}; +use ty::relate::{TraitObjectMode, TypeRelation}; use ty::subst::{Subst, Substs}; use ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt, TypeFoldable}; @@ -445,7 +445,9 @@ impl_stable_hash_for!(enum self::EvaluationResult { /// Indicates that trait evaluation caused overflow. pub struct OverflowError; -impl_stable_hash_for!(struct OverflowError {}); +impl_stable_hash_for!( + struct OverflowError {} +); impl<'tcx> From for SelectionError<'tcx> { fn from(OverflowError: OverflowError) -> SelectionError<'tcx> { @@ -637,13 +639,14 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { &mut self, op: impl FnOnce(&mut Self) -> Result, ) -> Result { - self.infcx.probe(|snapshot| -> Result { - let result = op(self)?; - match self.infcx.region_constraints_added_in_snapshot(snapshot) { - None => Ok(result), - Some(_) => Ok(result.max(EvaluatedToOkModuloRegions)), - } - }) + self.infcx + .probe(|snapshot| -> Result { + let result = op(self)?; + match self.infcx.region_constraints_added_in_snapshot(snapshot) { + None => Ok(result), + Some(_) => Ok(result.max(EvaluatedToOkModuloRegions)), + } + }) } /// Evaluates the predicates in `predicates` recursively. Note that @@ -692,7 +695,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { ty::Predicate::Subtype(ref p) => { // does this code ever run? - match self.infcx + match self + .infcx .subtype_predicate(&obligation.cause, obligation.param_env, p) { Some(Ok(InferOk { obligations, .. })) => { @@ -734,10 +738,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { let project_obligation = obligation.with(data.clone()); match project::poly_project_and_unify_type(self, &project_obligation) { Ok(Some(subobligations)) => { - let result = self.evaluate_predicates_recursively( - previous_stack, - subobligations.iter(), - ); + let result = self + .evaluate_predicates_recursively(previous_stack, subobligations.iter()); if let Some(key) = ProjectionCacheKey::from_poly_projection_predicate(self, data) { @@ -798,7 +800,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { ) -> Result { debug!("evaluate_trait_predicate_recursively({:?})", obligation); - if self.intercrate.is_none() && obligation.is_global() + if self.intercrate.is_none() + && obligation.is_global() && obligation .param_env .caller_bounds @@ -896,10 +899,12 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { } return Ok(EvaluatedToAmbig); } - if unbound_input_types && stack.iter().skip(1).any(|prev| { - stack.obligation.param_env == prev.obligation.param_env - && self.match_fresh_trait_refs(&stack.fresh_trait_ref, &prev.fresh_trait_ref) - }) { + if unbound_input_types + && stack.iter().skip(1).any(|prev| { + stack.obligation.param_env == prev.obligation.param_env + && self.match_fresh_trait_refs(&stack.fresh_trait_ref, &prev.fresh_trait_ref) + }) + { debug!( "evaluate_stack({:?}) --> unbound argument, recursive --> giving up", stack.fresh_trait_ref @@ -926,10 +931,13 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // affect the inferencer state and (b) that if we see two // fresh regions with the same index, they refer to the same // unbound type variable. - if let Some(rec_index) = stack.iter() - .skip(1) // skip top-most frame - .position(|prev| stack.obligation.param_env == prev.obligation.param_env && - stack.fresh_trait_ref == prev.fresh_trait_ref) + if let Some(rec_index) = stack + .iter() + .skip(1) // skip top-most frame + .position(|prev| { + stack.obligation.param_env == prev.obligation.param_env + && stack.fresh_trait_ref == prev.fresh_trait_ref + }) { debug!("evaluate_stack({:?}) --> recursive", stack.fresh_trait_ref); @@ -1147,7 +1155,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { where OP: FnOnce(&mut Self) -> R, { - let (result, dep_node) = self.tcx() + let (result, dep_node) = self + .tcx() .dep_graph .with_anon_task(DepKind::TraitSelect, || op(self)); self.tcx().dep_graph.read_index(dep_node); @@ -1367,7 +1376,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { } let obligation = &stack.obligation; - let predicate = self.infcx() + let predicate = self + .infcx() .resolve_type_vars_if_possible(&obligation.predicate); // OK to skip binder because of the nature of the @@ -1419,8 +1429,10 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // Same idea as the above, but for alt trait object modes. These // should only be used in intercrate mode - better safe than sorry. if self.infcx.trait_object_mode() != TraitObjectMode::NoSquash { - bug!("using squashing TraitObjectMode outside of intercrate mode? param_env={:?}", - param_env); + bug!( + "using squashing TraitObjectMode outside of intercrate mode? param_env={:?}", + param_env + ); } // Otherwise, we can use the global cache. @@ -1464,14 +1476,16 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { /// Because of this, we always want to re-run the full selection /// process for our obligation the next time we see it, since /// we might end up picking a different SelectionCandidate (or none at all) - fn can_cache_candidate(&self, - result: &SelectionResult<'tcx, SelectionCandidate<'tcx>> - ) -> bool { + fn can_cache_candidate( + &self, + result: &SelectionResult<'tcx, SelectionCandidate<'tcx>>, + ) -> bool { match result { - Ok(Some(SelectionCandidate::ParamCandidate(trait_ref))) => { - !trait_ref.skip_binder().input_types().any(|t| t.walk().any(|t_| t_.is_ty_infer())) - }, - _ => true + Ok(Some(SelectionCandidate::ParamCandidate(trait_ref))) => !trait_ref + .skip_binder() + .input_types() + .any(|t| t.walk().any(|t_| t_.is_ty_infer())), + _ => true, } } @@ -1486,10 +1500,12 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { let trait_ref = cache_fresh_trait_pred.skip_binder().trait_ref; if !self.can_cache_candidate(&candidate) { - debug!("insert_candidate_cache(trait_ref={:?}, candidate={:?} -\ - candidate is not cacheable", trait_ref, candidate); + debug!( + "insert_candidate_cache(trait_ref={:?}, candidate={:?} -\ + candidate is not cacheable", + trait_ref, candidate + ); return; - } if self.can_use_global_caches(param_env) { @@ -1532,7 +1548,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { param_env: obligation.param_env, cause: obligation.cause.clone(), recursion_depth: obligation.recursion_depth, - predicate: self.infcx() + predicate: self + .infcx() .resolve_type_vars_if_possible(&obligation.predicate), }; @@ -1631,9 +1648,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { _ => return, } - let result = self.infcx.probe(|_| { - self.match_projection_obligation_against_definition_bounds(obligation) - }); + let result = self + .infcx + .probe(|_| self.match_projection_obligation_against_definition_bounds(obligation)); if result { candidates.vec.push(ProjectionCandidate); @@ -1644,9 +1661,11 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { &mut self, obligation: &TraitObligation<'tcx>, ) -> bool { - let poly_trait_predicate = self.infcx() + let poly_trait_predicate = self + .infcx() .resolve_type_vars_if_possible(&obligation.predicate); - let (skol_trait_predicate, _) = self.infcx() + let (skol_trait_predicate, _) = self + .infcx() .replace_bound_vars_with_placeholders(&poly_trait_predicate); debug!( "match_projection_obligation_against_definition_bounds: \ @@ -1824,7 +1843,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { obligation: &TraitObligation<'tcx>, candidates: &mut SelectionCandidateSet<'tcx>, ) -> Result<(), SelectionError<'tcx>> { - let kind = match self.tcx() + let kind = match self + .tcx() .lang_items() .fn_trait_kind(obligation.predicate.def_id()) { @@ -1876,7 +1896,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { candidates: &mut SelectionCandidateSet<'tcx>, ) -> Result<(), SelectionError<'tcx>> { // We provide impl of all fn traits for fn pointers. - if self.tcx() + if self + .tcx() .lang_items() .fn_trait_kind(obligation.predicate.def_id()) .is_none() @@ -1925,8 +1946,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { obligation.predicate.skip_binder().trait_ref.self_ty(), |impl_def_id| { self.infcx.probe(|_| { - if let Ok(_substs) = self.match_impl(impl_def_id, obligation) - { + if let Ok(_substs) = self.match_impl(impl_def_id, obligation) { candidates.vec.push(ImplCandidate(impl_def_id)); } }); @@ -2005,7 +2025,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { let self_ty = self.tcx().erase_late_bound_regions(&obligation.self_ty()); let poly_trait_ref = match self_ty.sty { ty::Dynamic(ref data, ..) => { - if data.auto_traits() + if data + .auto_traits() .any(|did| did == obligation.predicate.def_id()) { debug!( @@ -2109,9 +2130,10 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // We always upcast when we can because of reason // #2 (region bounds). data_a.principal().def_id() == data_b.principal().def_id() - && data_b.auto_traits() - // All of a's auto traits need to be in b's auto traits. - .all(|b| data_a.auto_traits().any(|a| a == b)) + && data_b + .auto_traits() + // All of a's auto traits need to be in b's auto traits. + .all(|b| data_a.auto_traits().any(|a| a == b)) } // T -> Trait. @@ -2328,7 +2350,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { use self::BuiltinImplConditions::{Ambiguous, None, Where}; // NOTE: binder moved to (*) - let self_ty = self.infcx + let self_ty = self + .infcx .shallow_resolve(obligation.predicate.skip_binder().self_ty()); match self_ty.sty { @@ -2390,7 +2413,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { obligation: &TraitObligation<'tcx>, ) -> BuiltinImplConditions<'tcx> { // NOTE: binder moved to (*) - let self_ty = self.infcx + let self_ty = self + .infcx .shallow_resolve(obligation.predicate.skip_binder().self_ty()); use self::BuiltinImplConditions::{Ambiguous, None, Where}; @@ -2586,8 +2610,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { let ty: ty::Binder> = ty::Binder::bind(ty); // <----/ self.infcx.in_snapshot(|_| { - let (skol_ty, _) = self.infcx - .replace_bound_vars_with_placeholders(&ty); + let (skol_ty, _) = self.infcx.replace_bound_vars_with_placeholders(&ty); let Normalized { value: normalized_ty, mut obligations, @@ -2641,10 +2664,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { Ok(VtableParam(obligations)) } - ImplCandidate(impl_def_id) => Ok(VtableImpl(self.confirm_impl_candidate( - obligation, - impl_def_id, - ))), + ImplCandidate(impl_def_id) => Ok(VtableImpl( + self.confirm_impl_candidate(obligation, impl_def_id), + )), AutoImplCandidate(trait_def_id) => { let data = self.confirm_auto_impl_candidate(obligation, trait_def_id); @@ -2699,8 +2721,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { fn confirm_projection_candidate(&mut self, obligation: &TraitObligation<'tcx>) { self.infcx.in_snapshot(|_| { - let result = - self.match_projection_obligation_against_definition_bounds(obligation); + let result = self.match_projection_obligation_against_definition_bounds(obligation); assert!(result); }) } @@ -2819,7 +2840,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { let trait_obligations: Vec> = self.infcx.in_snapshot(|_| { let poly_trait_ref = obligation.predicate.to_poly_trait_ref(); - let (trait_ref, _) = self.infcx + let (trait_ref, _) = self + .infcx .replace_bound_vars_with_placeholders(&poly_trait_ref); let cause = obligation.derived_cause(ImplDerivedObligation); self.impl_or_trait_obligations( @@ -2916,7 +2938,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // probably flatten the binder from the obligation and the binder // from the object. Have to try to make a broken test case that // results. - let self_ty = self.infcx + let self_ty = self + .infcx .shallow_resolve(*obligation.self_ty().skip_binder()); let poly_trait_ref = match self_ty.sty { ty::Dynamic(ref data, ..) => data.principal().with_self_ty(self.tcx(), self_ty), @@ -2936,16 +2959,19 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // where we can unify because otherwise select would have // reported an ambiguity. (When we do find a match, also // record it for later.) - let nonmatching = util::supertraits(tcx, poly_trait_ref).take_while( - |&t| match self.infcx.commit_if_ok(|_| self.match_poly_trait_ref(obligation, t)) { + let nonmatching = util::supertraits(tcx, poly_trait_ref).take_while(|&t| { + match self + .infcx + .commit_if_ok(|_| self.match_poly_trait_ref(obligation, t)) + { Ok(obligations) => { upcast_trait_ref = Some(t); nested.extend(obligations); false } Err(_) => true, - }, - ); + } + }); // Additionally, for each of the nonmatching predicates that // we pass over, we sum up the set of number of vtable @@ -2968,10 +2994,12 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { debug!("confirm_fn_pointer_candidate({:?})", obligation); // OK to skip binder; it is reintroduced below - let self_ty = self.infcx + let self_ty = self + .infcx .shallow_resolve(*obligation.self_ty().skip_binder()); let sig = self_ty.fn_sig(self.tcx()); - let trait_ref = self.tcx() + let trait_ref = self + .tcx() .closure_trait_ref_and_return_type( obligation.predicate.def_id(), self_ty, @@ -3014,7 +3042,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { ); self.infcx.in_snapshot(|_| { - let (predicate, _) = self.infcx() + let (predicate, _) = self + .infcx() .replace_bound_vars_with_placeholders(&obligation.predicate); let trait_ref = predicate.trait_ref; let trait_def_id = trait_ref.def_id; @@ -3048,7 +3077,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // OK to skip binder because the substs on generator types never // touch bound regions, they just capture the in-scope // type/region parameters - let self_ty = self.infcx + let self_ty = self + .infcx .shallow_resolve(obligation.self_ty().skip_binder()); let (generator_def_id, substs) = match self_ty.sty { ty::Generator(id, substs, _) => (id, substs), @@ -3098,7 +3128,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { ) -> Result>, SelectionError<'tcx>> { debug!("confirm_closure_candidate({:?})", obligation); - let kind = self.tcx() + let kind = self + .tcx() .lang_items() .fn_trait_kind(obligation.predicate.def_id()) .unwrap_or_else(|| bug!("closure candidate for non-fn trait {:?}", obligation)); @@ -3106,7 +3137,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // OK to skip binder because the substs on closure types never // touch bound regions, they just capture the in-scope // type/region parameters - let self_ty = self.infcx + let self_ty = self + .infcx .shallow_resolve(obligation.self_ty().skip_binder()); let (closure_def_id, substs) = match self_ty.sty { ty::Closure(id, substs) => (id, substs), @@ -3201,7 +3233,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // assemble_candidates_for_unsizing should ensure there are no late bound // regions here. See the comment there for more details. - let source = self.infcx + let source = self + .infcx .shallow_resolve(obligation.self_ty().no_bound_vars().unwrap()); let target = obligation .predicate @@ -3236,7 +3269,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { tcx.mk_existential_predicates(iter) }); let source_trait = tcx.mk_dynamic(existential_predicates, r_b); - let InferOk { obligations, .. } = self.infcx + let InferOk { obligations, .. } = self + .infcx .at(&obligation.cause, obligation.param_env) .sup(target, source_trait) .map_err(|_| Unimplemented)?; @@ -3259,7 +3293,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // T -> Trait. (_, &ty::Dynamic(ref data, r)) => { - let mut object_dids = data.auto_traits() + let mut object_dids = data + .auto_traits() .chain(iter::once(data.principal().def_id())); if let Some(did) = object_dids.find(|did| !tcx.is_object_safe(*did)) { return Err(TraitNotObjectSafe(did)); @@ -3308,7 +3343,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // [T; n] -> [T]. (&ty::Array(a, _), &ty::Slice(b)) => { - let InferOk { obligations, .. } = self.infcx + let InferOk { obligations, .. } = self + .infcx .at(&obligation.cause, obligation.param_env) .eq(b, a) .map_err(|_| Unimplemented)?; @@ -3317,7 +3353,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // Struct -> Struct. (&ty::Adt(def, substs_a), &ty::Adt(_, substs_b)) => { - let fields = def.all_fields() + let fields = def + .all_fields() .map(|f| tcx.type_of(f.did)) .collect::>(); @@ -3371,7 +3408,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { } }); let new_struct = tcx.mk_adt(def, tcx.mk_substs(params)); - let InferOk { obligations, .. } = self.infcx + let InferOk { obligations, .. } = self + .infcx .at(&obligation.cause, obligation.param_env) .eq(target, new_struct) .map_err(|_| Unimplemented)?; @@ -3403,7 +3441,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // Check that the source tuple with the target's // last element is equal to the target. let new_tuple = tcx.mk_tup(a_mid.iter().cloned().chain(iter::once(b_last))); - let InferOk { obligations, .. } = self.infcx + let InferOk { obligations, .. } = self + .infcx .at(&obligation.cause, obligation.param_env) .eq(target, new_tuple) .map_err(|_| Unimplemented)?; @@ -3467,11 +3506,13 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { return Err(()); } - let (skol_obligation, _) = self.infcx() + let (skol_obligation, _) = self + .infcx() .replace_bound_vars_with_placeholders(&obligation.predicate); let skol_obligation_trait_ref = skol_obligation.trait_ref; - let impl_substs = self.infcx + let impl_substs = self + .infcx .fresh_substs_for_item(obligation.cause.span, impl_def_id); let impl_trait_ref = impl_trait_ref.subst(self.tcx(), impl_substs); @@ -3493,7 +3534,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { impl_def_id, obligation, impl_trait_ref, skol_obligation_trait_ref ); - let InferOk { obligations, .. } = self.infcx + let InferOk { obligations, .. } = self + .infcx .at(&obligation.cause, obligation.param_env) .eq(skol_obligation_trait_ref, impl_trait_ref) .map_err(|e| debug!("match_impl: failed eq_trait_refs due to `{}`", e))?; @@ -3571,8 +3613,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { previous: &ty::PolyTraitRef<'tcx>, current: &ty::PolyTraitRef<'tcx>, ) -> bool { - let mut matcher = ty::_match::Match::new( - self.tcx(), self.infcx.trait_object_mode()); + let mut matcher = ty::_match::Match::new(self.tcx(), self.infcx.trait_object_mode()); matcher.relate(previous, current).is_ok() } diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index 63f52a34dfa70..fc6a61cd36d19 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -14,17 +14,17 @@ pub mod specialization_graph; use hir::def_id::DefId; use infer::{InferCtxt, InferOk}; use lint; -use traits::{self, FutureCompatOverlapErrorKind, ObligationCause, TraitEngine}; -use traits::coherence; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::Lrc; use syntax_pos::DUMMY_SP; +use traits::coherence; use traits::select::IntercrateAmbiguityCause; -use ty::{self, TyCtxt, TypeFoldable}; +use traits::{self, FutureCompatOverlapErrorKind, ObligationCause, TraitEngine}; use ty::subst::{Subst, Substs}; +use ty::{self, TyCtxt, TypeFoldable}; -use super::{SelectionContext, FulfillmentContext}; use super::util::impl_trait_ref_and_oblig; +use super::{FulfillmentContext, SelectionContext}; /// Information pertinent to an overlapping impl error. #[derive(Debug)] @@ -71,18 +71,22 @@ pub struct OverlapError { /// through associated type projection. We deal with such cases by using /// *fulfillment* to relate the two impls, requiring that all projections are /// resolved. -pub fn translate_substs<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - source_impl: DefId, - source_substs: &'tcx Substs<'tcx>, - target_node: specialization_graph::Node) - -> &'tcx Substs<'tcx> { - debug!("translate_substs({:?}, {:?}, {:?}, {:?})", - param_env, source_impl, source_substs, target_node); - let source_trait_ref = infcx.tcx - .impl_trait_ref(source_impl) - .unwrap() - .subst(infcx.tcx, &source_substs); +pub fn translate_substs<'a, 'gcx, 'tcx>( + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + source_impl: DefId, + source_substs: &'tcx Substs<'tcx>, + target_node: specialization_graph::Node, +) -> &'tcx Substs<'tcx> { + debug!( + "translate_substs({:?}, {:?}, {:?}, {:?})", + param_env, source_impl, source_substs, target_node + ); + let source_trait_ref = infcx + .tcx + .impl_trait_ref(source_impl) + .unwrap() + .subst(infcx.tcx, &source_substs); // translate the Self and Param parts of the substitution, since those // vary across impls @@ -93,11 +97,14 @@ pub fn translate_substs<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, return source_substs; } - fulfill_implication(infcx, param_env, source_trait_ref, target_impl) - .unwrap_or_else(|_| - bug!("When translating substitutions for specialization, the expected \ - specialization failed to hold") - ) + fulfill_implication(infcx, param_env, source_trait_ref, target_impl).unwrap_or_else( + |_| { + bug!( + "When translating substitutions for specialization, the expected \ + specialization failed to hold" + ) + }, + ) } specialization_graph::Node::Trait(..) => source_trait_ref.substs, }; @@ -118,31 +125,43 @@ pub fn find_associated_item<'a, 'tcx>( substs: &'tcx Substs<'tcx>, impl_data: &super::VtableImplData<'tcx, ()>, ) -> (DefId, &'tcx Substs<'tcx>) { - debug!("find_associated_item({:?}, {:?}, {:?}, {:?})", - param_env, item, substs, impl_data); + debug!( + "find_associated_item({:?}, {:?}, {:?}, {:?})", + param_env, item, substs, impl_data + ); assert!(!substs.needs_infer()); let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap(); let trait_def = tcx.trait_def(trait_def_id); let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id); - match ancestors.defs(tcx, item.ident, item.kind, trait_def_id).next() { + match ancestors + .defs(tcx, item.ident, item.kind, trait_def_id) + .next() + { Some(node_item) => { let substs = tcx.infer_ctxt().enter(|infcx| { let param_env = param_env.with_reveal_all(); let substs = substs.rebase_onto(tcx, trait_def_id, impl_data.substs); - let substs = translate_substs(&infcx, param_env, impl_data.impl_def_id, - substs, node_item.node); + let substs = translate_substs( + &infcx, + param_env, + impl_data.impl_def_id, + substs, + node_item.node, + ); let substs = infcx.tcx.erase_regions(&substs); - tcx.lift(&substs).unwrap_or_else(|| - bug!("find_method: translate_substs \ - returned {:?} which contains inference types/regions", - substs) - ) + tcx.lift(&substs).unwrap_or_else(|| { + bug!( + "find_method: translate_substs \ + returned {:?} which contains inference types/regions", + substs + ) + }) }); (node_item.item.def_id, substs) } - None => bug!("{:?} not found in {:?}", item, impl_data.impl_def_id) + None => bug!("{:?} not found in {:?}", item, impl_data.impl_def_id), } } @@ -151,16 +170,15 @@ pub fn find_associated_item<'a, 'tcx>( /// Specialization is determined by the sets of types to which the impls apply; /// impl1 specializes impl2 if it applies to a subset of the types impl2 applies /// to. -pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - (impl1_def_id, impl2_def_id): (DefId, DefId)) - -> bool -{ +pub(super) fn specializes<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + (impl1_def_id, impl2_def_id): (DefId, DefId), +) -> bool { debug!("specializes({:?}, {:?})", impl1_def_id, impl2_def_id); // The feature gate should prevent introducing new specializations, but not // taking advantage of upstream ones. - if !tcx.features().specialization && - (impl1_def_id.is_local() || impl2_def_id.is_local()) { + if !tcx.features().specialization && (impl1_def_id.is_local() || impl2_def_id.is_local()) { return false; } @@ -189,17 +207,18 @@ pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx.infer_ctxt().enter(|infcx| { // Normalize the trait reference. The WF rules ought to ensure // that this always succeeds. - let impl1_trait_ref = - match traits::fully_normalize(&infcx, - FulfillmentContext::new(), - ObligationCause::dummy(), - penv, - &impl1_trait_ref) { - Ok(impl1_trait_ref) => impl1_trait_ref, - Err(err) => { - bug!("failed to fully normalize {:?}: {:?}", impl1_trait_ref, err); - } - }; + let impl1_trait_ref = match traits::fully_normalize( + &infcx, + FulfillmentContext::new(), + ObligationCause::dummy(), + penv, + &impl1_trait_ref, + ) { + Ok(impl1_trait_ref) => impl1_trait_ref, + Err(err) => { + bug!("failed to fully normalize {:?}: {:?}", impl1_trait_ref, err); + } + }; // Attempt to prove that impl2 applies, given all of the above. fulfill_implication(&infcx, penv, impl1_trait_ref, impl2_def_id).is_ok() @@ -211,33 +230,39 @@ pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, /// generics of `target_impl`, including both those needed to unify with /// `source_trait_ref` and those whose identity is determined via a where /// clause in the impl. -fn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - source_trait_ref: ty::TraitRef<'tcx>, - target_impl: DefId) - -> Result<&'tcx Substs<'tcx>, ()> { - debug!("fulfill_implication({:?}, trait_ref={:?} |- {:?} applies)", - param_env, source_trait_ref, target_impl); +fn fulfill_implication<'a, 'gcx, 'tcx>( + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + source_trait_ref: ty::TraitRef<'tcx>, + target_impl: DefId, +) -> Result<&'tcx Substs<'tcx>, ()> { + debug!( + "fulfill_implication({:?}, trait_ref={:?} |- {:?} applies)", + param_env, source_trait_ref, target_impl + ); let selcx = &mut SelectionContext::new(&infcx); let target_substs = infcx.fresh_substs_for_item(DUMMY_SP, target_impl); - let (target_trait_ref, mut obligations) = impl_trait_ref_and_oblig(selcx, - param_env, - target_impl, - target_substs); - debug!("fulfill_implication: target_trait_ref={:?}, obligations={:?}", - target_trait_ref, obligations); + let (target_trait_ref, mut obligations) = + impl_trait_ref_and_oblig(selcx, param_env, target_impl, target_substs); + debug!( + "fulfill_implication: target_trait_ref={:?}, obligations={:?}", + target_trait_ref, obligations + ); // do the impls unify? If not, no specialization. - match infcx.at(&ObligationCause::dummy(), param_env) - .eq(source_trait_ref, target_trait_ref) { + match infcx + .at(&ObligationCause::dummy(), param_env) + .eq(source_trait_ref, target_trait_ref) + { Ok(InferOk { obligations: o, .. }) => { obligations.extend(o); } Err(_) => { - debug!("fulfill_implication: {:?} does not unify with {:?}", - source_trait_ref, - target_trait_ref); + debug!( + "fulfill_implication: {:?} does not unify with {:?}", + source_trait_ref, target_trait_ref + ); return Err(()); } } @@ -264,19 +289,19 @@ fn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, match fulfill_cx.select_all_or_error(infcx) { Err(errors) => { // no dice! - debug!("fulfill_implication: for impls on {:?} and {:?}, \ - could not fulfill: {:?} given {:?}", - source_trait_ref, - target_trait_ref, - errors, - param_env.caller_bounds); + debug!( + "fulfill_implication: for impls on {:?} and {:?}, \ + could not fulfill: {:?} given {:?}", + source_trait_ref, target_trait_ref, errors, param_env.caller_bounds + ); Err(()) } Ok(()) => { - debug!("fulfill_implication: an impl for {:?} specializes {:?}", - source_trait_ref, - target_trait_ref); + debug!( + "fulfill_implication: an impl for {:?} specializes {:?}", + source_trait_ref, target_trait_ref + ); // Now resolve the *substitution* we built for the target earlier, replacing // the inference variables inside with whatever we got from fulfillment. @@ -300,9 +325,11 @@ pub(super) fn specialization_graph_provider<'a, 'tcx>( // negated `CrateNum` (so remote definitions are visited first) and then // by a flattened version of the `DefIndex`. trait_impls.sort_unstable_by_key(|def_id| { - (-(def_id.krate.as_u32() as i64), - def_id.index.address_space().index(), - def_id.index.as_array_index()) + ( + -(def_id.krate.as_u32() as i64), + def_id.index.address_space().index(), + def_id.index.as_array_index(), + ) }); for impl_def_id in trait_impls { @@ -313,55 +340,67 @@ pub(super) fn specialization_graph_provider<'a, 'tcx>( let (overlap, used_to_be_allowed) = match insert_result { Err(overlap) => (Some(overlap), None), Ok(Some(overlap)) => (Some(overlap.error), Some(overlap.kind)), - Ok(None) => (None, None) + Ok(None) => (None, None), }; if let Some(overlap) = overlap { - let msg = format!("conflicting implementations of trait `{}`{}:{}", + let msg = format!( + "conflicting implementations of trait `{}`{}:{}", overlap.trait_desc, - overlap.self_desc.clone().map_or( - String::new(), |ty| { - format!(" for type `{}`", ty) - }), - if used_to_be_allowed.is_some() { " (E0119)" } else { "" } - ); - let impl_span = tcx.sess.source_map().def_span( - tcx.span_of_impl(impl_def_id).unwrap() + overlap + .self_desc + .clone() + .map_or(String::new(), |ty| format!(" for type `{}`", ty)), + if used_to_be_allowed.is_some() { + " (E0119)" + } else { + "" + } ); + let impl_span = tcx + .sess + .source_map() + .def_span(tcx.span_of_impl(impl_def_id).unwrap()); let mut err = if let Some(kind) = used_to_be_allowed { let lint = match kind { - FutureCompatOverlapErrorKind::Issue43355 => - lint::builtin::INCOHERENT_FUNDAMENTAL_IMPLS, - FutureCompatOverlapErrorKind::Issue33140 => - lint::builtin::ORDER_DEPENDENT_TRAIT_OBJECTS, + FutureCompatOverlapErrorKind::Issue43355 => { + lint::builtin::INCOHERENT_FUNDAMENTAL_IMPLS + } + FutureCompatOverlapErrorKind::Issue33140 => { + lint::builtin::ORDER_DEPENDENT_TRAIT_OBJECTS + } }; tcx.struct_span_lint_node( lint, tcx.hir().as_local_node_id(impl_def_id).unwrap(), impl_span, - &msg) + &msg, + ) } else { - struct_span_err!(tcx.sess, - impl_span, - E0119, - "{}", - msg) + struct_span_err!(tcx.sess, impl_span, E0119, "{}", msg) }; match tcx.span_of_impl(overlap.with_impl) { Ok(span) => { - err.span_label(tcx.sess.source_map().def_span(span), - "first implementation here".to_string()); - err.span_label(impl_span, - format!("conflicting implementation{}", - overlap.self_desc - .map_or(String::new(), - |ty| format!(" for `{}`", ty)))); + err.span_label( + tcx.sess.source_map().def_span(span), + "first implementation here".to_string(), + ); + err.span_label( + impl_span, + format!( + "conflicting implementation{}", + overlap + .self_desc + .map_or(String::new(), |ty| format!(" for `{}`", ty)) + ), + ); } Err(cname) => { let msg = match to_pretty_impl_header(tcx, overlap.with_impl) { - Some(s) => format!( - "conflicting implementation in crate `{}`:\n- {}", cname, s), + Some(s) => { + format!("conflicting implementation in crate `{}`:\n- {}", cname, s) + } None => format!("conflicting implementation in crate `{}`", cname), }; err.note(&msg); @@ -410,10 +449,14 @@ fn to_pretty_impl_header(tcx: TyCtxt<'_, '_, '_>, impl_def_id: DefId) -> Option< if !substs.is_noop() { types_without_default_bounds.extend(substs.types()); w.push('<'); - w.push_str(&substs.iter() - .map(|k| k.to_string()) - .filter(|k| &k[..] != "'_") - .collect::>().join(", ")); + w.push_str( + &substs + .iter() + .map(|k| k.to_string()) + .filter(|k| &k[..] != "'_") + .collect::>() + .join(", "), + ); w.push('>'); } @@ -422,8 +465,8 @@ fn to_pretty_impl_header(tcx: TyCtxt<'_, '_, '_>, impl_def_id: DefId) -> Option< // The predicates will contain default bounds like `T: Sized`. We need to // remove these bounds, and add `T: ?Sized` to any untouched type parameters. let predicates = &tcx.predicates_of(impl_def_id).predicates; - let mut pretty_predicates = Vec::with_capacity( - predicates.len() + types_without_default_bounds.len()); + let mut pretty_predicates = + Vec::with_capacity(predicates.len() + types_without_default_bounds.len()); for (p, _) in predicates { if let Some(poly_trait_ref) = p.to_opt_poly_trait_ref() { @@ -436,7 +479,9 @@ fn to_pretty_impl_header(tcx: TyCtxt<'_, '_, '_>, impl_def_id: DefId) -> Option< } pretty_predicates.extend( - types_without_default_bounds.iter().map(|ty| format!("{}: ?Sized", ty)) + types_without_default_bounds + .iter() + .map(|ty| format!("{}: ?Sized", ty)), ); if !pretty_predicates.is_empty() { diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs index db3547b2b7479..3a4fd2da0f1ac 100644 --- a/src/librustc/traits/specialize/specialization_graph.rs +++ b/src/librustc/traits/specialize/specialization_graph.rs @@ -2,14 +2,13 @@ use super::OverlapError; use hir::def_id::DefId; use ich::{self, StableHashingContext}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; +use rustc_data_structures::sync::Lrc; +use syntax::ast::Ident; use traits; -use ty::{self, TyCtxt, TypeFoldable}; use ty::fast_reject::{self, SimplifiedType}; use ty::relate::TraitObjectMode; -use rustc_data_structures::sync::Lrc; -use syntax::ast::Ident; +use ty::{self, TyCtxt, TypeFoldable}; use util::captures::Captures; use util::nodemap::{DefIdMap, FxHashMap}; @@ -51,7 +50,6 @@ struct Children { // A similar division is used within `TraitDef`, but the lists there collect // together *all* the impls for a trait, and are populated prior to building // the specialization graph. - /// Impls of the trait. nonblanket_impls: FxHashMap>, @@ -68,7 +66,7 @@ pub enum FutureCompatOverlapErrorKind { #[derive(Debug)] pub struct FutureCompatOverlapError { pub error: OverlapError, - pub kind: FutureCompatOverlapErrorKind + pub kind: FutureCompatOverlapErrorKind, } /// The result of attempting to insert an impl into a group of children. @@ -85,13 +83,17 @@ enum Inserted { impl<'a, 'gcx, 'tcx> Children { /// Insert an impl into this set of children without comparing to any existing impls. - fn insert_blindly(&mut self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId) { + fn insert_blindly(&mut self, tcx: TyCtxt<'a, 'gcx, 'tcx>, impl_def_id: DefId) { let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) { - debug!("insert_blindly: impl_def_id={:?} sty={:?}", impl_def_id, sty); - self.nonblanket_impls.entry(sty).or_default().push(impl_def_id) + debug!( + "insert_blindly: impl_def_id={:?} sty={:?}", + impl_def_id, sty + ); + self.nonblanket_impls + .entry(sty) + .or_default() + .push(impl_def_id) } else { debug!("insert_blindly: impl_def_id={:?} sty=None", impl_def_id); self.blanket_impls.push(impl_def_id) @@ -101,13 +103,14 @@ impl<'a, 'gcx, 'tcx> Children { /// Remove an impl from this set of children. Used when replacing /// an impl with a parent. The impl must be present in the list of /// children already. - fn remove_existing(&mut self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId) { + fn remove_existing(&mut self, tcx: TyCtxt<'a, 'gcx, 'tcx>, impl_def_id: DefId) { let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); let vec: &mut Vec; if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) { - debug!("remove_existing: impl_def_id={:?} sty={:?}", impl_def_id, sty); + debug!( + "remove_existing: impl_def_id={:?} sty={:?}", + impl_def_id, sty + ); vec = self.nonblanket_impls.get_mut(&sty).unwrap(); } else { debug!("remove_existing: impl_def_id={:?} sty=None", impl_def_id); @@ -120,19 +123,18 @@ impl<'a, 'gcx, 'tcx> Children { /// Attempt to insert an impl into this set of children, while comparing for /// specialization relationships. - fn insert(&mut self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId, - simplified_self: Option) - -> Result - { + fn insert( + &mut self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + impl_def_id: DefId, + simplified_self: Option, + ) -> Result { let mut last_lint = None; let mut replace_children = Vec::new(); debug!( "insert(impl_def_id={:?}, simplified_self={:?})", - impl_def_id, - simplified_self, + impl_def_id, simplified_self, ); let possible_siblings = match simplified_self { @@ -143,9 +145,7 @@ impl<'a, 'gcx, 'tcx> Children { for possible_sibling in possible_siblings { debug!( "insert: impl_def_id={:?}, simplified_self={:?}, possible_sibling={:?}", - impl_def_id, - simplified_self, - possible_sibling, + impl_def_id, simplified_self, possible_sibling, ); let overlap_error = |overlap: traits::coherence::OverlapResult<'_>| { @@ -193,14 +193,18 @@ impl<'a, 'gcx, 'tcx> Children { )?; if le && !ge { - debug!("descending as child of TraitRef {:?}", - tcx.impl_trait_ref(possible_sibling).unwrap()); + debug!( + "descending as child of TraitRef {:?}", + tcx.impl_trait_ref(possible_sibling).unwrap() + ); // The impl specializes `possible_sibling`. return Ok(Inserted::ShouldRecurseOn(possible_sibling)); } else if ge && !le { - debug!("placing as parent of TraitRef {:?}", - tcx.impl_trait_ref(possible_sibling).unwrap()); + debug!( + "placing as parent of TraitRef {:?}", + tcx.impl_trait_ref(possible_sibling).unwrap() + ); replace_children.push(possible_sibling); } else { @@ -217,7 +221,7 @@ impl<'a, 'gcx, 'tcx> Children { |overlap| { last_lint = Some(FutureCompatOverlapError { error: overlap_error(overlap), - kind: FutureCompatOverlapErrorKind::Issue33140 + kind: FutureCompatOverlapErrorKind::Issue33140, }); }, || (), @@ -232,7 +236,7 @@ impl<'a, 'gcx, 'tcx> Children { |overlap| { last_lint = Some(FutureCompatOverlapError { error: overlap_error(overlap), - kind: FutureCompatOverlapErrorKind::Issue43355 + kind: FutureCompatOverlapErrorKind::Issue43355, }); }, || (), @@ -266,23 +270,25 @@ impl<'a, 'gcx, 'tcx> Children { // A custom iterator used by Children::insert enum PotentialSiblings - where I: Iterator, - J: Iterator +where + I: Iterator, + J: Iterator, { Unfiltered(I), - Filtered(J) + Filtered(J), } impl Iterator for PotentialSiblings - where I: Iterator, - J: Iterator +where + I: Iterator, + J: Iterator, { type Item = DefId; fn next(&mut self) -> Option { match *self { PotentialSiblings::Unfiltered(ref mut iter) => iter.next(), - PotentialSiblings::Filtered(ref mut iter) => iter.next() + PotentialSiblings::Filtered(ref mut iter) => iter.next(), } } } @@ -298,29 +304,36 @@ impl<'a, 'gcx, 'tcx> Graph { /// Insert a local impl into the specialization graph. If an existing impl /// conflicts with it (has overlap, but neither specializes the other), /// information about the area of overlap is returned in the `Err`. - pub fn insert(&mut self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - impl_def_id: DefId) - -> Result, OverlapError> { + pub fn insert( + &mut self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + impl_def_id: DefId, + ) -> Result, OverlapError> { assert!(impl_def_id.is_local()); let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); let trait_def_id = trait_ref.def_id; - debug!("insert({:?}): inserting TraitRef {:?} into specialization graph", - impl_def_id, trait_ref); + debug!( + "insert({:?}): inserting TraitRef {:?} into specialization graph", + impl_def_id, trait_ref + ); // If the reference itself contains an earlier error (e.g., due to a // resolution failure), then we just insert the impl at the top level of // the graph and claim that there's no overlap (in order to suppress // bogus errors). if trait_ref.references_error() { - debug!("insert: inserting dummy node for erroneous TraitRef {:?}, \ - impl_def_id={:?}, trait_def_id={:?}", - trait_ref, impl_def_id, trait_def_id); + debug!( + "insert: inserting dummy node for erroneous TraitRef {:?}, \ + impl_def_id={:?}, trait_def_id={:?}", + trait_ref, impl_def_id, trait_def_id + ); self.parent.insert(impl_def_id, trait_def_id); - self.children.entry(trait_def_id).or_default() + self.children + .entry(trait_def_id) + .or_default() .insert_blindly(tcx, impl_def_id); return Ok(None); } @@ -333,8 +346,11 @@ impl<'a, 'gcx, 'tcx> Graph { loop { use self::Inserted::*; - let insert_result = self.children.entry(parent).or_default() - .insert(tcx, impl_def_id, simplified)?; + let insert_result = + self.children + .entry(parent) + .or_default() + .insert(tcx, impl_def_id, simplified)?; match insert_result { BecameNewSibling(opt_lint) => { @@ -358,9 +374,7 @@ impl<'a, 'gcx, 'tcx> Graph { // Adjust P's list of children: remove G and then add N. { - let siblings = self.children - .get_mut(&parent) - .unwrap(); + let siblings = self.children.get_mut(&parent).unwrap(); for &grand_child_to_be in &grand_children_to_be { siblings.remove_existing(tcx, grand_child_to_be); } @@ -375,7 +389,9 @@ impl<'a, 'gcx, 'tcx> Graph { // Add G as N's child. for &grand_child_to_be in &grand_children_to_be { - self.children.entry(impl_def_id).or_default() + self.children + .entry(impl_def_id) + .or_default() .insert_blindly(tcx, grand_child_to_be); } break; @@ -391,16 +407,23 @@ impl<'a, 'gcx, 'tcx> Graph { } /// Insert cached metadata mapping from a child impl back to its parent. - pub fn record_impl_from_cstore(&mut self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - parent: DefId, - child: DefId) { + pub fn record_impl_from_cstore( + &mut self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + parent: DefId, + child: DefId, + ) { if self.parent.insert(child, parent).is_some() { - bug!("When recording an impl from the crate store, information about its parent \ - was already present."); + bug!( + "When recording an impl from the crate store, information about its parent \ + was already present." + ); } - self.children.entry(parent).or_default().insert_blindly(tcx, child); + self.children + .entry(parent) + .or_default() + .insert_blindly(tcx, child); } /// The parent of a given impl, which is the def id of the trait when the @@ -491,32 +514,33 @@ impl<'a, 'gcx, 'tcx> Ancestors { trait_item_name: Ident, trait_item_kind: ty::AssociatedKind, trait_def_id: DefId, - ) -> impl Iterator> + Captures<'gcx> + Captures<'tcx> + 'a { + ) -> impl Iterator> + Captures<'gcx> + Captures<'tcx> + 'a + { self.flat_map(move |node| { use ty::AssociatedKind::*; - node.items(tcx).filter(move |impl_item| match (trait_item_kind, impl_item.kind) { - | (Const, Const) - | (Method, Method) - | (Type, Type) - | (Type, Existential) - => tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id), - - | (Const, _) - | (Method, _) - | (Type, _) - | (Existential, _) - => false, - }).map(move |item| NodeItem { node: node, item: item }) + node.items(tcx) + .filter(move |impl_item| match (trait_item_kind, impl_item.kind) { + (Const, Const) | (Method, Method) | (Type, Type) | (Type, Existential) => { + tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id) + } + + (Const, _) | (Method, _) | (Type, _) | (Existential, _) => false, + }) + .map(move |item| NodeItem { + node: node, + item: item, + }) }) } } /// Walk up the specialization ancestors of a given impl, starting with that /// impl itself. -pub fn ancestors(tcx: TyCtxt<'_, '_, '_>, - trait_def_id: DefId, - start_from_impl: DefId) - -> Ancestors { +pub fn ancestors( + tcx: TyCtxt<'_, '_, '_>, + trait_def_id: DefId, + start_from_impl: DefId, +) -> Ancestors { let specialization_graph = tcx.specialization_graph_of(trait_def_id); Ancestors { trait_def_id, @@ -526,9 +550,11 @@ pub fn ancestors(tcx: TyCtxt<'_, '_, '_>, } impl<'a> HashStable> for Children { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let Children { ref nonblanket_impls, ref blanket_impls, diff --git a/src/librustc/traits/structural_impls.rs b/src/librustc/traits/structural_impls.rs index ae2b83e105773..06f28f84c119f 100644 --- a/src/librustc/traits/structural_impls.rs +++ b/src/librustc/traits/structural_impls.rs @@ -1,14 +1,14 @@ use chalk_engine; use smallvec::SmallVec; +use syntax::symbol::InternedString; use traits; use traits::project::Normalized; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; use ty::{self, Lift, TyCtxt}; -use syntax::symbol::InternedString; +use std::collections::{BTreeMap, BTreeSet}; use std::fmt; use std::rc::Rc; -use std::collections::{BTreeSet, BTreeMap}; // structural impls for the structs in traits @@ -168,7 +168,7 @@ impl<'tcx> fmt::Display for traits::WhereClause<'tcx> { // Bypass ppaux because it does not print out anonymous regions. fn write_region_name<'tcx>( r: ty::Region<'tcx>, - fmt: &mut fmt::Formatter<'_> + fmt: &mut fmt::Formatter<'_>, ) -> fmt::Result { match r { ty::ReLateBound(index, br) => match br { @@ -181,7 +181,7 @@ impl<'tcx> fmt::Display for traits::WhereClause<'tcx> { } } _ => write!(fmt, "'_"), - } + }, _ => write!(fmt, "{}", r), } @@ -237,8 +237,7 @@ impl<'tcx> fmt::Display for traits::DomainGoal<'tcx> { Normalize(projection) => write!( fmt, "Normalize({} -> {})", - projection.projection_ty, - projection.ty + projection.projection_ty, projection.ty ), } } @@ -319,10 +318,10 @@ impl<'tcx> TypeVisitor<'tcx> for BoundNamesCollector { bound_ty.var.as_u32(), match bound_ty.kind { ty::BoundTyKind::Param(name) => name, - ty::BoundTyKind::Anon => Symbol::intern( - &format!("^{}", bound_ty.var.as_u32()) - ).as_interned_str(), - } + ty::BoundTyKind::Anon => { + Symbol::intern(&format!("^{}", bound_ty.var.as_u32())).as_interned_str() + } + }, ); } @@ -336,21 +335,18 @@ impl<'tcx> TypeVisitor<'tcx> for BoundNamesCollector { use syntax::symbol::Symbol; match r { - ty::ReLateBound(index, br) if *index == self.binder_index => { - match br { - ty::BoundRegion::BrNamed(_, name) => { - self.regions.insert(*name); - } - - ty::BoundRegion::BrAnon(var) => { - self.regions.insert(Symbol::intern( - &format!("'^{}", var) - ).as_interned_str()); - } + ty::ReLateBound(index, br) if *index == self.binder_index => match br { + ty::BoundRegion::BrNamed(_, name) => { + self.regions.insert(*name); + } - _ => (), + ty::BoundRegion::BrAnon(var) => { + self.regions + .insert(Symbol::intern(&format!("'^{}", var)).as_interned_str()); } - } + + _ => (), + }, _ => (), }; @@ -403,7 +399,9 @@ impl<'tcx> fmt::Display for traits::Goal<'tcx> { impl<'tcx> fmt::Display for traits::ProgramClause<'tcx> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let traits::ProgramClause { goal, hypotheses, .. } = self; + let traits::ProgramClause { + goal, hypotheses, .. + } = self; write!(fmt, "{}", goal)?; if !hypotheses.is_empty() { write!(fmt, " :- ")?; @@ -455,10 +453,10 @@ impl<'a, 'tcx> Lift<'tcx> for traits::SelectionError<'a> { match *self { super::Unimplemented => Some(super::Unimplemented), super::OutputTypeParameterMismatch(a, b, ref err) => { - tcx.lift(&(a, b)).and_then(|(a, b)| + tcx.lift(&(a, b)).and_then(|(a, b)| { tcx.lift(err) .map(|err| super::OutputTypeParameterMismatch(a, b, err)) - ) + }) } super::TraitNotObjectSafe(def_id) => Some(super::TraitNotObjectSafe(def_id)), super::ConstEvalFailure(err) => Some(super::ConstEvalFailure(err)), @@ -480,10 +478,10 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> { super::ReferenceOutlivesReferent(ty) => { tcx.lift(&ty).map(super::ReferenceOutlivesReferent) } - super::ObjectTypeBound(ty, r) => tcx.lift(&ty).and_then(|ty| + super::ObjectTypeBound(ty, r) => tcx.lift(&ty).and_then(|ty| { tcx.lift(&r) - .and_then(|r| Some(super::ObjectTypeBound(ty, r))) - ), + .and_then(|r| Some(super::ObjectTypeBound(ty, r))) + }), super::ObjectCastObligation(ty) => tcx.lift(&ty).map(super::ObjectCastObligation), super::AssignmentLhsSized => Some(super::AssignmentLhsSized), super::TupleInitializerSized => Some(super::TupleInitializerSized), @@ -532,13 +530,13 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> { impl<'a, 'tcx> Lift<'tcx> for traits::DerivedObligationCause<'a> { type Lifted = traits::DerivedObligationCause<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.parent_trait_ref).and_then(|trait_ref| + tcx.lift(&self.parent_trait_ref).and_then(|trait_ref| { tcx.lift(&*self.parent_code) - .map(|code| traits::DerivedObligationCause { - parent_trait_ref: trait_ref, - parent_code: Rc::new(code), - }) - ) + .map(|code| traits::DerivedObligationCause { + parent_trait_ref: trait_ref, + parent_code: Rc::new(code), + }) + }) } } @@ -562,40 +560,40 @@ impl<'a, 'tcx> Lift<'tcx> for traits::Vtable<'a, ()> { impl_def_id, substs, nested, - }) => tcx.lift(&substs).map(|substs| + }) => tcx.lift(&substs).map(|substs| { traits::VtableImpl(traits::VtableImplData { impl_def_id, substs, nested, }) - ), + }), traits::VtableAutoImpl(t) => Some(traits::VtableAutoImpl(t)), traits::VtableGenerator(traits::VtableGeneratorData { generator_def_id, substs, nested, - }) => tcx.lift(&substs).map(|substs| + }) => tcx.lift(&substs).map(|substs| { traits::VtableGenerator(traits::VtableGeneratorData { generator_def_id: generator_def_id, substs: substs, nested: nested, }) - ), + }), traits::VtableClosure(traits::VtableClosureData { closure_def_id, substs, nested, - }) => tcx.lift(&substs).map(|substs| + }) => tcx.lift(&substs).map(|substs| { traits::VtableClosure(traits::VtableClosureData { closure_def_id, substs, nested, }) - ), + }), traits::VtableFnPointer(traits::VtableFnPointerData { fn_ty, nested }) => { - tcx.lift(&fn_ty).map(|fn_ty| + tcx.lift(&fn_ty).map(|fn_ty| { traits::VtableFnPointer(traits::VtableFnPointerData { fn_ty, nested }) - ) + }) } traits::VtableParam(n) => Some(traits::VtableParam(n)), traits::VtableBuiltin(n) => Some(traits::VtableBuiltin(n)), @@ -603,24 +601,24 @@ impl<'a, 'tcx> Lift<'tcx> for traits::Vtable<'a, ()> { upcast_trait_ref, vtable_base, nested, - }) => tcx.lift(&upcast_trait_ref).map(|trait_ref| + }) => tcx.lift(&upcast_trait_ref).map(|trait_ref| { traits::VtableObject(traits::VtableObjectData { upcast_trait_ref: trait_ref, vtable_base, nested, }) - ), + }), traits::VtableTraitAlias(traits::VtableTraitAliasData { alias_def_id, substs, nested, - }) => tcx.lift(&substs).map(|substs| + }) => tcx.lift(&substs).map(|substs| { traits::VtableTraitAlias(traits::VtableTraitAliasData { alias_def_id, substs, nested, }) - ), + }), } } } @@ -677,11 +675,8 @@ EnumLiftImpl! { impl<'a, 'tcx> Lift<'tcx> for traits::Environment<'a> { type Lifted = traits::Environment<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.clauses).map(|clauses| { - traits::Environment { - clauses, - } - }) + tcx.lift(&self.clauses) + .map(|clauses| traits::Environment { clauses }) } } @@ -689,12 +684,8 @@ impl<'a, 'tcx, G: Lift<'tcx>> Lift<'tcx> for traits::InEnvironment<'a, G> { type Lifted = traits::InEnvironment<'tcx, G::Lifted>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { tcx.lift(&self.environment).and_then(|environment| { - tcx.lift(&self.goal).map(|goal| { - traits::InEnvironment { - environment, - goal, - } - }) + tcx.lift(&self.goal) + .map(|goal| traits::InEnvironment { environment, goal }) }) } } @@ -873,7 +864,8 @@ EnumTypeFoldableImpl! { impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - let v = self.iter() + let v = self + .iter() .map(|t| t.fold_with(folder)) .collect::>(); folder.tcx().intern_goals(&v) @@ -927,7 +919,8 @@ BraceStructTypeFoldableImpl! { impl<'tcx> TypeFoldable<'tcx> for traits::Clauses<'tcx> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - let v = self.iter() + let v = self + .iter() .map(|t| t.fold_with(folder)) .collect::>(); folder.tcx().intern_clauses(&v) @@ -945,17 +938,11 @@ where C::RegionConstraint: Clone, { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - ::fold_ex_clause_with( - self, - folder, - ) + ::fold_ex_clause_with(self, folder) } fn super_visit_with>(&self, visitor: &mut V) -> bool { - ::visit_ex_clause_with( - self, - visitor, - ) + ::visit_ex_clause_with(self, visitor) } } diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs index 5b7ba5386725e..2df53e7b63ad3 100644 --- a/src/librustc/traits/util.rs +++ b/src/librustc/traits/util.rs @@ -1,55 +1,63 @@ use hir; use hir::def_id::DefId; use traits::specialize::specialization_graph::NodeItem; -use ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef}; use ty::outlives::Component; use ty::subst::{Kind, Subst, Substs}; +use ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt}; use util::nodemap::FxHashSet; -use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext, Normalized}; +use super::{Normalized, Obligation, ObligationCause, PredicateObligation, SelectionContext}; -fn anonymize_predicate<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, - pred: &ty::Predicate<'tcx>) - -> ty::Predicate<'tcx> { +fn anonymize_predicate<'a, 'gcx, 'tcx>( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + pred: &ty::Predicate<'tcx>, +) -> ty::Predicate<'tcx> { match *pred { - ty::Predicate::Trait(ref data) => - ty::Predicate::Trait(tcx.anonymize_late_bound_regions(data)), + ty::Predicate::Trait(ref data) => { + ty::Predicate::Trait(tcx.anonymize_late_bound_regions(data)) + } - ty::Predicate::RegionOutlives(ref data) => - ty::Predicate::RegionOutlives(tcx.anonymize_late_bound_regions(data)), + ty::Predicate::RegionOutlives(ref data) => { + ty::Predicate::RegionOutlives(tcx.anonymize_late_bound_regions(data)) + } - ty::Predicate::TypeOutlives(ref data) => - ty::Predicate::TypeOutlives(tcx.anonymize_late_bound_regions(data)), + ty::Predicate::TypeOutlives(ref data) => { + ty::Predicate::TypeOutlives(tcx.anonymize_late_bound_regions(data)) + } - ty::Predicate::Projection(ref data) => - ty::Predicate::Projection(tcx.anonymize_late_bound_regions(data)), + ty::Predicate::Projection(ref data) => { + ty::Predicate::Projection(tcx.anonymize_late_bound_regions(data)) + } - ty::Predicate::WellFormed(data) => - ty::Predicate::WellFormed(data), + ty::Predicate::WellFormed(data) => ty::Predicate::WellFormed(data), - ty::Predicate::ObjectSafe(data) => - ty::Predicate::ObjectSafe(data), + ty::Predicate::ObjectSafe(data) => ty::Predicate::ObjectSafe(data), - ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => - ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind), + ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => { + ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) + } - ty::Predicate::Subtype(ref data) => - ty::Predicate::Subtype(tcx.anonymize_late_bound_regions(data)), + ty::Predicate::Subtype(ref data) => { + ty::Predicate::Subtype(tcx.anonymize_late_bound_regions(data)) + } - ty::Predicate::ConstEvaluatable(def_id, substs) => - ty::Predicate::ConstEvaluatable(def_id, substs), + ty::Predicate::ConstEvaluatable(def_id, substs) => { + ty::Predicate::ConstEvaluatable(def_id, substs) + } } } - -struct PredicateSet<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct PredicateSet<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, set: FxHashSet>, } impl<'a, 'gcx, 'tcx> PredicateSet<'a, 'gcx, 'tcx> { fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PredicateSet<'a, 'gcx, 'tcx> { - PredicateSet { tcx: tcx, set: Default::default() } + PredicateSet { + tcx: tcx, + set: Default::default(), + } } fn insert(&mut self, pred: &ty::Predicate<'tcx>) -> bool { @@ -78,37 +86,38 @@ impl<'a, 'gcx, 'tcx> PredicateSet<'a, 'gcx, 'tcx> { /// that `T : PartialOrd` holds as well. Similarly, if we have `trait /// Foo : 'static`, and we know that `T : Foo`, then we know that `T : /// 'static`. -pub struct Elaborator<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct Elaborator<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { stack: Vec>, visited: PredicateSet<'a, 'gcx, 'tcx>, } pub fn elaborate_trait_ref<'cx, 'gcx, 'tcx>( tcx: TyCtxt<'cx, 'gcx, 'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>) - -> Elaborator<'cx, 'gcx, 'tcx> -{ + trait_ref: ty::PolyTraitRef<'tcx>, +) -> Elaborator<'cx, 'gcx, 'tcx> { elaborate_predicates(tcx, vec![trait_ref.to_predicate()]) } pub fn elaborate_trait_refs<'cx, 'gcx, 'tcx>( tcx: TyCtxt<'cx, 'gcx, 'tcx>, - trait_refs: impl Iterator>) - -> Elaborator<'cx, 'gcx, 'tcx> -{ - let predicates = trait_refs.map(|trait_ref| trait_ref.to_predicate()) - .collect(); + trait_refs: impl Iterator>, +) -> Elaborator<'cx, 'gcx, 'tcx> { + let predicates = trait_refs + .map(|trait_ref| trait_ref.to_predicate()) + .collect(); elaborate_predicates(tcx, predicates) } pub fn elaborate_predicates<'cx, 'gcx, 'tcx>( tcx: TyCtxt<'cx, 'gcx, 'tcx>, - mut predicates: Vec>) - -> Elaborator<'cx, 'gcx, 'tcx> -{ + mut predicates: Vec>, +) -> Elaborator<'cx, 'gcx, 'tcx> { let mut visited = PredicateSet::new(tcx); predicates.retain(|pred| visited.insert(pred)); - Elaborator { stack: predicates, visited: visited } + Elaborator { + stack: predicates, + visited: visited, + } } impl<'cx, 'gcx, 'tcx> Elaborator<'cx, 'gcx, 'tcx> { @@ -123,14 +132,16 @@ impl<'cx, 'gcx, 'tcx> Elaborator<'cx, 'gcx, 'tcx> { // Predicates declared on the trait. let predicates = tcx.super_predicates_of(data.def_id()); - let mut predicates: Vec<_> = - predicates.predicates - .iter() - .map(|(p, _)| p.subst_supertrait(tcx, &data.to_poly_trait_ref())) - .collect(); + let mut predicates: Vec<_> = predicates + .predicates + .iter() + .map(|(p, _)| p.subst_supertrait(tcx, &data.to_poly_trait_ref())) + .collect(); - debug!("super_predicates: data={:?} predicates={:?}", - data, predicates); + debug!( + "super_predicates: data={:?} predicates={:?}", + data, predicates + ); // Only keep those bounds that we haven't already // seen. This is necessary to prevent infinite @@ -194,34 +205,36 @@ impl<'cx, 'gcx, 'tcx> Elaborator<'cx, 'gcx, 'tcx> { tcx.push_outlives_components(ty_max, &mut components); self.stack.extend( components - .into_iter() - .filter_map(|component| match component { - Component::Region(r) => if r.is_late_bound() { - None - } else { - Some(ty::Predicate::RegionOutlives( - ty::Binder::dummy(ty::OutlivesPredicate(r, r_min)))) - }, - - Component::Param(p) => { - let ty = tcx.mk_ty_param(p.idx, p.name); - Some(ty::Predicate::TypeOutlives( - ty::Binder::dummy(ty::OutlivesPredicate(ty, r_min)))) - }, - - Component::UnresolvedInferenceVariable(_) => { - None - }, - - Component::Projection(_) | - Component::EscapingProjection(_) => { - // We can probably do more here. This - // corresponds to a case like `>::U: 'b`. - None - }, - }) - .filter(|p| visited.insert(p))); + .into_iter() + .filter_map(|component| match component { + Component::Region(r) => { + if r.is_late_bound() { + None + } else { + Some(ty::Predicate::RegionOutlives(ty::Binder::dummy( + ty::OutlivesPredicate(r, r_min), + ))) + } + } + + Component::Param(p) => { + let ty = tcx.mk_ty_param(p.idx, p.name); + Some(ty::Predicate::TypeOutlives(ty::Binder::dummy( + ty::OutlivesPredicate(ty, r_min), + ))) + } + + Component::UnresolvedInferenceVariable(_) => None, + + Component::Projection(_) | Component::EscapingProjection(_) => { + // We can probably do more here. This + // corresponds to a case like `>::U: 'b`. + None + } + }) + .filter(|p| visited.insert(p)), + ); } } } @@ -254,33 +267,33 @@ impl<'cx, 'gcx, 'tcx> Iterator for Elaborator<'cx, 'gcx, 'tcx> { pub type Supertraits<'cx, 'gcx, 'tcx> = FilterToTraits>; -pub fn supertraits<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>) - -> Supertraits<'cx, 'gcx, 'tcx> -{ +pub fn supertraits<'cx, 'gcx, 'tcx>( + tcx: TyCtxt<'cx, 'gcx, 'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>, +) -> Supertraits<'cx, 'gcx, 'tcx> { elaborate_trait_ref(tcx, trait_ref).filter_to_traits() } -pub fn transitive_bounds<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>, - bounds: impl Iterator>) - -> Supertraits<'cx, 'gcx, 'tcx> -{ +pub fn transitive_bounds<'cx, 'gcx, 'tcx>( + tcx: TyCtxt<'cx, 'gcx, 'tcx>, + bounds: impl Iterator>, +) -> Supertraits<'cx, 'gcx, 'tcx> { elaborate_trait_refs(tcx, bounds).filter_to_traits() } /////////////////////////////////////////////////////////////////////////// // Iterator over def-ids of supertraits -pub struct SupertraitDefIds<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct SupertraitDefIds<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, stack: Vec, visited: FxHashSet, } -pub fn supertrait_def_ids<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>, - trait_def_id: DefId) - -> SupertraitDefIds<'cx, 'gcx, 'tcx> -{ +pub fn supertrait_def_ids<'cx, 'gcx, 'tcx>( + tcx: TyCtxt<'cx, 'gcx, 'tcx>, + trait_def_id: DefId, +) -> SupertraitDefIds<'cx, 'gcx, 'tcx> { SupertraitDefIds { tcx, stack: vec![trait_def_id], @@ -294,17 +307,21 @@ impl<'cx, 'gcx, 'tcx> Iterator for SupertraitDefIds<'cx, 'gcx, 'tcx> { fn next(&mut self) -> Option { let def_id = match self.stack.pop() { Some(def_id) => def_id, - None => { return None; } + None => { + return None; + } }; let predicates = self.tcx.super_predicates_of(def_id); let visited = &mut self.visited; self.stack.extend( - predicates.predicates - .iter() - .filter_map(|(p, _)| p.to_opt_poly_trait_ref()) - .map(|t| t.def_id()) - .filter(|&super_def_id| visited.insert(super_def_id))); + predicates + .predicates + .iter() + .filter_map(|(p, _)| p.to_opt_poly_trait_ref()) + .map(|t| t.def_id()) + .filter(|&super_def_id| visited.insert(super_def_id)), + ); Some(def_id) } } @@ -316,12 +333,14 @@ impl<'cx, 'gcx, 'tcx> Iterator for SupertraitDefIds<'cx, 'gcx, 'tcx> { /// A filter around an iterator of predicates that makes it yield up /// just trait references. pub struct FilterToTraits { - base_iterator: I + base_iterator: I, } impl FilterToTraits { fn new(base: I) -> FilterToTraits { - FilterToTraits { base_iterator: base } + FilterToTraits { + base_iterator: base, + } } } @@ -355,29 +374,30 @@ impl<'tcx, I: Iterator>> Iterator for FilterToTraits< /// Instantiate all bound parameters of the impl with the given substs, /// returning the resulting trait ref and all obligations that arise. /// The obligations are closed under normalization. -pub fn impl_trait_ref_and_oblig<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - impl_def_id: DefId, - impl_substs: &Substs<'tcx>) - -> (ty::TraitRef<'tcx>, - Vec>) -{ - let impl_trait_ref = - selcx.tcx().impl_trait_ref(impl_def_id).unwrap(); - let impl_trait_ref = - impl_trait_ref.subst(selcx.tcx(), impl_substs); - let Normalized { value: impl_trait_ref, obligations: normalization_obligations1 } = - super::normalize(selcx, param_env, ObligationCause::dummy(), &impl_trait_ref); +pub fn impl_trait_ref_and_oblig<'a, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + impl_def_id: DefId, + impl_substs: &Substs<'tcx>, +) -> (ty::TraitRef<'tcx>, Vec>) { + let impl_trait_ref = selcx.tcx().impl_trait_ref(impl_def_id).unwrap(); + let impl_trait_ref = impl_trait_ref.subst(selcx.tcx(), impl_substs); + let Normalized { + value: impl_trait_ref, + obligations: normalization_obligations1, + } = super::normalize(selcx, param_env, ObligationCause::dummy(), &impl_trait_ref); let predicates = selcx.tcx().predicates_of(impl_def_id); let predicates = predicates.instantiate(selcx.tcx(), impl_substs); - let Normalized { value: predicates, obligations: normalization_obligations2 } = - super::normalize(selcx, param_env, ObligationCause::dummy(), &predicates); + let Normalized { + value: predicates, + obligations: normalization_obligations2, + } = super::normalize(selcx, param_env, ObligationCause::dummy(), &predicates); let impl_obligations = predicates_for_generics(ObligationCause::dummy(), 0, param_env, &predicates); - let impl_obligations: Vec<_> = - impl_obligations.into_iter() + let impl_obligations: Vec<_> = impl_obligations + .into_iter() .chain(normalization_obligations1) .chain(normalization_obligations2) .collect(); @@ -386,30 +406,35 @@ pub fn impl_trait_ref_and_oblig<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, } /// See `super::obligations_for_generics` -pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>, - recursion_depth: usize, - param_env: ty::ParamEnv<'tcx>, - generic_bounds: &ty::InstantiatedPredicates<'tcx>) - -> Vec> -{ - debug!("predicates_for_generics(generic_bounds={:?})", - generic_bounds); - - generic_bounds.predicates.iter().map(|predicate| { - Obligation { cause: cause.clone(), - recursion_depth, - param_env, - predicate: predicate.clone() } - }).collect() +pub fn predicates_for_generics<'tcx>( + cause: ObligationCause<'tcx>, + recursion_depth: usize, + param_env: ty::ParamEnv<'tcx>, + generic_bounds: &ty::InstantiatedPredicates<'tcx>, +) -> Vec> { + debug!( + "predicates_for_generics(generic_bounds={:?})", + generic_bounds + ); + + generic_bounds + .predicates + .iter() + .map(|predicate| Obligation { + cause: cause.clone(), + recursion_depth, + param_env, + predicate: predicate.clone(), + }) + .collect() } pub fn predicate_for_trait_ref<'tcx>( cause: ObligationCause<'tcx>, param_env: ty::ParamEnv<'tcx>, trait_ref: ty::TraitRef<'tcx>, - recursion_depth: usize) - -> PredicateObligation<'tcx> -{ + recursion_depth: usize, +) -> PredicateObligation<'tcx> { Obligation { cause, param_env, @@ -419,18 +444,18 @@ pub fn predicate_for_trait_ref<'tcx>( } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - pub fn predicate_for_trait_def(self, - param_env: ty::ParamEnv<'tcx>, - cause: ObligationCause<'tcx>, - trait_def_id: DefId, - recursion_depth: usize, - self_ty: Ty<'tcx>, - params: &[Kind<'tcx>]) - -> PredicateObligation<'tcx> - { + pub fn predicate_for_trait_def( + self, + param_env: ty::ParamEnv<'tcx>, + cause: ObligationCause<'tcx>, + trait_def_id: DefId, + recursion_depth: usize, + self_ty: Ty<'tcx>, + params: &[Kind<'tcx>], + ) -> PredicateObligation<'tcx> { let trait_ref = ty::TraitRef { def_id: trait_def_id, - substs: self.mk_substs_trait(self_ty, params) + substs: self.mk_substs_trait(self_ty, params), }; predicate_for_trait_ref(cause, param_env, trait_ref, recursion_depth) } @@ -438,11 +463,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Cast a trait reference into a reference to one of its super /// traits; returns `None` if `target_trait_def_id` is not a /// supertrait. - pub fn upcast_choices(self, - source_trait_ref: ty::PolyTraitRef<'tcx>, - target_trait_def_id: DefId) - -> Vec> - { + pub fn upcast_choices( + self, + source_trait_ref: ty::PolyTraitRef<'tcx>, + target_trait_def_id: DefId, + ) -> Vec> { if source_trait_ref.def_id() == target_trait_def_id { return vec![source_trait_ref]; // shorcut the most common case } @@ -470,9 +495,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Given an upcast trait object described by `object`, returns the /// index of the method `method_def_id` (which should be part of /// `object.upcast_trait_ref`) within the vtable for `object`. - pub fn get_vtable_index_of_object_method(self, - object: &super::VtableObjectData<'tcx, N>, - method_def_id: DefId) -> usize { + pub fn get_vtable_index_of_object_method( + self, + object: &super::VtableObjectData<'tcx, N>, + method_def_id: DefId, + ) -> usize { // Count number of methods preceding the one we are selecting and // add them to the total offset. // Skip over associated types and constants. @@ -488,21 +515,22 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - bug!("get_vtable_index_of_object_method: {:?} was not found", - method_def_id); + bug!( + "get_vtable_index_of_object_method: {:?} was not found", + method_def_id + ); } - pub fn closure_trait_ref_and_return_type(self, + pub fn closure_trait_ref_and_return_type( + self, fn_trait_def_id: DefId, self_ty: Ty<'tcx>, sig: ty::PolyFnSig<'tcx>, - tuple_arguments: TupleArgumentsFlag) - -> ty::Binder<(ty::TraitRef<'tcx>, Ty<'tcx>)> - { + tuple_arguments: TupleArgumentsFlag, + ) -> ty::Binder<(ty::TraitRef<'tcx>, Ty<'tcx>)> { let arguments_tuple = match tuple_arguments { TupleArgumentsFlag::No => sig.skip_binder().inputs()[0], - TupleArgumentsFlag::Yes => - self.intern_tup(sig.skip_binder().inputs()), + TupleArgumentsFlag::Yes => self.intern_tup(sig.skip_binder().inputs()), }; let trait_ref = ty::TraitRef { def_id: fn_trait_def_id, @@ -511,17 +539,21 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { ty::Binder::bind((trait_ref, sig.skip_binder().output())) } - pub fn generator_trait_ref_and_outputs(self, + pub fn generator_trait_ref_and_outputs( + self, fn_trait_def_id: DefId, self_ty: Ty<'tcx>, - sig: ty::PolyGenSig<'tcx>) - -> ty::Binder<(ty::TraitRef<'tcx>, Ty<'tcx>, Ty<'tcx>)> - { + sig: ty::PolyGenSig<'tcx>, + ) -> ty::Binder<(ty::TraitRef<'tcx>, Ty<'tcx>, Ty<'tcx>)> { let trait_ref = ty::TraitRef { def_id: fn_trait_def_id, substs: self.mk_substs_trait(self_ty, &[]), }; - ty::Binder::bind((trait_ref, sig.skip_binder().yield_ty, sig.skip_binder().return_ty)) + ty::Binder::bind(( + trait_ref, + sig.skip_binder().yield_ty, + sig.skip_binder().return_ty, + )) } pub fn impl_is_default(self, node_item_def_id: DefId) -> bool { @@ -534,11 +566,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { false } } - None => { - self.global_tcx() - .impl_defaultness(node_item_def_id) - .is_default() - } + None => self + .global_tcx() + .impl_defaultness(node_item_def_id) + .is_default(), } } @@ -547,4 +578,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } -pub enum TupleArgumentsFlag { Yes, No } +pub enum TupleArgumentsFlag { + Yes, + No, +} diff --git a/src/librustc/ty/_match.rs b/src/librustc/ty/_match.rs index 3657a880804e1..00aaf29c02df1 100644 --- a/src/librustc/ty/_match.rs +++ b/src/librustc/ty/_match.rs @@ -1,6 +1,6 @@ -use ty::{self, Ty, TyCtxt}; use ty::error::TypeError; -use ty::relate::{self, Relate, TypeRelation, RelateResult}; +use ty::relate::{self, Relate, RelateResult, TypeRelation}; +use ty::{self, Ty, TyCtxt}; /// A type "A" *matches* "B" if the fresh types in B could be /// substituted with values so as to make it equal to A. Matching is @@ -18,77 +18,87 @@ use ty::relate::{self, Relate, TypeRelation, RelateResult}; /// Like subtyping, matching is really a binary relation, so the only /// important thing about the result is Ok/Err. Also, matching never /// affects any type variables or unification state. -pub struct Match<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct Match<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_object_mode: relate::TraitObjectMode + trait_object_mode: relate::TraitObjectMode, } impl<'a, 'gcx, 'tcx> Match<'a, 'gcx, 'tcx> { - pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_object_mode: relate::TraitObjectMode) - -> Match<'a, 'gcx, 'tcx> { - Match { tcx, trait_object_mode } + pub fn new( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_object_mode: relate::TraitObjectMode, + ) -> Match<'a, 'gcx, 'tcx> { + Match { + tcx, + trait_object_mode, + } } } impl<'a, 'gcx, 'tcx> TypeRelation<'a, 'gcx, 'tcx> for Match<'a, 'gcx, 'tcx> { - fn tag(&self) -> &'static str { "Match" } + fn tag(&self) -> &'static str { + "Match" + } fn trait_object_mode(&self) -> relate::TraitObjectMode { self.trait_object_mode } - fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.tcx } - fn a_is_expected(&self) -> bool { true } // irrelevant + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { + self.tcx + } + fn a_is_expected(&self) -> bool { + true + } // irrelevant - fn relate_with_variance>(&mut self, - _: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> - { + fn relate_with_variance>( + &mut self, + _: ty::Variance, + a: &T, + b: &T, + ) -> RelateResult<'tcx, T> { self.relate(a, b) } - fn regions(&mut self, a: ty::Region<'tcx>, b: ty::Region<'tcx>) - -> RelateResult<'tcx, ty::Region<'tcx>> { - debug!("{}.regions({:?}, {:?})", - self.tag(), - a, - b); + fn regions( + &mut self, + a: ty::Region<'tcx>, + b: ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>> { + debug!("{}.regions({:?}, {:?})", self.tag(), a, b); Ok(a) } fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> { - debug!("{}.tys({:?}, {:?})", self.tag(), - a, b); - if a == b { return Ok(a); } + debug!("{}.tys({:?}, {:?})", self.tag(), a, b); + if a == b { + return Ok(a); + } match (&a.sty, &b.sty) { - (_, &ty::Infer(ty::FreshTy(_))) | - (_, &ty::Infer(ty::FreshIntTy(_))) | - (_, &ty::Infer(ty::FreshFloatTy(_))) => { - Ok(a) - } + (_, &ty::Infer(ty::FreshTy(_))) + | (_, &ty::Infer(ty::FreshIntTy(_))) + | (_, &ty::Infer(ty::FreshFloatTy(_))) => Ok(a), - (&ty::Infer(_), _) | - (_, &ty::Infer(_)) => { + (&ty::Infer(_), _) | (_, &ty::Infer(_)) => { Err(TypeError::Sorts(relate::expected_found(self, &a, &b))) } - (&ty::Error, _) | (_, &ty::Error) => { - Ok(self.tcx().types.err) - } + (&ty::Error, _) | (_, &ty::Error) => Ok(self.tcx().types.err), - _ => { - relate::super_relate_tys(self, a, b) - } + _ => relate::super_relate_tys(self, a, b), } } - fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where T: Relate<'tcx> + fn binders( + &mut self, + a: &ty::Binder, + b: &ty::Binder, + ) -> RelateResult<'tcx, ty::Binder> + where + T: Relate<'tcx>, { - Ok(ty::Binder::bind(self.relate(a.skip_binder(), b.skip_binder())?)) + Ok(ty::Binder::bind( + self.relate(a.skip_binder(), b.skip_binder())?, + )) } } diff --git a/src/librustc/ty/adjustment.rs b/src/librustc/ty/adjustment.rs index 117112c0c75f4..6ce45f4cc4f49 100644 --- a/src/librustc/ty/adjustment.rs +++ b/src/librustc/ty/adjustment.rs @@ -1,8 +1,7 @@ use hir; use hir::def_id::DefId; -use ty::{self, Ty, TyCtxt}; use ty::subst::Substs; - +use ty::{self, Ty, TyCtxt}; /// Represents coercing a value to a different type of value. /// @@ -97,14 +96,20 @@ pub struct OverloadedDeref<'tcx> { } impl<'a, 'gcx, 'tcx> OverloadedDeref<'tcx> { - pub fn method_call(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, source: Ty<'tcx>) - -> (DefId, &'tcx Substs<'tcx>) { + pub fn method_call( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + source: Ty<'tcx>, + ) -> (DefId, &'tcx Substs<'tcx>) { let trait_def_id = match self.mutbl { hir::MutImmutable => tcx.lang_items().deref_trait(), - hir::MutMutable => tcx.lang_items().deref_mut_trait() + hir::MutMutable => tcx.lang_items().deref_mut_trait(), }; - let method_def_id = tcx.associated_items(trait_def_id.unwrap()) - .find(|m| m.kind == ty::AssociatedKind::Method).unwrap().def_id; + let method_def_id = tcx + .associated_items(trait_def_id.unwrap()) + .find(|m| m.kind == ty::AssociatedKind::Method) + .unwrap() + .def_id; (method_def_id, tcx.mk_substs_trait(source, &[])) } } @@ -124,12 +129,14 @@ impl<'a, 'gcx, 'tcx> OverloadedDeref<'tcx> { #[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub enum AllowTwoPhase { Yes, - No + No, } #[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub enum AutoBorrowMutability { - Mutable { allow_two_phase_borrow: AllowTwoPhase }, + Mutable { + allow_two_phase_borrow: AllowTwoPhase, + }, Immutable, } @@ -163,11 +170,11 @@ pub struct CoerceUnsizedInfo { /// coercion is it? This applies to impls of `CoerceUnsized` for /// structs, primarily, where we store a bit of info about which /// fields need to be coerced. - pub custom_kind: Option + pub custom_kind: Option, } #[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)] pub enum CustomCoerceUnsized { /// Records the index of the field being coerced. - Struct(usize) + Struct(usize), } diff --git a/src/librustc/ty/binding.rs b/src/librustc/ty/binding.rs index 2ab14642406b1..f34e1ac2b5d04 100644 --- a/src/librustc/ty/binding.rs +++ b/src/librustc/ty/binding.rs @@ -1,5 +1,5 @@ -use hir::BindingAnnotation::*; use hir::BindingAnnotation; +use hir::BindingAnnotation::*; use hir::Mutability; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] diff --git a/src/librustc/ty/cast.rs b/src/librustc/ty/cast.rs index 0f067de3649bf..a2f6ddb841705 100644 --- a/src/librustc/ty/cast.rs +++ b/src/librustc/ty/cast.rs @@ -12,7 +12,7 @@ pub enum IntTy { I, CEnum, Bool, - Char + Char, } // Valid types for the result of a non-coercion cast @@ -44,7 +44,7 @@ pub enum CastKind { U8CharCast, ArrayPtrCast, FnPtrPtrCast, - FnPtrAddrCast + FnPtrAddrCast, } impl<'tcx> CastTy<'tcx> { @@ -59,8 +59,7 @@ impl<'tcx> CastTy<'tcx> { ty::Infer(ty::InferTy::FloatVar(_)) => Some(CastTy::Float), ty::Uint(u) => Some(CastTy::Int(IntTy::U(u))), ty::Float(_) => Some(CastTy::Float), - ty::Adt(d,_) if d.is_enum() && d.is_payloadfree() => - Some(CastTy::Int(IntTy::CEnum)), + ty::Adt(d, _) if d.is_enum() && d.is_payloadfree() => Some(CastTy::Int(IntTy::CEnum)), ty::RawPtr(mt) => Some(CastTy::Ptr(mt)), ty::Ref(_, ty, mutbl) => Some(CastTy::RPtr(ty::TypeAndMut { ty, mutbl })), ty::FnPtr(..) => Some(CastTy::FnPtr), diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs index 6429e3249c4c5..b228af2256ef1 100644 --- a/src/librustc/ty/codec.rs +++ b/src/librustc/ty/codec.rs @@ -6,15 +6,15 @@ // The functionality in here is shared between persisting to crate metadata and // persisting to incr. comp. caches. -use hir::def_id::{DefId, CrateNum}; +use hir::def_id::{CrateNum, DefId}; use infer::canonical::{CanonicalVarInfo, CanonicalVarInfos}; +use mir::interpret::Allocation; use rustc_data_structures::fx::FxHashMap; -use rustc_serialize::{Decodable, Decoder, Encoder, Encodable, opaque}; +use rustc_serialize::{opaque, Decodable, Decoder, Encodable, Encoder}; use std::hash::Hash; use std::intrinsics; -use ty::{self, Ty, TyCtxt}; use ty::subst::Substs; -use mir::interpret::Allocation; +use ty::{self, Ty, TyCtxt}; /// The shorthand encoding uses an enum's variant index `usize` /// and is offset by this value so it never matches a real variant. @@ -52,13 +52,11 @@ impl TyEncoder for opaque::Encoder { } /// Encode the given value or a previously cached shorthand. -pub fn encode_with_shorthand(encoder: &mut E, - value: &T, - cache: M) - -> Result<(), E::Error> - where E: TyEncoder, - M: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap, - T: EncodableWithShorthand, +pub fn encode_with_shorthand(encoder: &mut E, value: &T, cache: M) -> Result<(), E::Error> +where + E: TyEncoder, + M: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap, + T: EncodableWithShorthand, { let existing_shorthand = cache(encoder).get(value).cloned(); if let Some(shorthand) = existing_shorthand { @@ -90,12 +88,14 @@ pub fn encode_with_shorthand(encoder: &mut E, Ok(()) } -pub fn encode_predicates<'tcx, E, C>(encoder: &mut E, - predicates: &ty::GenericPredicates<'tcx>, - cache: C) - -> Result<(), E::Error> - where E: TyEncoder, - C: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap, usize>, +pub fn encode_predicates<'tcx, E, C>( + encoder: &mut E, + predicates: &ty::GenericPredicates<'tcx>, + cache: C, +) -> Result<(), E::Error> +where + E: TyEncoder, + C: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap, usize>, { predicates.parent.encode(encoder)?; predicates.predicates.len().encode(encoder)?; @@ -107,21 +107,23 @@ pub fn encode_predicates<'tcx, E, C>(encoder: &mut E, } pub trait TyDecoder<'a, 'tcx: 'a>: Decoder { - fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>; fn peek_byte(&self) -> u8; fn position(&self) -> usize; - fn cached_ty_for_shorthand(&mut self, - shorthand: usize, - or_insert_with: F) - -> Result, Self::Error> - where F: FnOnce(&mut Self) -> Result, Self::Error>; + fn cached_ty_for_shorthand( + &mut self, + shorthand: usize, + or_insert_with: F, + ) -> Result, Self::Error> + where + F: FnOnce(&mut Self) -> Result, Self::Error>; fn with_position(&mut self, pos: usize, f: F) -> R - where F: FnOnce(&mut Self) -> R; + where + F: FnOnce(&mut Self) -> R; fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum; @@ -132,8 +134,9 @@ pub trait TyDecoder<'a, 'tcx: 'a>: Decoder { #[inline] pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { let cnum = CrateNum::from_u32(u32::decode(decoder)?); Ok(decoder.map_encoded_cnum_to_current(cnum)) @@ -141,8 +144,9 @@ pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result #[inline] pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { // Handle shorthands first, if we have an usize > 0x80. if decoder.positioned_at_shorthand() { @@ -160,34 +164,38 @@ pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> } #[inline] -pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D) - -> Result, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +pub fn decode_predicates<'a, 'tcx, D>( + decoder: &mut D, +) -> Result, D::Error> +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { Ok(ty::GenericPredicates { parent: Decodable::decode(decoder)?, - predicates: (0..decoder.read_usize()?).map(|_| { - // Handle shorthands first, if we have an usize > 0x80. - let predicate = if decoder.positioned_at_shorthand() { - let pos = decoder.read_usize()?; - assert!(pos >= SHORTHAND_OFFSET); - let shorthand = pos - SHORTHAND_OFFSET; - - decoder.with_position(shorthand, ty::Predicate::decode) - } else { - ty::Predicate::decode(decoder) - }?; - Ok((predicate, Decodable::decode(decoder)?)) - }) - .collect::, _>>()?, + predicates: (0..decoder.read_usize()?) + .map(|_| { + // Handle shorthands first, if we have an usize > 0x80. + let predicate = if decoder.positioned_at_shorthand() { + let pos = decoder.read_usize()?; + assert!(pos >= SHORTHAND_OFFSET); + let shorthand = pos - SHORTHAND_OFFSET; + + decoder.with_position(shorthand, ty::Predicate::decode) + } else { + ty::Predicate::decode(decoder) + }?; + Ok((predicate, Decodable::decode(decoder)?)) + }) + .collect::, _>>()?, }) } #[inline] pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx Substs<'tcx>, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { let len = decoder.read_usize()?; let tcx = decoder.tcx(); @@ -196,72 +204,83 @@ pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx Substs<'tcx>, #[inline] pub fn decode_region<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { Ok(decoder.tcx().mk_region(Decodable::decode(decoder)?)) } #[inline] -pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D) - -> Result<&'tcx ty::List>, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::List>, D::Error> +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { let len = decoder.read_usize()?; - Ok(decoder.tcx().mk_type_list((0..len).map(|_| Decodable::decode(decoder)))?) + Ok(decoder + .tcx() + .mk_type_list((0..len).map(|_| Decodable::decode(decoder)))?) } #[inline] -pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D) - -> Result<&'tcx ty::AdtDef, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::AdtDef, D::Error> +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { let def_id = DefId::decode(decoder)?; Ok(decoder.tcx().adt_def(def_id)) } #[inline] -pub fn decode_existential_predicate_slice<'a, 'tcx, D>(decoder: &mut D) - -> Result<&'tcx ty::List>, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +pub fn decode_existential_predicate_slice<'a, 'tcx, D>( + decoder: &mut D, +) -> Result<&'tcx ty::List>, D::Error> +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { let len = decoder.read_usize()?; - Ok(decoder.tcx() - .mk_existential_predicates((0..len).map(|_| Decodable::decode(decoder)))?) + Ok(decoder + .tcx() + .mk_existential_predicates((0..len).map(|_| Decodable::decode(decoder)))?) } #[inline] -pub fn decode_canonical_var_infos<'a, 'tcx, D>(decoder: &mut D) - -> Result, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +pub fn decode_canonical_var_infos<'a, 'tcx, D>( + decoder: &mut D, +) -> Result, D::Error> +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { let len = decoder.read_usize()?; - let interned: Result, _> = (0..len).map(|_| Decodable::decode(decoder)) - .collect(); - Ok(decoder.tcx() - .intern_canonical_var_infos(interned?.as_slice())) + let interned: Result, _> = + (0..len).map(|_| Decodable::decode(decoder)).collect(); + Ok(decoder + .tcx() + .intern_canonical_var_infos(interned?.as_slice())) } #[inline] -pub fn decode_const<'a, 'tcx, D>(decoder: &mut D) - -> Result<&'tcx ty::Const<'tcx>, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +pub fn decode_const<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::Const<'tcx>, D::Error> +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { Ok(decoder.tcx().mk_const(Decodable::decode(decoder)?)) } #[inline] -pub fn decode_allocation<'a, 'tcx, D>(decoder: &mut D) - -> Result<&'tcx Allocation, D::Error> - where D: TyDecoder<'a, 'tcx>, - 'tcx: 'a, +pub fn decode_allocation<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx Allocation, D::Error> +where + D: TyDecoder<'a, 'tcx>, + 'tcx: 'a, { - Ok(decoder.tcx().intern_const_alloc(Decodable::decode(decoder)?)) + Ok(decoder + .tcx() + .intern_const_alloc(Decodable::decode(decoder)?)) } #[macro_export] diff --git a/src/librustc/ty/constness.rs b/src/librustc/ty/constness.rs index 3741f4051b896..12bd544660b92 100644 --- a/src/librustc/ty/constness.rs +++ b/src/librustc/ty/constness.rs @@ -1,31 +1,33 @@ -use ty::query::Providers; -use hir::def_id::DefId; use hir; -use ty::TyCtxt; -use syntax_pos::symbol::Symbol; +use hir::def_id::DefId; use hir::map::blocks::FnLikeNode; use syntax::attr; +use syntax_pos::symbol::Symbol; +use ty::query::Providers; +use ty::TyCtxt; impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { /// Whether the `def_id` counts as const fn in your current crate, considering all active /// feature gates pub fn is_const_fn(self, def_id: DefId) -> bool { - self.is_const_fn_raw(def_id) && match self.lookup_stability(def_id) { - Some(stab) => match stab.const_stability { - // has a `rustc_const_unstable` attribute, check whether the user enabled the - // corresponding feature gate - Some(feature_name) => self.features() - .declared_lib_features - .iter() - .any(|&(sym, _)| sym == feature_name), - // the function has no stability attribute, it is stable as const fn or the user - // needs to use feature gates to use the function at all + self.is_const_fn_raw(def_id) + && match self.lookup_stability(def_id) { + Some(stab) => match stab.const_stability { + // has a `rustc_const_unstable` attribute, check whether the user enabled the + // corresponding feature gate + Some(feature_name) => self + .features() + .declared_lib_features + .iter() + .any(|&(sym, _)| sym == feature_name), + // the function has no stability attribute, it is stable as const fn or the user + // needs to use feature gates to use the function at all + None => true, + }, + // functions without stability are either stable user written const fn or the user is + // using feature gates and we thus don't care what they do None => true, - }, - // functions without stability are either stable user written const fn or the user is - // using feature gates and we thus don't care what they do - None => true, - } + } } /// Whether the `def_id` is an unstable const fn and what feature gate is necessary to enable it @@ -49,7 +51,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { // it needs to be stable and have no `rustc_const_unstable` attribute match self.lookup_stability(def_id) { // stable functions with unstable const fn aren't `min_const_fn` - Some(&attr::Stability { const_stability: Some(_), .. }) => false, + Some(&attr::Stability { + const_stability: Some(_), + .. + }) => false, // unstable functions don't need to conform Some(&attr::Stability { ref level, .. }) if level.is_unstable() => false, // everything else needs to conform, because it would be callable from @@ -63,12 +68,13 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { } } - pub fn provide<'tcx>(providers: &mut Providers<'tcx>) { /// only checks whether the function has a `const` modifier fn is_const_fn_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool { - let node_id = tcx.hir().as_local_node_id(def_id) - .expect("Non-local call to local provider is_const_fn"); + let node_id = tcx + .hir() + .as_local_node_id(def_id) + .expect("Non-local call to local provider is_const_fn"); if let Some(fn_like) = FnLikeNode::from_node(tcx.hir().get(node_id)) { fn_like.constness() == hir::Constness::Const @@ -78,21 +84,22 @@ pub fn provide<'tcx>(providers: &mut Providers<'tcx>) { } fn is_promotable_const_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool { - tcx.is_const_fn(def_id) && match tcx.lookup_stability(def_id) { - Some(stab) => { - if cfg!(debug_assertions) && stab.promotable { - let sig = tcx.fn_sig(def_id); - assert_eq!( - sig.unsafety(), - hir::Unsafety::Normal, - "don't mark const unsafe fns as promotable", - // https://github.com/rust-lang/rust/pull/53851#issuecomment-418760682 - ); + tcx.is_const_fn(def_id) + && match tcx.lookup_stability(def_id) { + Some(stab) => { + if cfg!(debug_assertions) && stab.promotable { + let sig = tcx.fn_sig(def_id); + assert_eq!( + sig.unsafety(), + hir::Unsafety::Normal, + "don't mark const unsafe fns as promotable", + // https://github.com/rust-lang/rust/pull/53851#issuecomment-418760682 + ); + } + stab.promotable } - stab.promotable - }, - None => false, - } + None => false, + } } *providers = Providers { diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 32348e2e5046d..19fd6dca7b93d 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1,76 +1,76 @@ //! type context book-keeping +use arena::{SyncDroplessArena, TypedArena}; use dep_graph::DepGraph; -use dep_graph::{DepNode, DepConstructor}; +use dep_graph::{DepConstructor, DepNode}; use errors::DiagnosticBuilder; -use session::Session; -use session::config::{BorrowckMode, OutputFilenames}; -use session::config::CrateType; -use middle; -use hir::{TraitCandidate, HirId, ItemKind, ItemLocalId, Node}; use hir::def::{Def, Export}; use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use hir::map as hir_map; use hir::map::DefPathHash; -use lint::{self, Lint}; -use ich::{StableHashingContext, NodeIdHashingMode}; +use hir::{HirId, ItemKind, ItemLocalId, Node, TraitCandidate}; +use ich::{NodeIdHashingMode, StableHashingContext}; use infer::canonical::{Canonical, CanonicalVarInfo, CanonicalVarInfos}; use infer::outlives::free_region_map::FreeRegionMap; +use lint::{self, Lint}; +use middle; use middle::cstore::CrateStoreDyn; use middle::cstore::EncodedMetadata; use middle::lang_items; use middle::resolve_lifetime::{self, ObjectLifetimeDefault}; use middle::stability; -use mir::{self, Mir, interpret, ProjectionKind}; use mir::interpret::Allocation; -use ty::subst::{Kind, Substs, Subst}; -use ty::ReprOptions; -use traits; -use traits::{Clause, Clauses, GoalKind, Goal, Goals}; -use ty::{self, Ty, TypeAndMut}; -use ty::{TyS, TyKind, List}; -use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorSubsts, Region, Const}; -use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate}; -use ty::RegionKind; -use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid}; -use ty::TyKind::*; -use ty::GenericParamDefKind; -use ty::layout::{LayoutDetails, TargetDataLayout, VariantIdx}; -use ty::query; -use ty::steal::Steal; -use ty::subst::{UserSubsts, UnpackedKind}; -use ty::{BoundVar, BindingMode}; -use ty::CanonicalPolyFnSig; -use util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap}; -use util::nodemap::{FxHashMap, FxHashSet}; +use mir::{self, interpret, Mir, ProjectionKind}; +use rustc_data_structures::indexed_vec::{Idx, IndexVec}; use rustc_data_structures::interner::HashInterner; +use rustc_data_structures::stable_hasher::{ + hash_stable_hashmap, HashStable, StableHasher, StableHasherResult, StableVec, +}; +use rustc_data_structures::sync::{self, Lock, Lrc, WorkerLocal}; +use rustc_target::spec::abi; +use session::config::CrateType; +use session::config::{BorrowckMode, OutputFilenames}; +use session::Session; use smallvec::SmallVec; -use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap, - StableHasher, StableHasherResult, - StableVec}; -use arena::{TypedArena, SyncDroplessArena}; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use rustc_data_structures::sync::{self, Lrc, Lock, WorkerLocal}; use std::any::Any; use std::borrow::Borrow; use std::cmp::Ordering; use std::collections::hash_map::{self, Entry}; -use std::hash::{Hash, Hasher}; use std::fmt; -use std::mem; -use std::ops::{Deref, Bound}; +use std::hash::{Hash, Hasher}; use std::iter; +use std::marker::PhantomData; +use std::mem; +use std::ops::{Bound, Deref}; use std::sync::mpsc; use std::sync::Arc; -use std::marker::PhantomData; -use rustc_target::spec::abi; use syntax::ast::{self, NodeId}; use syntax::attr; -use syntax::source_map::MultiSpan; use syntax::edition::Edition; use syntax::feature_gate; -use syntax::symbol::{Symbol, keywords, InternedString}; +use syntax::source_map::MultiSpan; +use syntax::symbol::{keywords, InternedString, Symbol}; use syntax_pos::Span; +use traits; +use traits::{Clause, Clauses, Goal, GoalKind, Goals}; +use ty::layout::{LayoutDetails, TargetDataLayout, VariantIdx}; +use ty::query; +use ty::steal::Steal; +use ty::subst::{Kind, Subst, Substs}; +use ty::subst::{UnpackedKind, UserSubsts}; +use ty::CanonicalPolyFnSig; +use ty::GenericParamDefKind; +use ty::RegionKind; +use ty::ReprOptions; +use ty::TyKind::*; +use ty::{self, Ty, TypeAndMut}; +use ty::{AdtDef, AdtKind, ClosureSubsts, Const, GeneratorSubsts, Region}; +use ty::{BindingMode, BoundVar}; +use ty::{ExistentialPredicate, InferTy, ParamTy, PolyFnSig, Predicate, ProjectionTy}; +use ty::{FloatVar, FloatVid, IntVar, IntVid, TyVar, TyVid}; +use ty::{List, TyKind, TyS}; +use util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap}; +use util::nodemap::{FxHashMap, FxHashSet}; use hir; @@ -153,7 +153,7 @@ impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> { fn intern_ty( local: &CtxtInterners<'tcx>, global: &CtxtInterners<'gcx>, - st: TyKind<'tcx> + st: TyKind<'tcx>, ) -> Ty<'tcx> { let flags = super::flags::FlagComputation::for_sty(&st); @@ -161,39 +161,47 @@ impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> { // determine that all contents are in the global tcx. // See comments on Lift for why we can't use that. if flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) { - local.type_.borrow_mut().intern(st, |st| { - let ty_struct = TyS { - sty: st, - flags: flags.flags, - outer_exclusive_binder: flags.outer_exclusive_binder, - }; + local + .type_ + .borrow_mut() + .intern(st, |st| { + let ty_struct = TyS { + sty: st, + flags: flags.flags, + outer_exclusive_binder: flags.outer_exclusive_binder, + }; - // Make sure we don't end up with inference - // types/regions in the global interner - if local as *const _ as usize == global as *const _ as usize { - bug!("Attempted to intern `{:?}` which contains \ - inference types/regions in the global type context", - &ty_struct); - } + // Make sure we don't end up with inference + // types/regions in the global interner + if local as *const _ as usize == global as *const _ as usize { + bug!( + "Attempted to intern `{:?}` which contains \ + inference types/regions in the global type context", + &ty_struct + ); + } - Interned(local.arena.alloc(ty_struct)) - }).0 + Interned(local.arena.alloc(ty_struct)) + }) + .0 } else { - global.type_.borrow_mut().intern(st, |st| { - let ty_struct = TyS { - sty: st, - flags: flags.flags, - outer_exclusive_binder: flags.outer_exclusive_binder, - }; + global + .type_ + .borrow_mut() + .intern(st, |st| { + let ty_struct = TyS { + sty: st, + flags: flags.flags, + outer_exclusive_binder: flags.outer_exclusive_binder, + }; - // This is safe because all the types the ty_struct can point to - // already is in the global arena - let ty_struct: TyS<'gcx> = unsafe { - mem::transmute(ty_struct) - }; + // This is safe because all the types the ty_struct can point to + // already is in the global arena + let ty_struct: TyS<'gcx> = unsafe { mem::transmute(ty_struct) }; - Interned(global.arena.alloc(ty_struct)) - }).0 + Interned(global.arena.alloc(ty_struct)) + }) + .0 } } } @@ -226,7 +234,7 @@ pub struct CommonTypes<'tcx> { pub struct LocalTableInContext<'a, V: 'a> { local_id_root: Option, - data: &'a ItemLocalMap + data: &'a ItemLocalMap, } /// Validate that the given HirId (respectively its `local_id` part) can be @@ -236,20 +244,24 @@ pub struct LocalTableInContext<'a, V: 'a> { /// would be in a different frame of reference and using its `local_id` /// would result in lookup errors, or worse, in silently wrong data being /// stored/returned. -fn validate_hir_id_for_typeck_tables(local_id_root: Option, - hir_id: hir::HirId, - mut_access: bool) { +fn validate_hir_id_for_typeck_tables( + local_id_root: Option, + hir_id: hir::HirId, + mut_access: bool, +) { if cfg!(debug_assertions) { if let Some(local_id_root) = local_id_root { if hir_id.owner != local_id_root.index { ty::tls::with(|tcx| { let node_id = tcx.hir().hir_to_node_id(hir_id); - bug!("node {} with HirId::owner {:?} cannot be placed in \ - TypeckTables with local_id_root {:?}", - tcx.hir().node_to_string(node_id), - DefId::local(hir_id.owner), - local_id_root) + bug!( + "node {} with HirId::owner {:?} cannot be placed in \ + TypeckTables with local_id_root {:?}", + tcx.hir().node_to_string(node_id), + DefId::local(hir_id.owner), + local_id_root + ) }); } } else { @@ -291,7 +303,7 @@ impl<'a, V> ::std::ops::Index for LocalTableInContext<'a, V> { pub struct LocalTableInContextMut<'a, V: 'a> { local_id_root: Option, - data: &'a mut ItemLocalMap + data: &'a mut ItemLocalMap, } impl<'a, V> LocalTableInContextMut<'a, V> { @@ -452,7 +464,10 @@ impl<'tcx> TypeckTables<'tcx> { hir::QPath::Resolved(_, ref path) => path.def, hir::QPath::TypeRelative(..) => { validate_hir_id_for_typeck_tables(self.local_id_root, id, false); - self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err) + self.type_dependent_defs + .get(&id.local_id) + .cloned() + .unwrap_or(Def::Err) } } } @@ -460,71 +475,73 @@ impl<'tcx> TypeckTables<'tcx> { pub fn type_dependent_defs(&self) -> LocalTableInContext<'_, Def> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.type_dependent_defs + data: &self.type_dependent_defs, } } pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<'_, Def> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.type_dependent_defs + data: &mut self.type_dependent_defs, } } pub fn field_indices(&self) -> LocalTableInContext<'_, usize> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.field_indices + data: &self.field_indices, } } pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.field_indices + data: &mut self.field_indices, } } pub fn user_provided_types( - &self + &self, ) -> LocalTableInContext<'_, CanonicalUserTypeAnnotation<'tcx>> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.user_provided_types + data: &self.user_provided_types, } } pub fn user_provided_types_mut( - &mut self + &mut self, ) -> LocalTableInContextMut<'_, CanonicalUserTypeAnnotation<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.user_provided_types + data: &mut self.user_provided_types, } } pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.node_types + data: &self.node_types, } } pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.node_types + data: &mut self.node_types, } } pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> { - self.node_id_to_type_opt(id).unwrap_or_else(|| - bug!("node_id_to_type: no type for node `{}`", - tls::with(|tcx| { - let id = tcx.hir().hir_to_node_id(id); - tcx.hir().node_to_string(id) - })) - ) + self.node_id_to_type_opt(id).unwrap_or_else(|| { + bug!( + "node_id_to_type: no type for node `{}`", + tls::with(|tcx| { + let id = tcx.hir().hir_to_node_id(id); + tcx.hir().node_to_string(id) + }) + ) + }) } pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option> { @@ -535,13 +552,16 @@ impl<'tcx> TypeckTables<'tcx> { pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, &'tcx Substs<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.node_substs + data: &mut self.node_substs, } } pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> { validate_hir_id_for_typeck_tables(self.local_id_root, id, false); - self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| Substs::empty()) + self.node_substs + .get(&id.local_id) + .cloned() + .unwrap_or_else(|| Substs::empty()) } pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> { @@ -580,22 +600,24 @@ impl<'tcx> TypeckTables<'tcx> { pub fn adjustments(&self) -> LocalTableInContext<'_, Vec>> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.adjustments + data: &self.adjustments, } } - pub fn adjustments_mut(&mut self) - -> LocalTableInContextMut<'_, Vec>> { + pub fn adjustments_mut( + &mut self, + ) -> LocalTableInContextMut<'_, Vec>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.adjustments + data: &mut self.adjustments, } } - pub fn expr_adjustments(&self, expr: &hir::Expr) - -> &[ty::adjustment::Adjustment<'tcx>] { + pub fn expr_adjustments(&self, expr: &hir::Expr) -> &[ty::adjustment::Adjustment<'tcx>] { validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false); - self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..]) + self.adjustments + .get(&expr.hir_id.local_id) + .map_or(&[], |a| &a[..]) } /// Returns the type of `expr`, considering any `Adjustment` @@ -622,22 +644,21 @@ impl<'tcx> TypeckTables<'tcx> { match self.type_dependent_defs().get(expr.hir_id) { Some(&Def::Method(_)) => true, - _ => false + _ => false, } } pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.pat_binding_modes + data: &self.pat_binding_modes, } } - pub fn pat_binding_modes_mut(&mut self) - -> LocalTableInContextMut<'_, BindingMode> { + pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.pat_binding_modes + data: &mut self.pat_binding_modes, } } @@ -648,8 +669,7 @@ impl<'tcx> TypeckTables<'tcx> { } } - pub fn pat_adjustments_mut(&mut self) - -> LocalTableInContextMut<'_, Vec>> { + pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.pat_adjustments, @@ -663,64 +683,66 @@ impl<'tcx> TypeckTables<'tcx> { pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, ast::Name)> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.closure_kind_origins + data: &self.closure_kind_origins, } } pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<'_, (Span, ast::Name)> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.closure_kind_origins + data: &mut self.closure_kind_origins, } } pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.liberated_fn_sigs + data: &self.liberated_fn_sigs, } } pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.liberated_fn_sigs + data: &mut self.liberated_fn_sigs, } } pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec>> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.fru_field_types + data: &self.fru_field_types, } } pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.fru_field_types + data: &mut self.fru_field_types, } } pub fn cast_kinds(&self) -> LocalTableInContext<'_, ty::cast::CastKind> { LocalTableInContext { local_id_root: self.local_id_root, - data: &self.cast_kinds + data: &self.cast_kinds, } } pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<'_, ty::cast::CastKind> { LocalTableInContextMut { local_id_root: self.local_id_root, - data: &mut self.cast_kinds + data: &mut self.cast_kinds, } } } impl<'a, 'gcx> HashStable> for TypeckTables<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let ty::TypeckTables { local_id_root, ref type_dependent_defs, @@ -758,11 +780,10 @@ impl<'a, 'gcx> HashStable> for TypeckTables<'gcx> { hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| { let ty::UpvarId { var_path, - closure_expr_id + closure_expr_id, } = *up_var_id; - let local_id_root = - local_id_root.expect("trying to hash invalid TypeckTables"); + let local_id_root = local_id_root.expect("trying to hash invalid TypeckTables"); let var_owner_def_id = DefId { krate: local_id_root.krate, @@ -772,9 +793,11 @@ impl<'a, 'gcx> HashStable> for TypeckTables<'gcx> { krate: local_id_root.krate, index: closure_expr_id.to_def_id().index, }; - (hcx.def_path_hash(var_owner_def_id), - var_path.hir_id.local_id, - hcx.def_path_hash(closure_def_id)) + ( + hcx.def_path_hash(var_owner_def_id), + var_path.hir_id.local_id, + hcx.def_path_hash(closure_def_id), + ) }); closure_kind_origins.hash_stable(hcx, hasher); @@ -814,28 +837,32 @@ impl CanonicalUserTypeAnnotation<'gcx> { return false; } - user_substs.substs.iter().zip(BoundVar::new(0)..).all(|(kind, cvar)| { - match kind.unpack() { - UnpackedKind::Type(ty) => match ty.sty { - ty::Bound(debruijn, b) => { - // We only allow a `ty::INNERMOST` index in substitutions. - assert_eq!(debruijn, ty::INNERMOST); - cvar == b.var - } - _ => false, - }, - - UnpackedKind::Lifetime(r) => match r { - ty::ReLateBound(debruijn, br) => { - // We only allow a `ty::INNERMOST` index in substitutions. - assert_eq!(*debruijn, ty::INNERMOST); - cvar == br.assert_bound_var() - } - _ => false, - }, - } - }) - }, + user_substs + .substs + .iter() + .zip(BoundVar::new(0)..) + .all(|(kind, cvar)| { + match kind.unpack() { + UnpackedKind::Type(ty) => match ty.sty { + ty::Bound(debruijn, b) => { + // We only allow a `ty::INNERMOST` index in substitutions. + assert_eq!(debruijn, ty::INNERMOST); + cvar == b.var + } + _ => false, + }, + + UnpackedKind::Lifetime(r) => match r { + ty::ReLateBound(debruijn, br) => { + // We only allow a `ty::INNERMOST` index in substitutions. + assert_eq!(*debruijn, ty::INNERMOST); + cvar == br.assert_bound_var() + } + _ => false, + }, + } + }) + } } } } @@ -871,9 +898,11 @@ impl<'tcx> CommonTypes<'tcx> { fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty); let mk_region = |r| { - interners.region.borrow_mut().intern(r, |r| { - Interned(interners.arena.alloc(r)) - }).0 + interners + .region + .borrow_mut() + .intern(r, |r| Interned(interners.arena.alloc(r))) + .0 }; CommonTypes { @@ -952,9 +981,7 @@ pub struct GlobalCtxt<'tcx> { /// Map indicating what traits are in scope for places where this /// is relevant; generated by resolve. - trait_map: FxHashMap>>>>, + trait_map: FxHashMap>>>>, /// Export map produced by name resolution. export_map: FxHashMap>>, @@ -1053,12 +1080,13 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.global_arenas.trait_def.alloc(def) } - pub fn alloc_adt_def(self, - did: DefId, - kind: AdtKind, - variants: IndexVec, - repr: ReprOptions) - -> &'gcx ty::AdtDef { + pub fn alloc_adt_def( + self, + did: DefId, + kind: AdtKind, + variants: IndexVec, + repr: ReprOptions, + ) -> &'gcx ty::AdtDef { let def = ty::AdtDef::new(self, did, kind, variants, repr); self.global_arenas.adt_def.alloc(def) } @@ -1071,8 +1099,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>]) - -> &'tcx [&'tcx ty::Const<'tcx>] { + pub fn alloc_const_slice( + self, + values: &[&'tcx ty::Const<'tcx>], + ) -> &'tcx [&'tcx ty::Const<'tcx>] { if values.is_empty() { &[] } else { @@ -1080,8 +1110,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)]) - -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] { + pub fn alloc_name_const_slice( + self, + values: &[(ast::Name, &'tcx ty::Const<'tcx>)], + ) -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] { if values.is_empty() { &[] } else { @@ -1089,13 +1121,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn intern_const_alloc( - self, - alloc: Allocation, - ) -> &'gcx Allocation { - self.allocation_interner.borrow_mut().intern(alloc, |alloc| { - self.global_arenas.const_allocs.alloc(alloc) - }) + pub fn intern_const_alloc(self, alloc: Allocation) -> &'gcx Allocation { + self.allocation_interner + .borrow_mut() + .intern(alloc, |alloc| self.global_arenas.const_allocs.alloc(alloc)) } /// Allocates a byte or string literal for `mir::interpret`, read-only @@ -1107,15 +1136,15 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability { - self.stability_interner.borrow_mut().intern(stab, |stab| { - self.global_interners.arena.alloc(stab) - }) + self.stability_interner + .borrow_mut() + .intern(stab, |stab| self.global_interners.arena.alloc(stab)) } pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails { - self.layout_interner.borrow_mut().intern(layout, |layout| { - self.global_arenas.layout.alloc(layout) - }) + self.layout_interner + .borrow_mut() + .intern(layout, |layout| self.global_arenas.layout.alloc(layout)) } /// Returns a range of the start/end indices specified with the @@ -1127,15 +1156,24 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { Some(attr) => attr, None => return Bound::Unbounded, }; - for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") { + for meta in attr + .meta_item_list() + .expect("rustc_layout_scalar_valid_range takes args") + { match meta.literal().expect("attribute takes lit").node { ast::LitKind::Int(a, _) => return Bound::Included(a), _ => span_bug!(attr.span, "rustc_layout_scalar_valid_range expects int arg"), } } - span_bug!(attr.span, "no arguments to `rustc_layout_scalar_valid_range` attribute"); + span_bug!( + attr.span, + "no arguments to `rustc_layout_scalar_valid_range` attribute" + ); }; - (get("rustc_layout_scalar_valid_range_start"), get("rustc_layout_scalar_valid_range_end")) + ( + get("rustc_layout_scalar_valid_range_start"), + get("rustc_layout_scalar_valid_range_end"), + ) } pub fn lift>(self, value: &T) -> Option { @@ -1158,19 +1196,22 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// to the context. The closure enforces that the type context and any interned /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. - pub fn create_and_enter(s: &'tcx Session, - cstore: &'tcx CrateStoreDyn, - local_providers: ty::query::Providers<'tcx>, - extern_providers: ty::query::Providers<'tcx>, - arenas: &'tcx mut AllArenas<'tcx>, - resolutions: ty::Resolutions, - hir: hir_map::Map<'tcx>, - on_disk_query_result_cache: query::OnDiskCache<'tcx>, - crate_name: &str, - tx: mpsc::Sender>, - output_filenames: &OutputFilenames, - f: F) -> R - where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R + pub fn create_and_enter( + s: &'tcx Session, + cstore: &'tcx CrateStoreDyn, + local_providers: ty::query::Providers<'tcx>, + extern_providers: ty::query::Providers<'tcx>, + arenas: &'tcx mut AllArenas<'tcx>, + resolutions: ty::Resolutions, + hir: hir_map::Map<'tcx>, + on_disk_query_result_cache: query::OnDiskCache<'tcx>, + crate_name: &str, + tx: mpsc::Sender>, + output_filenames: &OutputFilenames, + f: F, + ) -> R + where + F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R, { let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| { s.fatal(&err); @@ -1178,7 +1219,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let interners = CtxtInterners::new(&arenas.interner); let common_types = CommonTypes::new(&interners); let dep_graph = hir.dep_graph.clone(); - let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0); + let max_cnum = cstore + .crates_untracked() + .iter() + .map(|c| c.as_usize()) + .max() + .unwrap_or(0); let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1); providers[LOCAL_CRATE] = local_providers; @@ -1193,17 +1239,18 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { upstream_def_path_tables .iter() .map(|&(cnum, ref rc)| (cnum, &**rc)) - .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table()))) + .chain(iter::once(( + LOCAL_CRATE, + hir.definitions().def_path_table(), + ))) }; // Precompute the capacity of the hashmap so we don't have to // re-allocate when populating it. let capacity = def_path_tables().map(|(_, t)| t.size()).sum::(); - let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher( - capacity, - ::std::default::Default::default() - ); + let mut map: FxHashMap<_, _> = + FxHashMap::with_capacity_and_hasher(capacity, ::std::default::Default::default()); for (cnum, def_path_table) in def_path_tables() { def_path_table.add_def_path_hashes_to(cnum, &mut map); @@ -1218,9 +1265,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { for (k, v) in resolutions.trait_map { let hir_id = hir.node_to_hir_id(k); let map = trait_map.entry(hir_id.owner).or_default(); - Lrc::get_mut(map).unwrap() - .insert(hir_id.local_id, - Lrc::new(StableVec::new(v))); + Lrc::get_mut(map) + .unwrap() + .insert(hir_id.local_id, Lrc::new(StableVec::new(v))); } arenas.global_ctxt = Some(GlobalCtxt { @@ -1231,30 +1278,30 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { dep_graph, types: common_types, trait_map, - export_map: resolutions.export_map.into_iter().map(|(k, v)| { - (k, Lrc::new(v)) - }).collect(), - freevars: resolutions.freevars.into_iter().map(|(k, v)| { - (hir.local_def_id(k), Lrc::new(v)) - }).collect(), - maybe_unused_trait_imports: - resolutions.maybe_unused_trait_imports - .into_iter() - .map(|id| hir.local_def_id(id)) - .collect(), - maybe_unused_extern_crates: - resolutions.maybe_unused_extern_crates - .into_iter() - .map(|(id, sp)| (hir.local_def_id(id), sp)) - .collect(), + export_map: resolutions + .export_map + .into_iter() + .map(|(k, v)| (k, Lrc::new(v))) + .collect(), + freevars: resolutions + .freevars + .into_iter() + .map(|(k, v)| (hir.local_def_id(k), Lrc::new(v))) + .collect(), + maybe_unused_trait_imports: resolutions + .maybe_unused_trait_imports + .into_iter() + .map(|id| hir.local_def_id(id)) + .collect(), + maybe_unused_extern_crates: resolutions + .maybe_unused_extern_crates + .into_iter() + .map(|(id, sp)| (hir.local_def_id(id), sp)) + .collect(), extern_prelude: resolutions.extern_prelude, hir_map: hir, def_path_hash_to_def_id, - queries: query::Queries::new( - providers, - extern_providers, - on_disk_query_result_cache, - ), + queries: query::Queries::new(providers, extern_providers, on_disk_query_result_cache), rcache: Default::default(), selection_cache: Default::default(), evaluation_cache: Default::default(), @@ -1295,31 +1342,57 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> { let items = self.lang_items(); let def_id = Some(def_id); - if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) } - else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) } - else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) } - else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) } - else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) } - else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) } - else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) } - else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) } - else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) } - else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) } - else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) } - else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) } - else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) } - else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) } - else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) } - else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) } - else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) } - else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) } - else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) } - else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) } - else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) } - else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) } - else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) } - else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) } - else { None } + if items.i128_add_fn() == def_id { + Some((mir::BinOp::Add, false)) + } else if items.u128_add_fn() == def_id { + Some((mir::BinOp::Add, false)) + } else if items.i128_sub_fn() == def_id { + Some((mir::BinOp::Sub, false)) + } else if items.u128_sub_fn() == def_id { + Some((mir::BinOp::Sub, false)) + } else if items.i128_mul_fn() == def_id { + Some((mir::BinOp::Mul, false)) + } else if items.u128_mul_fn() == def_id { + Some((mir::BinOp::Mul, false)) + } else if items.i128_div_fn() == def_id { + Some((mir::BinOp::Div, false)) + } else if items.u128_div_fn() == def_id { + Some((mir::BinOp::Div, false)) + } else if items.i128_rem_fn() == def_id { + Some((mir::BinOp::Rem, false)) + } else if items.u128_rem_fn() == def_id { + Some((mir::BinOp::Rem, false)) + } else if items.i128_shl_fn() == def_id { + Some((mir::BinOp::Shl, false)) + } else if items.u128_shl_fn() == def_id { + Some((mir::BinOp::Shl, false)) + } else if items.i128_shr_fn() == def_id { + Some((mir::BinOp::Shr, false)) + } else if items.u128_shr_fn() == def_id { + Some((mir::BinOp::Shr, false)) + } else if items.i128_addo_fn() == def_id { + Some((mir::BinOp::Add, true)) + } else if items.u128_addo_fn() == def_id { + Some((mir::BinOp::Add, true)) + } else if items.i128_subo_fn() == def_id { + Some((mir::BinOp::Sub, true)) + } else if items.u128_subo_fn() == def_id { + Some((mir::BinOp::Sub, true)) + } else if items.i128_mulo_fn() == def_id { + Some((mir::BinOp::Mul, true)) + } else if items.u128_mulo_fn() == def_id { + Some((mir::BinOp::Mul, true)) + } else if items.i128_shlo_fn() == def_id { + Some((mir::BinOp::Shl, true)) + } else if items.u128_shlo_fn() == def_id { + Some((mir::BinOp::Shl, true)) + } else if items.i128_shro_fn() == def_id { + Some((mir::BinOp::Shr, true)) + } else if items.u128_shro_fn() == def_id { + Some((mir::BinOp::Shr, true)) + } else { + None + } } pub fn stability(self) -> Lrc> { @@ -1370,19 +1443,25 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // statements within the query system and we'd run into endless // recursion otherwise. let (crate_name, crate_disambiguator) = if def_id.is_local() { - (self.crate_name.clone(), - self.sess.local_crate_disambiguator()) + ( + self.crate_name.clone(), + self.sess.local_crate_disambiguator(), + ) } else { - (self.cstore.crate_name_untracked(def_id.krate), - self.cstore.crate_disambiguator_untracked(def_id.krate)) + ( + self.cstore.crate_name_untracked(def_id.krate), + self.cstore.crate_disambiguator_untracked(def_id.krate), + ) }; - format!("{}[{}]{}", - crate_name, - // Don't print the whole crate disambiguator. That's just - // annoying in debug output. - &(crate_disambiguator.to_fingerprint().to_hex())[..4], - self.def_path(def_id).to_string_no_crate()) + format!( + "{}[{}]{}", + crate_name, + // Don't print the whole crate disambiguator. That's just + // annoying in debug output. + &(crate_disambiguator.to_fingerprint().to_hex())[..4], + self.def_path(def_id).to_string_no_crate() + ) } pub fn metadata_encoding_version(self) -> Vec { @@ -1399,10 +1478,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> { let krate = self.gcx.hir_map.forest.untracked_krate(); - StableHashingContext::new(self.sess, - krate, - self.hir().definitions(), - self.cstore) + StableHashingContext::new(self.sess, krate, self.hir().definitions(), self.cstore) } // This method makes sure that we have a DepNode and a Fingerprint for @@ -1416,10 +1492,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { for cnum in self.cstore.crates_untracked() { let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum)); let crate_hash = self.cstore.crate_hash_untracked(cnum); - self.dep_graph.with_task(dep_node, - self, - crate_hash, - |_, x| x // No transformation needed + self.dep_graph.with_task( + dep_node, + self, + crate_hash, + |_, x| x, // No transformation needed ); } } @@ -1434,12 +1511,13 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn serialize_query_result_cache(self, - encoder: &mut E) - -> Result<(), E::Error> - where E: ty::codec::TyEncoder + pub fn serialize_query_result_cache(self, encoder: &mut E) -> Result<(), E::Error> + where + E: ty::codec::TyEncoder, { - self.queries.on_disk_cache.serialize(self.global_tcx(), encoder) + self.queries + .on_disk_cache + .serialize(self.global_tcx(), encoder) } /// This checks whether one is allowed to have pattern bindings @@ -1513,8 +1591,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// done with either: `-Ztwo-phase-borrows`, `#![feature(nll)]`, /// or by opting into an edition after 2015. pub fn two_phase_borrows(self) -> bool { - self.sess.rust_2018() || self.features().nll || - self.sess.opts.debugging_opts.two_phase_borrows + self.sess.rust_2018() + || self.features().nll + || self.sess.opts.debugging_opts.two_phase_borrows } /// What mode(s) of borrowck should we run? AST? MIR? both? @@ -1548,12 +1627,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // // * Otherwise, use the behavior requested via `-Z borrowck=...` - if self.features().nll { return BorrowckMode::Mir; } + if self.features().nll { + return BorrowckMode::Mir; + } match self.sess.opts.borrowck_mode { - mode @ BorrowckMode::Mir | - mode @ BorrowckMode::Compare | - mode @ BorrowckMode::Migrate => mode, + mode @ BorrowckMode::Mir + | mode @ BorrowckMode::Compare + | mode @ BorrowckMode::Migrate => mode, BorrowckMode::Ast => match self.sess.edition() { Edition::Edition2015 => BorrowckMode::Ast, @@ -1566,16 +1647,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn local_crate_exports_generics(self) -> bool { debug_assert!(self.sess.opts.share_generics()); - self.sess.crate_types.borrow().iter().any(|crate_type| { - match crate_type { - CrateType::Executable | - CrateType::Staticlib | - CrateType::ProcMacro | - CrateType::Cdylib => false, - CrateType::Rlib | - CrateType::Dylib => true, - } - }) + self.sess + .crate_types + .borrow() + .iter() + .any(|crate_type| match crate_type { + CrateType::Executable + | CrateType::Staticlib + | CrateType::ProcMacro + | CrateType::Cdylib => false, + CrateType::Rlib | CrateType::Dylib => true, + }) } // This method returns the DefId and the BoundRegion corresponding to the given region. @@ -1589,7 +1671,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { _ => return None, // not a free region }; - let node_id = self.hir() + let node_id = self + .hir() .as_local_node_id(suitable_region_binding_scope) .unwrap(); let is_impl_item = match self.hir().find(node_id) { @@ -1607,10 +1690,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }); } - pub fn return_type_impl_trait( - &self, - scope_def_id: DefId, - ) -> Option> { + pub fn return_type_impl_trait(&self, scope_def_id: DefId) -> Option> { // HACK: `type_of_def_id()` will fail on these (#55796), so return None let node_id = self.hir().as_local_node_id(scope_def_id).unwrap(); match self.hir().get(node_id) { @@ -1636,16 +1716,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { None } } - _ => None + _ => None, } } // Here we check if the bound region is in Impl Item. - pub fn is_bound_region_in_impl_item( - &self, - suitable_region_binding_scope: DefId, - ) -> bool { - let container_id = self.associated_item(suitable_region_binding_scope) + pub fn is_bound_region_in_impl_item(&self, suitable_region_binding_scope: DefId) -> bool { + let container_id = self + .associated_item(suitable_region_binding_scope) .container .id(); if self.impl_trait_ref(container_id).is_some() { @@ -1662,9 +1740,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { - pub fn encode_metadata(self) - -> EncodedMetadata - { + pub fn encode_metadata(self) -> EncodedMetadata { self.cstore.encode_metadata(self) } } @@ -1677,7 +1753,7 @@ impl<'gcx> GlobalCtxt<'gcx> { &'gcx self, arena: &'tcx SyncDroplessArena, interners: &'tcx mut Option>, - f: F + f: F, ) -> R where F: FnOnce(TyCtxt<'tcx, 'gcx, 'tcx>) -> R, @@ -1696,9 +1772,7 @@ impl<'gcx> GlobalCtxt<'gcx> { layout_depth: icx.layout_depth, task: icx.task, }; - ty::tls::enter_context(&new_icx, |_| { - f(tcx) - }) + ty::tls::enter_context(&new_icx, |_| f(tcx)) }) } } @@ -1772,10 +1846,7 @@ impl<'a, 'tcx> Lift<'tcx> for Goal<'a> { impl<'a, 'tcx> Lift<'tcx> for &'a List> { type Lifted = &'tcx List>; - fn lift_to_tcx<'b, 'gcx>( - &self, - tcx: TyCtxt<'b, 'gcx, 'tcx>, - ) -> Option<&'tcx List>> { + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx List>> { if self.is_empty() { return Some(List::empty()); } @@ -1849,8 +1920,7 @@ impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> { impl<'a, 'tcx> Lift<'tcx> for &'a List> { type Lifted = &'tcx List>; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) - -> Option<&'tcx List>> { + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx List>> { if self.len() == 0 { return Some(List::empty()); } @@ -1868,8 +1938,10 @@ impl<'a, 'tcx> Lift<'tcx> for &'a List> { impl<'a, 'tcx> Lift<'tcx> for &'a List> { type Lifted = &'tcx List>; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) - -> Option<&'tcx List>> { + fn lift_to_tcx<'b, 'gcx>( + &self, + tcx: TyCtxt<'b, 'gcx, 'tcx>, + ) -> Option<&'tcx List>> { if self.is_empty() { return Some(List::empty()); } @@ -1887,8 +1959,10 @@ impl<'a, 'tcx> Lift<'tcx> for &'a List> { impl<'a, 'tcx> Lift<'tcx> for &'a List> { type Lifted = &'tcx List>; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) - -> Option<&'tcx List>> { + fn lift_to_tcx<'b, 'gcx>( + &self, + tcx: TyCtxt<'b, 'gcx, 'tcx>, + ) -> Option<&'tcx List>> { if self.is_empty() { return Some(List::empty()); } @@ -1943,15 +2017,15 @@ impl<'a, 'tcx> Lift<'tcx> for &'a List> { pub mod tls { use super::{GlobalCtxt, TyCtxt}; + use dep_graph::OpenTask; + use errors::{Diagnostic, TRACK_DIAGNOSTICS}; + use rustc_data_structures::sync::{self, Lock, Lrc}; + use rustc_data_structures::OnDrop; use std::fmt; - use std::mem; use std::marker::PhantomData; + use std::mem; use syntax_pos; use ty::query; - use errors::{Diagnostic, TRACK_DIAGNOSTICS}; - use rustc_data_structures::OnDrop; - use rustc_data_structures::sync::{self, Lrc, Lock}; - use dep_graph::OpenTask; #[cfg(not(parallel_queries))] use std::cell::Cell; @@ -2048,7 +2122,8 @@ pub mod tls { /// Sets up the callbacks from libsyntax on the current thread pub fn with_thread_locals(f: F) -> R - where F: FnOnce() -> R + where + F: FnOnce() -> R, { syntax_pos::SPAN_DEBUG.with(|span_dbg| { let original_span_debug = span_dbg.get(); @@ -2073,13 +2148,14 @@ pub mod tls { /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f` #[inline] - pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>, - f: F) -> R - where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R + pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>( + context: &ImplicitCtxt<'a, 'gcx, 'tcx>, + f: F, + ) -> R + where + F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R, { - set_tlv(context as *const _ as usize, || { - f(&context) - }) + set_tlv(context as *const _ as usize, || f(&context)) } /// Enters GlobalCtxt by setting up libsyntax callbacks and @@ -2087,7 +2163,8 @@ pub mod tls { /// This happens once per rustc session and TyCtxts only exists /// inside the `f` function. pub fn enter_global<'gcx, F, R>(gcx: &'gcx GlobalCtxt<'gcx>, f: F) -> R - where F: FnOnce(TyCtxt<'gcx, 'gcx, 'gcx>) -> R + where + F: FnOnce(TyCtxt<'gcx, 'gcx, 'gcx>) -> R, { with_thread_locals(|| { // Update GCX_PTR to indicate there's a GlobalCtxt available @@ -2110,9 +2187,7 @@ pub mod tls { layout_depth: 0, task: &OpenTask::Ignore, }; - enter_context(&icx, |_| { - f(tcx) - }) + enter_context(&icx, |_| f(tcx)) }) } @@ -2124,7 +2199,8 @@ pub mod tls { /// Creates a TyCtxt and ImplicitCtxt based on the GCX_PTR thread local. /// This is used in the deadlock handler. pub unsafe fn with_global(f: F) -> R - where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R + where + F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R, { let gcx = GCX_PTR.with(|lock| *lock.lock()); assert!(gcx != 0); @@ -2146,7 +2222,8 @@ pub mod tls { /// Allows access to the current ImplicitCtxt in a closure if one is available #[inline] pub fn with_context_opt(f: F) -> R - where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R + where + F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R, { let context = get_tlv(); if context == 0 { @@ -2164,7 +2241,8 @@ pub mod tls { /// Panics if there is no ImplicitCtxt available #[inline] pub fn with_context(f: F) -> R - where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R + where + F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R, { with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls"))) } @@ -2176,15 +2254,14 @@ pub mod tls { /// the current ImplicitCtxt's tcx field. #[inline] pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R - where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R + where + F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R, { - with_context(|context| { - unsafe { - let gcx = tcx.gcx as *const _ as usize; - assert!(context.tcx.gcx as *const _ as usize == gcx); - let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context); - f(context) - } + with_context(|context| unsafe { + let gcx = tcx.gcx as *const _ as usize; + assert!(context.tcx.gcx as *const _ as usize == gcx); + let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context); + f(context) }) } @@ -2195,17 +2272,16 @@ pub mod tls { /// a different local interner from the current ImplicitCtxt's tcx field. #[inline] pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R - where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R + where + F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R, { - with_context(|context| { - unsafe { - let gcx = tcx.gcx as *const _ as usize; - let interners = tcx.interners as *const _ as usize; - assert!(context.tcx.gcx as *const _ as usize == gcx); - assert!(context.tcx.interners as *const _ as usize == interners); - let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context); - f(context) - } + with_context(|context| unsafe { + let gcx = tcx.gcx as *const _ as usize; + let interners = tcx.interners as *const _ as usize; + assert!(context.tcx.gcx as *const _ as usize == gcx); + assert!(context.tcx.interners as *const _ as usize == interners); + let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context); + f(context) }) } @@ -2213,7 +2289,8 @@ pub mod tls { /// Panics if there is no ImplicitCtxt available #[inline] pub fn with(f: F) -> R - where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R + where + F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R, { with_context(|context| f(context.tcx)) } @@ -2222,7 +2299,8 @@ pub mod tls { /// The closure is passed None if there is no ImplicitCtxt available #[inline] pub fn with_opt(f: F) -> R - where F: for<'a, 'gcx, 'tcx> FnOnce(Option>) -> R + where + F: for<'a, 'gcx, 'tcx> FnOnce(Option>) -> R, { with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx))) } @@ -2295,28 +2373,51 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { pub fn print_debug_stats(self) { sty_debug_print!( self, - Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr, Placeholder, - Generator, GeneratorWitness, Dynamic, Closure, Tuple, Bound, - Param, Infer, UnnormalizedProjection, Projection, Opaque, Foreign); + Adt, + Array, + Slice, + RawPtr, + Ref, + FnDef, + FnPtr, + Placeholder, + Generator, + GeneratorWitness, + Dynamic, + Closure, + Tuple, + Bound, + Param, + Infer, + UnnormalizedProjection, + Projection, + Opaque, + Foreign + ); println!("Substs interner: #{}", self.interners.substs.borrow().len()); println!("Region interner: #{}", self.interners.region.borrow().len()); - println!("Stability interner: #{}", self.stability_interner.borrow().len()); - println!("Allocation interner: #{}", self.allocation_interner.borrow().len()); + println!( + "Stability interner: #{}", + self.stability_interner.borrow().len() + ); + println!( + "Allocation interner: #{}", + self.allocation_interner.borrow().len() + ); println!("Layout interner: #{}", self.layout_interner.borrow().len()); } } - /// An entry in an interner. -struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T); +struct Interned<'tcx, T: 'tcx + ?Sized>(&'tcx T); -impl<'tcx, T: 'tcx+?Sized> Clone for Interned<'tcx, T> { +impl<'tcx, T: 'tcx + ?Sized> Clone for Interned<'tcx, T> { fn clone(&self) -> Self { Interned(self.0) } } -impl<'tcx, T: 'tcx+?Sized> Copy for Interned<'tcx, T> {} +impl<'tcx, T: 'tcx + ?Sized> Copy for Interned<'tcx, T> {} // N.B., an `Interned` compares and hashes as a sty. impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { @@ -2373,7 +2474,8 @@ impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> { } impl<'tcx: 'lcx, 'lcx> Borrow<[ProjectionKind<'lcx>]> - for Interned<'tcx, List>> { + for Interned<'tcx, List>> +{ fn borrow<'a>(&'a self) -> &'a [ProjectionKind<'lcx>] { &self.0[..] } @@ -2392,14 +2494,14 @@ impl<'tcx: 'lcx, 'lcx> Borrow> for Interned<'tcx, GoalKind<'tcx>> } impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]> - for Interned<'tcx, List>> { + for Interned<'tcx, List>> +{ fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] { &self.0[..] } } -impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]> - for Interned<'tcx, List>> { +impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]> for Interned<'tcx, List>> { fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] { &self.0[..] } @@ -2411,15 +2513,13 @@ impl<'tcx: 'lcx, 'lcx> Borrow> for Interned<'tcx, Const<'tcx>> { } } -impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]> -for Interned<'tcx, List>> { +impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]> for Interned<'tcx, List>> { fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] { &self.0[..] } } -impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]> -for Interned<'tcx, List>> { +impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]> for Interned<'tcx, List>> { fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] { &self.0[..] } @@ -2555,9 +2655,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> { let converted_sig = sig.map_bound(|s| { let params_iter = match s.inputs()[0].sty { - ty::Tuple(params) => { - params.into_iter().cloned() - } + ty::Tuple(params) => params.into_iter().cloned(), _ => bug!(), }; self.mk_fn_sig( @@ -2579,30 +2677,30 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> { match tm { - ast::IntTy::Isize => self.types.isize, - ast::IntTy::I8 => self.types.i8, - ast::IntTy::I16 => self.types.i16, - ast::IntTy::I32 => self.types.i32, - ast::IntTy::I64 => self.types.i64, - ast::IntTy::I128 => self.types.i128, + ast::IntTy::Isize => self.types.isize, + ast::IntTy::I8 => self.types.i8, + ast::IntTy::I16 => self.types.i16, + ast::IntTy::I32 => self.types.i32, + ast::IntTy::I64 => self.types.i64, + ast::IntTy::I128 => self.types.i128, } } pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> { match tm { - ast::UintTy::Usize => self.types.usize, - ast::UintTy::U8 => self.types.u8, - ast::UintTy::U16 => self.types.u16, - ast::UintTy::U32 => self.types.u32, - ast::UintTy::U64 => self.types.u64, - ast::UintTy::U128 => self.types.u128, + ast::UintTy::Usize => self.types.usize, + ast::UintTy::U8 => self.types.u8, + ast::UintTy::U16 => self.types.u16, + ast::UintTy::U32 => self.types.u32, + ast::UintTy::U64 => self.types.u64, + ast::UintTy::U128 => self.types.u128, } } pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> { match tm { - ast::FloatTy::F32 => self.types.f32, - ast::FloatTy::F64 => self.types.f64, + ast::FloatTy::F32 => self.types.f32, + ast::FloatTy::F64 => self.types.f64, } } @@ -2630,16 +2728,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> { let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem); let adt_def = self.adt_def(def_id); - let substs = Substs::for_item(self, def_id, |param, substs| { - match param.kind { - GenericParamDefKind::Lifetime => bug!(), - GenericParamDefKind::Type { has_default, .. } => { - if param.index == 0 { - ty.into() - } else { - assert!(has_default); - self.type_of(param.def_id).subst(self, substs).into() - } + let substs = Substs::for_item(self, def_id, |param, substs| match param.kind { + GenericParamDefKind::Lifetime => bug!(), + GenericParamDefKind::Type { has_default, .. } => { + if param.index == 0 { + ty.into() + } else { + assert!(has_default); + self.type_of(param.def_id).subst(self, substs).into() } } }); @@ -2658,22 +2754,40 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { #[inline] pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { - self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable}) + self.mk_ref( + r, + TypeAndMut { + ty: ty, + mutbl: hir::MutMutable, + }, + ) } #[inline] pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { - self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable}) + self.mk_ref( + r, + TypeAndMut { + ty: ty, + mutbl: hir::MutImmutable, + }, + ) } #[inline] pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { - self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable}) + self.mk_ptr(TypeAndMut { + ty: ty, + mutbl: hir::MutMutable, + }) } #[inline] pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { - self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable}) + self.mk_ptr(TypeAndMut { + ty: ty, + mutbl: hir::MutImmutable, + }) } #[inline] @@ -2720,8 +2834,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } #[inline] - pub fn mk_fn_def(self, def_id: DefId, - substs: &'tcx Substs<'tcx>) -> Ty<'tcx> { + pub fn mk_fn_def(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> { self.mk_ty(FnDef(def_id, substs)) } @@ -2734,34 +2847,31 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn mk_dynamic( self, obj: ty::Binder<&'tcx List>>, - reg: ty::Region<'tcx> + reg: ty::Region<'tcx>, ) -> Ty<'tcx> { self.mk_ty(Dynamic(obj, reg)) } #[inline] - pub fn mk_projection(self, - item_def_id: DefId, - substs: &'tcx Substs<'tcx>) - -> Ty<'tcx> { - self.mk_ty(Projection(ProjectionTy { - item_def_id, - substs, - })) - } + pub fn mk_projection(self, item_def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> { + self.mk_ty(Projection(ProjectionTy { + item_def_id, + substs, + })) + } #[inline] - pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>) - -> Ty<'tcx> { + pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>) -> Ty<'tcx> { self.mk_ty(Closure(closure_id, closure_substs)) } #[inline] - pub fn mk_generator(self, - id: DefId, - generator_substs: GeneratorSubsts<'tcx>, - movability: hir::GeneratorMovability) - -> Ty<'tcx> { + pub fn mk_generator( + self, + id: DefId, + generator_substs: GeneratorSubsts<'tcx>, + movability: hir::GeneratorMovability, + ) -> Ty<'tcx> { self.mk_ty(Generator(id, generator_substs, movability)) } @@ -2791,10 +2901,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } #[inline] - pub fn mk_ty_param(self, - index: u32, - name: InternedString) -> Ty<'tcx> { - self.mk_ty(Param(ParamTy { idx: index, name: name })) + pub fn mk_ty_param(self, index: u32, name: InternedString) -> Ty<'tcx> { + self.mk_ty(Param(ParamTy { + idx: index, + name: name, + })) } #[inline] @@ -2804,10 +2915,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> { match param.kind { - GenericParamDefKind::Lifetime => { - self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into() - } - GenericParamDefKind::Type {..} => self.mk_ty_param(param.index, param.name).into(), + GenericParamDefKind::Lifetime => self + .mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())) + .into(), + GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(), } } @@ -2816,15 +2927,18 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.mk_ty(Opaque(def_id, substs)) } - pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>]) - -> &'tcx List> { + pub fn intern_existential_predicates( + self, + eps: &[ExistentialPredicate<'tcx>], + ) -> &'tcx List> { assert!(!eps.is_empty()); - assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater)); + assert!(eps + .windows(2) + .all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater)); self._intern_existential_predicates(eps) } - pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) - -> &'tcx List> { + pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List> { // FIXME consider asking the input slice to be sorted to avoid // re-interning permutations, in which case that would be asserted // here. @@ -2884,49 +2998,56 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn mk_fn_sig(self, - inputs: I, - output: I::Item, - variadic: bool, - unsafety: hir::Unsafety, - abi: abi::Abi) - -> , ty::FnSig<'tcx>>>::Output - where I: Iterator, - I::Item: InternIteratorElement, ty::FnSig<'tcx>> + pub fn mk_fn_sig( + self, + inputs: I, + output: I::Item, + variadic: bool, + unsafety: hir::Unsafety, + abi: abi::Abi, + ) -> , ty::FnSig<'tcx>>>::Output + where + I: Iterator, + I::Item: InternIteratorElement, ty::FnSig<'tcx>>, { - inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig { - inputs_and_output: self.intern_type_list(xs), - variadic, unsafety, abi - }) + inputs + .chain(iter::once(output)) + .intern_with(|xs| ty::FnSig { + inputs_and_output: self.intern_type_list(xs), + variadic, + unsafety, + abi, + }) } - pub fn mk_existential_predicates], - &'tcx List>>>(self, iter: I) - -> I::Output { + pub fn mk_existential_predicates< + I: InternAs<[ExistentialPredicate<'tcx>], &'tcx List>>, + >( + self, + iter: I, + ) -> I::Output { iter.intern_with(|xs| self.intern_existential_predicates(xs)) } - pub fn mk_predicates], - &'tcx List>>>(self, iter: I) - -> I::Output { + pub fn mk_predicates], &'tcx List>>>( + self, + iter: I, + ) -> I::Output { iter.intern_with(|xs| self.intern_predicates(xs)) } - pub fn mk_type_list], - &'tcx List>>>(self, iter: I) -> I::Output { + pub fn mk_type_list], &'tcx List>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_type_list(xs)) } - pub fn mk_substs], - &'tcx List>>>(self, iter: I) -> I::Output { + pub fn mk_substs], &'tcx List>>>( + self, + iter: I, + ) -> I::Output { iter.intern_with(|xs| self.intern_substs(xs)) } - pub fn mk_substs_trait(self, - self_ty: Ty<'tcx>, - rest: &[Kind<'tcx>]) - -> &'tcx Substs<'tcx> - { + pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[Kind<'tcx>]) -> &'tcx Substs<'tcx> { self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned())) } @@ -2938,47 +3059,59 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { iter.intern_with(|xs| self.intern_goals(xs)) } - pub fn lint_hir>(self, - lint: &'static Lint, - hir_id: HirId, - span: S, - msg: &str) { - self.struct_span_lint_hir(lint, hir_id, span.into(), msg).emit() + pub fn lint_hir>( + self, + lint: &'static Lint, + hir_id: HirId, + span: S, + msg: &str, + ) { + self.struct_span_lint_hir(lint, hir_id, span.into(), msg) + .emit() } - pub fn lint_node>(self, - lint: &'static Lint, - id: NodeId, - span: S, - msg: &str) { - self.struct_span_lint_node(lint, id, span.into(), msg).emit() + pub fn lint_node>( + self, + lint: &'static Lint, + id: NodeId, + span: S, + msg: &str, + ) { + self.struct_span_lint_node(lint, id, span.into(), msg) + .emit() } - pub fn lint_hir_note>(self, - lint: &'static Lint, - hir_id: HirId, - span: S, - msg: &str, - note: &str) { + pub fn lint_hir_note>( + self, + lint: &'static Lint, + hir_id: HirId, + span: S, + msg: &str, + note: &str, + ) { let mut err = self.struct_span_lint_hir(lint, hir_id, span.into(), msg); err.note(note); err.emit() } - pub fn lint_node_note>(self, - lint: &'static Lint, - id: NodeId, - span: S, - msg: &str, - note: &str) { + pub fn lint_node_note>( + self, + lint: &'static Lint, + id: NodeId, + span: S, + msg: &str, + note: &str, + ) { let mut err = self.struct_span_lint_node(lint, id, span.into(), msg); err.note(note); err.emit() } - pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId) - -> (lint::Level, lint::LintSource) - { + pub fn lint_level_at_node( + self, + lint: &'static Lint, + mut id: NodeId, + ) -> (lint::Level, lint::LintSource) { // Right now we insert a `with_ignore` node in the dep graph here to // ignore the fact that `lint_levels` below depends on the entire crate. // For now this'll prevent false positives of recompiling too much when @@ -2992,7 +3125,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { loop { let hir_id = self.hir().definitions().node_to_hir_id(id); if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) { - return pair + return pair; } let next = self.hir().get_parent_node(id); if next == id { @@ -3003,32 +3136,35 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }) } - pub fn struct_span_lint_hir>(self, - lint: &'static Lint, - hir_id: HirId, - span: S, - msg: &str) - -> DiagnosticBuilder<'tcx> - { + pub fn struct_span_lint_hir>( + self, + lint: &'static Lint, + hir_id: HirId, + span: S, + msg: &str, + ) -> DiagnosticBuilder<'tcx> { let node_id = self.hir().hir_to_node_id(hir_id); let (level, src) = self.lint_level_at_node(lint, node_id); lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg) } - pub fn struct_span_lint_node>(self, - lint: &'static Lint, - id: NodeId, - span: S, - msg: &str) - -> DiagnosticBuilder<'tcx> - { + pub fn struct_span_lint_node>( + self, + lint: &'static Lint, + id: NodeId, + span: S, + msg: &str, + ) -> DiagnosticBuilder<'tcx> { let (level, src) = self.lint_level_at_node(lint, id); lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg) } - pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str) - -> DiagnosticBuilder<'tcx> - { + pub fn struct_lint_node( + self, + lint: &'static Lint, + id: NodeId, + msg: &str, + ) -> DiagnosticBuilder<'tcx> { let (level, src) = self.lint_level_at_node(lint, id); lint::struct_lint_level(self.sess, lint, level, src, None, msg) } @@ -3049,9 +3185,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { .unwrap_or(false) } - pub fn object_lifetime_defaults(self, id: HirId) - -> Option>> - { + pub fn object_lifetime_defaults(self, id: HirId) -> Option>> { self.object_lifetime_defaults_map(id.owner) .and_then(|map| map.get(&id.local_id).cloned()) } @@ -3060,43 +3194,49 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub trait InternAs { type Output; fn intern_with(self, f: F) -> Self::Output - where F: FnOnce(&T) -> R; + where + F: FnOnce(&T) -> R; } impl InternAs<[T], R> for I - where E: InternIteratorElement, - I: Iterator { +where + E: InternIteratorElement, + I: Iterator, +{ type Output = E::Output; fn intern_with(self, f: F) -> Self::Output - where F: FnOnce(&[T]) -> R { + where + F: FnOnce(&[T]) -> R, + { E::intern_with(self, f) } } pub trait InternIteratorElement: Sized { type Output; - fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; + fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; } impl InternIteratorElement for T { type Output = R; - fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { + fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { f(&iter.collect::>()) } } impl<'a, T, R> InternIteratorElement for &'a T - where T: Clone + 'a +where + T: Clone + 'a, { type Output = R; - fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { + fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { f(&iter.cloned().collect::>()) } } impl InternIteratorElement for Result { type Output = Result; - fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { + fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { Ok(f(&iter.collect::, _>>()?)) } } @@ -3120,9 +3260,7 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) { Lrc::new(middle::lang_items::collect(tcx)) }; providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned(); - providers.maybe_unused_trait_import = |tcx, id| { - tcx.maybe_unused_trait_imports.contains(&id) - }; + providers.maybe_unused_trait_import = |tcx, id| tcx.maybe_unused_trait_imports.contains(&id); providers.maybe_unused_extern_crates = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); Lrc::new(tcx.maybe_unused_extern_crates.clone()) diff --git a/src/librustc/ty/erase_regions.rs b/src/librustc/ty/erase_regions.rs index bbf71c62ca69d..a9418ad2a210d 100644 --- a/src/librustc/ty/erase_regions.rs +++ b/src/librustc/ty/erase_regions.rs @@ -1,5 +1,5 @@ +use ty::fold::{TypeFoldable, TypeFolder}; use ty::{self, Ty, TyCtxt}; -use ty::fold::{TypeFolder, TypeFoldable}; pub(super) fn provide(providers: &mut ty::query::Providers<'_>) { *providers = ty::query::Providers { @@ -19,7 +19,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// that late-bound regions remain, because they are important for /// subtyping, but they are anonymized and normalized as well).. pub fn erase_regions(self, value: &T) -> T - where T : TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { let value1 = value.fold_with(&mut RegionEraserVisitor { tcx: self }); debug!("erase_regions({:?}) = {:?}", value, value1); @@ -45,7 +46,8 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionEraserVisitor<'a, 'gcx, 't } fn fold_binder(&mut self, t: &ty::Binder) -> ty::Binder - where T : TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { let u = self.tcx.anonymize_late_bound_regions(t); u.super_fold_with(self) @@ -62,7 +64,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionEraserVisitor<'a, 'gcx, 't // whenever a substitution occurs. match *r { ty::ReLateBound(..) => r, - _ => self.tcx.types.re_erased + _ => self.tcx.types.re_erased, } } } diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs index 76e102d88d7ce..534802065fe71 100644 --- a/src/librustc/ty/error.rs +++ b/src/librustc/ty/error.rs @@ -1,11 +1,11 @@ +use errors::{Applicability, DiagnosticBuilder}; use hir::def_id::DefId; -use ty::{self, Region, Ty, TyCtxt}; +use rustc_target::spec::abi; use std::borrow::Cow; use std::fmt; -use rustc_target::spec::abi; use syntax::ast; -use errors::{Applicability, DiagnosticBuilder}; use syntax_pos::Span; +use ty::{self, Region, Ty, TyCtxt}; use hir; @@ -58,8 +58,11 @@ pub enum UnconstrainedNumeric { impl<'tcx> fmt::Display for TypeError<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use self::TypeError::*; - fn report_maybe_different(f: &mut fmt::Formatter<'_>, - expected: &str, found: &str) -> fmt::Result { + fn report_maybe_different( + f: &mut fmt::Formatter<'_>, + expected: &str, + found: &str, + ) -> fmt::Result { // A naive approach to making sure that we're not reporting silly errors such as: // (expected closure, found closure). if expected == found { @@ -72,78 +75,88 @@ impl<'tcx> fmt::Display for TypeError<'tcx> { match *self { CyclicTy(_) => write!(f, "cyclic type of infinite size"), Mismatch => write!(f, "types differ"), - UnsafetyMismatch(values) => { - write!(f, "expected {} fn, found {} fn", - values.expected, - values.found) - } - AbiMismatch(values) => { - write!(f, "expected {} fn, found {} fn", - values.expected, - values.found) - } + UnsafetyMismatch(values) => write!( + f, + "expected {} fn, found {} fn", + values.expected, values.found + ), + AbiMismatch(values) => write!( + f, + "expected {} fn, found {} fn", + values.expected, values.found + ), Mutability => write!(f, "types differ in mutability"), - FixedArraySize(values) => { - write!(f, "expected an array with a fixed size of {} elements, \ - found one with {} elements", - values.expected, - values.found) - } - TupleSize(values) => { - write!(f, "expected a tuple with {} elements, \ - found one with {} elements", - values.expected, - values.found) - } - ArgCount => { - write!(f, "incorrect number of function parameters") - } - RegionsDoesNotOutlive(..) => { - write!(f, "lifetime mismatch") - } - RegionsPlaceholderMismatch => { - write!(f, "one type is more general than the other") - } + FixedArraySize(values) => write!( + f, + "expected an array with a fixed size of {} elements, \ + found one with {} elements", + values.expected, values.found + ), + TupleSize(values) => write!( + f, + "expected a tuple with {} elements, \ + found one with {} elements", + values.expected, values.found + ), + ArgCount => write!(f, "incorrect number of function parameters"), + RegionsDoesNotOutlive(..) => write!(f, "lifetime mismatch"), + RegionsPlaceholderMismatch => write!(f, "one type is more general than the other"), Sorts(values) => ty::tls::with(|tcx| { - report_maybe_different(f, &values.expected.sort_string(tcx), - &values.found.sort_string(tcx)) + report_maybe_different( + f, + &values.expected.sort_string(tcx), + &values.found.sort_string(tcx), + ) }), Traits(values) => ty::tls::with(|tcx| { - report_maybe_different(f, - &format!("trait `{}`", - tcx.item_path_str(values.expected)), - &format!("trait `{}`", - tcx.item_path_str(values.found))) + report_maybe_different( + f, + &format!("trait `{}`", tcx.item_path_str(values.expected)), + &format!("trait `{}`", tcx.item_path_str(values.found)), + ) }), - IntMismatch(ref values) => { - write!(f, "expected `{:?}`, found `{:?}`", - values.expected, - values.found) - } - FloatMismatch(ref values) => { - write!(f, "expected `{:?}`, found `{:?}`", - values.expected, - values.found) - } - VariadicMismatch(ref values) => { - write!(f, "expected {} fn, found {} function", - if values.expected { "variadic" } else { "non-variadic" }, - if values.found { "variadic" } else { "non-variadic" }) - } + IntMismatch(ref values) => write!( + f, + "expected `{:?}`, found `{:?}`", + values.expected, values.found + ), + FloatMismatch(ref values) => write!( + f, + "expected `{:?}`, found `{:?}`", + values.expected, values.found + ), + VariadicMismatch(ref values) => write!( + f, + "expected {} fn, found {} function", + if values.expected { + "variadic" + } else { + "non-variadic" + }, + if values.found { + "variadic" + } else { + "non-variadic" + } + ), ProjectionMismatched(ref values) => ty::tls::with(|tcx| { - write!(f, "expected {}, found {}", - tcx.item_path_str(values.expected), - tcx.item_path_str(values.found)) + write!( + f, + "expected {}, found {}", + tcx.item_path_str(values.expected), + tcx.item_path_str(values.found) + ) }), - ProjectionBoundsLength(ref values) => { - write!(f, "expected {} associated type bindings, found {}", - values.expected, - values.found) - }, - ExistentialMismatch(ref values) => { - report_maybe_different(f, &format!("trait `{}`", values.expected), - &format!("trait `{}`", values.found)) - } + ProjectionBoundsLength(ref values) => write!( + f, + "expected {} associated type bindings, found {}", + values.expected, values.found + ), + ExistentialMismatch(ref values) => report_maybe_different( + f, + &format!("trait `{}`", values.expected), + &format!("trait `{}`", values.found), + ), } } } @@ -151,18 +164,17 @@ impl<'tcx> fmt::Display for TypeError<'tcx> { impl<'a, 'gcx, 'lcx, 'tcx> ty::TyS<'tcx> { pub fn sort_string(&self, tcx: TyCtxt<'a, 'gcx, 'lcx>) -> Cow<'static, str> { match self.sty { - ty::Bool | ty::Char | ty::Int(_) | - ty::Uint(_) | ty::Float(_) | ty::Str | ty::Never => self.to_string().into(), + ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str | ty::Never => { + self.to_string().into() + } ty::Tuple(ref tys) if tys.is_empty() => self.to_string().into(), ty::Adt(def, _) => format!("{} `{}`", def.descr(), tcx.item_path_str(def.did)).into(), ty::Foreign(def_id) => format!("extern type `{}`", tcx.item_path_str(def_id)).into(), - ty::Array(_, n) => { - match n.assert_usize(tcx) { - Some(n) => format!("array of {} elements", n).into(), - None => "array".into(), - } - } + ty::Array(_, n) => match n.assert_usize(tcx) { + Some(n) => format!("array of {} elements", n).into(), + None => "array".into(), + }, ty::Slice(_) => "slice".into(), ty::RawPtr(_) => "*-ptr".into(), ty::Ref(region, ty, mutbl) => { @@ -170,12 +182,17 @@ impl<'a, 'gcx, 'lcx, 'tcx> ty::TyS<'tcx> { let tymut_string = tymut.to_string(); if tymut_string == "_" || //unknown type name, tymut_string.len() > 10 || //name longer than saying "reference", - region.to_string() != "" //... or a complex type + region.to_string() != "" + //... or a complex type { - format!("{}reference", match mutbl { - hir::Mutability::MutMutable => "mutable ", - _ => "" - }).into() + format!( + "{}reference", + match mutbl { + hir::Mutability::MutMutable => "mutable ", + _ => "", + } + ) + .into() } else { format!("&{}", tymut_string).into() } @@ -213,10 +230,12 @@ impl<'a, 'gcx, 'lcx, 'tcx> ty::TyS<'tcx> { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - pub fn note_and_explain_type_err(self, - db: &mut DiagnosticBuilder<'_>, - err: &TypeError<'tcx>, - sp: Span) { + pub fn note_and_explain_type_err( + self, + db: &mut DiagnosticBuilder<'_>, + err: &TypeError<'tcx>, + sp: Span, + ) { use self::TypeError::*; match err.clone() { @@ -228,26 +247,32 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { db.help("consider boxing your closure and/or using it as a trait object"); } if let (ty::Infer(ty::IntVar(_)), ty::Float(_)) = - (&values.found.sty, &values.expected.sty) // Issue #53280 + (&values.found.sty, &values.expected.sty) + // Issue #53280 { if let Ok(snippet) = self.sess.source_map().span_to_snippet(sp) { - if snippet.chars().all(|c| c.is_digit(10) || c == '-' || c == '_') { + if snippet + .chars() + .all(|c| c.is_digit(10) || c == '-' || c == '_') + { db.span_suggestion_with_applicability( sp, "use a float literal", format!("{}.0", snippet), - Applicability::MachineApplicable + Applicability::MachineApplicable, ); } } } - }, + } CyclicTy(ty) => { // Watch out for various cases of cyclic types and try to explain. if ty.is_closure() || ty.is_generator() { - db.note("closures cannot capture themselves or take themselves as argument;\n\ - this error may be the result of a recent compiler bug-fix,\n\ - see https://github.com/rust-lang/rust/issues/46062 for more details"); + db.note( + "closures cannot capture themselves or take themselves as argument;\n\ + this error may be the result of a recent compiler bug-fix,\n\ + see https://github.com/rust-lang/rust/issues/46062 for more details", + ); } } _ => {} diff --git a/src/librustc/ty/fast_reject.rs b/src/librustc/ty/fast_reject.rs index 7fbbfb337ea4f..1b1f61fca0bd1 100644 --- a/src/librustc/ty/fast_reject.rs +++ b/src/librustc/ty/fast_reject.rs @@ -1,7 +1,6 @@ use hir::def_id::DefId; use ich::StableHashingContext; -use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, - HashStable}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use std::fmt::Debug; use std::hash::Hash; use std::mem; @@ -18,9 +17,12 @@ pub type SimplifiedType = SimplifiedTypeGen; /// because we sometimes need to use SimplifiedTypeGen values as stable sorting /// keys (in which case we use a DefPathHash as id-type) but in the general case /// the non-stable but fast to construct DefId-version is the better choice. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, RustcEncodable, RustcDecodable)] +#[derive( + Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, RustcEncodable, RustcDecodable, +)] pub enum SimplifiedTypeGen - where D: Copy + Debug + Ord + Eq + Hash +where + D: Copy + Debug + Ord + Eq + Hash, { BoolSimplifiedType, CharSimplifiedType, @@ -55,11 +57,11 @@ pub enum SimplifiedTypeGen /// then we can't say much about whether two types would unify. Put another way, /// `can_simplify_params` should be true if type parameters appear free in `ty` and `false` if they /// are to be considered bound. -pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, - ty: Ty<'_>, - can_simplify_params: bool) - -> Option -{ +pub fn simplify_type<'a, 'gcx, 'tcx>( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + ty: Ty<'_>, + can_simplify_params: bool, +) -> Option { match ty.sty { ty::Bool => Some(BoolSimplifiedType), ty::Char => Some(CharSimplifiedType), @@ -84,23 +86,14 @@ pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, // view of possibly unifying simplify_type(tcx, ty, can_simplify_params) } - ty::FnDef(def_id, _) | - ty::Closure(def_id, _) => { - Some(ClosureSimplifiedType(def_id)) - } - ty::Generator(def_id, _, _) => { - Some(GeneratorSimplifiedType(def_id)) - } + ty::FnDef(def_id, _) | ty::Closure(def_id, _) => Some(ClosureSimplifiedType(def_id)), + ty::Generator(def_id, _, _) => Some(GeneratorSimplifiedType(def_id)), ty::GeneratorWitness(ref tys) => { Some(GeneratorWitnessSimplifiedType(tys.skip_binder().len())) } ty::Never => Some(NeverSimplifiedType), - ty::Tuple(ref tys) => { - Some(TupleSimplifiedType(tys.len())) - } - ty::FnPtr(ref f) => { - Some(FunctionSimplifiedType(f.skip_binder().inputs().len())) - } + ty::Tuple(ref tys) => Some(TupleSimplifiedType(tys.len())), + ty::FnPtr(ref f) => Some(FunctionSimplifiedType(f.skip_binder().inputs().len())), ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"), ty::Projection(_) | ty::Param(_) => { if can_simplify_params { @@ -114,20 +107,17 @@ pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, None } } - ty::Opaque(def_id, _) => { - Some(OpaqueSimplifiedType(def_id)) - } - ty::Foreign(def_id) => { - Some(ForeignSimplifiedType(def_id)) - } + ty::Opaque(def_id, _) => Some(OpaqueSimplifiedType(def_id)), + ty::Foreign(def_id) => Some(ForeignSimplifiedType(def_id)), ty::Placeholder(..) | ty::Bound(..) | ty::Infer(_) | ty::Error => None, } } impl SimplifiedTypeGen { pub fn map_def(self, map: F) -> SimplifiedTypeGen - where F: Fn(D) -> U, - U: Copy + Debug + Ord + Eq + Hash, + where + F: Fn(D) -> U, + U: Copy + Debug + Ord + Eq + Hash, { match self { BoolSimplifiedType => BoolSimplifiedType, @@ -155,22 +145,24 @@ impl SimplifiedTypeGen { } impl<'a, 'gcx, D> HashStable> for SimplifiedTypeGen - where D: Copy + Debug + Ord + Eq + Hash + - HashStable>, +where + D: Copy + Debug + Ord + Eq + Hash + HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - BoolSimplifiedType | - CharSimplifiedType | - StrSimplifiedType | - ArraySimplifiedType | - PtrSimplifiedType | - NeverSimplifiedType | - ParameterSimplifiedType | - MarkerTraitObjectSimplifiedType => { + BoolSimplifiedType + | CharSimplifiedType + | StrSimplifiedType + | ArraySimplifiedType + | PtrSimplifiedType + | NeverSimplifiedType + | ParameterSimplifiedType + | MarkerTraitObjectSimplifiedType => { // nothing to do } IntSimplifiedType(t) => t.hash_stable(hcx, hasher), diff --git a/src/librustc/ty/flags.rs b/src/librustc/ty/flags.rs index 6ee24187e3326..48ad0a9e09901 100644 --- a/src/librustc/ty/flags.rs +++ b/src/librustc/ty/flags.rs @@ -57,15 +57,14 @@ impl FlagComputation { fn add_sty(&mut self, st: &ty::TyKind<'_>) { match st { - &ty::Bool | - &ty::Char | - &ty::Int(_) | - &ty::Float(_) | - &ty::Uint(_) | - &ty::Never | - &ty::Str | - &ty::Foreign(..) => { - } + &ty::Bool + | &ty::Char + | &ty::Int(_) + | &ty::Float(_) + | &ty::Uint(_) + | &ty::Never + | &ty::Str + | &ty::Foreign(..) => {} // You might think that we could just return Error for // any type containing Error as a component, and get @@ -74,9 +73,7 @@ impl FlagComputation { // But doing so caused sporadic memory corruption, and // neither I (tjc) nor nmatsakis could figure out why, // so we're doing it this way. - &ty::Error => { - self.add_flags(TypeFlags::HAS_TY_ERR) - } + &ty::Error => self.add_flags(TypeFlags::HAS_TY_ERR), &ty::Param(ref p) => { self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES); @@ -117,14 +114,9 @@ impl FlagComputation { self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES); // it might, right? self.add_flags(TypeFlags::HAS_TY_INFER); match infer { - ty::FreshTy(_) | - ty::FreshIntTy(_) | - ty::FreshFloatTy(_) => { - } + ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) => {} - ty::TyVar(_) | - ty::IntVar(_) | - ty::FloatVar(_) => { + ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => { self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX) } } @@ -147,7 +139,7 @@ impl FlagComputation { &ty::UnnormalizedProjection(ref data) => { self.add_flags(TypeFlags::HAS_PROJECTION); self.add_projection_ty(data); - }, + } &ty::Opaque(_, substs) => { self.add_flags(TypeFlags::HAS_PROJECTION); @@ -176,9 +168,7 @@ impl FlagComputation { self.add_const(len); } - &ty::Slice(tt) => { - self.add_ty(tt) - } + &ty::Slice(tt) => self.add_ty(tt), &ty::RawPtr(ref m) => { self.add_ty(m.ty); diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs index 0a72f733b51e5..ab670c229f728 100644 --- a/src/librustc/ty/fold.rs +++ b/src/librustc/ty/fold.rs @@ -29,8 +29,8 @@ //! These methods return true to indicate that the visitor has found what it is looking for //! and does not need to visit anything else. -use mir::interpret::ConstValue; use hir::def_id::DefId; +use mir::interpret::ConstValue; use ty::{self, Binder, Ty, TyCtxt, TypeFlags}; use std::collections::BTreeMap; @@ -58,7 +58,9 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { /// If `binder` is `ty::INNERMOST`, this indicates whether /// there are any late-bound regions that appear free. fn has_vars_bound_at_or_above(&self, binder: ty::DebruijnIndex) -> bool { - self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder }) + self.visit_with(&mut HasEscapingVarsVisitor { + outer_index: binder, + }) } /// True if this `self` has any regions that escape `binder` (and @@ -129,7 +131,6 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { /// A visitor that does not recurse into types, works like `fn walk_shallow` in `Ty`. fn visit_tys_shallow(&self, visit: impl FnMut(Ty<'tcx>) -> bool) -> bool { - pub struct Visitor(F); impl<'tcx, F: FnMut(Ty<'tcx>) -> bool> TypeVisitor<'tcx> for Visitor { @@ -147,11 +148,12 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { /// default implementation that does an "identity" fold. Within each /// identity fold, it should invoke `foo.fold_with(self)` to fold each /// sub-item. -pub trait TypeFolder<'gcx: 'tcx, 'tcx> : Sized { +pub trait TypeFolder<'gcx: 'tcx, 'tcx>: Sized { fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>; fn fold_binder(&mut self, t: &Binder) -> Binder - where T : TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { t.super_fold_with(self) } @@ -169,7 +171,7 @@ pub trait TypeFolder<'gcx: 'tcx, 'tcx> : Sized { } } -pub trait TypeVisitor<'tcx> : Sized { +pub trait TypeVisitor<'tcx>: Sized { fn visit_binder>(&mut self, t: &Binder) -> bool { t.super_visit_with(self) } @@ -190,9 +192,10 @@ pub trait TypeVisitor<'tcx> : Sized { /////////////////////////////////////////////////////////////////////////// // Some sample folders -pub struct BottomUpFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a, F, G> - where F: FnMut(Ty<'tcx>) -> Ty<'tcx>, - G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>, +pub struct BottomUpFolder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a, F, G> +where + F: FnMut(Ty<'tcx>) -> Ty<'tcx>, + G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>, { pub tcx: TyCtxt<'a, 'gcx, 'tcx>, pub fldop: F, @@ -200,10 +203,13 @@ pub struct BottomUpFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a, F, G> } impl<'a, 'gcx, 'tcx, F, G> TypeFolder<'gcx, 'tcx> for BottomUpFolder<'a, 'gcx, 'tcx, F, G> - where F: FnMut(Ty<'tcx>) -> Ty<'tcx>, - G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>, +where + F: FnMut(Ty<'tcx>) -> Ty<'tcx>, + G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>, { - fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.tcx + } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { let t1 = ty.super_fold_with(self); @@ -222,11 +228,9 @@ impl<'a, 'gcx, 'tcx, F, G> TypeFolder<'gcx, 'tcx> for BottomUpFolder<'a, 'gcx, ' impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Collects the free and escaping regions in `value` into `region_set`. Returns /// whether any late-bound regions were skipped - pub fn collect_regions(self, - value: &T, - region_set: &mut FxHashSet>) - -> bool - where T : TypeFoldable<'tcx> + pub fn collect_regions(self, value: &T, region_set: &mut FxHashSet>) -> bool + where + T: TypeFoldable<'tcx>, { let mut have_bound_regions = false; self.fold_regions(value, &mut have_bound_regions, |r, d| { @@ -246,7 +250,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { mut f: impl FnMut(ty::Region<'tcx>, ty::DebruijnIndex) -> ty::Region<'tcx>, ) -> T where - T : TypeFoldable<'tcx>, + T: TypeFoldable<'tcx>, { value.fold_with(&mut RegionFolder::new(self, skipped_regions, &mut f)) } @@ -280,7 +284,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { ) -> bool { return value.visit_with(&mut RegionVisitor { outer_index: ty::INNERMOST, - callback + callback, }); struct RegionVisitor { @@ -306,7 +310,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } impl<'tcx, F> TypeVisitor<'tcx> for RegionVisitor - where F: FnMut(ty::Region<'tcx>) -> bool + where + F: FnMut(ty::Region<'tcx>) -> bool, { fn visit_binder>(&mut self, t: &Binder) -> bool { self.outer_index.shift_in(1); @@ -345,7 +350,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// visited by this folder; only regions that occur free will be /// visited by `fld_r`. -pub struct RegionFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +pub struct RegionFolder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, skipped_regions: &'a mut bool, @@ -357,10 +362,8 @@ pub struct RegionFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { /// Callback invokes for each free region. The `DebruijnIndex` /// points to the binder *just outside* the ones we have passed /// through. - fold_region_fn: &'a mut (dyn FnMut( - ty::Region<'tcx>, - ty::DebruijnIndex, - ) -> ty::Region<'tcx> + 'a), + fold_region_fn: + &'a mut (dyn FnMut(ty::Region<'tcx>, ty::DebruijnIndex) -> ty::Region<'tcx> + 'a), } impl<'a, 'gcx, 'tcx> RegionFolder<'a, 'gcx, 'tcx> { @@ -380,7 +383,9 @@ impl<'a, 'gcx, 'tcx> RegionFolder<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionFolder<'a, 'gcx, 'tcx> { - fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.tcx + } fn fold_binder>(&mut self, t: &ty::Binder) -> ty::Binder { self.current_index.shift_in(1); @@ -392,14 +397,18 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionFolder<'a, 'gcx, 'tcx> { fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { match *r { ty::ReLateBound(debruijn, _) if debruijn < self.current_index => { - debug!("RegionFolder.fold_region({:?}) skipped bound region (current index={:?})", - r, self.current_index); + debug!( + "RegionFolder.fold_region({:?}) skipped bound region (current index={:?})", + r, self.current_index + ); *self.skipped_regions = true; r } _ => { - debug!("RegionFolder.fold_region({:?}) folding free region (current_index={:?})", - r, self.current_index); + debug!( + "RegionFolder.fold_region({:?}) folding free region (current_index={:?})", + r, self.current_index + ); (self.fold_region_fn)(r, self.current_index) } } @@ -422,13 +431,10 @@ struct BoundVarReplacer<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { } impl<'a, 'gcx, 'tcx> BoundVarReplacer<'a, 'gcx, 'tcx> { - fn new( - tcx: TyCtxt<'a, 'gcx, 'tcx>, - fld_r: &'a mut F, - fld_t: &'a mut G - ) -> Self - where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, - G: FnMut(ty::BoundTy) -> ty::Ty<'tcx> + fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, fld_r: &'a mut F, fld_t: &'a mut G) -> Self + where + F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, + G: FnMut(ty::BoundTy) -> ty::Ty<'tcx>, { BoundVarReplacer { tcx, @@ -440,7 +446,9 @@ impl<'a, 'gcx, 'tcx> BoundVarReplacer<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for BoundVarReplacer<'a, 'gcx, 'tcx> { - fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.tcx + } fn fold_binder>(&mut self, t: &ty::Binder) -> ty::Binder { self.current_index.shift_in(1); @@ -455,11 +463,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for BoundVarReplacer<'a, 'gcx, 'tcx> if debruijn == self.current_index { let fld_t = &mut self.fld_t; let ty = fld_t(bound_ty); - ty::fold::shift_vars( - self.tcx, - &ty, - self.current_index.as_u32() - ) + ty::fold::shift_vars(self.tcx, &ty, self.current_index.as_u32()) } else { t } @@ -491,7 +495,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for BoundVarReplacer<'a, 'gcx, 'tcx> region } } - _ => r + _ => r, } } } @@ -511,10 +515,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn replace_late_bound_regions( self, value: &Binder, - fld_r: F + fld_r: F, ) -> (T, BTreeMap>) - where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, - T: TypeFoldable<'tcx> + where + F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, + T: TypeFoldable<'tcx>, { // identity for bound types let fld_t = |bound_ty| self.mk_ty(ty::Bound(ty::INNERMOST, bound_ty)); @@ -527,11 +532,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self, value: &T, mut fld_r: F, - mut fld_t: G + mut fld_t: G, ) -> (T, BTreeMap>) - where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, - G: FnMut(ty::BoundTy) -> ty::Ty<'tcx>, - T: TypeFoldable<'tcx> + where + F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, + G: FnMut(ty::BoundTy) -> ty::Ty<'tcx>, + T: TypeFoldable<'tcx>, { use rustc_data_structures::fx::FxHashMap; @@ -541,13 +547,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if !value.has_escaping_bound_vars() { (value.clone(), region_map) } else { - let mut real_fld_r = |br| { - *region_map.entry(br).or_insert_with(|| fld_r(br)) - }; + let mut real_fld_r = |br| *region_map.entry(br).or_insert_with(|| fld_r(br)); - let mut real_fld_t = |bound_ty| { - *type_map.entry(bound_ty).or_insert_with(|| fld_t(bound_ty)) - }; + let mut real_fld_t = + |bound_ty| *type_map.entry(bound_ty).or_insert_with(|| fld_t(bound_ty)); let mut replacer = BoundVarReplacer::new(self, &mut real_fld_r, &mut real_fld_t); let result = value.fold_with(&mut replacer); @@ -562,11 +565,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self, value: &Binder, fld_r: F, - fld_t: G + fld_t: G, ) -> (T, BTreeMap>) - where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, - G: FnMut(ty::BoundTy) -> ty::Ty<'tcx>, - T: TypeFoldable<'tcx> + where + F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, + G: FnMut(ty::BoundTy) -> ty::Ty<'tcx>, + T: TypeFoldable<'tcx>, { self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t) } @@ -576,22 +580,25 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn liberate_late_bound_regions( &self, all_outlive_scope: DefId, - value: &ty::Binder + value: &ty::Binder, ) -> T - where T: TypeFoldable<'tcx> { + where + T: TypeFoldable<'tcx>, + { self.replace_late_bound_regions(value, |br| { self.mk_region(ty::ReFree(ty::FreeRegion { scope: all_outlive_scope, - bound_region: br + bound_region: br, })) - }).0 + }) + .0 } /// Flattens multiple binding levels into one. So `for<'a> for<'b> Foo` /// becomes `for<'a,'b> Foo`. - pub fn flatten_late_bound_regions(self, bound2_value: &Binder>) - -> Binder - where T: TypeFoldable<'tcx> + pub fn flatten_late_bound_regions(self, bound2_value: &Binder>) -> Binder + where + T: TypeFoldable<'tcx>, { let bound0_value = bound2_value.skip_binder().skip_binder(); let value = self.fold_regions(bound0_value, &mut false, |region, current_depth| { @@ -604,9 +611,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { assert!(debruijn == current_depth); self.mk_region(ty::ReLateBound(current_depth, br)) } - _ => { - region - } + _ => region, } }); Binder::bind(value) @@ -616,24 +621,34 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// by `value`, meaning that if we instantiate those LBR with /// variables and equate `value` with something else, those /// variables will also be equated. - pub fn collect_constrained_late_bound_regions(&self, value: &Binder) - -> FxHashSet - where T : TypeFoldable<'tcx> + pub fn collect_constrained_late_bound_regions( + &self, + value: &Binder, + ) -> FxHashSet + where + T: TypeFoldable<'tcx>, { self.collect_late_bound_regions(value, true) } /// Returns a set of all late-bound regions that appear in `value` anywhere. - pub fn collect_referenced_late_bound_regions(&self, value: &Binder) - -> FxHashSet - where T : TypeFoldable<'tcx> + pub fn collect_referenced_late_bound_regions( + &self, + value: &Binder, + ) -> FxHashSet + where + T: TypeFoldable<'tcx>, { self.collect_late_bound_regions(value, false) } - fn collect_late_bound_regions(&self, value: &Binder, just_constraint: bool) - -> FxHashSet - where T : TypeFoldable<'tcx> + fn collect_late_bound_regions( + &self, + value: &Binder, + just_constraint: bool, + ) -> FxHashSet + where + T: TypeFoldable<'tcx>, { let mut collector = LateBoundRegionsCollector::new(just_constraint); let result = value.skip_binder().visit_with(&mut collector); @@ -644,9 +659,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Replace any late-bound regions bound in `value` with `'erased`. Useful in codegen but also /// method lookup and a few other places where precise region relationships are not required. pub fn erase_late_bound_regions(self, value: &Binder) -> T - where T : TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { - self.replace_late_bound_regions(value, |_| self.types.re_erased).0 + self.replace_late_bound_regions(value, |_| self.types.re_erased) + .0 } /// Rewrite any late-bound regions so that they are anonymous. Region numbers are @@ -658,13 +675,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and /// `for<'a, 'b> fn(&'b isize, &'a isize)` will become identical after anonymization. pub fn anonymize_late_bound_regions(self, sig: &Binder) -> Binder - where T : TypeFoldable<'tcx>, + where + T: TypeFoldable<'tcx>, { let mut counter = 0; - Binder::bind(self.replace_late_bound_regions(sig, |_| { - counter += 1; - self.mk_region(ty::ReLateBound(ty::INNERMOST, ty::BrAnon(counter))) - }).0) + Binder::bind( + self.replace_late_bound_regions(sig, |_| { + counter += 1; + self.mk_region(ty::ReLateBound(ty::INNERMOST, ty::BrAnon(counter))) + }) + .0, + ) } } @@ -683,7 +704,7 @@ enum Direction { Out, } -struct Shifter<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct Shifter<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, current_index: ty::DebruijnIndex, amount: u32, @@ -702,7 +723,9 @@ impl Shifter<'a, 'gcx, 'tcx> { } impl TypeFolder<'gcx, 'tcx> for Shifter<'a, 'gcx, 'tcx> { - fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.tcx + } fn fold_binder>(&mut self, t: &ty::Binder) -> ty::Binder { self.current_index.shift_in(1); @@ -728,7 +751,7 @@ impl TypeFolder<'gcx, 'tcx> for Shifter<'a, 'gcx, 'tcx> { self.tcx.mk_region(shifted) } } - _ => r + _ => r, } } @@ -745,9 +768,7 @@ impl TypeFolder<'gcx, 'tcx> for Shifter<'a, 'gcx, 'tcx> { debruijn.shifted_out(self.amount) } }; - self.tcx.mk_ty( - ty::Bound(debruijn, bound_ty) - ) + self.tcx.mk_ty(ty::Bound(debruijn, bound_ty)) } } @@ -759,36 +780,30 @@ impl TypeFolder<'gcx, 'tcx> for Shifter<'a, 'gcx, 'tcx> { pub fn shift_region<'a, 'gcx, 'tcx>( tcx: TyCtxt<'a, 'gcx, 'tcx>, region: ty::Region<'tcx>, - amount: u32 + amount: u32, ) -> ty::Region<'tcx> { match region { ty::ReLateBound(debruijn, br) if amount > 0 => { tcx.mk_region(ty::ReLateBound(debruijn.shifted_in(amount), *br)) } - _ => { - region - } + _ => region, } } -pub fn shift_vars<'a, 'gcx, 'tcx, T>( - tcx: TyCtxt<'a, 'gcx, 'tcx>, - value: &T, - amount: u32 -) -> T where T: TypeFoldable<'tcx> { - debug!("shift_vars(value={:?}, amount={})", - value, amount); +pub fn shift_vars<'a, 'gcx, 'tcx, T>(tcx: TyCtxt<'a, 'gcx, 'tcx>, value: &T, amount: u32) -> T +where + T: TypeFoldable<'tcx>, +{ + debug!("shift_vars(value={:?}, amount={})", value, amount); value.fold_with(&mut Shifter::new(tcx, amount, Direction::In)) } -pub fn shift_out_vars<'a, 'gcx, 'tcx, T>( - tcx: TyCtxt<'a, 'gcx, 'tcx>, - value: &T, - amount: u32 -) -> T where T: TypeFoldable<'tcx> { - debug!("shift_out_vars(value={:?}, amount={})", - value, amount); +pub fn shift_out_vars<'a, 'gcx, 'tcx, T>(tcx: TyCtxt<'a, 'gcx, 'tcx>, value: &T, amount: u32) -> T +where + T: TypeFoldable<'tcx>, +{ + debug!("shift_out_vars(value={:?}, amount={})", value, amount); value.fold_with(&mut Shifter::new(tcx, amount, Direction::Out)) } @@ -854,20 +869,26 @@ struct HasTypeFlagsVisitor { impl<'tcx> TypeVisitor<'tcx> for HasTypeFlagsVisitor { fn visit_ty(&mut self, t: Ty<'_>) -> bool { - debug!("HasTypeFlagsVisitor: t={:?} t.flags={:?} self.flags={:?}", t, t.flags, self.flags); + debug!( + "HasTypeFlagsVisitor: t={:?} t.flags={:?} self.flags={:?}", + t, t.flags, self.flags + ); t.flags.intersects(self.flags) } fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool { let flags = r.type_flags(); - debug!("HasTypeFlagsVisitor: r={:?} r.flags={:?} self.flags={:?}", r, flags, self.flags); + debug!( + "HasTypeFlagsVisitor: r={:?} r.flags={:?} self.flags={:?}", + r, flags, self.flags + ); flags.intersects(self.flags) } fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> bool { if let ConstValue::Unevaluated(..) = c.val { - let projection_flags = TypeFlags::HAS_NORMALIZABLE_PROJECTION | - TypeFlags::HAS_PROJECTION; + let projection_flags = + TypeFlags::HAS_NORMALIZABLE_PROJECTION | TypeFlags::HAS_PROJECTION; if projection_flags.intersects(self.flags) { return true; } @@ -916,8 +937,10 @@ impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector { // in the normalized form if self.just_constrained { match t.sty { - ty::Projection(..) | ty::Opaque(..) => { return false; } - _ => { } + ty::Projection(..) | ty::Opaque(..) => { + return false; + } + _ => {} } } @@ -926,7 +949,7 @@ impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector { fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool { if let ty::ReLateBound(debruijn, br) = *r { - if debruijn == self.current_index { + if debruijn == self.current_index { self.regions.insert(br); } } diff --git a/src/librustc/ty/inhabitedness/def_id_forest.rs b/src/librustc/ty/inhabitedness/def_id_forest.rs index 41fd88607e893..3b16a31c4710b 100644 --- a/src/librustc/ty/inhabitedness/def_id_forest.rs +++ b/src/librustc/ty/inhabitedness/def_id_forest.rs @@ -1,5 +1,5 @@ -use std::mem; use smallvec::SmallVec; +use std::mem; use syntax::ast::CRATE_NODE_ID; use ty::context::TyCtxt; use ty::{DefId, DefIdTree}; @@ -41,9 +41,7 @@ impl<'a, 'gcx, 'tcx> DefIdForest { pub fn from_id(id: DefId) -> DefIdForest { let mut root_ids = SmallVec::new(); root_ids.push(id); - DefIdForest { - root_ids, - } + DefIdForest { root_ids } } /// Test whether the forest is empty. @@ -52,17 +50,16 @@ impl<'a, 'gcx, 'tcx> DefIdForest { } /// Test whether the forest contains a given DefId. - pub fn contains(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - id: DefId) -> bool - { - self.root_ids.iter().any(|root_id| tcx.is_descendant_of(id, *root_id)) + pub fn contains(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, id: DefId) -> bool { + self.root_ids + .iter() + .any(|root_id| tcx.is_descendant_of(id, *root_id)) } /// Calculate the intersection of a collection of forests. - pub fn intersection(tcx: TyCtxt<'a, 'gcx, 'tcx>, - iter: I) -> DefIdForest - where I: IntoIterator + pub fn intersection(tcx: TyCtxt<'a, 'gcx, 'tcx>, iter: I) -> DefIdForest + where + I: IntoIterator, { let mut iter = iter.into_iter(); let mut ret = if let Some(first) = iter.next() { @@ -88,7 +85,12 @@ impl<'a, 'gcx, 'tcx> DefIdForest { } ret.root_ids.extend(old_ret.drain()); - next_ret.extend(next_forest.root_ids.into_iter().filter(|&id| ret.contains(tcx, id))); + next_ret.extend( + next_forest + .root_ids + .into_iter() + .filter(|&id| ret.contains(tcx, id)), + ); mem::swap(&mut next_ret, &mut ret.root_ids); next_ret.drain(); @@ -97,14 +99,18 @@ impl<'a, 'gcx, 'tcx> DefIdForest { } /// Calculate the union of a collection of forests. - pub fn union(tcx: TyCtxt<'a, 'gcx, 'tcx>, - iter: I) -> DefIdForest - where I: IntoIterator + pub fn union(tcx: TyCtxt<'a, 'gcx, 'tcx>, iter: I) -> DefIdForest + where + I: IntoIterator, { let mut ret = DefIdForest::empty(); let mut next_ret = SmallVec::new(); for next_forest in iter { - next_ret.extend(ret.root_ids.drain().filter(|&id| !next_forest.contains(tcx, id))); + next_ret.extend( + ret.root_ids + .drain() + .filter(|&id| !next_forest.contains(tcx, id)), + ); for id in next_forest.root_ids { if !next_ret.contains(&id) { @@ -118,4 +124,3 @@ impl<'a, 'gcx, 'tcx> DefIdForest { ret } } - diff --git a/src/librustc/ty/inhabitedness/mod.rs b/src/librustc/ty/inhabitedness/mod.rs index 29d201c1179e5..1db8aab57da50 100644 --- a/src/librustc/ty/inhabitedness/mod.rs +++ b/src/librustc/ty/inhabitedness/mod.rs @@ -1,8 +1,8 @@ use ty::context::TyCtxt; -use ty::{AdtDef, VariantDef, FieldDef, Ty, TyS}; -use ty::{DefId, Substs}; -use ty::{AdtKind, Visibility}; use ty::TyKind::*; +use ty::{AdtDef, FieldDef, Ty, TyS, VariantDef}; +use ty::{AdtKind, Visibility}; +use ty::{DefId, Substs}; pub use self::def_id_forest::DefIdForest; @@ -105,25 +105,31 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { ty.uninhabited_from(self) } - pub fn is_enum_variant_uninhabited_from(self, - module: DefId, - variant: &'tcx VariantDef, - substs: &'tcx Substs<'tcx>) - -> bool - { - self.variant_inhabitedness_forest(variant, substs).contains(self, module) + pub fn is_enum_variant_uninhabited_from( + self, + module: DefId, + variant: &'tcx VariantDef, + substs: &'tcx Substs<'tcx>, + ) -> bool { + self.variant_inhabitedness_forest(variant, substs) + .contains(self, module) } - pub fn is_variant_uninhabited_from_all_modules(self, - variant: &'tcx VariantDef, - substs: &'tcx Substs<'tcx>) - -> bool - { - !self.variant_inhabitedness_forest(variant, substs).is_empty() + pub fn is_variant_uninhabited_from_all_modules( + self, + variant: &'tcx VariantDef, + substs: &'tcx Substs<'tcx>, + ) -> bool { + !self + .variant_inhabitedness_forest(variant, substs) + .is_empty() } - fn variant_inhabitedness_forest(self, variant: &'tcx VariantDef, substs: &'tcx Substs<'tcx>) - -> DefIdForest { + fn variant_inhabitedness_forest( + self, + variant: &'tcx VariantDef, + substs: &'tcx Substs<'tcx>, + ) -> DefIdForest { // Determine the ADT kind: let adt_def_id = self.adt_def_id_of_variant(variant); let adt_kind = self.adt_def(adt_def_id).adt_kind(); @@ -138,11 +144,14 @@ impl<'a, 'gcx, 'tcx> AdtDef { fn uninhabited_from( &self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - substs: &'tcx Substs<'tcx>) -> DefIdForest - { - DefIdForest::intersection(tcx, self.variants.iter().map(|v| { - v.uninhabited_from(tcx, substs, self.adt_kind()) - })) + substs: &'tcx Substs<'tcx>, + ) -> DefIdForest { + DefIdForest::intersection( + tcx, + self.variants + .iter() + .map(|v| v.uninhabited_from(tcx, substs, self.adt_kind())), + ) } } @@ -152,8 +161,8 @@ impl<'a, 'gcx, 'tcx> VariantDef { &self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &'tcx Substs<'tcx>, - adt_kind: AdtKind) -> DefIdForest - { + adt_kind: AdtKind, + ) -> DefIdForest { let is_enum = match adt_kind { // For now, `union`s are never considered uninhabited. // The precise semantics of inhabitedness with respect to unions is currently undecided. @@ -161,9 +170,12 @@ impl<'a, 'gcx, 'tcx> VariantDef { AdtKind::Enum => true, AdtKind::Struct => false, }; - DefIdForest::union(tcx, self.fields.iter().map(|f| { - f.uninhabited_from(tcx, substs, is_enum) - })) + DefIdForest::union( + tcx, + self.fields + .iter() + .map(|f| f.uninhabited_from(tcx, substs, is_enum)), + ) } } @@ -175,9 +187,7 @@ impl<'a, 'gcx, 'tcx> FieldDef { substs: &'tcx Substs<'tcx>, is_enum: bool, ) -> DefIdForest { - let data_uninhabitedness = move || { - self.ty(tcx, substs).uninhabited_from(tcx) - }; + let data_uninhabitedness = move || self.ty(tcx, substs).uninhabited_from(tcx); // FIXME(canndrew): Currently enum fields are (incorrectly) stored with // Visibility::Invisible so we need to override self.vis if we're // dealing with an enum. @@ -190,7 +200,7 @@ impl<'a, 'gcx, 'tcx> FieldDef { let forest = DefIdForest::from_id(from); let iter = Some(forest).into_iter().chain(Some(data_uninhabitedness())); DefIdForest::intersection(tcx, iter) - }, + } Visibility::Public => data_uninhabitedness(), } } @@ -199,17 +209,14 @@ impl<'a, 'gcx, 'tcx> FieldDef { impl<'a, 'gcx, 'tcx> TyS<'tcx> { /// Calculate the forest of DefIds from which this type is visibly uninhabited. - fn uninhabited_from(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest - { + fn uninhabited_from(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest { match self.sty { Adt(def, substs) => def.uninhabited_from(tcx, substs), Never => DefIdForest::full(tcx), Tuple(ref tys) => { - DefIdForest::union(tcx, tys.iter().map(|ty| { - ty.uninhabited_from(tcx) - })) + DefIdForest::union(tcx, tys.iter().map(|ty| ty.uninhabited_from(tcx))) } Array(ty, len) => { @@ -217,7 +224,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { // If the array is definitely non-empty, it's uninhabited if // the type of its elements is uninhabited. Some(n) if n != 0 => ty.uninhabited_from(tcx), - _ => DefIdForest::empty() + _ => DefIdForest::empty(), } } @@ -232,4 +239,3 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { } } } - diff --git a/src/librustc/ty/instance.rs b/src/librustc/ty/instance.rs index 78b6ffaa54e17..378105aa670cf 100644 --- a/src/librustc/ty/instance.rs +++ b/src/librustc/ty/instance.rs @@ -1,8 +1,8 @@ -use hir::Unsafety; use hir::def_id::DefId; -use ty::{self, Ty, PolyFnSig, TypeFoldable, Substs, TyCtxt}; -use traits; +use hir::Unsafety; use rustc_target::spec::abi::Abi; +use traits; +use ty::{self, PolyFnSig, Substs, Ty, TyCtxt, TypeFoldable}; use util::ppaux; use std::fmt; @@ -30,7 +30,9 @@ pub enum InstanceDef<'tcx> { Virtual(DefId, usize), /// <[mut closure] as FnOnce>::call_once - ClosureOnceShim { call_once: DefId }, + ClosureOnceShim { + call_once: DefId, + }, /// drop_in_place::; None for empty drop glue. DropGlue(DefId, Option>), @@ -40,16 +42,9 @@ pub enum InstanceDef<'tcx> { } impl<'a, 'tcx> Instance<'tcx> { - pub fn ty(&self, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> Ty<'tcx> - { + pub fn ty(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> { let ty = tcx.type_of(self.def.def_id()); - tcx.subst_and_normalize_erasing_regions( - self.substs, - ty::ParamEnv::reveal_all(), - &ty, - ) + tcx.subst_and_normalize_erasing_regions(self.substs, ty::ParamEnv::reveal_all(), &ty) } fn fn_sig_noadjust(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> PolyFnSig<'tcx> { @@ -116,14 +111,14 @@ impl<'tcx> InstanceDef<'tcx> { #[inline] pub fn def_id(&self) -> DefId { match *self { - InstanceDef::Item(def_id) | - InstanceDef::VtableShim(def_id) | - InstanceDef::FnPtrShim(def_id, _) | - InstanceDef::Virtual(def_id, _) | - InstanceDef::Intrinsic(def_id, ) | - InstanceDef::ClosureOnceShim { call_once: def_id } | - InstanceDef::DropGlue(def_id, _) | - InstanceDef::CloneShim(def_id, _) => def_id + InstanceDef::Item(def_id) + | InstanceDef::VtableShim(def_id) + | InstanceDef::FnPtrShim(def_id, _) + | InstanceDef::Virtual(def_id, _) + | InstanceDef::Intrinsic(def_id) + | InstanceDef::ClosureOnceShim { call_once: def_id } + | InstanceDef::DropGlue(def_id, _) + | InstanceDef::CloneShim(def_id, _) => def_id, } } @@ -132,36 +127,30 @@ impl<'tcx> InstanceDef<'tcx> { tcx.get_attrs(self.def_id()) } - pub fn is_inline<'a>( - &self, - tcx: TyCtxt<'a, 'tcx, 'tcx> - ) -> bool { + pub fn is_inline<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool { use hir::map::DefPathData; let def_id = match *self { ty::InstanceDef::Item(def_id) => def_id, ty::InstanceDef::DropGlue(_, Some(_)) => return false, - _ => return true + _ => return true, }; match tcx.def_key(def_id).disambiguated_data.data { - DefPathData::StructCtor | - DefPathData::EnumVariant(..) | - DefPathData::ClosureExpr => true, - _ => false + DefPathData::StructCtor | DefPathData::EnumVariant(..) | DefPathData::ClosureExpr => { + true + } + _ => false, } } - pub fn requires_local<'a>( - &self, - tcx: TyCtxt<'a, 'tcx, 'tcx> - ) -> bool { + pub fn requires_local<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool { if self.is_inline(tcx) { - return true + return true; } if let ty::InstanceDef::DropGlue(..) = *self { // Drop glue wants to be instantiated at every codegen // unit, but without an #[inline] hint. We should make this // available to normal end-users. - return true + return true; } tcx.codegen_fn_attrs(self.def_id()).requests_inline() } @@ -172,38 +161,29 @@ impl<'tcx> fmt::Display for Instance<'tcx> { ppaux::parameterized(f, self.substs, self.def_id(), &[])?; match self.def { InstanceDef::Item(_) => Ok(()), - InstanceDef::VtableShim(_) => { - write!(f, " - shim(vtable)") - } - InstanceDef::Intrinsic(_) => { - write!(f, " - intrinsic") - } - InstanceDef::Virtual(_, num) => { - write!(f, " - shim(#{})", num) - } - InstanceDef::FnPtrShim(_, ty) => { - write!(f, " - shim({:?})", ty) - } - InstanceDef::ClosureOnceShim { .. } => { - write!(f, " - shim") - } - InstanceDef::DropGlue(_, ty) => { - write!(f, " - shim({:?})", ty) - } - InstanceDef::CloneShim(_, ty) => { - write!(f, " - shim({:?})", ty) - } + InstanceDef::VtableShim(_) => write!(f, " - shim(vtable)"), + InstanceDef::Intrinsic(_) => write!(f, " - intrinsic"), + InstanceDef::Virtual(_, num) => write!(f, " - shim(#{})", num), + InstanceDef::FnPtrShim(_, ty) => write!(f, " - shim({:?})", ty), + InstanceDef::ClosureOnceShim { .. } => write!(f, " - shim"), + InstanceDef::DropGlue(_, ty) => write!(f, " - shim({:?})", ty), + InstanceDef::CloneShim(_, ty) => write!(f, " - shim({:?})", ty), } } } impl<'a, 'b, 'tcx> Instance<'tcx> { - pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>) - -> Instance<'tcx> { - assert!(!substs.has_escaping_bound_vars(), - "substs of instance {:?} not normalized for codegen: {:?}", - def_id, substs); - Instance { def: InstanceDef::Item(def_id), substs: substs } + pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>) -> Instance<'tcx> { + assert!( + !substs.has_escaping_bound_vars(), + "substs of instance {:?} not normalized for codegen: {:?}", + def_id, + substs + ); + Instance { + def: InstanceDef::Item(def_id), + substs: substs, + } } pub fn mono(tcx: TyCtxt<'a, 'tcx, 'b>, def_id: DefId) -> Instance<'tcx> { @@ -233,29 +213,30 @@ impl<'a, 'b, 'tcx> Instance<'tcx> { /// Presuming that coherence and type-check have succeeded, if this method is invoked /// in a monomorphic context (i.e., like during codegen), then it is guaranteed to return /// `Some`. - pub fn resolve(tcx: TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - def_id: DefId, - substs: &'tcx Substs<'tcx>) -> Option> { + pub fn resolve( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + def_id: DefId, + substs: &'tcx Substs<'tcx>, + ) -> Option> { debug!("resolve(def_id={:?}, substs={:?})", def_id, substs); let result = if let Some(trait_def_id) = tcx.trait_of_item(def_id) { - debug!(" => associated item, attempting to find impl in param_env {:#?}", param_env); + debug!( + " => associated item, attempting to find impl in param_env {:#?}", + param_env + ); let item = tcx.associated_item(def_id); resolve_associated_item(tcx, &item, param_env, trait_def_id, substs) } else { let ty = tcx.type_of(def_id); - let item_type = tcx.subst_and_normalize_erasing_regions( - substs, - param_env, - &ty, - ); + let item_type = tcx.subst_and_normalize_erasing_regions(substs, param_env, &ty); let def = match item_type.sty { - ty::FnDef(..) if { - let f = item_type.fn_sig(tcx); - f.abi() == Abi::RustIntrinsic || - f.abi() == Abi::PlatformIntrinsic - } => + ty::FnDef(..) + if { + let f = item_type.fn_sig(tcx); + f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic + } => { debug!(" => intrinsic"); ty::InstanceDef::Intrinsic(def_id) @@ -278,17 +259,22 @@ impl<'a, 'b, 'tcx> Instance<'tcx> { }; Some(Instance { def: def, - substs: substs + substs: substs, }) }; - debug!("resolve(def_id={:?}, substs={:?}) = {:?}", def_id, substs, result); + debug!( + "resolve(def_id={:?}, substs={:?}) = {:?}", + def_id, substs, result + ); result } - pub fn resolve_for_vtable(tcx: TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - def_id: DefId, - substs: &'tcx Substs<'tcx>) -> Option> { + pub fn resolve_for_vtable( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + def_id: DefId, + substs: &'tcx Substs<'tcx>, + ) -> Option> { debug!("resolve(def_id={:?}, substs={:?})", def_id, substs); let fn_sig = tcx.fn_sig(def_id); let is_vtable_shim = @@ -308,14 +294,13 @@ impl<'a, 'b, 'tcx> Instance<'tcx> { tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, substs: ty::ClosureSubsts<'tcx>, - requested_kind: ty::ClosureKind) - -> Instance<'tcx> - { + requested_kind: ty::ClosureKind, + ) -> Instance<'tcx> { let actual_kind = substs.closure_kind(def_id, tcx); match needs_fn_once_adapter_shim(actual_kind, requested_kind) { Ok(true) => fn_once_adapter_instance(tcx, def_id, substs), - _ => Instance::new(def_id, substs.substs) + _ => Instance::new(def_id, substs.substs), } } @@ -336,11 +321,13 @@ fn resolve_associated_item<'a, 'tcx>( rcvr_substs: &'tcx Substs<'tcx>, ) -> Option> { let def_id = trait_item.def_id; - debug!("resolve_associated_item(trait_item={:?}, \ - param_env={:?}, \ - trait_id={:?}, \ - rcvr_substs={:?})", - def_id, param_env, trait_id, rcvr_substs); + debug!( + "resolve_associated_item(trait_item={:?}, \ + param_env={:?}, \ + trait_id={:?}, \ + rcvr_substs={:?})", + def_id, param_env, trait_id, rcvr_substs + ); let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_substs); let vtbl = tcx.codegen_fulfill_obligation((param_env, ty::Binder::bind(trait_ref))); @@ -349,98 +336,94 @@ fn resolve_associated_item<'a, 'tcx>( // the actual function: match vtbl { traits::VtableImpl(impl_data) => { - let (def_id, substs) = traits::find_associated_item( - tcx, param_env, trait_item, rcvr_substs, &impl_data); + let (def_id, substs) = + traits::find_associated_item(tcx, param_env, trait_item, rcvr_substs, &impl_data); let substs = tcx.erase_regions(&substs); Some(ty::Instance::new(def_id, substs)) } - traits::VtableGenerator(generator_data) => { - Some(Instance { - def: ty::InstanceDef::Item(generator_data.generator_def_id), - substs: generator_data.substs.substs - }) - } + traits::VtableGenerator(generator_data) => Some(Instance { + def: ty::InstanceDef::Item(generator_data.generator_def_id), + substs: generator_data.substs.substs, + }), traits::VtableClosure(closure_data) => { let trait_closure_kind = tcx.lang_items().fn_trait_kind(trait_id).unwrap(); - Some(Instance::resolve_closure(tcx, closure_data.closure_def_id, closure_data.substs, - trait_closure_kind)) - } - traits::VtableFnPointer(ref data) => { - Some(Instance { - def: ty::InstanceDef::FnPtrShim(trait_item.def_id, data.fn_ty), - substs: rcvr_substs - }) + Some(Instance::resolve_closure( + tcx, + closure_data.closure_def_id, + closure_data.substs, + trait_closure_kind, + )) } + traits::VtableFnPointer(ref data) => Some(Instance { + def: ty::InstanceDef::FnPtrShim(trait_item.def_id, data.fn_ty), + substs: rcvr_substs, + }), traits::VtableObject(ref data) => { let index = tcx.get_vtable_index_of_object_method(data, def_id); Some(Instance { def: ty::InstanceDef::Virtual(def_id, index), - substs: rcvr_substs + substs: rcvr_substs, }) } traits::VtableBuiltin(..) => { if tcx.lang_items().clone_trait().is_some() { Some(Instance { def: ty::InstanceDef::CloneShim(def_id, trait_ref.self_ty()), - substs: rcvr_substs + substs: rcvr_substs, }) } else { None } } - traits::VtableAutoImpl(..) | - traits::VtableParam(..) | - traits::VtableTraitAlias(..) => None + traits::VtableAutoImpl(..) | traits::VtableParam(..) | traits::VtableTraitAlias(..) => None, } } -fn needs_fn_once_adapter_shim<'a, 'tcx>(actual_closure_kind: ty::ClosureKind, - trait_closure_kind: ty::ClosureKind) - -> Result -{ +fn needs_fn_once_adapter_shim<'a, 'tcx>( + actual_closure_kind: ty::ClosureKind, + trait_closure_kind: ty::ClosureKind, +) -> Result { match (actual_closure_kind, trait_closure_kind) { - (ty::ClosureKind::Fn, ty::ClosureKind::Fn) | - (ty::ClosureKind::FnMut, ty::ClosureKind::FnMut) | - (ty::ClosureKind::FnOnce, ty::ClosureKind::FnOnce) => { - // No adapter needed. - Ok(false) - } + (ty::ClosureKind::Fn, ty::ClosureKind::Fn) + | (ty::ClosureKind::FnMut, ty::ClosureKind::FnMut) + | (ty::ClosureKind::FnOnce, ty::ClosureKind::FnOnce) => { + // No adapter needed. + Ok(false) + } (ty::ClosureKind::Fn, ty::ClosureKind::FnMut) => { // The closure fn `llfn` is a `fn(&self, ...)`. We want a // `fn(&mut self, ...)`. In fact, at codegen time, these are // basically the same thing, so we can just return llfn. Ok(false) } - (ty::ClosureKind::Fn, ty::ClosureKind::FnOnce) | - (ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => { - // The closure fn `llfn` is a `fn(&self, ...)` or `fn(&mut - // self, ...)`. We want a `fn(self, ...)`. We can produce - // this by doing something like: - // - // fn call_once(self, ...) { call_mut(&self, ...) } - // fn call_once(mut self, ...) { call_mut(&mut self, ...) } - // - // These are both the same at codegen time. - Ok(true) + (ty::ClosureKind::Fn, ty::ClosureKind::FnOnce) + | (ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => { + // The closure fn `llfn` is a `fn(&self, ...)` or `fn(&mut + // self, ...)`. We want a `fn(self, ...)`. We can produce + // this by doing something like: + // + // fn call_once(self, ...) { call_mut(&self, ...) } + // fn call_once(mut self, ...) { call_mut(&mut self, ...) } + // + // These are both the same at codegen time. + Ok(true) } - (ty::ClosureKind::FnMut, _) | - (ty::ClosureKind::FnOnce, _) => Err(()) + (ty::ClosureKind::FnMut, _) | (ty::ClosureKind::FnOnce, _) => Err(()), } } fn fn_once_adapter_instance<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, closure_did: DefId, - substs: ty::ClosureSubsts<'tcx>) - -> Instance<'tcx> -{ - debug!("fn_once_adapter_shim({:?}, {:?})", - closure_did, - substs); + substs: ty::ClosureSubsts<'tcx>, +) -> Instance<'tcx> { + debug!("fn_once_adapter_shim({:?}, {:?})", closure_did, substs); let fn_once = tcx.lang_items().fn_once_trait().unwrap(); - let call_once = tcx.associated_items(fn_once) + let call_once = tcx + .associated_items(fn_once) .find(|it| it.kind == ty::AssociatedKind::Method) - .unwrap().def_id; + .unwrap() + .def_id; let def = ty::InstanceDef::ClosureOnceShim { call_once }; let self_ty = tcx.mk_closure(closure_did, substs); diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index da467f57d2544..0efad4a6c3a33 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -1,9 +1,9 @@ -use hir::map::DefPathData; use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; -use ty::{self, DefIdTree, Ty, TyCtxt}; +use hir::map::DefPathData; use middle::cstore::{ExternCrate, ExternCrateSource}; use syntax::ast; use syntax::symbol::{keywords, LocalInternedString, Symbol}; +use ty::{self, DefIdTree, Ty, TyCtxt}; use std::cell::Cell; use std::fmt::Debug; @@ -80,7 +80,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// suitable for user output. It always begins with a crate identifier. pub fn absolute_item_path_str(self, def_id: DefId) -> String { let mut buffer = LocalPathBuffer::new(RootMode::Absolute); - debug!("absolute_item_path_str: buffer={:?} def_id={:?}", buffer, def_id); + debug!( + "absolute_item_path_str: buffer={:?} def_id={:?}", + buffer, def_id + ); self.push_item_path(&mut buffer, def_id, false); buffer.into_string() } @@ -94,7 +97,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// we do not want to prepend `crate::` (as that would not be a valid /// path). pub fn push_krate_path(self, buffer: &mut T, cnum: CrateNum, pushed_prelude_crate: bool) - where T: ItemPathBuffer + Debug + where + T: ItemPathBuffer + Debug, { debug!( "push_krate_path: buffer={:?} cnum={:?} LOCAL_CRATE={:?}", @@ -158,7 +162,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { external_def_id: DefId, pushed_prelude_crate: bool, ) -> bool - where T: ItemPathBuffer + Debug + where + T: ItemPathBuffer + Debug, { debug!( "try_push_visible_item_path: buffer={:?} external_def_id={:?}", @@ -183,15 +188,21 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }) => { debug!("try_push_visible_item_path: def_id={:?}", def_id); self.push_item_path(buffer, def_id, pushed_prelude_crate); - cur_path.iter().rev().for_each(|segment| buffer.push(&segment)); + cur_path + .iter() + .rev() + .for_each(|segment| buffer.push(&segment)); return true; } None => { buffer.push(&self.crate_name(cur_def.krate).as_str()); - cur_path.iter().rev().for_each(|segment| buffer.push(&segment)); + cur_path + .iter() + .rev() + .for_each(|segment| buffer.push(&segment)); return true; } - _ => {}, + _ => {} } } @@ -202,7 +213,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if let DefPathData::StructCtor = cur_def_key.disambiguated_data.data { let parent = DefId { krate: cur_def.krate, - index: cur_def_key.parent.expect("DefPathData::StructCtor missing a parent"), + index: cur_def_key + .parent + .expect("DefPathData::StructCtor missing a parent"), }; cur_def_key = self.def_key(parent); @@ -258,7 +271,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } name.map(|n| n.as_str()).unwrap_or(module_name.as_str()) - }, + } _ => { data.get_opt_name().map(|n| n.as_str()).unwrap_or_else(|| { // Re-exported `extern crate` (#43189). @@ -268,7 +281,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { Symbol::intern("").as_str() } }) - }, + } }; debug!("try_push_visible_item_path: symbol={:?}", symbol); cur_path.push(symbol); @@ -281,15 +294,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn push_item_path(self, buffer: &mut T, def_id: DefId, pushed_prelude_crate: bool) - where T: ItemPathBuffer + Debug + where + T: ItemPathBuffer + Debug, { debug!( "push_item_path: buffer={:?} def_id={:?} pushed_prelude_crate={:?}", buffer, def_id, pushed_prelude_crate ); match *buffer.root_mode() { - RootMode::Local if !def_id.is_local() => - if self.try_push_visible_item_path(buffer, def_id, pushed_prelude_crate) { return }, + RootMode::Local if !def_id.is_local() => { + if self.try_push_visible_item_path(buffer, def_id, pushed_prelude_crate) { + return; + } + } _ => {} } @@ -308,23 +325,23 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // Unclear if there is any value in distinguishing these. // Probably eventually (and maybe we would even want // finer-grained distinctions, e.g., between enum/struct). - data @ DefPathData::Misc | - data @ DefPathData::TypeNs(..) | - data @ DefPathData::Trait(..) | - data @ DefPathData::AssocTypeInTrait(..) | - data @ DefPathData::AssocTypeInImpl(..) | - data @ DefPathData::AssocExistentialInImpl(..) | - data @ DefPathData::ValueNs(..) | - data @ DefPathData::Module(..) | - data @ DefPathData::TypeParam(..) | - data @ DefPathData::LifetimeParam(..) | - data @ DefPathData::EnumVariant(..) | - data @ DefPathData::Field(..) | - data @ DefPathData::AnonConst | - data @ DefPathData::MacroDef(..) | - data @ DefPathData::ClosureExpr | - data @ DefPathData::ImplTrait | - data @ DefPathData::GlobalMetaData(..) => { + data @ DefPathData::Misc + | data @ DefPathData::TypeNs(..) + | data @ DefPathData::Trait(..) + | data @ DefPathData::AssocTypeInTrait(..) + | data @ DefPathData::AssocTypeInImpl(..) + | data @ DefPathData::AssocExistentialInImpl(..) + | data @ DefPathData::ValueNs(..) + | data @ DefPathData::Module(..) + | data @ DefPathData::TypeParam(..) + | data @ DefPathData::LifetimeParam(..) + | data @ DefPathData::EnumVariant(..) + | data @ DefPathData::Field(..) + | data @ DefPathData::AnonConst + | data @ DefPathData::MacroDef(..) + | data @ DefPathData::ClosureExpr + | data @ DefPathData::ImplTrait + | data @ DefPathData::GlobalMetaData(..) => { let parent_did = self.parent_def_id(def_id).unwrap(); // Keep track of whether we are one recursion away from the `CrateRoot` and @@ -332,33 +349,34 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // printing the `CrateRoot` so we don't prepend a `crate::` to paths. let mut is_prelude_crate = false; if let DefPathData::CrateRoot = self.def_key(parent_did).disambiguated_data.data { - if self.extern_prelude.contains_key(&data.as_interned_str().as_symbol()) { + if self + .extern_prelude + .contains_key(&data.as_interned_str().as_symbol()) + { is_prelude_crate = true; } } - self.push_item_path( - buffer, parent_did, pushed_prelude_crate || is_prelude_crate - ); + self.push_item_path(buffer, parent_did, pushed_prelude_crate || is_prelude_crate); buffer.push(&data.as_interned_str().as_symbol().as_str()); - }, + } - DefPathData::StructCtor => { // present `X` instead of `X::{{constructor}}` + DefPathData::StructCtor => { + // present `X` instead of `X::{{constructor}}` let parent_def_id = self.parent_def_id(def_id).unwrap(); self.push_item_path(buffer, parent_def_id, pushed_prelude_crate); } } } - fn push_impl_path( - self, - buffer: &mut T, - impl_def_id: DefId, - pushed_prelude_crate: bool, - ) - where T: ItemPathBuffer + Debug + fn push_impl_path(self, buffer: &mut T, impl_def_id: DefId, pushed_prelude_crate: bool) + where + T: ItemPathBuffer + Debug, { - debug!("push_impl_path: buffer={:?} impl_def_id={:?}", buffer, impl_def_id); + debug!( + "push_impl_path: buffer={:?} impl_def_id={:?}", + buffer, impl_def_id + ); let parent_def_id = self.parent_def_id(impl_def_id).unwrap(); // Always use types for non-local impls, where types are always @@ -417,7 +435,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // anything other than a simple path. match self_ty.sty { ty::Adt(adt_def, substs) => { - if substs.types().next().is_none() { // ignore regions + if substs.types().next().is_none() { + // ignore regions self.push_item_path(buffer, adt_def.did, pushed_prelude_crate); } else { buffer.push(&format!("<{}>", self_ty)); @@ -426,12 +445,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { ty::Foreign(did) => self.push_item_path(buffer, did, pushed_prelude_crate), - ty::Bool | - ty::Char | - ty::Int(_) | - ty::Uint(_) | - ty::Float(_) | - ty::Str => { + ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str => { buffer.push(&self_ty.to_string()); } @@ -446,8 +460,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { buffer: &mut T, impl_def_id: DefId, pushed_prelude_crate: bool, - ) - where T: ItemPathBuffer + Debug + ) where + T: ItemPathBuffer + Debug, { // If no type info is available, fall back to // pretty printing some span information. This should @@ -465,7 +479,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// inlined root. pub fn parent_def_id(self, def_id: DefId) -> Option { let key = self.def_key(def_id); - key.parent.map(|index| DefId { krate: def_id.krate, index: index }) + key.parent.map(|index| DefId { + krate: def_id.krate, + index: index, + }) } } @@ -481,39 +498,39 @@ pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option { ty::Dynamic(data, ..) => Some(data.principal().def_id()), - ty::Array(subty, _) | - ty::Slice(subty) => characteristic_def_id_of_type(subty), + ty::Array(subty, _) | ty::Slice(subty) => characteristic_def_id_of_type(subty), ty::RawPtr(mt) => characteristic_def_id_of_type(mt.ty), ty::Ref(_, ty, _) => characteristic_def_id_of_type(ty), - ty::Tuple(ref tys) => tys.iter() - .filter_map(|ty| characteristic_def_id_of_type(ty)) - .next(), - - ty::FnDef(def_id, _) | - ty::Closure(def_id, _) | - ty::Generator(def_id, _, _) | - ty::Foreign(def_id) => Some(def_id), - - ty::Bool | - ty::Char | - ty::Int(_) | - ty::Uint(_) | - ty::Str | - ty::FnPtr(_) | - ty::Projection(_) | - ty::Placeholder(..) | - ty::UnnormalizedProjection(..) | - ty::Param(_) | - ty::Opaque(..) | - ty::Infer(_) | - ty::Bound(..) | - ty::Error | - ty::GeneratorWitness(..) | - ty::Never | - ty::Float(_) => None, + ty::Tuple(ref tys) => tys + .iter() + .filter_map(|ty| characteristic_def_id_of_type(ty)) + .next(), + + ty::FnDef(def_id, _) + | ty::Closure(def_id, _) + | ty::Generator(def_id, _, _) + | ty::Foreign(def_id) => Some(def_id), + + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Str + | ty::FnPtr(_) + | ty::Projection(_) + | ty::Placeholder(..) + | ty::UnnormalizedProjection(..) + | ty::Param(_) + | ty::Opaque(..) + | ty::Infer(_) + | ty::Bound(..) + | ty::Error + | ty::GeneratorWitness(..) + | ty::Never + | ty::Float(_) => None, } } diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 171c53b7b20ba..b6717611b0af7 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -1,5 +1,5 @@ use session::{self, DataTypeKind}; -use ty::{self, Ty, TyCtxt, TypeFoldable, ReprOptions}; +use ty::{self, ReprOptions, Ty, TyCtxt, TypeFoldable}; use syntax::ast::{self, Ident, IntTy, UintTy}; use syntax::attr; @@ -13,21 +13,21 @@ use std::mem; use std::ops::Bound; use ich::StableHashingContext; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; pub use rustc_target::abi::*; pub trait IntegerExt { fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx>; fn from_attr(cx: &C, ity: attr::IntType) -> Integer; - fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: Ty<'tcx>, - repr: &ReprOptions, - min: i128, - max: i128) - -> (Integer, bool); + fn repr_discr<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: Ty<'tcx>, + repr: &ReprOptions, + min: i128, + max: i128, + ) -> (Integer, bool); } impl IntegerExt for Integer { @@ -66,12 +66,13 @@ impl IntegerExt for Integer { /// signed discriminant range and #[repr] attribute. /// N.B.: u128 values above i128::MAX will be treated as signed, but /// that shouldn't affect anything, other than maybe debuginfo. - fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: Ty<'tcx>, - repr: &ReprOptions, - min: i128, - max: i128) - -> (Integer, bool) { + fn repr_discr<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: Ty<'tcx>, + repr: &ReprOptions, + min: i128, + max: i128, + ) -> (Integer, bool) { // Theoretically, negative values could be larger in unsigned representation // than the unsigned representation of the signed minimum. However, if there // are any negative values, the only valid unsigned representation is u128 @@ -84,10 +85,17 @@ impl IntegerExt for Integer { if let Some(ity) = repr.int { let discr = Integer::from_attr(&tcx, ity); - let fit = if ity.is_signed() { signed_fit } else { unsigned_fit }; + let fit = if ity.is_signed() { + signed_fit + } else { + unsigned_fit + }; if discr < fit { - bug!("Integer::repr_discr: `#[repr]` hint too small for \ - discriminant range of enum `{}", ty) + bug!( + "Integer::repr_discr: `#[repr]` hint too small for \ + discriminant range of enum `{}", + ty + ) } return (discr, ity.is_signed()); } @@ -144,33 +152,33 @@ pub const FAT_PTR_EXTRA: usize = 1; #[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable)] pub enum LayoutError<'tcx> { Unknown(Ty<'tcx>), - SizeOverflow(Ty<'tcx>) + SizeOverflow(Ty<'tcx>), } impl<'tcx> fmt::Display for LayoutError<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { - LayoutError::Unknown(ty) => { - write!(f, "the type `{:?}` has an unknown layout", ty) - } - LayoutError::SizeOverflow(ty) => { - write!(f, "the type `{:?}` is too big for the current architecture", ty) - } + LayoutError::Unknown(ty) => write!(f, "the type `{:?}` has an unknown layout", ty), + LayoutError::SizeOverflow(ty) => write!( + f, + "the type `{:?}` is too big for the current architecture", + ty + ), } } } -fn layout_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> -{ +fn layout_raw<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, +) -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> { ty::tls::with_related_context(tcx, move |icx| { let rec_limit = *tcx.sess.recursion_limit.get(); let (param_env, ty) = query.into_parts(); if icx.layout_depth > rec_limit { - tcx.sess.fatal( - &format!("overflow representing the type `{}`", ty)); + tcx.sess + .fatal(&format!("overflow representing the type `{}`", ty)); } // Update the ImplicitCtxt to increase the layout_depth @@ -215,26 +223,27 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { assert!(bits <= 128); Scalar { value, - valid_range: 0..=(!0 >> (128 - bits)) + valid_range: 0..=(!0 >> (128 - bits)), } }; - let scalar = |value: Primitive| { - tcx.intern_layout(LayoutDetails::scalar(self, scalar_unit(value))) - }; + let scalar = + |value: Primitive| tcx.intern_layout(LayoutDetails::scalar(self, scalar_unit(value))); let scalar_pair = |a: Scalar, b: Scalar| { let b_align = b.value.align(dl); let align = a.value.align(dl).max(b_align).max(dl.aggregate_align); let b_offset = a.value.size(dl).align_to(b_align.abi); let size = (b_offset + b.value.size(dl)).align_to(align.abi); LayoutDetails { - variants: Variants::Single { index: VariantIdx::new(0) }, + variants: Variants::Single { + index: VariantIdx::new(0), + }, fields: FieldPlacement::Arbitrary { offsets: vec![Size::ZERO, b_offset], - memory_index: vec![0, 1] + memory_index: vec![0, 1], }, abi: Abi::ScalarPair(a, b), align, - size + size, } }; @@ -279,11 +288,14 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { }; let optimizing = &mut inverse_memory_index[..end]; let field_align = |f: &TyLayout<'_>| { - if packed { f.align.abi.min(pack) } else { f.align.abi } + if packed { + f.align.abi.min(pack) + } else { + f.align.abi + } }; match kind { - StructKind::AlwaysSized | - StructKind::MaybeUnsized => { + StructKind::AlwaysSized | StructKind::MaybeUnsized => { optimizing.sort_by_key(|&x| { // Place ZSTs first to avoid "interesting offsets", // especially with only one or two non-ZST fields. @@ -318,8 +330,11 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { for &i in &inverse_memory_index { let field = fields[i as usize]; if !sized { - bug!("univariant: field #{} of `{}` comes after unsized field", - offsets.len(), ty); + bug!( + "univariant: field #{} of `{}` comes after unsized field", + offsets.len(), + ty + ); } if field.is_unsized() { @@ -338,7 +353,8 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { debug!("univariant offset: {:?} field: {:#?}", offset, field); offsets[i as usize] = offset; - offset = offset.checked_add(field.size, dl) + offset = offset + .checked_add(field.size, dl) .ok_or(LayoutError::SizeOverflow(ty))?; } @@ -363,7 +379,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { memory_index = vec![0; inverse_memory_index.len()]; for i in 0..inverse_memory_index.len() { - memory_index[inverse_memory_index[i] as usize] = i as u32; + memory_index[inverse_memory_index[i] as usize] = i as u32; } } else { memory_index = inverse_memory_index; @@ -375,19 +391,26 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // Unpack newtype ABIs and find scalar pairs. if sized && size.bytes() > 0 { // All other fields must be ZSTs, and we need them to all start at 0. - let mut zst_offsets = - offsets.iter().enumerate().filter(|&(i, _)| fields[i].is_zst()); + let mut zst_offsets = offsets + .iter() + .enumerate() + .filter(|&(i, _)| fields[i].is_zst()); if zst_offsets.all(|(_, o)| o.bytes() == 0) { let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst()); - match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) { + match ( + non_zst_fields.next(), + non_zst_fields.next(), + non_zst_fields.next(), + ) { // We have exactly one non-ZST field. (Some((i, field)), None, None) => { // Field fills the struct and it has a scalar or scalar pair ABI. - if offsets[i].bytes() == 0 && - align.abi == field.align.abi && - size == field.size { + if offsets[i].bytes() == 0 + && align.abi == field.align.abi + && size == field.size + { match field.abi { // For plain scalars, or vectors of them, we can't unpack // newtypes for `#[repr(C)]`, as that affects C ABIs. @@ -405,11 +428,31 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { } // Two non-ZST fields, and they're both scalars. - (Some((i, &TyLayout { - details: &LayoutDetails { abi: Abi::Scalar(ref a), .. }, .. - })), Some((j, &TyLayout { - details: &LayoutDetails { abi: Abi::Scalar(ref b), .. }, .. - })), None) => { + ( + Some(( + i, + &TyLayout { + details: + &LayoutDetails { + abi: Abi::Scalar(ref a), + .. + }, + .. + }, + )), + Some(( + j, + &TyLayout { + details: + &LayoutDetails { + abi: Abi::Scalar(ref b), + .. + }, + .. + }, + )), + None, + ) => { // Order by the memory placement, not source order. let ((i, a), (j, b)) = if offsets[i] < offsets[j] { ((i, a), (j, b)) @@ -420,17 +463,18 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { let pair_offsets = match pair.fields { FieldPlacement::Arbitrary { ref offsets, - ref memory_index + ref memory_index, } => { assert_eq!(memory_index, &[0, 1]); offsets } - _ => bug!() + _ => bug!(), }; - if offsets[i] == pair_offsets[0] && - offsets[j] == pair_offsets[1] && - align == pair.align && - size == pair.size { + if offsets[i] == pair_offsets[0] + && offsets[j] == pair_offsets[1] + && align == pair.align + && size == pair.size + { // We can use `ScalarPair` only when it matches our // already computed layout (including `#[repr(C)]`). abi = pair.abi; @@ -447,14 +491,16 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { } Ok(LayoutDetails { - variants: Variants::Single { index: VariantIdx::new(0) }, + variants: Variants::Single { + index: VariantIdx::new(0), + }, fields: FieldPlacement::Arbitrary { offsets, - memory_index + memory_index, }, abi, align, - size + size, }) }; let univariant = |fields: &[TyLayout<'_>], repr: &ReprOptions, kind| { @@ -464,24 +510,22 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { Ok(match ty.sty { // Basic scalars. - ty::Bool => { - tcx.intern_layout(LayoutDetails::scalar(self, Scalar { + ty::Bool => tcx.intern_layout(LayoutDetails::scalar( + self, + Scalar { value: Int(I8, false), - valid_range: 0..=1 - })) - } - ty::Char => { - tcx.intern_layout(LayoutDetails::scalar(self, Scalar { + valid_range: 0..=1, + }, + )), + ty::Char => tcx.intern_layout(LayoutDetails::scalar( + self, + Scalar { value: Int(I32, false), - valid_range: 0..=0x10FFFF - })) - } - ty::Int(ity) => { - scalar(Int(Integer::from_attr(dl, attr::SignedInt(ity)), true)) - } - ty::Uint(ity) => { - scalar(Int(Integer::from_attr(dl, attr::UnsignedInt(ity)), false)) - } + valid_range: 0..=0x10FFFF, + }, + )), + ty::Int(ity) => scalar(Int(Integer::from_attr(dl, attr::SignedInt(ity)), true)), + ty::Uint(ity) => scalar(Int(Integer::from_attr(dl, attr::UnsignedInt(ity)), false)), ty::Float(fty) => scalar(Float(fty)), ty::FnPtr(_) => { let mut ptr = scalar_unit(Pointer); @@ -490,19 +534,18 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { } // The never type. - ty::Never => { - tcx.intern_layout(LayoutDetails { - variants: Variants::Single { index: VariantIdx::new(0) }, - fields: FieldPlacement::Union(0), - abi: Abi::Uninhabited, - align: dl.i8_align, - size: Size::ZERO - }) - } + ty::Never => tcx.intern_layout(LayoutDetails { + variants: Variants::Single { + index: VariantIdx::new(0), + }, + fields: FieldPlacement::Union(0), + abi: Abi::Uninhabited, + align: dl.i8_align, + size: Size::ZERO, + }), // Potentially-fat pointers. - ty::Ref(_, pointee, _) | - ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { + ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { let mut data_ptr = scalar_unit(Pointer); if !ty.is_unsafe_ptr() { data_ptr.valid_range = 1..=*data_ptr.valid_range.end(); @@ -518,15 +561,13 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { ty::Foreign(..) => { return Ok(tcx.intern_layout(LayoutDetails::scalar(self, data_ptr))); } - ty::Slice(_) | ty::Str => { - scalar_unit(Int(dl.ptr_sized_integer(), false)) - } + ty::Slice(_) | ty::Str => scalar_unit(Int(dl.ptr_sized_integer(), false)), ty::Dynamic(..) => { let mut vtable = scalar_unit(Pointer); vtable.valid_range = 1..=*vtable.valid_range.end(); vtable } - _ => return Err(LayoutError::Unknown(unsized_part)) + _ => return Err(LayoutError::Unknown(unsized_part)), }; // Effectively a (ptr, meta) tuple. @@ -544,7 +585,9 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { let element = self.layout_of(element)?; let count = count.unwrap_usize(tcx); - let size = element.size.checked_mul(count, dl) + let size = element + .size + .checked_mul(count, dl) .ok_or(LayoutError::SizeOverflow(ty))?; let abi = if count != 0 && ty.conservative_is_privately_uninhabited(tcx) { @@ -554,52 +597,54 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { }; tcx.intern_layout(LayoutDetails { - variants: Variants::Single { index: VariantIdx::new(0) }, + variants: Variants::Single { + index: VariantIdx::new(0), + }, fields: FieldPlacement::Array { stride: element.size, - count + count, }, abi, align: element.align, - size + size, }) } ty::Slice(element) => { let element = self.layout_of(element)?; tcx.intern_layout(LayoutDetails { - variants: Variants::Single { index: VariantIdx::new(0) }, + variants: Variants::Single { + index: VariantIdx::new(0), + }, fields: FieldPlacement::Array { stride: element.size, - count: 0 + count: 0, }, abi: Abi::Aggregate { sized: false }, align: element.align, - size: Size::ZERO - }) - } - ty::Str => { - tcx.intern_layout(LayoutDetails { - variants: Variants::Single { index: VariantIdx::new(0) }, - fields: FieldPlacement::Array { - stride: Size::from_bytes(1), - count: 0 - }, - abi: Abi::Aggregate { sized: false }, - align: dl.i8_align, - size: Size::ZERO + size: Size::ZERO, }) } + ty::Str => tcx.intern_layout(LayoutDetails { + variants: Variants::Single { + index: VariantIdx::new(0), + }, + fields: FieldPlacement::Array { + stride: Size::from_bytes(1), + count: 0, + }, + abi: Abi::Aggregate { sized: false }, + align: dl.i8_align, + size: Size::ZERO, + }), // Odd unit types. - ty::FnDef(..) => { - univariant(&[], &ReprOptions::default(), StructKind::AlwaysSized)? - } + ty::FnDef(..) => univariant(&[], &ReprOptions::default(), StructKind::AlwaysSized)?, ty::Dynamic(..) | ty::Foreign(..) => { - let mut unit = univariant_uninterned(&[], &ReprOptions::default(), - StructKind::AlwaysSized)?; + let mut unit = + univariant_uninterned(&[], &ReprOptions::default(), StructKind::AlwaysSized)?; match unit.abi { Abi::Aggregate { ref mut sized } => *sized = false, - _ => bug!() + _ => bug!(), } tcx.intern_layout(unit) } @@ -607,16 +652,22 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // Tuples, generators and closures. ty::Generator(def_id, ref substs, _) => { let tys = substs.field_tys(def_id, tcx); - univariant(&tys.map(|ty| self.layout_of(ty)).collect::, _>>()?, + univariant( + &tys.map(|ty| self.layout_of(ty)) + .collect::, _>>()?, &ReprOptions::default(), - StructKind::AlwaysSized)? + StructKind::AlwaysSized, + )? } ty::Closure(def_id, ref substs) => { let tys = substs.upvar_tys(def_id, tcx); - univariant(&tys.map(|ty| self.layout_of(ty)).collect::, _>>()?, + univariant( + &tys.map(|ty| self.layout_of(ty)) + .collect::, _>>()?, &ReprOptions::default(), - StructKind::AlwaysSized)? + StructKind::AlwaysSized, + )? } ty::Tuple(tys) => { @@ -626,8 +677,13 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { StructKind::MaybeUnsized }; - univariant(&tys.iter().map(|ty| self.layout_of(ty)).collect::, _>>()?, - &ReprOptions::default(), kind)? + univariant( + &tys.iter() + .map(|ty| self.layout_of(ty)) + .collect::, _>>()?, + &ReprOptions::default(), + kind, + )? } // SIMD vector types. @@ -638,25 +694,31 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { let scalar = match element.abi { Abi::Scalar(ref scalar) => scalar.clone(), _ => { - tcx.sess.fatal(&format!("monomorphising SIMD type `{}` with \ - a non-machine element type `{}`", - ty, element.ty)); + tcx.sess.fatal(&format!( + "monomorphising SIMD type `{}` with \ + a non-machine element type `{}`", + ty, element.ty + )); } }; - let size = element.size.checked_mul(count, dl) + let size = element + .size + .checked_mul(count, dl) .ok_or(LayoutError::SizeOverflow(ty))?; let align = dl.vector_align(size); let size = size.align_to(align.abi); tcx.intern_layout(LayoutDetails { - variants: Variants::Single { index: VariantIdx::new(0) }, + variants: Variants::Single { + index: VariantIdx::new(0), + }, fields: FieldPlacement::Array { stride: element.size, - count + count, }, abi: Abi::Vector { element: scalar, - count + count, }, size, align, @@ -666,11 +728,16 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // ADTs. ty::Adt(def, substs) => { // Cache the field layouts. - let variants = def.variants.iter().map(|v| { - v.fields.iter().map(|field| { - self.layout_of(field.ty(tcx, substs)) - }).collect::, _>>() - }).collect::, _>>()?; + let variants = def + .variants + .iter() + .map(|v| { + v.fields + .iter() + .map(|field| self.layout_of(field.ty(tcx, substs))) + .collect::, _>>() + }) + .collect::, _>>()?; if def.is_union() { let packed = def.repr.packed(); @@ -688,8 +755,8 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { if def.repr.align > 0 { let repr_align = def.repr.align as u64; - align = align.max( - AbiAndPrefAlign::new(Align::from_bytes(repr_align).unwrap())); + align = + align.max(AbiAndPrefAlign::new(Align::from_bytes(repr_align).unwrap())); } let optimize = !def.repr.inhibit_union_abi_opt(); @@ -712,25 +779,21 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { let field_abi = match &field.abi { Abi::Scalar(x) => Abi::Scalar(scalar_unit(x.value)), Abi::ScalarPair(x, y) => { - Abi::ScalarPair( - scalar_unit(x.value), - scalar_unit(y.value), - ) + Abi::ScalarPair(scalar_unit(x.value), scalar_unit(y.value)) } - Abi::Vector { element: x, count } => { - Abi::Vector { - element: scalar_unit(x.value), - count: *count, - } + Abi::Vector { element: x, count } => Abi::Vector { + element: scalar_unit(x.value), + count: *count, + }, + Abi::Uninhabited | Abi::Aggregate { .. } => { + Abi::Aggregate { sized: true } } - Abi::Uninhabited | - Abi::Aggregate { .. } => Abi::Aggregate { sized: true }, }; if size == Size::ZERO { // first non ZST: initialize 'abi' abi = field_abi; - } else if abi != field_abi { + } else if abi != field_abi { // different fields have different ABI: reset to Aggregate abi = Abi::Aggregate { sized: true }; } @@ -744,7 +807,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { fields: FieldPlacement::Union(variants[index].len()), abi, align, - size: size.align_to(align.abi) + size: size.align_to(align.abi), })); } @@ -788,18 +851,21 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { } else { let param_env = tcx.param_env(def.did); let last_field = def.variants[v].fields.last().unwrap(); - let always_sized = tcx.type_of(last_field.did) - .is_sized(tcx.at(DUMMY_SP), param_env); - if !always_sized { StructKind::MaybeUnsized } - else { StructKind::AlwaysSized } + let always_sized = tcx + .type_of(last_field.did) + .is_sized(tcx.at(DUMMY_SP), param_env); + if !always_sized { + StructKind::MaybeUnsized + } else { + StructKind::AlwaysSized + } }; let mut st = univariant_uninterned(&variants[v], &def.repr, kind)?; st.variants = Variants::Single { index: v }; let (start, end) = self.tcx.layout_scalar_valid_range(def.did); match st.abi { - Abi::Scalar(ref mut scalar) | - Abi::ScalarPair(ref mut scalar, _) => { + Abi::Scalar(ref mut scalar) | Abi::ScalarPair(ref mut scalar, _) => { // the asserts ensure that we are not using the // `#[rustc_layout_scalar_valid_range(n)]` // attribute to widen the range of anything as that would probably @@ -826,7 +892,9 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // The current code for niche-filling relies on variant indices // instead of actual discriminants, so dataful enums with // explicit discriminants (RFC #2363) would misbehave. - let no_explicit_discriminants = def.variants.iter_enumerated() + let no_explicit_discriminants = def + .variants + .iter_enumerated() .all(|(i, v)| v.discr == ty::VariantDiscr::Relative(i.as_u32())); // Niche-filling enum optimization. @@ -858,9 +926,9 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { } if let Some(i) = dataful_variant { - let count = ( - niche_variants.end().as_u32() - niche_variants.start().as_u32() + 1 - ) as u128; + let count = (niche_variants.end().as_u32() + - niche_variants.start().as_u32() + + 1) as u128; for (field_index, &field) in variants[i].iter().enumerate() { let niche = match self.find_niche(field)? { Some(niche) => niche, @@ -872,15 +940,21 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { }; let mut align = dl.aggregate_align; - let st = variants.iter_enumerated().map(|(j, v)| { - let mut st = univariant_uninterned(v, - &def.repr, StructKind::AlwaysSized)?; - st.variants = Variants::Single { index: j }; - - align = align.max(st.align); - - Ok(st) - }).collect::, _>>()?; + let st = variants + .iter_enumerated() + .map(|(j, v)| { + let mut st = univariant_uninterned( + v, + &def.repr, + StructKind::AlwaysSized, + )?; + st.variants = Variants::Single { index: j }; + + align = align.max(st.align); + + Ok(st) + }) + .collect::, _>>()?; let offset = st[i].fields.offset(field_index) + niche.offset; let size = st[i].size; @@ -922,7 +996,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { }, fields: FieldPlacement::Arbitrary { offsets: vec![offset], - memory_index: vec![0] + memory_index: vec![0], }, abi, size, @@ -944,8 +1018,12 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // sign extend the raw representation to be an i128 x = (x << (128 - bits)) >> (128 - bits); } - if x < min { min = x; } - if x > max { max = x; } + if x < min { + min = x; + } + if x > max { + max = x; + } } // We might have no inhabited variants, so pretend there's at least one. if (min, max) == (i128::max_value(), i128::min_value()) { @@ -977,22 +1055,32 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { } // Create the set of structs that represent each variant. - let mut layout_variants = variants.iter_enumerated().map(|(i, field_layouts)| { - let mut st = univariant_uninterned(&field_layouts, - &def.repr, StructKind::Prefixed(min_ity.size(), prefix_align))?; - st.variants = Variants::Single { index: i }; - // Find the first field we can't move later - // to make room for a larger discriminant. - for field in st.fields.index_by_increasing_offset().map(|j| field_layouts[j]) { - if !field.is_zst() || field.align.abi.bytes() != 1 { - start_align = start_align.min(field.align.abi); - break; + let mut layout_variants = variants + .iter_enumerated() + .map(|(i, field_layouts)| { + let mut st = univariant_uninterned( + &field_layouts, + &def.repr, + StructKind::Prefixed(min_ity.size(), prefix_align), + )?; + st.variants = Variants::Single { index: i }; + // Find the first field we can't move later + // to make room for a larger discriminant. + for field in st + .fields + .index_by_increasing_offset() + .map(|j| field_layouts[j]) + { + if !field.is_zst() || field.align.abi.bytes() != 1 { + start_align = start_align.min(field.align.abi); + break; + } } - } - size = cmp::max(size, st.size); - align = align.max(st.align); - Ok(st) - }).collect::, _>>()?; + size = cmp::max(size, st.size); + align = align.max(st.align); + Ok(st) + }) + .collect::, _>>()?; // Align the maximum variant size to the largest alignment. size = size.align_to(align.abi); @@ -1012,8 +1100,11 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // to store this 16-bit discriminant into 8-bit sized temporary some of the // space necessary to represent would have to be discarded (or layout is wrong // on thinking it needs 16 bits) - bug!("layout decided on a larger discriminant type ({:?}) than typeck ({:?})", - min_ity, typeck_ity); + bug!( + "layout decided on a larger discriminant type ({:?}) than typeck ({:?})", + min_ity, + typeck_ity + ); // However, it is fine to make discr type however large (as an optimisation) // after this point – we’ll just truncate the value we load in codegen. } @@ -1044,7 +1135,9 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { let new_ity_size = ity.size(); for variant in &mut layout_variants { match variant.fields { - FieldPlacement::Arbitrary { ref mut offsets, .. } => { + FieldPlacement::Arbitrary { + ref mut offsets, .. + } => { for i in offsets { if *i <= old_ity_size { assert_eq!(*i, old_ity_size); @@ -1056,7 +1149,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { variant.size = new_ity_size; } } - _ => bug!() + _ => bug!(), } } } @@ -1077,10 +1170,8 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { FieldPlacement::Arbitrary { ref offsets, .. } => offsets, _ => bug!(), }; - let mut fields = field_layouts - .iter() - .zip(offsets) - .filter(|p| !p.0.is_zst()); + let mut fields = + field_layouts.iter().zip(offsets).filter(|p| !p.0.is_zst()); let (field, offset) = match (fields.next(), fields.next()) { (None, None) => continue, (Some(pair), None) => pair, @@ -1114,17 +1205,18 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { let pair_offsets = match pair.fields { FieldPlacement::Arbitrary { ref offsets, - ref memory_index + ref memory_index, } => { assert_eq!(memory_index, &[0, 1]); offsets } - _ => bug!() + _ => bug!(), }; - if pair_offsets[0] == Size::ZERO && - pair_offsets[1] == *offset && - align == pair.align && - size == pair.size { + if pair_offsets[0] == Size::ZERO + && pair_offsets[1] == *offset + && align == pair.align + && size == pair.size + { // We can use `ScalarPair` only when it matches our // already computed layout (including `#[repr(C)]`). abi = pair.abi; @@ -1143,11 +1235,11 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { }, fields: FieldPlacement::Arbitrary { offsets: vec![Size::ZERO], - memory_index: vec![0] + memory_index: vec![0], }, abi, align, - size + size, }) } @@ -1160,13 +1252,11 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { tcx.layout_raw(param_env.and(normalized))? } - ty::Bound(..) | - ty::Placeholder(..) | - ty::UnnormalizedProjection(..) | - ty::GeneratorWitness(..) | - ty::Infer(_) => { - bug!("LayoutDetails::compute: unexpected type `{}`", ty) - } + ty::Bound(..) + | ty::Placeholder(..) + | ty::UnnormalizedProjection(..) + | ty::GeneratorWitness(..) + | ty::Infer(_) => bug!("LayoutDetails::compute: unexpected type `{}`", ty), ty::Param(_) | ty::Error => { return Err(LayoutError::Unknown(ty)); @@ -1182,11 +1272,10 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // dumping later. Ignore layouts that are done with non-empty // environments or non-monomorphic layouts, as the user only wants // to see the stuff resulting from the final codegen session. - if - !self.tcx.sess.opts.debugging_opts.print_type_sizes || - layout.ty.has_param_types() || - layout.ty.has_self_ty() || - !self.param_env.caller_bounds.is_empty() + if !self.tcx.sess.opts.debugging_opts.print_type_sizes + || layout.ty.has_param_types() + || layout.ty.has_self_ty() + || !self.param_env.caller_bounds.is_empty() { return; } @@ -1198,13 +1287,15 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // (delay format until we actually need it) let record = |kind, packed, opt_discr_size, variants| { let type_desc = format!("{:?}", layout.ty); - self.tcx.sess.code_stats.borrow_mut().record_type_size(kind, - type_desc, - layout.align.abi, - layout.size, - packed, - opt_discr_size, - variants); + self.tcx.sess.code_stats.borrow_mut().record_type_size( + kind, + type_desc, + layout.align.abi, + layout.size, + packed, + opt_discr_size, + variants, + ); }; let adt_def = match layout.ty.sty { @@ -1228,12 +1319,12 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { let adt_kind = adt_def.adt_kind(); let adt_packed = adt_def.repr.packed(); - let build_variant_info = |n: Option, - flds: &[ast::Name], - layout: TyLayout<'tcx>| { + let build_variant_info = |n: Option, flds: &[ast::Name], layout: TyLayout<'tcx>| { let mut min_size = Size::ZERO; - let field_info: Vec<_> = flds.iter().enumerate().map(|(i, &name)| { - match layout.field(self, i) { + let field_info: Vec<_> = flds + .iter() + .enumerate() + .map(|(i, &name)| match layout.field(self, i) { Err(err) => { bug!("no layout found for field {}: `{:?}`", name, err); } @@ -1250,8 +1341,8 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { align: field_layout.align.abi.bytes(), } } - } - }).collect(); + }) + .collect(); session::VariantInfo { name: n.map(|n| n.to_string()), @@ -1272,18 +1363,19 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { match layout.variants { Variants::Single { index } => { - debug!("print-type-size `{:#?}` variant {}", - layout, adt_def.variants[index].ident); + debug!( + "print-type-size `{:#?}` variant {}", + layout, adt_def.variants[index].ident + ); if !adt_def.variants.is_empty() { let variant_def = &adt_def.variants[index]; - let fields: Vec<_> = - variant_def.fields.iter().map(|f| f.ident.name).collect(); - record(adt_kind.into(), - adt_packed, - None, - vec![build_variant_info(Some(variant_def.ident), - &fields, - layout)]); + let fields: Vec<_> = variant_def.fields.iter().map(|f| f.ident.name).collect(); + record( + adt_kind.into(), + adt_packed, + None, + vec![build_variant_info(Some(variant_def.ident), &fields, layout)], + ); } else { // (This case arises for *empty* enums; so give it // zero variants.) @@ -1291,23 +1383,34 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { } } - Variants::NicheFilling { .. } | - Variants::Tagged { .. } => { - debug!("print-type-size `{:#?}` adt general variants def {}", - layout.ty, adt_def.variants.len()); - let variant_infos: Vec<_> = - adt_def.variants.iter_enumerated().map(|(i, variant_def)| { + Variants::NicheFilling { .. } | Variants::Tagged { .. } => { + debug!( + "print-type-size `{:#?}` adt general variants def {}", + layout.ty, + adt_def.variants.len() + ); + let variant_infos: Vec<_> = adt_def + .variants + .iter_enumerated() + .map(|(i, variant_def)| { let fields: Vec<_> = variant_def.fields.iter().map(|f| f.ident.name).collect(); - build_variant_info(Some(variant_def.ident), - &fields, - layout.for_variant(self, i)) + build_variant_info( + Some(variant_def.ident), + &fields, + layout.for_variant(self, i), + ) }) .collect(); - record(adt_kind.into(), adt_packed, match layout.variants { - Variants::Tagged { ref tag, .. } => Some(tag.value.size(self)), - _ => None - }, variant_infos); + record( + adt_kind.into(), + adt_packed, + match layout.variants { + Variants::Tagged { ref tag, .. } => Some(tag.value.size(self)), + _ => None, + }, + variant_infos, + ); } } } @@ -1329,15 +1432,16 @@ pub enum SizeSkeleton<'tcx> { /// The type which determines the unsized metadata, if any, /// of this pointer. Either a type parameter or a projection /// depending on one, with regions erased. - tail: Ty<'tcx> - } + tail: Ty<'tcx>, + }, } impl<'a, 'tcx> SizeSkeleton<'tcx> { - pub fn compute(ty: Ty<'tcx>, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>) - -> Result, LayoutError<'tcx>> { + pub fn compute( + ty: Ty<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> Result, LayoutError<'tcx>> { debug_assert!(!ty.has_infer_types()); // First try computing a static layout. @@ -1345,12 +1449,11 @@ impl<'a, 'tcx> SizeSkeleton<'tcx> { Ok(layout) => { return Ok(SizeSkeleton::Known(layout.size)); } - Err(err) => err + Err(err) => err, }; match ty.sty { - ty::Ref(_, pointee, _) | - ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { + ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { let non_zero = !ty.is_unsafe_ptr(); let tail = tcx.struct_tail(pointee); match tail.sty { @@ -1358,14 +1461,16 @@ impl<'a, 'tcx> SizeSkeleton<'tcx> { debug_assert!(tail.has_param_types() || tail.has_self_ty()); Ok(SizeSkeleton::Pointer { non_zero, - tail: tcx.erase_regions(&tail) + tail: tcx.erase_regions(&tail), }) } - _ => { - bug!("SizeSkeleton::compute({}): layout errored ({}), yet \ - tail `{}` is not a type parameter or a projection", - ty, err, tail) - } + _ => bug!( + "SizeSkeleton::compute({}): layout errored ({}), yet \ + tail `{}` is not a type parameter or a projection", + ty, + err, + tail + ), } } @@ -1378,9 +1483,10 @@ impl<'a, 'tcx> SizeSkeleton<'tcx> { // Get a zero-sized variant or a pointer newtype. let zero_or_ptr_variant = |i| { let i = VariantIdx::new(i); - let fields = def.variants[i].fields.iter().map(|field| { - SizeSkeleton::compute(field.ty(tcx, substs), tcx, param_env) - }); + let fields = def.variants[i] + .fields + .iter() + .map(|field| SizeSkeleton::compute(field.ty(tcx, substs), tcx, param_env)); let mut ptr = None; for field in fields { let field = field?; @@ -1390,7 +1496,7 @@ impl<'a, 'tcx> SizeSkeleton<'tcx> { return Err(err); } } - SizeSkeleton::Pointer {..} => { + SizeSkeleton::Pointer { .. } => { if ptr.is_some() { return Err(err); } @@ -1406,12 +1512,14 @@ impl<'a, 'tcx> SizeSkeleton<'tcx> { if def.variants.len() == 1 { if let Some(SizeSkeleton::Pointer { non_zero, tail }) = v0 { return Ok(SizeSkeleton::Pointer { - non_zero: non_zero || match tcx.layout_scalar_valid_range(def.did) { - (Bound::Included(start), Bound::Unbounded) => start > 0, - (Bound::Included(start), Bound::Included(end)) => - 0 < start && start < end, - _ => false, - }, + non_zero: non_zero + || match tcx.layout_scalar_valid_range(def.did) { + (Bound::Included(start), Bound::Unbounded) => start > 0, + (Bound::Included(start), Bound::Included(end)) => { + 0 < start && start < end + } + _ => false, + }, tail, }); } else { @@ -1422,14 +1530,24 @@ impl<'a, 'tcx> SizeSkeleton<'tcx> { let v1 = zero_or_ptr_variant(1)?; // Nullable pointer enum optimization. match (v0, v1) { - (Some(SizeSkeleton::Pointer { non_zero: true, tail }), None) | - (None, Some(SizeSkeleton::Pointer { non_zero: true, tail })) => { - Ok(SizeSkeleton::Pointer { - non_zero: false, + ( + Some(SizeSkeleton::Pointer { + non_zero: true, tail, - }) - } - _ => Err(err) + }), + None, + ) + | ( + None, + Some(SizeSkeleton::Pointer { + non_zero: true, + tail, + }), + ) => Ok(SizeSkeleton::Pointer { + non_zero: false, + tail, + }), + _ => Err(err), } } @@ -1442,16 +1560,17 @@ impl<'a, 'tcx> SizeSkeleton<'tcx> { } } - _ => Err(err) + _ => Err(err), } } pub fn same_size(self, other: SizeSkeleton<'_>) -> bool { match (self, other) { (SizeSkeleton::Known(a), SizeSkeleton::Known(b)) => a == b, - (SizeSkeleton::Pointer { tail: a, .. }, - SizeSkeleton::Pointer { tail: b, .. }) => a == b, - _ => false + (SizeSkeleton::Pointer { tail: a, .. }, SizeSkeleton::Pointer { tail: b, .. }) => { + a == b + } + _ => false, } } } @@ -1519,10 +1638,7 @@ impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { let param_env = self.param_env.with_reveal_all(); let ty = self.tcx.normalize_erasing_regions(param_env, ty); let details = self.tcx.layout_raw(param_env.and(ty))?; - let layout = TyLayout { - ty, - details - }; + let layout = TyLayout { ty, details }; // N.B., this recording is normally disabled; when enabled, it // can however trigger recursive invocations of `layout_of`. @@ -1546,10 +1662,7 @@ impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, ty::query::TyCtxtAt<'a, 'tcx, 'tcx>> let param_env = self.param_env.with_reveal_all(); let ty = self.tcx.normalize_erasing_regions(param_env, ty); let details = self.tcx.layout_raw(param_env.and(ty))?; - let layout = TyLayout { - ty, - details - }; + let layout = TyLayout { ty, details }; // N.B., this recording is normally disabled; when enabled, it // can however trigger recursive invocations of `layout_of`. @@ -1559,7 +1672,7 @@ impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, ty::query::TyCtxtAt<'a, 'tcx, 'tcx>> // here, but it seems like the right thing to do. -nmatsakis) let cx = LayoutCx { tcx: *self.tcx, - param_env: self.param_env + param_env: self.param_env, }; cx.record_layout_for_printing(layout); @@ -1572,11 +1685,13 @@ impl TyCtxt<'a, 'tcx, '_> { /// Computes the layout of a type. Note that this implicitly /// executes in "reveal all" mode. #[inline] - pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> Result, LayoutError<'tcx>> { + pub fn layout_of( + self, + param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, + ) -> Result, LayoutError<'tcx>> { let cx = LayoutCx { tcx: self.global_tcx(), - param_env: param_env_and_ty.param_env + param_env: param_env_and_ty.param_env, }; cx.layout_of(param_env_and_ty.value) } @@ -1586,19 +1701,22 @@ impl ty::query::TyCtxtAt<'a, 'tcx, '_> { /// Computes the layout of a type. Note that this implicitly /// executes in "reveal all" mode. #[inline] - pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> Result, LayoutError<'tcx>> { + pub fn layout_of( + self, + param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, + ) -> Result, LayoutError<'tcx>> { let cx = LayoutCx { tcx: self.global_tcx().at(self.span), - param_env: param_env_and_ty.param_env + param_env: param_env_and_ty.param_env, }; cx.layout_of(param_env_and_ty.value) } } impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx> - where C: LayoutOf> + HasTyCtxt<'tcx>, - C::TyLayout: MaybeResult> +where + C: LayoutOf> + HasTyCtxt<'tcx>, + C::TyLayout: MaybeResult>, { fn for_variant(this: TyLayout<'tcx>, cx: &C, variant_index: VariantIdx) -> TyLayout<'tcx> { let details = match this.variants { @@ -1613,52 +1731,55 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx> let fields = match this.ty.sty { ty::Adt(def, _) => def.variants[variant_index].fields.len(), - _ => bug!() + _ => bug!(), }; let tcx = cx.tcx(); tcx.intern_layout(LayoutDetails { - variants: Variants::Single { index: variant_index }, + variants: Variants::Single { + index: variant_index, + }, fields: FieldPlacement::Union(fields), abi: Abi::Uninhabited, align: tcx.data_layout.i8_align, - size: Size::ZERO + size: Size::ZERO, }) } - Variants::NicheFilling { ref variants, .. } | - Variants::Tagged { ref variants, .. } => { + Variants::NicheFilling { ref variants, .. } | Variants::Tagged { ref variants, .. } => { &variants[variant_index] } }; - assert_eq!(details.variants, Variants::Single { index: variant_index }); + assert_eq!( + details.variants, + Variants::Single { + index: variant_index + } + ); TyLayout { ty: this.ty, - details + details, } } fn field(this: TyLayout<'tcx>, cx: &C, i: usize) -> C::TyLayout { let tcx = cx.tcx(); cx.layout_of(match this.ty.sty { - ty::Bool | - ty::Char | - ty::Int(_) | - ty::Uint(_) | - ty::Float(_) | - ty::FnPtr(_) | - ty::Never | - ty::FnDef(..) | - ty::GeneratorWitness(..) | - ty::Foreign(..) | - ty::Dynamic(..) => { - bug!("TyLayout::field_type({:?}): not applicable", this) - } + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::FnPtr(_) + | ty::Never + | ty::FnDef(..) + | ty::GeneratorWitness(..) + | ty::Foreign(..) + | ty::Dynamic(..) => bug!("TyLayout::field_type({:?}): not applicable", this), // Potentially-fat pointers. - ty::Ref(_, pointee, _) | - ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { + ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { assert!(i < this.fields.count()); // Reuse the fat *T type as its own thin pointer data field. @@ -1679,13 +1800,9 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx> } match tcx.struct_tail(pointee).sty { - ty::Slice(_) | - ty::Str => tcx.types.usize, + ty::Slice(_) | ty::Str => tcx.types.usize, ty::Dynamic(_, _) => { - tcx.mk_imm_ref( - tcx.types.re_static, - tcx.mk_array(tcx.types.usize, 3), - ) + tcx.mk_imm_ref(tcx.types.re_static, tcx.mk_array(tcx.types.usize, 3)) /* FIXME use actual fn pointers Warning: naively computing the number of entries in the vtable by counting the methods on the trait + methods on @@ -1700,56 +1817,52 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx> ]) */ } - _ => bug!("TyLayout::field_type({:?}): not applicable", this) + _ => bug!("TyLayout::field_type({:?}): not applicable", this), } } // Arrays and slices. - ty::Array(element, _) | - ty::Slice(element) => element, + ty::Array(element, _) | ty::Slice(element) => element, ty::Str => tcx.types.u8, // Tuples, generators and closures. - ty::Closure(def_id, ref substs) => { - substs.upvar_tys(def_id, tcx).nth(i).unwrap() - } + ty::Closure(def_id, ref substs) => substs.upvar_tys(def_id, tcx).nth(i).unwrap(), - ty::Generator(def_id, ref substs, _) => { - substs.field_tys(def_id, tcx).nth(i).unwrap() - } + ty::Generator(def_id, ref substs, _) => substs.field_tys(def_id, tcx).nth(i).unwrap(), ty::Tuple(tys) => tys[i], // SIMD vector types. - ty::Adt(def, ..) if def.repr.simd() => { - this.ty.simd_type(tcx) - } + ty::Adt(def, ..) if def.repr.simd() => this.ty.simd_type(tcx), // ADTs. ty::Adt(def, substs) => { match this.variants { - Variants::Single { index } => { - def.variants[index].fields[i].ty(tcx, substs) - } + Variants::Single { index } => def.variants[index].fields[i].ty(tcx, substs), // Discriminant field for enums (where applicable). - Variants::Tagged { tag: ref discr, .. } | - Variants::NicheFilling { niche: ref discr, .. } => { + Variants::Tagged { tag: ref discr, .. } + | Variants::NicheFilling { + niche: ref discr, .. + } => { assert_eq!(i, 0); let layout = LayoutDetails::scalar(cx, discr.clone()); return MaybeResult::from_ok(TyLayout { details: tcx.intern_layout(layout), - ty: discr.value.to_ty(tcx) + ty: discr.value.to_ty(tcx), }); } } } - ty::Projection(_) | ty::UnnormalizedProjection(..) | ty::Bound(..) | - ty::Placeholder(..) | ty::Opaque(..) | ty::Param(_) | ty::Infer(_) | - ty::Error => { - bug!("TyLayout::field_type: unexpected type `{}`", this.ty) - } + ty::Projection(_) + | ty::UnnormalizedProjection(..) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Opaque(..) + | ty::Param(_) + | ty::Infer(_) + | ty::Error => bug!("TyLayout::field_type: unexpected type `{}`", this.ty), }) } } @@ -1769,13 +1882,22 @@ impl Niche { if count > self.available { return None; } - let Scalar { value, valid_range: ref v } = self.scalar; + let Scalar { + value, + valid_range: ref v, + } = self.scalar; let bits = value.size(cx).bits(); assert!(bits <= 128); let max_value = !0u128 >> (128 - bits); let start = v.end().wrapping_add(1) & max_value; let end = v.end().wrapping_add(count) & max_value; - Some((start, Scalar { value, valid_range: *v.start()..=end })) + Some(( + start, + Scalar { + value, + valid_range: *v.start()..=end, + }, + )) } } @@ -1785,7 +1907,10 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // FIXME(eddyb) traverse already optimized enums. fn find_niche(&self, layout: TyLayout<'tcx>) -> Result, LayoutError<'tcx>> { let scalar_niche = |scalar: &Scalar, offset| { - let Scalar { value, valid_range: ref v } = *scalar; + let Scalar { + value, + valid_range: ref v, + } = *scalar; let bits = value.size(self).bits(); assert!(bits <= 128); @@ -1803,7 +1928,11 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { return None; } - Some(Niche { offset, scalar: scalar.clone(), available }) + Some(Niche { + offset, + scalar: scalar.clone(), + available, + }) }; // Locals variables which live across yields are stored @@ -1820,9 +1949,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { Abi::ScalarPair(ref a, ref b) => { // HACK(nox): We iter on `b` and then `a` because `max_by_key` // returns the last maximum. - let niche = iter::once( - (b, a.value.size(self).align_to(b.value.align(self).abi)) - ) + let niche = iter::once((b, a.value.size(self).align_to(b.value.align(self).abi))) .chain(iter::once((a, Size::ZERO))) .filter_map(|(scalar, offset)| scalar_niche(scalar, offset)) .max_by_key(|niche| niche.available); @@ -1865,9 +1992,11 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { } impl<'a> HashStable> for Variants { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use ty::layout::Variants::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1901,9 +2030,11 @@ impl<'a> HashStable> for Variants { } impl<'a> HashStable> for FieldPlacement { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use ty::layout::FieldPlacement::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1915,7 +2046,10 @@ impl<'a> HashStable> for FieldPlacement { count.hash_stable(hcx, hasher); stride.hash_stable(hcx, hasher); } - Arbitrary { ref offsets, ref memory_index } => { + Arbitrary { + ref offsets, + ref memory_index, + } => { offsets.hash_stable(hcx, hasher); memory_index.hash_stable(hcx, hasher); } @@ -1934,9 +2068,11 @@ impl<'a> HashStable> for VariantIdx { } impl<'a> HashStable> for Abi { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use ty::layout::Abi::*; mem::discriminant(self).hash_stable(hcx, hasher); @@ -1961,10 +2097,15 @@ impl<'a> HashStable> for Abi { } impl<'a> HashStable> for Scalar { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { - let Scalar { value, ref valid_range } = *self; + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { + let Scalar { + value, + ref valid_range, + } = *self; value.hash_stable(hcx, hasher); valid_range.start().hash_stable(hcx, hasher); valid_range.end().hash_stable(hcx, hasher); @@ -1999,32 +2140,36 @@ impl_stable_hash_for!(struct ::ty::layout::AbiAndPrefAlign { }); impl<'gcx> HashStable> for Align { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'gcx>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'gcx>, + hasher: &mut StableHasher, + ) { self.bytes().hash_stable(hcx, hasher); } } impl<'gcx> HashStable> for Size { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'gcx>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'gcx>, + hasher: &mut StableHasher, + ) { self.bytes().hash_stable(hcx, hasher); } } -impl<'a, 'gcx> HashStable> for LayoutError<'gcx> -{ - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { +impl<'a, 'gcx> HashStable> for LayoutError<'gcx> { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { use ty::layout::LayoutError::*; mem::discriminant(self).hash_stable(hcx, hasher); match *self { - Unknown(t) | - SizeOverflow(t) => t.hash_stable(hcx, hasher) + Unknown(t) | SizeOverflow(t) => t.hash_stable(hcx, hasher), } } } diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index d40dd830e9fb9..6716b1e68bc7b 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -1,82 +1,81 @@ -pub use self::Variance::*; +pub use self::fold::TypeFoldable; pub use self::AssociatedItemContainer::*; pub use self::BorrowKind::*; pub use self::IntVarValue::*; -pub use self::fold::TypeFoldable; +pub use self::Variance::*; -use hir::{map as hir_map, FreevarMap, TraitMap}; -use hir::Node; -use hir::def::{Def, CtorKind, ExportMap}; +use arena::SyncDroplessArena; +use hir::def::{CtorKind, Def, ExportMap}; use hir::def_id::{CrateNum, DefId, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use hir::map::DefPathData; -use rustc_data_structures::svh::Svh; +use hir::Node; +use hir::{map as hir_map, FreevarMap, TraitMap}; use ich::Fingerprint; use ich::StableHashingContext; use infer::canonical::Canonical; -use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem}; +use middle::lang_items::{FnMutTraitLangItem, FnOnceTraitLangItem, FnTraitLangItem}; use middle::privacy::AccessLevels; use middle::resolve_lifetime::ObjectLifetimeDefault; -use mir::Mir; -use mir::interpret::{GlobalId, ErrorHandled}; +use mir::interpret::{ErrorHandled, GlobalId}; use mir::GeneratorLayout; +use mir::Mir; +use rustc_data_structures::svh::Svh; use session::CrateDisambiguator; +use session::DataTypeKind; use traits::{self, Reveal}; use ty; use ty::layout::VariantIdx; use ty::subst::{Subst, Substs}; -use ty::util::{IntTypeExt, Discr}; +use ty::util::{Discr, IntTypeExt}; use ty::walk::TypeWalker; use util::captures::Captures; -use util::nodemap::{NodeSet, DefIdMap, FxHashMap}; -use arena::SyncDroplessArena; -use session::DataTypeKind; +use util::nodemap::{DefIdMap, FxHashMap, NodeSet}; +use rustc_data_structures::sync::{self, par_iter, Lrc, ParallelIterator}; use serialize::{self, Encodable, Encoder}; use std::cell::RefCell; use std::cmp::{self, Ordering}; use std::fmt; use std::hash::{Hash, Hasher}; use std::ops::Deref; -use rustc_data_structures::sync::{self, Lrc, ParallelIterator, par_iter}; use std::slice; use std::{mem, ptr}; -use syntax::ast::{self, DUMMY_NODE_ID, Name, Ident, NodeId}; +use syntax::ast::{self, Ident, Name, NodeId, DUMMY_NODE_ID}; use syntax::attr; use syntax::ext::hygiene::Mark; -use syntax::symbol::{keywords, Symbol, LocalInternedString, InternedString}; -use syntax_pos::{DUMMY_SP, Span}; +use syntax::symbol::{keywords, InternedString, LocalInternedString, Symbol}; +use syntax_pos::{Span, DUMMY_SP}; -use smallvec; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, - HashStable}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; +use smallvec; use hir; -pub use self::sty::{Binder, BoundTy, BoundTyKind, BoundVar, DebruijnIndex, INNERMOST}; -pub use self::sty::{FnSig, GenSig, CanonicalPolyFnSig, PolyFnSig, PolyGenSig}; -pub use self::sty::{InferTy, ParamTy, ProjectionTy, ExistentialPredicate}; -pub use self::sty::{ClosureSubsts, GeneratorSubsts, UpvarSubsts, TypeAndMut}; -pub use self::sty::{TraitRef, TyKind, PolyTraitRef}; -pub use self::sty::{ExistentialTraitRef, PolyExistentialTraitRef}; -pub use self::sty::{ExistentialProjection, PolyExistentialProjection, Const}; -pub use self::sty::{BoundRegion, EarlyBoundRegion, FreeRegion, Region}; -pub use self::sty::RegionKind; -pub use self::sty::{TyVid, IntVid, FloatVid, RegionVid}; pub use self::sty::BoundRegion::*; pub use self::sty::InferTy::*; +pub use self::sty::RegionKind; pub use self::sty::RegionKind::*; pub use self::sty::TyKind::*; +pub use self::sty::{Binder, BoundTy, BoundTyKind, BoundVar, DebruijnIndex, INNERMOST}; +pub use self::sty::{BoundRegion, EarlyBoundRegion, FreeRegion, Region}; +pub use self::sty::{CanonicalPolyFnSig, FnSig, GenSig, PolyFnSig, PolyGenSig}; +pub use self::sty::{ClosureSubsts, GeneratorSubsts, TypeAndMut, UpvarSubsts}; +pub use self::sty::{Const, ExistentialProjection, PolyExistentialProjection}; +pub use self::sty::{ExistentialPredicate, InferTy, ParamTy, ProjectionTy}; +pub use self::sty::{ExistentialTraitRef, PolyExistentialTraitRef}; +pub use self::sty::{FloatVid, IntVid, RegionVid, TyVid}; +pub use self::sty::{PolyTraitRef, TraitRef, TyKind}; pub use self::binding::BindingMode; pub use self::binding::BindingMode::*; -pub use self::context::{TyCtxt, FreeRegionInfo, GlobalArenas, AllArenas, tls, keep_local}; -pub use self::context::{Lift, TypeckTables, CtxtInterners}; +pub use self::context::{keep_local, tls, AllArenas, FreeRegionInfo, GlobalArenas, TyCtxt}; pub use self::context::{ - UserTypeAnnotationIndex, UserTypeAnnotation, CanonicalUserTypeAnnotation, - CanonicalUserTypeAnnotations, + CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, UserTypeAnnotation, + UserTypeAnnotationIndex, }; +pub use self::context::{CtxtInterners, Lift, TypeckTables}; pub use self::instance::{Instance, InstanceDef}; @@ -89,24 +88,24 @@ pub mod binding; pub mod cast; #[macro_use] pub mod codec; +pub mod _match; mod constness; -pub mod error; mod erase_regions; +pub mod error; pub mod fast_reject; pub mod fold; pub mod inhabitedness; pub mod item_path; pub mod layout; -pub mod _match; pub mod outlives; pub mod query; pub mod relate; pub mod steal; pub mod subst; pub mod trait_def; +pub mod util; pub mod walk; pub mod wf; -pub mod util; mod context; mod flags; @@ -152,7 +151,7 @@ impl AssociatedItemContainer { pub fn assert_trait(&self) -> DefId { match *self { TraitContainer(id) => id, - _ => bug!("associated item has wrong container type: {:?}", self) + _ => bug!("associated item has wrong container type: {:?}", self), } } @@ -194,7 +193,7 @@ pub enum AssociatedKind { Const, Method, Existential, - Type + Type, } impl AssociatedItem { @@ -211,9 +210,7 @@ impl AssociatedItem { /// for ! pub fn relevant_for_never<'tcx>(&self) -> bool { match self.kind { - AssociatedKind::Existential | - AssociatedKind::Const | - AssociatedKind::Type => true, + AssociatedKind::Existential | AssociatedKind::Const | AssociatedKind::Type => true, // FIXME(canndrew): Be more thorough here, check if any argument is uninhabited. AssociatedKind::Method => !self.method_has_self_argument, } @@ -267,7 +264,9 @@ pub trait DefIdTree: Copy { impl<'a, 'gcx, 'tcx> DefIdTree for TyCtxt<'a, 'gcx, 'tcx> { fn parent(self, id: DefId) -> Option { - self.def_key(id).parent.map(|index| DefId { index: index, ..id }) + self.def_key(id) + .parent + .map(|index| DefId { index: index, ..id }) } } @@ -326,10 +325,10 @@ impl Visibility { #[derive(Copy, Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Hash)] pub enum Variance { - Covariant, // T <: T iff A <: B -- e.g., function return type - Invariant, // T <: T iff B == A -- e.g., type of mutable cell - Contravariant, // T <: T iff B <: A -- e.g., function param type - Bivariant, // T <: T -- e.g., unused type parameter + Covariant, // T <: T iff A <: B -- e.g., function return type + Invariant, // T <: T iff B == A -- e.g., type of mutable cell + Contravariant, // T <: T iff B <: A -- e.g., function param type + Bivariant, // T <: T -- e.g., unused type parameter } /// The crate variances map is computed during typeck and contains the @@ -544,15 +543,15 @@ impl<'tcx> Hash for TyS<'tcx> { impl<'tcx> TyS<'tcx> { pub fn is_primitive_ty(&self) -> bool { match self.sty { - TyKind::Bool | - TyKind::Char | - TyKind::Int(_) | - TyKind::Uint(_) | - TyKind::Float(_) | - TyKind::Infer(InferTy::IntVar(_)) | - TyKind::Infer(InferTy::FloatVar(_)) | - TyKind::Infer(InferTy::FreshIntTy(_)) | - TyKind::Infer(InferTy::FreshFloatTy(_)) => true, + TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Infer(InferTy::IntVar(_)) + | TyKind::Infer(InferTy::FloatVar(_)) + | TyKind::Infer(InferTy::FreshIntTy(_)) + | TyKind::Infer(InferTy::FreshFloatTy(_)) => true, TyKind::Ref(_, x, _) => x.is_primitive_ty(), _ => false, } @@ -560,22 +559,24 @@ impl<'tcx> TyS<'tcx> { pub fn is_suggestable(&self) -> bool { match self.sty { - TyKind::Opaque(..) | - TyKind::FnDef(..) | - TyKind::FnPtr(..) | - TyKind::Dynamic(..) | - TyKind::Closure(..) | - TyKind::Infer(..) | - TyKind::Projection(..) => false, + TyKind::Opaque(..) + | TyKind::FnDef(..) + | TyKind::FnPtr(..) + | TyKind::Dynamic(..) + | TyKind::Closure(..) + | TyKind::Infer(..) + | TyKind::Projection(..) => false, _ => true, } } } impl<'a, 'gcx> HashStable> for ty::TyS<'gcx> { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let ty::TyS { ref sty, @@ -597,7 +598,7 @@ impl<'tcx> serialize::UseSpecializedDecodable for Ty<'tcx> {} pub type CanonicalTy<'gcx> = Canonical<'gcx, Ty<'gcx>>; -extern { +extern "C" { /// A dummy type used to force List to by unsized without requiring fat pointers type OpaqueListContents; } @@ -634,7 +635,8 @@ impl List { let mem = arena.alloc_raw( size, - cmp::max(mem::align_of::(), mem::align_of::())); + cmp::max(mem::align_of::(), mem::align_of::()), + ); unsafe { let result = &mut *(mem.as_mut_ptr() as *mut List); // Write the length @@ -662,17 +664,27 @@ impl Encodable for List { } } -impl Ord for List where T: Ord { +impl Ord for List +where + T: Ord, +{ fn cmp(&self, other: &List) -> Ordering { - if self == other { Ordering::Equal } else { + if self == other { + Ordering::Equal + } else { <[T] as Ord>::cmp(&**self, &**other) } } } -impl PartialOrd for List where T: PartialOrd { +impl PartialOrd for List +where + T: PartialOrd, +{ fn partial_cmp(&self, other: &List) -> Option { - if self == other { Some(Ordering::Equal) } else { + if self == other { + Some(Ordering::Equal) + } else { <[T] as PartialOrd>::partial_cmp(&**self, &**other) } } @@ -697,9 +709,7 @@ impl Deref for List { type Target = [T]; #[inline(always)] fn deref(&self) -> &[T] { - unsafe { - slice::from_raw_parts(self.data.as_ptr(), self.len) - } + unsafe { slice::from_raw_parts(self.data.as_ptr(), self.len) } } } @@ -721,9 +731,7 @@ impl List { struct EmptySlice([u8; 64]); static EMPTY_SLICE: EmptySlice = EmptySlice([0; 64]); assert!(mem::align_of::() <= 64); - unsafe { - &*(&EMPTY_SLICE as *const _ as *const List) - } + unsafe { &*(&EMPTY_SLICE as *const _ as *const List) } } } @@ -784,7 +792,7 @@ pub enum BorrowKind { UniqueImmBorrow, /// Data is mutable and not aliasable. - MutBorrow + MutBorrow, } /// Information describing the capture of an upvar. This is computed @@ -848,7 +856,7 @@ pub enum GenericParamDefKind { has_default: bool, object_lifetime_default: ObjectLifetimeDefault, synthetic: Option, - } + }, } #[derive(Clone, RustcEncodable, RustcDecodable)] @@ -947,37 +955,38 @@ impl<'a, 'gcx, 'tcx> Generics { } } - pub fn region_param(&'tcx self, - param: &EarlyBoundRegion, - tcx: TyCtxt<'a, 'gcx, 'tcx>) - -> &'tcx GenericParamDef - { + pub fn region_param( + &'tcx self, + param: &EarlyBoundRegion, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + ) -> &'tcx GenericParamDef { if let Some(index) = param.index.checked_sub(self.parent_count as u32) { let param = &self.params[index as usize]; match param.kind { ty::GenericParamDefKind::Lifetime => param, - _ => bug!("expected lifetime parameter, but found another generic parameter") + _ => bug!("expected lifetime parameter, but found another generic parameter"), } } else { tcx.generics_of(self.parent.expect("parent_count > 0 but no parent?")) - .region_param(param, tcx) + .region_param(param, tcx) } } /// Returns the `GenericParamDef` associated with this `ParamTy`. - pub fn type_param(&'tcx self, - param: &ParamTy, - tcx: TyCtxt<'a, 'gcx, 'tcx>) - -> &'tcx GenericParamDef { + pub fn type_param( + &'tcx self, + param: &ParamTy, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + ) -> &'tcx GenericParamDef { if let Some(index) = param.idx.checked_sub(self.parent_count as u32) { let param = &self.params[index as usize]; match param.kind { - ty::GenericParamDefKind::Type {..} => param, - _ => bug!("expected type parameter, but found another generic parameter") + ty::GenericParamDefKind::Type { .. } => param, + _ => bug!("expected type parameter, but found another generic parameter"), } } else { tcx.generics_of(self.parent.expect("parent_count > 0 but no parent?")) - .type_param(param, tcx) + .type_param(param, tcx) } } } @@ -993,55 +1002,80 @@ impl<'tcx> serialize::UseSpecializedEncodable for GenericPredicates<'tcx> {} impl<'tcx> serialize::UseSpecializedDecodable for GenericPredicates<'tcx> {} impl<'a, 'gcx, 'tcx> GenericPredicates<'tcx> { - pub fn instantiate(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &Substs<'tcx>) - -> InstantiatedPredicates<'tcx> { + pub fn instantiate( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + substs: &Substs<'tcx>, + ) -> InstantiatedPredicates<'tcx> { let mut instantiated = InstantiatedPredicates::empty(); self.instantiate_into(tcx, &mut instantiated, substs); instantiated } - pub fn instantiate_own(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &Substs<'tcx>) - -> InstantiatedPredicates<'tcx> { + pub fn instantiate_own( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + substs: &Substs<'tcx>, + ) -> InstantiatedPredicates<'tcx> { InstantiatedPredicates { - predicates: self.predicates.iter().map(|(p, _)| p.subst(tcx, substs)).collect(), + predicates: self + .predicates + .iter() + .map(|(p, _)| p.subst(tcx, substs)) + .collect(), } } - fn instantiate_into(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - instantiated: &mut InstantiatedPredicates<'tcx>, - substs: &Substs<'tcx>) { + fn instantiate_into( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + instantiated: &mut InstantiatedPredicates<'tcx>, + substs: &Substs<'tcx>, + ) { if let Some(def_id) = self.parent { - tcx.predicates_of(def_id).instantiate_into(tcx, instantiated, substs); + tcx.predicates_of(def_id) + .instantiate_into(tcx, instantiated, substs); } - instantiated.predicates.extend( - self.predicates.iter().map(|(p, _)| p.subst(tcx, substs)), - ); + instantiated + .predicates + .extend(self.predicates.iter().map(|(p, _)| p.subst(tcx, substs))); } - pub fn instantiate_identity(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) - -> InstantiatedPredicates<'tcx> { + pub fn instantiate_identity( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + ) -> InstantiatedPredicates<'tcx> { let mut instantiated = InstantiatedPredicates::empty(); self.instantiate_identity_into(tcx, &mut instantiated); instantiated } - fn instantiate_identity_into(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - instantiated: &mut InstantiatedPredicates<'tcx>) { + fn instantiate_identity_into( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + instantiated: &mut InstantiatedPredicates<'tcx>, + ) { if let Some(def_id) = self.parent { - tcx.predicates_of(def_id).instantiate_identity_into(tcx, instantiated); + tcx.predicates_of(def_id) + .instantiate_identity_into(tcx, instantiated); } - instantiated.predicates.extend(self.predicates.iter().map(|&(p, _)| p)) + instantiated + .predicates + .extend(self.predicates.iter().map(|&(p, _)| p)) } - pub fn instantiate_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - poly_trait_ref: &ty::PolyTraitRef<'tcx>) - -> InstantiatedPredicates<'tcx> - { + pub fn instantiate_supertrait( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + poly_trait_ref: &ty::PolyTraitRef<'tcx>, + ) -> InstantiatedPredicates<'tcx> { assert_eq!(self.parent, None); InstantiatedPredicates { - predicates: self.predicates.iter().map(|(pred, _)| { - pred.subst_supertrait(tcx, poly_trait_ref) - }).collect() + predicates: self + .predicates + .iter() + .map(|(pred, _)| pred.subst_supertrait(tcx, poly_trait_ref)) + .collect(), } } } @@ -1109,10 +1143,11 @@ impl<'a, 'gcx, 'tcx> Predicate<'tcx> { /// poly-trait-ref holds. This is slightly different from a normal /// substitution in terms of what happens with bound regions. See /// lengthy comment below for details. - pub fn subst_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_ref: &ty::PolyTraitRef<'tcx>) - -> ty::Predicate<'tcx> - { + pub fn subst_supertrait( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: &ty::PolyTraitRef<'tcx>, + ) -> ty::Predicate<'tcx> { // The interaction between HRTB and supertraits is not entirely // obvious. Let me walk you (and myself) through an example. // @@ -1175,31 +1210,36 @@ impl<'a, 'gcx, 'tcx> Predicate<'tcx> { let substs = &trait_ref.skip_binder().substs; match *self { - Predicate::Trait(ref binder) => - Predicate::Trait(binder.map_bound(|data| data.subst(tcx, substs))), - Predicate::Subtype(ref binder) => - Predicate::Subtype(binder.map_bound(|data| data.subst(tcx, substs))), - Predicate::RegionOutlives(ref binder) => - Predicate::RegionOutlives(binder.map_bound(|data| data.subst(tcx, substs))), - Predicate::TypeOutlives(ref binder) => - Predicate::TypeOutlives(binder.map_bound(|data| data.subst(tcx, substs))), - Predicate::Projection(ref binder) => - Predicate::Projection(binder.map_bound(|data| data.subst(tcx, substs))), - Predicate::WellFormed(data) => - Predicate::WellFormed(data.subst(tcx, substs)), - Predicate::ObjectSafe(trait_def_id) => - Predicate::ObjectSafe(trait_def_id), - Predicate::ClosureKind(closure_def_id, closure_substs, kind) => - Predicate::ClosureKind(closure_def_id, closure_substs.subst(tcx, substs), kind), - Predicate::ConstEvaluatable(def_id, const_substs) => - Predicate::ConstEvaluatable(def_id, const_substs.subst(tcx, substs)), + Predicate::Trait(ref binder) => { + Predicate::Trait(binder.map_bound(|data| data.subst(tcx, substs))) + } + Predicate::Subtype(ref binder) => { + Predicate::Subtype(binder.map_bound(|data| data.subst(tcx, substs))) + } + Predicate::RegionOutlives(ref binder) => { + Predicate::RegionOutlives(binder.map_bound(|data| data.subst(tcx, substs))) + } + Predicate::TypeOutlives(ref binder) => { + Predicate::TypeOutlives(binder.map_bound(|data| data.subst(tcx, substs))) + } + Predicate::Projection(ref binder) => { + Predicate::Projection(binder.map_bound(|data| data.subst(tcx, substs))) + } + Predicate::WellFormed(data) => Predicate::WellFormed(data.subst(tcx, substs)), + Predicate::ObjectSafe(trait_def_id) => Predicate::ObjectSafe(trait_def_id), + Predicate::ClosureKind(closure_def_id, closure_substs, kind) => { + Predicate::ClosureKind(closure_def_id, closure_substs.subst(tcx, substs), kind) + } + Predicate::ConstEvaluatable(def_id, const_substs) => { + Predicate::ConstEvaluatable(def_id, const_substs.subst(tcx, substs)) + } } } } #[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct TraitPredicate<'tcx> { - pub trait_ref: TraitRef<'tcx> + pub trait_ref: TraitRef<'tcx>, } pub type PolyTraitPredicate<'tcx> = ty::Binder>; @@ -1209,7 +1249,7 @@ impl<'tcx> TraitPredicate<'tcx> { self.trait_ref.def_id } - pub fn input_types<'a>(&'a self) -> impl DoubleEndedIterator> + 'a { + pub fn input_types<'a>(&'a self) -> impl DoubleEndedIterator> + 'a { self.trait_ref.input_types() } @@ -1225,13 +1265,13 @@ impl<'tcx> PolyTraitPredicate<'tcx> { } } -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] -pub struct OutlivesPredicate(pub A, pub B); // `A: B` -pub type PolyOutlivesPredicate = ty::Binder>; -pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate, - ty::Region<'tcx>>; -pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate, - ty::Region<'tcx>>; +#[derive( + Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] +pub struct OutlivesPredicate(pub A, pub B); // `A: B` +pub type PolyOutlivesPredicate = ty::Binder>; +pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate, ty::Region<'tcx>>; +pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate, ty::Region<'tcx>>; pub type PolyRegionOutlivesPredicate<'tcx> = ty::Binder>; pub type PolyTypeOutlivesPredicate<'tcx> = ty::Binder>; @@ -1239,7 +1279,7 @@ pub type PolyTypeOutlivesPredicate<'tcx> = ty::Binder { pub a_is_expected: bool, pub a: Ty<'tcx>, - pub b: Ty<'tcx> + pub b: Ty<'tcx>, } pub type PolySubtypePredicate<'tcx> = ty::Binder>; @@ -1316,7 +1356,7 @@ pub trait ToPredicate<'tcx> { impl<'tcx> ToPredicate<'tcx> for TraitRef<'tcx> { fn to_predicate(&self) -> Predicate<'tcx> { ty::Predicate::Trait(ty::Binder::dummy(ty::TraitPredicate { - trait_ref: self.clone() + trait_ref: self.clone(), })) } } @@ -1347,22 +1387,24 @@ impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> { // A custom iterator used by Predicate::walk_tys. enum WalkTysIter<'tcx, I, J, K> - where I: Iterator>, - J: Iterator>, - K: Iterator> +where + I: Iterator>, + J: Iterator>, + K: Iterator>, { None, One(Ty<'tcx>), Two(Ty<'tcx>, Ty<'tcx>), Types(I), InputTypes(J), - ProjectionTypes(K) + ProjectionTypes(K), } impl<'tcx, I, J, K> Iterator for WalkTysIter<'tcx, I, J, K> - where I: Iterator>, - J: Iterator>, - K: Iterator> +where + I: Iterator>, + J: Iterator>, + K: Iterator>, { type Item = Ty<'tcx>; @@ -1372,20 +1414,14 @@ impl<'tcx, I, J, K> Iterator for WalkTysIter<'tcx, I, J, K> WalkTysIter::One(item) => { *self = WalkTysIter::None; Some(item) - }, + } WalkTysIter::Two(item1, item2) => { *self = WalkTysIter::One(item2); Some(item1) - }, - WalkTysIter::Types(ref mut iter) => { - iter.next() - }, - WalkTysIter::InputTypes(ref mut iter) => { - iter.next() - }, - WalkTysIter::ProjectionTypes(ref mut iter) => { - iter.next() } + WalkTysIter::Types(ref mut iter) => iter.next(), + WalkTysIter::InputTypes(ref mut iter) => iter.next(), + WalkTysIter::ProjectionTypes(ref mut iter) => iter.next(), } } } @@ -1400,68 +1436,55 @@ impl<'tcx> Predicate<'tcx> { WalkTysIter::InputTypes(data.skip_binder().input_types()) } ty::Predicate::Subtype(binder) => { - let SubtypePredicate { a, b, a_is_expected: _ } = binder.skip_binder(); + let SubtypePredicate { + a, + b, + a_is_expected: _, + } = binder.skip_binder(); WalkTysIter::Two(a, b) } - ty::Predicate::TypeOutlives(binder) => { - WalkTysIter::One(binder.skip_binder().0) - } - ty::Predicate::RegionOutlives(..) => { - WalkTysIter::None - } + ty::Predicate::TypeOutlives(binder) => WalkTysIter::One(binder.skip_binder().0), + ty::Predicate::RegionOutlives(..) => WalkTysIter::None, ty::Predicate::Projection(ref data) => { let inner = data.skip_binder(); WalkTysIter::ProjectionTypes( - inner.projection_ty.substs.types().chain(Some(inner.ty))) - } - ty::Predicate::WellFormed(data) => { - WalkTysIter::One(data) - } - ty::Predicate::ObjectSafe(_trait_def_id) => { - WalkTysIter::None + inner.projection_ty.substs.types().chain(Some(inner.ty)), + ) } + ty::Predicate::WellFormed(data) => WalkTysIter::One(data), + ty::Predicate::ObjectSafe(_trait_def_id) => WalkTysIter::None, ty::Predicate::ClosureKind(_closure_def_id, closure_substs, _kind) => { WalkTysIter::Types(closure_substs.substs.types()) } - ty::Predicate::ConstEvaluatable(_, substs) => { - WalkTysIter::Types(substs.types()) - } + ty::Predicate::ConstEvaluatable(_, substs) => WalkTysIter::Types(substs.types()), } } pub fn to_opt_poly_trait_ref(&self) -> Option> { match *self { - Predicate::Trait(ref t) => { - Some(t.to_poly_trait_ref()) - } - Predicate::Projection(..) | - Predicate::Subtype(..) | - Predicate::RegionOutlives(..) | - Predicate::WellFormed(..) | - Predicate::ObjectSafe(..) | - Predicate::ClosureKind(..) | - Predicate::TypeOutlives(..) | - Predicate::ConstEvaluatable(..) => { - None - } + Predicate::Trait(ref t) => Some(t.to_poly_trait_ref()), + Predicate::Projection(..) + | Predicate::Subtype(..) + | Predicate::RegionOutlives(..) + | Predicate::WellFormed(..) + | Predicate::ObjectSafe(..) + | Predicate::ClosureKind(..) + | Predicate::TypeOutlives(..) + | Predicate::ConstEvaluatable(..) => None, } } pub fn to_opt_type_outlives(&self) -> Option> { match *self { - Predicate::TypeOutlives(data) => { - Some(data) - } - Predicate::Trait(..) | - Predicate::Projection(..) | - Predicate::Subtype(..) | - Predicate::RegionOutlives(..) | - Predicate::WellFormed(..) | - Predicate::ObjectSafe(..) | - Predicate::ClosureKind(..) | - Predicate::ConstEvaluatable(..) => { - None - } + Predicate::TypeOutlives(data) => Some(data), + Predicate::Trait(..) + | Predicate::Projection(..) + | Predicate::Subtype(..) + | Predicate::RegionOutlives(..) + | Predicate::WellFormed(..) + | Predicate::ObjectSafe(..) + | Predicate::ClosureKind(..) + | Predicate::ConstEvaluatable(..) => None, } } } @@ -1584,19 +1607,22 @@ impl UniverseIndex { /// basically a name -- distinct bound regions within the same /// universe are just two regions with an unknown relationship to one /// another. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, PartialOrd, Ord)] +#[derive( + Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, PartialOrd, Ord, +)] pub struct Placeholder { pub universe: UniverseIndex, pub name: T, } impl<'a, 'gcx, T> HashStable> for Placeholder - where T: HashStable> +where + T: HashStable>, { fn hash_stable( &self, hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher + hasher: &mut StableHasher, ) { self.universe.hash_stable(hcx, hasher); self.name.hash_stable(hcx, hasher); @@ -1655,9 +1681,13 @@ impl<'tcx> ParamEnv<'tcx> { pub fn new( caller_bounds: &'tcx List>, reveal: Reveal, - def_id: Option + def_id: Option, ) -> Self { - ty::ParamEnv { caller_bounds, reveal, def_id } + ty::ParamEnv { + caller_bounds, + reveal, + def_id, + } } /// Returns a new parameter environment with the same clauses, but @@ -1667,12 +1697,18 @@ impl<'tcx> ParamEnv<'tcx> { /// contexts; normally though we want to use `Reveal::UserFacing`, /// which is the default. pub fn with_reveal_all(self) -> Self { - ty::ParamEnv { reveal: Reveal::All, ..self } + ty::ParamEnv { + reveal: Reveal::All, + ..self + } } /// Returns this same environment but with no caller bounds. pub fn without_caller_bounds(self) -> Self { - ty::ParamEnv { caller_bounds: List::empty(), ..self } + ty::ParamEnv { + caller_bounds: List::empty(), + ..self + } } /// Creates a suitable environment in which to perform trait @@ -1689,12 +1725,10 @@ impl<'tcx> ParamEnv<'tcx> { /// although the surrounding function is never reachable. pub fn and>(self, value: T) -> ParamEnvAnd<'tcx, T> { match self.reveal { - Reveal::UserFacing => { - ParamEnvAnd { - param_env: self, - value, - } - } + Reveal::UserFacing => ParamEnvAnd { + param_env: self, + value, + }, Reveal::All => { if value.has_placeholders() @@ -1730,14 +1764,17 @@ impl<'tcx, T> ParamEnvAnd<'tcx, T> { } impl<'a, 'gcx, T> HashStable> for ParamEnvAnd<'gcx, T> - where T: HashStable> +where + T: HashStable>, { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let ParamEnvAnd { ref param_env, - ref value + ref value, } = *self; param_env.hash_stable(hcx, hasher); @@ -1806,18 +1843,20 @@ impl<'a, 'gcx, 'tcx> VariantDef { /// /// If someone speeds up attribute loading to not be a performance concern, they can /// remove this hack and use the constructor DefId everywhere. - pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, - did: DefId, - ident: Ident, - discr: VariantDiscr, - fields: Vec, - adt_kind: AdtKind, - ctor_kind: CtorKind, - attribute_def_id: DefId) - -> Self - { - debug!("VariantDef::new({:?}, {:?}, {:?}, {:?}, {:?}, {:?}, {:?})", did, ident, discr, - fields, adt_kind, ctor_kind, attribute_def_id); + pub fn new( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + did: DefId, + ident: Ident, + discr: VariantDiscr, + fields: Vec, + adt_kind: AdtKind, + ctor_kind: CtorKind, + attribute_def_id: DefId, + ) -> Self { + debug!( + "VariantDef::new({:?}, {:?}, {:?}, {:?}, {:?}, {:?}, {:?})", + did, ident, discr, fields, adt_kind, ctor_kind, attribute_def_id + ); let mut flags = VariantFlags::NO_VARIANT_FLAGS; if adt_kind == AdtKind::Struct && tcx.has_attr(attribute_def_id, "non_exhaustive") { debug!("found non-exhaustive field list for {:?}", did); @@ -1829,13 +1868,14 @@ impl<'a, 'gcx, 'tcx> VariantDef { discr, fields, ctor_kind, - flags + flags, } } #[inline] pub fn is_field_list_non_exhaustive(&self) -> bool { - self.flags.intersects(VariantFlags::IS_FIELD_LIST_NON_EXHAUSTIVE) + self.flags + .intersects(VariantFlags::IS_FIELD_LIST_NON_EXHAUSTIVE) } } @@ -1896,7 +1936,9 @@ impl Ord for AdtDef { impl PartialEq for AdtDef { // AdtDef are always interned and this is part of TyS equality #[inline] - fn eq(&self, other: &Self) -> bool { ptr::eq(self, other) } + fn eq(&self, other: &Self) -> bool { + ptr::eq(self, other) + } } impl Eq for AdtDef {} @@ -1916,11 +1958,12 @@ impl<'tcx> serialize::UseSpecializedEncodable for &'tcx AdtDef { impl<'tcx> serialize::UseSpecializedDecodable for &'tcx AdtDef {} - impl<'a> HashStable> for AdtDef { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { thread_local! { static CACHE: RefCell> = Default::default(); } @@ -1942,7 +1985,7 @@ impl<'a> HashStable> for AdtDef { repr.hash_stable(hcx, &mut hasher); hasher.finish() - }) + }) }); hash.hash_stable(hcx, hasher); @@ -1950,7 +1993,11 @@ impl<'a> HashStable> for AdtDef { } #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] -pub enum AdtKind { Struct, Union, Enum } +pub enum AdtKind { + Struct, + Union, + Enum, +} impl Into for AdtKind { fn into(self) -> DataTypeKind { @@ -2015,17 +2062,17 @@ impl ReprOptions { pack }; ReprFlags::empty() - }, + } attr::ReprTransparent => ReprFlags::IS_TRANSPARENT, attr::ReprSimd => ReprFlags::IS_SIMD, attr::ReprInt(i) => { size = Some(i); ReprFlags::empty() - }, + } attr::ReprAlign(align) => { max_align = cmp::max(align, max_align); ReprFlags::empty() - }, + } }); } } @@ -2034,19 +2081,34 @@ impl ReprOptions { if !tcx.consider_optimizing(|| format!("Reorder fields of {:?}", tcx.item_path_str(did))) { flags.insert(ReprFlags::IS_LINEAR); } - ReprOptions { int: size, align: max_align, pack: min_pack, flags: flags } + ReprOptions { + int: size, + align: max_align, + pack: min_pack, + flags: flags, + } } #[inline] - pub fn simd(&self) -> bool { self.flags.contains(ReprFlags::IS_SIMD) } + pub fn simd(&self) -> bool { + self.flags.contains(ReprFlags::IS_SIMD) + } #[inline] - pub fn c(&self) -> bool { self.flags.contains(ReprFlags::IS_C) } + pub fn c(&self) -> bool { + self.flags.contains(ReprFlags::IS_C) + } #[inline] - pub fn packed(&self) -> bool { self.pack > 0 } + pub fn packed(&self) -> bool { + self.pack > 0 + } #[inline] - pub fn transparent(&self) -> bool { self.flags.contains(ReprFlags::IS_TRANSPARENT) } + pub fn transparent(&self) -> bool { + self.flags.contains(ReprFlags::IS_TRANSPARENT) + } #[inline] - pub fn linear(&self) -> bool { self.flags.contains(ReprFlags::IS_LINEAR) } + pub fn linear(&self) -> bool { + self.flags.contains(ReprFlags::IS_LINEAR) + } pub fn discr_type(&self) -> attr::IntType { self.int.unwrap_or(attr::SignedInt(ast::IntTy::Isize)) @@ -2062,24 +2124,27 @@ impl ReprOptions { /// Returns `true` if this `#[repr()]` should inhibit struct field reordering /// optimizations, such as with repr(C), repr(packed(1)), or repr(). pub fn inhibit_struct_field_reordering_opt(&self) -> bool { - self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.pack == 1 || - self.int.is_some() + self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.pack == 1 || self.int.is_some() } /// Returns true if this `#[repr()]` should inhibit union abi optimisations pub fn inhibit_union_abi_opt(&self) -> bool { self.c() } - } impl<'a, 'gcx, 'tcx> AdtDef { - fn new(tcx: TyCtxt<'_, '_, '_>, - did: DefId, - kind: AdtKind, - variants: IndexVec, - repr: ReprOptions) -> Self { - debug!("AdtDef::new({:?}, {:?}, {:?}, {:?})", did, kind, variants, repr); + fn new( + tcx: TyCtxt<'_, '_, '_>, + did: DefId, + kind: AdtKind, + variants: IndexVec, + repr: ReprOptions, + ) -> Self { + debug!( + "AdtDef::new({:?}, {:?}, {:?}, {:?})", + did, kind, variants, repr + ); let mut flags = AdtFlags::NO_ADT_FLAGS; if kind == AdtKind::Enum && tcx.has_attr(did, "non_exhaustive") { @@ -2143,7 +2208,8 @@ impl<'a, 'gcx, 'tcx> AdtDef { #[inline] pub fn is_variant_list_non_exhaustive(&self) -> bool { - self.flags.contains(AdtFlags::IS_VARIANT_LIST_NON_EXHAUSTIVE) + self.flags + .contains(AdtFlags::IS_VARIANT_LIST_NON_EXHAUSTIVE) } /// Returns the kind of the ADT. @@ -2234,8 +2300,7 @@ impl<'a, 'gcx, 'tcx> AdtDef { } pub fn is_payloadfree(&self) -> bool { - !self.variants.is_empty() && - self.variants.iter().all(|v| v.fields.is_empty()) + !self.variants.is_empty() && self.variants.iter().all(|v| v.fields.is_empty()) } pub fn variant_with_id(&self, vid: DefId) -> &VariantDef { @@ -2256,10 +2321,14 @@ impl<'a, 'gcx, 'tcx> AdtDef { pub fn variant_of_def(&self, def: Def) -> &VariantDef { match def { Def::Variant(vid) | Def::VariantCtor(vid, ..) => self.variant_with_id(vid), - Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) | - Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) | - Def::SelfCtor(..) => self.non_enum_variant(), - _ => bug!("unexpected def {:?} in variant_of_def", def) + Def::Struct(..) + | Def::StructCtor(..) + | Def::Union(..) + | Def::TyAlias(..) + | Def::AssociatedTy(..) + | Def::SelfTy(..) + | Def::SelfCtor(..) => self.non_enum_variant(), + _ => bug!("unexpected def {:?} in variant_of_def", def), } } @@ -2275,31 +2344,31 @@ impl<'a, 'gcx, 'tcx> AdtDef { let instance = ty::Instance::new(expr_did, substs); let cid = GlobalId { instance, - promoted: None + promoted: None, }; match tcx.const_eval(param_env.and(cid)) { Ok(val) => { // FIXME: Find the right type and use it instead of `val.ty` here if let Some(b) = val.assert_bits(tcx.global_tcx(), param_env.and(val.ty)) { trace!("discriminants: {} ({:?})", b, repr_type); - Some(Discr { - val: b, - ty: val.ty, - }) + Some(Discr { val: b, ty: val.ty }) } else { info!("invalid enum discriminant: {:#?}", val); ::mir::interpret::struct_error( tcx.at(tcx.def_span(expr_did)), "constant evaluation of enum discriminant resulted in non-integer", - ).emit(); + ) + .emit(); None } } Err(ErrorHandled::Reported) => { if !expr_did.is_local() { - span_bug!(tcx.def_span(expr_did), + span_bug!( + tcx.def_span(expr_did), "variant discriminant evaluation succeeded \ - in its crate but failed locally"); + in its crate but failed locally" + ); } None } @@ -2314,7 +2383,7 @@ impl<'a, 'gcx, 'tcx> AdtDef { pub fn discriminants( &'a self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - ) -> impl Iterator)> + Captures<'gcx> + 'a { + ) -> impl Iterator)> + Captures<'gcx> + 'a { let repr_type = self.repr.discr_type(); let initial = repr_type.initial_discriminant(tcx.global_tcx()); let mut prev_discr = None::>; @@ -2336,24 +2405,26 @@ impl<'a, 'gcx, 'tcx> AdtDef { /// only doing at most one query for evaluating an explicit /// discriminant (the last one before the requested variant), /// assuming there are no constant-evaluation errors there. - pub fn discriminant_for_variant(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - variant_index: VariantIdx) - -> Discr<'tcx> { + pub fn discriminant_for_variant( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + variant_index: VariantIdx, + ) -> Discr<'tcx> { let (val, offset) = self.discriminant_def_for_variant(variant_index); let explicit_value = val .and_then(|expr_did| self.eval_explicit_discr(tcx, expr_did)) - .unwrap_or_else(|| self.repr.discr_type().initial_discriminant(tcx.global_tcx())); + .unwrap_or_else(|| { + self.repr + .discr_type() + .initial_discriminant(tcx.global_tcx()) + }); explicit_value.checked_add(tcx, offset as u128).0 } /// Yields a DefId for the discriminant and an offset to add to it /// Alternatively, if there is no explicit discriminant, returns the /// inferred discriminant directly - pub fn discriminant_def_for_variant( - &self, - variant_index: VariantIdx, - ) -> (Option, u32) { + pub fn discriminant_def_for_variant(&self, variant_index: VariantIdx) -> (Option, u32) { let mut explicit_index = variant_index.as_u32(); let expr_did; loop { @@ -2361,7 +2432,7 @@ impl<'a, 'gcx, 'tcx> AdtDef { ty::VariantDiscr::Relative(0) => { expr_did = None; break; - }, + } ty::VariantDiscr::Relative(distance) => { explicit_index -= distance; } @@ -2405,43 +2476,33 @@ impl<'a, 'gcx, 'tcx> AdtDef { } } - fn sized_constraint_for_ty(&self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: Ty<'tcx>) - -> Vec> { + fn sized_constraint_for_ty(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> Vec> { let result = match ty.sty { - Bool | Char | Int(..) | Uint(..) | Float(..) | - RawPtr(..) | Ref(..) | FnDef(..) | FnPtr(_) | - Array(..) | Closure(..) | Generator(..) | Never => { - vec![] - } + Bool | Char | Int(..) | Uint(..) | Float(..) | RawPtr(..) | Ref(..) | FnDef(..) + | FnPtr(_) | Array(..) | Closure(..) | Generator(..) | Never => vec![], - Str | - Dynamic(..) | - Slice(_) | - Foreign(..) | - Error | - GeneratorWitness(..) => { + Str | Dynamic(..) | Slice(_) | Foreign(..) | Error | GeneratorWitness(..) => { // these are never sized - return the target type vec![ty] } - Tuple(ref tys) => { - match tys.last() { - None => vec![], - Some(ty) => self.sized_constraint_for_ty(tcx, ty) - } - } + Tuple(ref tys) => match tys.last() { + None => vec![], + Some(ty) => self.sized_constraint_for_ty(tcx, ty), + }, Adt(adt, substs) => { // recursive case let adt_tys = adt.sized_constraint(tcx); - debug!("sized_constraint_for_ty({:?}) intermediate = {:?}", - ty, adt_tys); - adt_tys.iter() - .map(|ty| ty.subst(tcx, substs)) - .flat_map(|ty| self.sized_constraint_for_ty(tcx, ty)) - .collect() + debug!( + "sized_constraint_for_ty({:?}) intermediate = {:?}", + ty, adt_tys + ); + adt_tys + .iter() + .map(|ty| ty.subst(tcx, substs)) + .flat_map(|ty| self.sized_constraint_for_ty(tcx, ty)) + .collect() } Projection(..) | Opaque(..) => { @@ -2459,12 +2520,13 @@ impl<'a, 'gcx, 'tcx> AdtDef { let sized_trait = match tcx.lang_items().sized_trait() { Some(x) => x, - _ => return vec![ty] + _ => return vec![ty], }; let sized_predicate = Binder::dummy(TraitRef { def_id: sized_trait, - substs: tcx.mk_substs_trait(ty, &[]) - }).to_predicate(); + substs: tcx.mk_substs_trait(ty, &[]), + }) + .to_predicate(); let predicates = &tcx.predicates_of(self.did).predicates; if predicates.iter().any(|(p, _)| *p == sized_predicate) { vec![] @@ -2473,11 +2535,8 @@ impl<'a, 'gcx, 'tcx> AdtDef { } } - Placeholder(..) | - Bound(..) | - Infer(..) => { - bug!("unexpected type `{:?}` in sized_constraint_for_ty", - ty) + Placeholder(..) | Bound(..) | Infer(..) => { + bug!("unexpected type `{:?}` in sized_constraint_for_ty", ty) } }; debug!("sized_constraint_for_ty({:?}) = {:?}", ty, result); @@ -2497,7 +2556,9 @@ impl<'a, 'gcx, 'tcx> FieldDef { /// /// You can get the environment type of a closure using /// `tcx.closure_env_ty()`. -#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub enum ClosureKind { // Warning: Ordering is significant here! The ordering is chosen // because the trait Fn is a subtrait of FnMut and so in turn, and @@ -2514,12 +2575,8 @@ impl<'a, 'tcx> ClosureKind { pub fn trait_did(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefId { match *self { ClosureKind::Fn => tcx.require_lang_item(FnTraitLangItem), - ClosureKind::FnMut => { - tcx.require_lang_item(FnMutTraitLangItem) - } - ClosureKind::FnOnce => { - tcx.require_lang_item(FnOnceTraitLangItem) - } + ClosureKind::FnMut => tcx.require_lang_item(FnMutTraitLangItem), + ClosureKind::FnOnce => tcx.require_lang_item(FnOnceTraitLangItem), } } @@ -2576,7 +2633,8 @@ impl<'tcx> TyS<'tcx> { /// /// Note: prefer `ty.walk()` where possible. pub fn maybe_walk(&'tcx self, mut f: F) - where F: FnMut(Ty<'tcx>) -> bool + where + F: FnMut(Ty<'tcx>) -> bool, { let mut walker = self.walk(); while let Some(ty) = walker.next() { @@ -2623,7 +2681,7 @@ impl BorrowKind { #[derive(Debug, Clone)] pub enum Attributes<'gcx> { Owned(Lrc<[ast::Attribute]>), - Borrowed(&'gcx [ast::Attribute]) + Borrowed(&'gcx [ast::Attribute]), } impl<'gcx> ::std::ops::Deref for Attributes<'gcx> { @@ -2632,7 +2690,7 @@ impl<'gcx> ::std::ops::Deref for Attributes<'gcx> { fn deref(&self) -> &[ast::Attribute] { match self { &Attributes::Owned(ref data) => &data, - &Attributes::Borrowed(data) => data + &Attributes::Borrowed(data) => data, } } } @@ -2645,26 +2703,22 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Returns an iterator of the def-ids for all body-owners in this /// crate. If you would prefer to iterate over the bodies /// themselves, you can do `self.hir().krate().body_ids.iter()`. - pub fn body_owners( - self, - ) -> impl Iterator + Captures<'tcx> + Captures<'gcx> + 'a { - self.hir().krate() - .body_ids - .iter() - .map(move |&body_id| self.hir().body_owner_def_id(body_id)) + pub fn body_owners(self) -> impl Iterator + Captures<'tcx> + Captures<'gcx> + 'a { + self.hir() + .krate() + .body_ids + .iter() + .map(move |&body_id| self.hir().body_owner_def_id(body_id)) } pub fn par_body_owners(self, f: F) { - par_iter(&self.hir().krate().body_ids).for_each(|&body_id| { - f(self.hir().body_owner_def_id(body_id)) - }); + par_iter(&self.hir().krate().body_ids) + .for_each(|&body_id| f(self.hir().body_owner_def_id(body_id))); } pub fn expr_span(self, id: NodeId) -> Span { match self.hir().find(id) { - Some(Node::Expr(e)) => { - e.span - } + Some(Node::Expr(e)) => e.span, Some(f) => { bug!("Node id {} is not an expr: {:?}", id, f); } @@ -2681,9 +2735,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn trait_relevant_for_never(self, did: DefId) -> bool { - self.associated_items(did).any(|item| { - item.relevant_for_never() - }) + self.associated_items(did) + .any(|item| item.relevant_for_never()) } pub fn opt_associated_item(self, def_id: DefId) -> Option { @@ -2706,17 +2759,16 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - fn associated_item_from_trait_item_ref(self, - parent_def_id: DefId, - parent_vis: &hir::Visibility, - trait_item_ref: &hir::TraitItemRef) - -> AssociatedItem { + fn associated_item_from_trait_item_ref( + self, + parent_def_id: DefId, + parent_vis: &hir::Visibility, + trait_item_ref: &hir::TraitItemRef, + ) -> AssociatedItem { let def_id = self.hir().local_def_id(trait_item_ref.id.node_id); let (kind, has_self) = match trait_item_ref.kind { hir::AssociatedItemKind::Const => (ty::AssociatedKind::Const, false), - hir::AssociatedItemKind::Method { has_self } => { - (ty::AssociatedKind::Method, has_self) - } + hir::AssociatedItemKind::Method { has_self } => (ty::AssociatedKind::Method, has_self), hir::AssociatedItemKind::Type => (ty::AssociatedKind::Type, false), hir::AssociatedItemKind::Existential => bug!("only impls can have existentials"), }; @@ -2729,20 +2781,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { defaultness: trait_item_ref.defaultness, def_id, container: TraitContainer(parent_def_id), - method_has_self_argument: has_self + method_has_self_argument: has_self, } } - fn associated_item_from_impl_item_ref(self, - parent_def_id: DefId, - impl_item_ref: &hir::ImplItemRef) - -> AssociatedItem { + fn associated_item_from_impl_item_ref( + self, + parent_def_id: DefId, + impl_item_ref: &hir::ImplItemRef, + ) -> AssociatedItem { let def_id = self.hir().local_def_id(impl_item_ref.id.node_id); let (kind, has_self) = match impl_item_ref.kind { hir::AssociatedItemKind::Const => (ty::AssociatedKind::Const, false), - hir::AssociatedItemKind::Method { has_self } => { - (ty::AssociatedKind::Method, has_self) - } + hir::AssociatedItemKind::Method { has_self } => (ty::AssociatedKind::Method, has_self), hir::AssociatedItemKind::Type => (ty::AssociatedKind::Type, false), hir::AssociatedItemKind::Existential => (ty::AssociatedKind::Existential, false), }; @@ -2755,13 +2806,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { defaultness: impl_item_ref.defaultness, def_id, container: ImplContainer(parent_def_id), - method_has_self_argument: has_self + method_has_self_argument: has_self, } } pub fn field_index(self, node_id: NodeId, tables: &TypeckTables<'_>) -> usize { let hir_id = self.hir().node_to_hir_id(node_id); - tables.field_indices().get(hir_id).cloned().expect("no index for a field") + tables + .field_indices() + .get(hir_id) + .cloned() + .expect("no index for a field") } pub fn find_field_index(self, ident: Ident, variant: &VariantDef) -> Option { @@ -2770,10 +2825,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }) } - pub fn associated_items( - self, - def_id: DefId, - ) -> AssociatedItemsIterator<'a, 'gcx, 'tcx> { + pub fn associated_items(self, def_id: DefId) -> AssociatedItemsIterator<'a, 'gcx, 'tcx> { // Ideally, we would use `-> impl Iterator` here, but it falls // afoul of the conservative "capture [restrictions]" we put // in place, so we use a hand-written iterator. @@ -2790,14 +2842,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// has no items or is annotated #[marker] and prevents item overrides. pub fn impls_are_allowed_to_overlap(self, def_id1: DefId, def_id2: DefId) -> bool { if self.features().overlapping_marker_traits { - let trait1_is_empty = self.impl_trait_ref(def_id1) - .map_or(false, |trait_ref| { - self.associated_item_def_ids(trait_ref.def_id).is_empty() - }); - let trait2_is_empty = self.impl_trait_ref(def_id2) - .map_or(false, |trait_ref| { - self.associated_item_def_ids(trait_ref.def_id).is_empty() - }); + let trait1_is_empty = self.impl_trait_ref(def_id1).map_or(false, |trait_ref| { + self.associated_item_def_ids(trait_ref.def_id).is_empty() + }); + let trait2_is_empty = self.impl_trait_ref(def_id2).map_or(false, |trait_ref| { + self.associated_item_def_ids(trait_ref.def_id).is_empty() + }); self.impl_polarity(def_id1) == self.impl_polarity(def_id2) && trait1_is_empty && trait2_is_empty @@ -2822,14 +2872,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let enum_did = self.parent_def_id(did).unwrap(); self.adt_def(enum_did).variant_with_id(did) } - Def::Struct(did) | Def::Union(did) => { - self.adt_def(did).non_enum_variant() - } + Def::Struct(did) | Def::Union(did) => self.adt_def(did).non_enum_variant(), Def::StructCtor(ctor_did, ..) => { - let did = self.parent_def_id(ctor_did).expect("struct ctor has no parent"); + let did = self + .parent_def_id(ctor_did) + .expect("struct ctor has no parent"); self.adt_def(did).non_enum_variant() } - _ => bug!("expect_variant_def used with unexpected def {:?}", def) + _ => bug!("expect_variant_def used with unexpected def {:?}", def), } } @@ -2839,8 +2889,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { match def_key.disambiguated_data.data { // for enum variants and tuple structs, the def-id of the ADT itself // is the *parent* of the variant - DefPathData::EnumVariant(..) | DefPathData::StructCtor => - DefId { krate: variant_def.did.krate, index: def_key.parent.unwrap() }, + DefPathData::EnumVariant(..) | DefPathData::StructCtor => DefId { + krate: variant_def.did.krate, + index: def_key.parent.unwrap(), + }, // otherwise, for structs and unions, they share a def-id _ => variant_def.did, @@ -2856,33 +2908,31 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if let hir_map::DefPathData::StructCtor = def_key.disambiguated_data.data { self.item_name(DefId { krate: id.krate, - index: def_key.parent.unwrap() + index: def_key.parent.unwrap(), }) } else { - def_key.disambiguated_data.data.get_opt_name().unwrap_or_else(|| { - bug!("item_name: no name for {:?}", self.def_path(id)); - }) + def_key + .disambiguated_data + .data + .get_opt_name() + .unwrap_or_else(|| { + bug!("item_name: no name for {:?}", self.def_path(id)); + }) } } } /// Return the possibly-auto-generated MIR of a (DefId, Subst) pair. - pub fn instance_mir(self, instance: ty::InstanceDef<'gcx>) - -> &'gcx Mir<'gcx> - { + pub fn instance_mir(self, instance: ty::InstanceDef<'gcx>) -> &'gcx Mir<'gcx> { match instance { - ty::InstanceDef::Item(did) => { - self.optimized_mir(did) - } - ty::InstanceDef::VtableShim(..) | - ty::InstanceDef::Intrinsic(..) | - ty::InstanceDef::FnPtrShim(..) | - ty::InstanceDef::Virtual(..) | - ty::InstanceDef::ClosureOnceShim { .. } | - ty::InstanceDef::DropGlue(..) | - ty::InstanceDef::CloneShim(..) => { - self.mir_shims(instance) - } + ty::InstanceDef::Item(did) => self.optimized_mir(did), + ty::InstanceDef::VtableShim(..) + | ty::InstanceDef::Intrinsic(..) + | ty::InstanceDef::FnPtrShim(..) + | ty::InstanceDef::Virtual(..) + | ty::InstanceDef::ClosureOnceShim { .. } + | ty::InstanceDef::DropGlue(..) + | ty::InstanceDef::CloneShim(..) => self.mir_shims(instance), } } @@ -2916,7 +2966,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn generator_layout(self, def_id: DefId) -> &'tcx GeneratorLayout<'tcx> { - self.optimized_mir(def_id).generator_layout.as_ref().unwrap() + self.optimized_mir(def_id) + .generator_layout + .as_ref() + .unwrap() } /// Given the def-id of an impl, return the def_id of the trait it implements. @@ -2938,12 +2991,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.opt_associated_item(def_id) }; - item.and_then(|trait_item| - match trait_item.container { - TraitContainer(_) => None, - ImplContainer(def_id) => Some(def_id), - } - ) + item.and_then(|trait_item| match trait_item.container { + TraitContainer(_) => None, + ImplContainer(def_id) => Some(def_id), + }) } /// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err` @@ -2961,7 +3012,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // supposed definition name (`def_name`). The method also needs `DefId` of the supposed // definition's parent/scope to perform comparison. pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool { - self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID).0 == def_name.modern() + self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID) + .0 + == def_name.modern() } pub fn adjust_ident(self, mut ident: Ident, scope: DefId, block: NodeId) -> (Ident, DefId) { @@ -2971,8 +3024,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { _ => Mark::root(), }; let scope = match ident.span.adjust(target_expansion) { - Some(actual_expansion) => - self.hir().definitions().parent_module_of_macro_def(actual_expansion), + Some(actual_expansion) => self + .hir() + .definitions() + .parent_module_of_macro_def(actual_expansion), None if block == DUMMY_NODE_ID => DefId::local(CRATE_DEF_INDEX), // Dummy DefId None => self.hir().get_module_parent(block), }; @@ -2997,7 +3052,8 @@ impl Iterator for AssociatedItemsIterator<'_, '_, '_> { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - pub fn with_freevars(self, fid: NodeId, f: F) -> T where + pub fn with_freevars(self, fid: NodeId, f: F) -> T + where F: FnOnce(&[hir::Freevar]) -> T, { let def_id = self.hir().local_def_id(fid); @@ -3016,8 +3072,8 @@ fn associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Asso match parent_item.node { hir::ItemKind::Impl(.., ref impl_item_refs) => { if let Some(impl_item_ref) = impl_item_refs.iter().find(|i| i.id.node_id == id) { - let assoc_item = tcx.associated_item_from_impl_item_ref(parent_def_id, - impl_item_ref); + let assoc_item = + tcx.associated_item_from_impl_item_ref(parent_def_id, impl_item_ref); debug_assert_eq!(assoc_item.def_id, def_id); return assoc_item; } @@ -3025,20 +3081,24 @@ fn associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Asso hir::ItemKind::Trait(.., ref trait_item_refs) => { if let Some(trait_item_ref) = trait_item_refs.iter().find(|i| i.id.node_id == id) { - let assoc_item = tcx.associated_item_from_trait_item_ref(parent_def_id, - &parent_item.vis, - trait_item_ref); + let assoc_item = tcx.associated_item_from_trait_item_ref( + parent_def_id, + &parent_item.vis, + trait_item_ref, + ); debug_assert_eq!(assoc_item.def_id, def_id); return assoc_item; } } - _ => { } + _ => {} } - span_bug!(parent_item.span, - "unexpected parent of trait or impl item or item not found: {:?}", - parent_item.node) + span_bug!( + parent_item.span, + "unexpected parent of trait or impl item or item not found: {:?}", + parent_item.node + ) } /// Calculates the Sized-constraint. @@ -3050,42 +3110,40 @@ fn associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Asso /// such. /// - a Error, if a type contained itself. The representability /// check should catch this case. -fn adt_sized_constraint<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - def_id: DefId) - -> &'tcx [Ty<'tcx>] { +fn adt_sized_constraint<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx [Ty<'tcx>] { let def = tcx.adt_def(def_id); - let result = tcx.mk_type_list(def.variants.iter().flat_map(|v| { - v.fields.last() - }).flat_map(|f| { - def.sized_constraint_for_ty(tcx, tcx.type_of(f.did)) - })); + let result = tcx.mk_type_list( + def.variants + .iter() + .flat_map(|v| v.fields.last()) + .flat_map(|f| def.sized_constraint_for_ty(tcx, tcx.type_of(f.did))), + ); debug!("adt_sized_constraint: {:?} => {:?}", def, result); result } -fn associated_item_def_ids<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - def_id: DefId) - -> Lrc> { +fn associated_item_def_ids<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId, +) -> Lrc> { let id = tcx.hir().as_local_node_id(def_id).unwrap(); let item = tcx.hir().expect_item(id); let vec: Vec<_> = match item.node { - hir::ItemKind::Trait(.., ref trait_item_refs) => { - trait_item_refs.iter() - .map(|trait_item_ref| trait_item_ref.id) - .map(|id| tcx.hir().local_def_id(id.node_id)) - .collect() - } - hir::ItemKind::Impl(.., ref impl_item_refs) => { - impl_item_refs.iter() - .map(|impl_item_ref| impl_item_ref.id) - .map(|id| tcx.hir().local_def_id(id.node_id)) - .collect() - } + hir::ItemKind::Trait(.., ref trait_item_refs) => trait_item_refs + .iter() + .map(|trait_item_ref| trait_item_ref.id) + .map(|id| tcx.hir().local_def_id(id.node_id)) + .collect(), + hir::ItemKind::Impl(.., ref impl_item_refs) => impl_item_refs + .iter() + .map(|impl_item_ref| impl_item_ref.id) + .map(|id| tcx.hir().local_def_id(id.node_id)) + .collect(), hir::ItemKind::TraitAlias(..) => vec![], - _ => span_bug!(item.span, "associated_item_def_ids: not impl or trait") + _ => span_bug!(item.span, "associated_item_def_ids: not impl or trait"), }; Lrc::new(vec) } @@ -3099,11 +3157,9 @@ fn def_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Span { /// Otherwise, return `None`. fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option { tcx.opt_associated_item(def_id) - .and_then(|associated_item| { - match associated_item.container { - TraitContainer(def_id) => Some(def_id), - ImplContainer(_) => None - } + .and_then(|associated_item| match associated_item.container { + TraitContainer(def_id) => Some(def_id), + ImplContainer(_) => None, }) } @@ -3132,18 +3188,14 @@ pub fn is_trait_alias(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> bool { } /// See `ParamEnv` struct definition for details. -fn param_env<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - def_id: DefId) - -> ParamEnv<'tcx> -{ +fn param_env<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ParamEnv<'tcx> { // The param_env of an impl Trait type is its defining function's param_env if let Some(parent) = is_impl_trait_defn(tcx, def_id) { return param_env(tcx, parent); } // Compute the bounds on Self and the type parameters. - let InstantiatedPredicates { predicates } = - tcx.predicates_of(def_id).instantiate_identity(tcx); + let InstantiatedPredicates { predicates } = tcx.predicates_of(def_id).instantiate_identity(tcx); // Finally, we have to normalize the bounds in the environment, in // case they contain any associated type projections. This process @@ -3160,46 +3212,57 @@ fn param_env<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let unnormalized_env = ty::ParamEnv::new( tcx.intern_predicates(&predicates), traits::Reveal::UserFacing, - if tcx.sess.opts.debugging_opts.chalk { Some(def_id) } else { None } + if tcx.sess.opts.debugging_opts.chalk { + Some(def_id) + } else { + None + }, ); - let body_id = tcx.hir().as_local_node_id(def_id).map_or(DUMMY_NODE_ID, |id| { - tcx.hir().maybe_body_owned_by(id).map_or(id, |body| body.node_id) - }); + let body_id = tcx + .hir() + .as_local_node_id(def_id) + .map_or(DUMMY_NODE_ID, |id| { + tcx.hir() + .maybe_body_owned_by(id) + .map_or(id, |body| body.node_id) + }); let cause = traits::ObligationCause::misc(tcx.def_span(def_id), body_id); traits::normalize_param_env_or_error(tcx, def_id, unnormalized_env, cause) } -fn crate_disambiguator<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - crate_num: CrateNum) -> CrateDisambiguator { +fn crate_disambiguator<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + crate_num: CrateNum, +) -> CrateDisambiguator { assert_eq!(crate_num, LOCAL_CRATE); tcx.sess.local_crate_disambiguator() } -fn original_crate_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - crate_num: CrateNum) -> Symbol { +fn original_crate_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> Symbol { assert_eq!(crate_num, LOCAL_CRATE); tcx.crate_name.clone() } -fn crate_hash<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - crate_num: CrateNum) - -> Svh { +fn crate_hash<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> Svh { assert_eq!(crate_num, LOCAL_CRATE); tcx.hir().crate_hash } -fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - instance_def: InstanceDef<'tcx>) - -> usize { +fn instance_def_size_estimate<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + instance_def: InstanceDef<'tcx>, +) -> usize { match instance_def { - InstanceDef::Item(..) | - InstanceDef::DropGlue(..) => { + InstanceDef::Item(..) | InstanceDef::DropGlue(..) => { let mir = tcx.instance_mir(instance_def); - mir.basic_blocks().iter().map(|bb| bb.statements.len()).sum() - }, + mir.basic_blocks() + .iter() + .map(|bb| bb.statements.len()) + .sum() + } // Estimate the size of other compiler-generated shims to be 1. - _ => 1 + _ => 1, } } @@ -3239,7 +3302,7 @@ pub struct CrateInherentImpls { pub struct SymbolName { // FIXME: we don't rely on interning or equality here - better have // this be a `&'tcx str`. - pub name: InternedString + pub name: InternedString, } impl_stable_hash_for!(struct self::SymbolName { @@ -3249,7 +3312,7 @@ impl_stable_hash_for!(struct self::SymbolName { impl SymbolName { pub fn new(name: &str) -> SymbolName { SymbolName { - name: Symbol::intern(name).as_interned_str() + name: Symbol::intern(name).as_interned_str(), } } diff --git a/src/librustc/ty/outlives.rs b/src/librustc/ty/outlives.rs index ca2d5cd718c64..a9943444deb61 100644 --- a/src/librustc/ty/outlives.rs +++ b/src/librustc/ty/outlives.rs @@ -48,8 +48,11 @@ pub enum Component<'tcx> { impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Push onto `out` all the things that must outlive `'a` for the condition /// `ty0: 'a` to hold. Note that `ty0` must be a **fully resolved type**. - pub fn push_outlives_components(&self, ty0: Ty<'tcx>, - out: &mut SmallVec<[Component<'tcx>; 4]>) { + pub fn push_outlives_components( + &self, + ty0: Ty<'tcx>, + out: &mut SmallVec<[Component<'tcx>; 4]>, + ) { self.compute_components(ty0, out); debug!("components({:?}) = {:?}", ty0, out); } @@ -175,5 +178,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { fn push_region_constraints<'tcx>(ty: Ty<'tcx>, out: &mut SmallVec<[Component<'tcx>; 4]>) { let mut regions = smallvec![]; ty.push_regions(&mut regions); - out.extend(regions.iter().filter(|&r| !r.is_late_bound()).map(|r| Component::Region(r))); + out.extend( + regions + .iter() + .filter(|&r| !r.is_late_bound()) + .map(|r| Component::Region(r)), + ); } diff --git a/src/librustc/ty/query/config.rs b/src/librustc/ty/query/config.rs index 3464464aa229c..0ff1998773f02 100644 --- a/src/librustc/ty/query/config.rs +++ b/src/librustc/ty/query/config.rs @@ -1,5 +1,5 @@ -use dep_graph::SerializedDepNodeIndex; use dep_graph::DepNode; +use dep_graph::SerializedDepNodeIndex; use hir::def_id::{CrateNum, DefId, DefIndex}; use mir::interpret::GlobalId; use traits; @@ -8,20 +8,20 @@ use traits::query::{ CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpNormalizeGoal, CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal, }; -use ty::{self, ParamEnvAnd, Ty, TyCtxt}; -use ty::subst::Substs; use ty::query::queries; use ty::query::Query; use ty::query::QueryCache; +use ty::subst::Substs; +use ty::{self, ParamEnvAnd, Ty, TyCtxt}; use util::profiling::ProfileCategory; +use ich::StableHashingContext; +use rustc_data_structures::stable_hasher::HashStable; +use rustc_data_structures::sync::Lock; use std::borrow::Cow; -use std::hash::Hash; use std::fmt::Debug; +use std::hash::Hash; use syntax_pos::symbol::InternedString; -use rustc_data_structures::sync::Lock; -use rustc_data_structures::stable_hasher::HashStable; -use ich::StableHashingContext; // Query configuration and description traits. @@ -55,14 +55,15 @@ pub(super) trait QueryDescription<'tcx>: QueryAccessors<'tcx> { false } - fn try_load_from_disk(_: TyCtxt<'_, 'tcx, 'tcx>, - _: SerializedDepNodeIndex) - -> Option { + fn try_load_from_disk( + _: TyCtxt<'_, 'tcx, 'tcx>, + _: SerializedDepNodeIndex, + ) -> Option { bug!("QueryDescription::load_from_disk() called for an unsupported query.") } } -impl<'tcx, M: QueryAccessors<'tcx, Key=DefId>> QueryDescription<'tcx> for M { +impl<'tcx, M: QueryAccessors<'tcx, Key = DefId>> QueryDescription<'tcx> for M { default fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> { if !tcx.sess.verbose() { format!("processing `{}`", tcx.item_path_str(def_id)).into() @@ -102,16 +103,24 @@ impl<'tcx> QueryDescription<'tcx> for queries::normalize_ty_after_erasing_region impl<'tcx> QueryDescription<'tcx> for queries::evaluate_obligation<'tcx> { fn describe(_tcx: TyCtxt<'_, '_, '_>, goal: CanonicalPredicateGoal<'tcx>) -> Cow<'static, str> { - format!("evaluating trait selection obligation `{}`", goal.value.value).into() + format!( + "evaluating trait selection obligation `{}`", + goal.value.value + ) + .into() } } impl<'tcx> QueryDescription<'tcx> for queries::evaluate_goal<'tcx> { fn describe( _tcx: TyCtxt<'_, '_, '_>, - goal: traits::ChalkCanonicalGoal<'tcx> + goal: traits::ChalkCanonicalGoal<'tcx>, ) -> Cow<'static, str> { - format!("evaluating trait selection obligation `{}`", goal.value.goal).into() + format!( + "evaluating trait selection obligation `{}`", + goal.value.goal + ) + .into() } } @@ -131,22 +140,28 @@ impl<'tcx> QueryDescription<'tcx> for queries::type_op_eq<'tcx> { } impl<'tcx> QueryDescription<'tcx> for queries::type_op_subtype<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, goal: CanonicalTypeOpSubtypeGoal<'tcx>) - -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + goal: CanonicalTypeOpSubtypeGoal<'tcx>, + ) -> Cow<'static, str> { format!("evaluating `type_op_subtype` `{:?}`", goal).into() } } impl<'tcx> QueryDescription<'tcx> for queries::type_op_prove_predicate<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, goal: CanonicalTypeOpProvePredicateGoal<'tcx>) - -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + goal: CanonicalTypeOpProvePredicateGoal<'tcx>, + ) -> Cow<'static, str> { format!("evaluating `type_op_prove_predicate` `{:?}`", goal).into() } } impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_ty<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, - goal: CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>) -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + goal: CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>, + ) -> Cow<'static, str> { format!("normalizing `{:?}`", goal).into() } } @@ -170,51 +185,66 @@ impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_poly_fn_sig<'tc } impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_fn_sig<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, - goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>) -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>, + ) -> Cow<'static, str> { format!("normalizing `{:?}`", goal).into() } } impl<'tcx> QueryDescription<'tcx> for queries::is_copy_raw<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, + ) -> Cow<'static, str> { format!("computing whether `{}` is `Copy`", env.value).into() } } impl<'tcx> QueryDescription<'tcx> for queries::is_sized_raw<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, + ) -> Cow<'static, str> { format!("computing whether `{}` is `Sized`", env.value).into() } } impl<'tcx> QueryDescription<'tcx> for queries::is_freeze_raw<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, + ) -> Cow<'static, str> { format!("computing whether `{}` is freeze", env.value).into() } } impl<'tcx> QueryDescription<'tcx> for queries::needs_drop_raw<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, + ) -> Cow<'static, str> { format!("computing whether `{}` needs drop", env.value).into() } } impl<'tcx> QueryDescription<'tcx> for queries::layout_raw<'tcx> { - fn describe(_tcx: TyCtxt<'_, '_, '_>, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> Cow<'static, str> { + fn describe( + _tcx: TyCtxt<'_, '_, '_>, + env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, + ) -> Cow<'static, str> { format!("computing layout of `{}`", env.value).into() } } impl<'tcx> QueryDescription<'tcx> for queries::super_predicates_of<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> { - format!("computing the supertraits of `{}`", - tcx.item_path_str(def_id)).into() + format!( + "computing the supertraits of `{}`", + tcx.item_path_str(def_id) + ) + .into() } } @@ -227,15 +257,21 @@ impl<'tcx> QueryDescription<'tcx> for queries::erase_regions_ty<'tcx> { impl<'tcx> QueryDescription<'tcx> for queries::type_param_predicates<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, (_, def_id): (DefId, DefId)) -> Cow<'static, str> { let id = tcx.hir().as_local_node_id(def_id).unwrap(); - format!("computing the bounds for type parameter `{}`", - tcx.hir().ty_param_name(id)).into() + format!( + "computing the bounds for type parameter `{}`", + tcx.hir().ty_param_name(id) + ) + .into() } } impl<'tcx> QueryDescription<'tcx> for queries::coherent_trait<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> { - format!("coherence checking all impls of trait `{}`", - tcx.item_path_str(def_id)).into() + format!( + "coherence checking all impls of trait `{}`", + tcx.item_path_str(def_id) + ) + .into() } } @@ -271,8 +307,11 @@ impl<'tcx> QueryDescription<'tcx> for queries::inferred_outlives_crate<'tcx> { impl<'tcx> QueryDescription<'tcx> for queries::mir_shims<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, def: ty::InstanceDef<'tcx>) -> Cow<'static, str> { - format!("generating MIR shim for `{}`", - tcx.item_path_str(def.def_id())).into() + format!( + "generating MIR shim for `{}`", + tcx.item_path_str(def.def_id()) + ) + .into() } } @@ -302,7 +341,8 @@ impl<'tcx> QueryDescription<'tcx> for queries::const_eval<'tcx> { format!( "const-evaluating + checking `{}`", tcx.item_path_str(key.value.instance.def.def_id()), - ).into() + ) + .into() } #[inline] @@ -311,18 +351,27 @@ impl<'tcx> QueryDescription<'tcx> for queries::const_eval<'tcx> { } #[inline] - fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option { - tcx.queries.on_disk_cache.try_load_query_result(tcx, id).map(Ok) + fn try_load_from_disk<'a>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex, + ) -> Option { + tcx.queries + .on_disk_cache + .try_load_query_result(tcx, id) + .map(Ok) } } impl<'tcx> QueryDescription<'tcx> for queries::const_eval_raw<'tcx> { - fn describe(tcx: TyCtxt<'_, '_, '_>, key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>) - -> Cow<'static, str> - { - format!("const-evaluating `{}`", tcx.item_path_str(key.value.instance.def.def_id())).into() + fn describe( + tcx: TyCtxt<'_, '_, '_>, + key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>, + ) -> Cow<'static, str> { + format!( + "const-evaluating `{}`", + tcx.item_path_str(key.value.instance.def.def_id()) + ) + .into() } #[inline] @@ -331,10 +380,14 @@ impl<'tcx> QueryDescription<'tcx> for queries::const_eval_raw<'tcx> { } #[inline] - fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option { - tcx.queries.on_disk_cache.try_load_query_result(tcx, id).map(Ok) + fn try_load_from_disk<'a>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex, + ) -> Option { + tcx.queries + .on_disk_cache + .try_load_query_result(tcx, id) + .map(Ok) } } @@ -355,9 +408,10 @@ impl<'tcx> QueryDescription<'tcx> for queries::symbol_name<'tcx> { } #[inline] - fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option { + fn try_load_from_disk<'a>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex, + ) -> Option { tcx.queries.on_disk_cache.try_load_query_result(tcx, id) } } @@ -374,7 +428,6 @@ impl<'tcx> QueryDescription<'tcx> for queries::def_span<'tcx> { } } - impl<'tcx> QueryDescription<'tcx> for queries::lookup_stability<'tcx> { fn describe(_: TyCtxt<'_, '_, '_>, _: DefId) -> Cow<'static, str> { bug!("stability") @@ -419,8 +472,11 @@ impl<'tcx> QueryDescription<'tcx> for queries::trait_of_item<'tcx> { impl<'tcx> QueryDescription<'tcx> for queries::const_is_rvalue_promotable_to_static<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> { - format!("const checking if rvalue is promotable to static `{}`", - tcx.item_path_str(def_id)).into() + format!( + "const checking if rvalue is promotable to static `{}`", + tcx.item_path_str(def_id) + ) + .into() } #[inline] @@ -429,32 +485,44 @@ impl<'tcx> QueryDescription<'tcx> for queries::const_is_rvalue_promotable_to_sta } #[inline] - fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option { + fn try_load_from_disk<'a>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex, + ) -> Option { tcx.queries.on_disk_cache.try_load_query_result(tcx, id) } } impl<'tcx> QueryDescription<'tcx> for queries::rvalue_promotable_map<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> { - format!("checking which parts of `{}` are promotable to static", - tcx.item_path_str(def_id)).into() + format!( + "checking which parts of `{}` are promotable to static", + tcx.item_path_str(def_id) + ) + .into() } } impl<'tcx> QueryDescription<'tcx> for queries::is_mir_available<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> { - format!("checking if item is mir available: `{}`", - tcx.item_path_str(def_id)).into() + format!( + "checking if item is mir available: `{}`", + tcx.item_path_str(def_id) + ) + .into() } } impl<'tcx> QueryDescription<'tcx> for queries::codegen_fulfill_obligation<'tcx> { - fn describe(tcx: TyCtxt<'_, '_, '_>, - key: (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) -> Cow<'static, str> { - format!("checking if `{}` fulfills its obligations", tcx.item_path_str(key.1.def_id())) - .into() + fn describe( + tcx: TyCtxt<'_, '_, '_>, + key: (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>), + ) -> Cow<'static, str> { + format!( + "checking if `{}` fulfills its obligations", + tcx.item_path_str(key.1.def_id()) + ) + .into() } #[inline] @@ -463,9 +531,10 @@ impl<'tcx> QueryDescription<'tcx> for queries::codegen_fulfill_obligation<'tcx> } #[inline] - fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option { + fn try_load_from_disk<'a>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex, + ) -> Option { tcx.queries.on_disk_cache.try_load_query_result(tcx, id) } } @@ -478,13 +547,21 @@ impl<'tcx> QueryDescription<'tcx> for queries::trait_impls_of<'tcx> { impl<'tcx> QueryDescription<'tcx> for queries::is_object_safe<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> { - format!("determine object safety of trait `{}`", tcx.item_path_str(def_id)).into() + format!( + "determine object safety of trait `{}`", + tcx.item_path_str(def_id) + ) + .into() } } impl<'tcx> QueryDescription<'tcx> for queries::is_const_fn_raw<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> { - format!("checking if item is const fn: `{}`", tcx.item_path_str(def_id)).into() + format!( + "checking if item is const fn: `{}`", + tcx.item_path_str(def_id) + ) + .into() } } @@ -777,8 +854,12 @@ impl<'tcx> QueryDescription<'tcx> for queries::output_filenames<'tcx> { } impl<'tcx> QueryDescription<'tcx> for queries::vtable_methods<'tcx> { - fn describe(tcx: TyCtxt<'_, '_, '_>, key: ty::PolyTraitRef<'tcx> ) -> Cow<'static, str> { - format!("finding all methods for trait {}", tcx.item_path_str(key.def_id())).into() + fn describe(tcx: TyCtxt<'_, '_, '_>, key: ty::PolyTraitRef<'tcx>) -> Cow<'static, str> { + format!( + "finding all methods for trait {}", + tcx.item_path_str(key.def_id()) + ) + .into() } } @@ -794,12 +875,12 @@ impl<'tcx> QueryDescription<'tcx> for queries::typeck_tables_of<'tcx> { def_id.is_local() } - fn try_load_from_disk(tcx: TyCtxt<'_, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option { - let typeck_tables: Option> = tcx - .queries.on_disk_cache - .try_load_query_result(tcx, id); + fn try_load_from_disk( + tcx: TyCtxt<'_, 'tcx, 'tcx>, + id: SerializedDepNodeIndex, + ) -> Option { + let typeck_tables: Option> = + tcx.queries.on_disk_cache.try_load_query_result(tcx, id); typeck_tables.map(|tables| tcx.alloc_tables(tables)) } @@ -811,18 +892,23 @@ impl<'tcx> QueryDescription<'tcx> for queries::optimized_mir<'tcx> { def_id.is_local() } - fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option { - let mir: Option<::mir::Mir<'tcx>> = tcx.queries.on_disk_cache - .try_load_query_result(tcx, id); + fn try_load_from_disk<'a>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex, + ) -> Option { + let mir: Option<::mir::Mir<'tcx>> = + tcx.queries.on_disk_cache.try_load_query_result(tcx, id); mir.map(|x| tcx.alloc_mir(x)) } } impl<'tcx> QueryDescription<'tcx> for queries::substitute_normalize_and_test_predicates<'tcx> { fn describe(tcx: TyCtxt<'_, '_, '_>, key: (DefId, &'tcx Substs<'tcx>)) -> Cow<'static, str> { - format!("testing substituted normalized predicates:`{}`", tcx.item_path_str(key.0)).into() + format!( + "testing substituted normalized predicates:`{}`", + tcx.item_path_str(key.0) + ) + .into() } } @@ -850,11 +936,12 @@ impl<'tcx> QueryDescription<'tcx> for queries::generics_of<'tcx> { def_id.is_local() } - fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option { - let generics: Option = tcx.queries.on_disk_cache - .try_load_query_result(tcx, id); + fn try_load_from_disk<'a>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex, + ) -> Option { + let generics: Option = + tcx.queries.on_disk_cache.try_load_query_result(tcx, id); generics.map(|x| tcx.alloc_generics(x)) } } diff --git a/src/librustc/ty/query/job.rs b/src/librustc/ty/query/job.rs index 0063794727fd2..bd962e0584942 100644 --- a/src/librustc/ty/query/job.rs +++ b/src/librustc/ty/query/job.rs @@ -1,33 +1,30 @@ #![allow(warnings)] -use std::mem; +use errors::Diagnostic; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::{Lock, LockGuard, Lrc, Weak}; use rustc_data_structures::OnDrop; +use std::mem; +use std::process; +use std::{fmt, ptr}; use syntax_pos::Span; -use ty::tls; -use ty::query::Query; +use ty::context::TyCtxt; use ty::query::plumbing::CycleError; +use ty::query::Query; #[cfg(not(parallel_queries))] -use ty::query::{ - plumbing::TryGetJob, - config::QueryDescription, -}; -use ty::context::TyCtxt; -use errors::Diagnostic; -use std::process; -use std::{fmt, ptr}; +use ty::query::{config::QueryDescription, plumbing::TryGetJob}; +use ty::tls; #[cfg(parallel_queries)] use { + parking_lot::{Condvar, Mutex}, rayon_core, - parking_lot::{Mutex, Condvar}, - std::sync::atomic::Ordering, - std::thread, + rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}, std::iter, std::iter::FromIterator, + std::sync::atomic::Ordering, + std::thread, syntax_pos::DUMMY_SP, - rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher, HashStable}, }; /// Indicates the state of a query for a given key in a query map @@ -113,7 +110,7 @@ impl<'tcx> QueryJob<'tcx> { let mut cycle = waiter.cycle.lock(); match cycle.take() { None => Ok(()), - Some(cycle) => Err(Box::new(cycle)) + Some(cycle) => Err(Box::new(cycle)), } }) } @@ -140,9 +137,10 @@ impl<'tcx> QueryJob<'tcx> { // Replace it with the span which caused the cycle to form cycle[0].span = span; // Find out why the cycle itself was used - let usage = job.parent.as_ref().map(|parent| { - (job.info.span, parent.info.query.clone()) - }); + let usage = job + .parent + .as_ref() + .map(|parent| (job.info.span, parent.info.query.clone())); return CycleError { usage, cycle }; } @@ -235,10 +233,7 @@ impl<'tcx> QueryLatch<'tcx> { /// Remove a single waiter from the list of waiters. /// This is used to break query cycles. - fn extract_waiter( - &self, - waiter: usize, - ) -> Lrc> { + fn extract_waiter(&self, waiter: usize) -> Lrc> { let mut info = self.info.lock(); debug_assert!(!info.complete); // Remove the waiter from the list of waiters @@ -262,7 +257,7 @@ type Waiter<'tcx> = (Lrc>, usize); #[cfg(parallel_queries)] fn visit_waiters<'tcx, F>(query: Lrc>, mut visit: F) -> Option>> where - F: FnMut(Span, Lrc>) -> Option>> + F: FnMut(Span, Lrc>) -> Option>>, { // Visit the parent query which is a non-resumable waiter since it's on the same stack if let Some(ref parent) = query.parent { @@ -288,10 +283,11 @@ where /// If a cycle is detected, this initial value is replaced with the span causing /// the cycle. #[cfg(parallel_queries)] -fn cycle_check<'tcx>(query: Lrc>, - span: Span, - stack: &mut Vec<(Span, Lrc>)>, - visited: &mut FxHashSet<*const QueryJob<'tcx>> +fn cycle_check<'tcx>( + query: Lrc>, + span: Span, + stack: &mut Vec<(Span, Lrc>)>, + visited: &mut FxHashSet<*const QueryJob<'tcx>>, ) -> Option>> { if !visited.insert(query.as_ptr()) { return if let Some(p) = stack.iter().position(|q| q.1.as_ptr() == query.as_ptr()) { @@ -304,7 +300,7 @@ fn cycle_check<'tcx>(query: Lrc>, Some(None) } else { None - } + }; } // Query marked as visited is added it to the stack @@ -329,7 +325,7 @@ fn cycle_check<'tcx>(query: Lrc>, #[cfg(parallel_queries)] fn connected_to_root<'tcx>( query: Lrc>, - visited: &mut FxHashSet<*const QueryJob<'tcx>> + visited: &mut FxHashSet<*const QueryJob<'tcx>>, ) -> bool { // We already visited this or we're deliberately ignoring it if !visited.insert(query.as_ptr()) { @@ -347,7 +343,8 @@ fn connected_to_root<'tcx>( } else { None } - }).is_some() + }) + .is_some() } // Deterministically pick an query from a list @@ -355,21 +352,24 @@ fn connected_to_root<'tcx>( fn pick_query<'a, 'tcx, T, F: Fn(&T) -> (Span, Lrc>)>( tcx: TyCtxt<'_, 'tcx, '_>, queries: &'a [T], - f: F + f: F, ) -> &'a T { // Deterministically pick an entry point // FIXME: Sort this instead let mut hcx = tcx.create_stable_hashing_context(); - queries.iter().min_by_key(|v| { - let (span, query) = f(v); - let mut stable_hasher = StableHasher::::new(); - query.info.query.hash_stable(&mut hcx, &mut stable_hasher); - // Prefer entry points which have valid spans for nicer error messages - // We add an integer to the tuple ensuring that entry points - // with valid spans are picked first - let span_cmp = if span == DUMMY_SP { 1 } else { 0 }; - (span_cmp, stable_hasher.finish()) - }).unwrap() + queries + .iter() + .min_by_key(|v| { + let (span, query) = f(v); + let mut stable_hasher = StableHasher::::new(); + query.info.query.hash_stable(&mut hcx, &mut stable_hasher); + // Prefer entry points which have valid spans for nicer error messages + // We add an integer to the tuple ensuring that entry points + // with valid spans are picked first + let span_cmp = if span == DUMMY_SP { 1 } else { 0 }; + (span_cmp, stable_hasher.finish()) + }) + .unwrap() } /// Looks for query cycles starting from the last query in `jobs`. @@ -381,15 +381,12 @@ fn pick_query<'a, 'tcx, T, F: Fn(&T) -> (Span, Lrc>)>( fn remove_cycle<'tcx>( jobs: &mut Vec>>, wakelist: &mut Vec>>, - tcx: TyCtxt<'_, 'tcx, '_> + tcx: TyCtxt<'_, 'tcx, '_>, ) -> bool { let mut visited = FxHashSet::default(); let mut stack = Vec::new(); // Look for a cycle starting with the last query in `jobs` - if let Some(waiter) = cycle_check(jobs.pop().unwrap(), - DUMMY_SP, - &mut stack, - &mut visited) { + if let Some(waiter) = cycle_check(jobs.pop().unwrap(), DUMMY_SP, &mut stack, &mut visited) { // The stack is a vector of pairs of spans and queries; reverse it so that // the earlier entries require later entries let (mut spans, queries): (Vec<_>, Vec<_>) = stack.into_iter().rev().unzip(); @@ -409,53 +406,65 @@ fn remove_cycle<'tcx>( // Find the queries in the cycle which are // connected to queries outside the cycle - let entry_points = stack.iter().filter_map(|(span, query)| { - if query.parent.is_none() { - // This query is connected to the root (it has no query parent) - Some((*span, query.clone(), None)) - } else { - let mut waiters = Vec::new(); - // Find all the direct waiters who lead to the root - visit_waiters(query.clone(), |span, waiter| { - // Mark all the other queries in the cycle as already visited - let mut visited = FxHashSet::from_iter(stack.iter().map(|q| q.1.as_ptr())); - - if connected_to_root(waiter.clone(), &mut visited) { - waiters.push((span, waiter)); - } - - None - }); - if waiters.is_empty() { - None + let entry_points = stack + .iter() + .filter_map(|(span, query)| { + if query.parent.is_none() { + // This query is connected to the root (it has no query parent) + Some((*span, query.clone(), None)) } else { - // Deterministically pick one of the waiters to show to the user - let waiter = pick_query(tcx, &waiters, |s| s.clone()).clone(); - Some((*span, query.clone(), Some(waiter))) + let mut waiters = Vec::new(); + // Find all the direct waiters who lead to the root + visit_waiters(query.clone(), |span, waiter| { + // Mark all the other queries in the cycle as already visited + let mut visited = FxHashSet::from_iter(stack.iter().map(|q| q.1.as_ptr())); + + if connected_to_root(waiter.clone(), &mut visited) { + waiters.push((span, waiter)); + } + + None + }); + if waiters.is_empty() { + None + } else { + // Deterministically pick one of the waiters to show to the user + let waiter = pick_query(tcx, &waiters, |s| s.clone()).clone(); + Some((*span, query.clone(), Some(waiter))) + } } - } - }).collect::>, Option<(Span, Lrc>)>)>>(); + }) + .collect::>, + Option<(Span, Lrc>)>, + )>>(); // Deterministically pick an entry point let (_, entry_point, usage) = pick_query(tcx, &entry_points, |e| (e.0, e.1.clone())); // Shift the stack so that our entry point is first - let entry_point_pos = stack.iter().position(|(_, query)| { - query.as_ptr() == entry_point.as_ptr() - }); + let entry_point_pos = stack + .iter() + .position(|(_, query)| query.as_ptr() == entry_point.as_ptr()); if let Some(pos) = entry_point_pos { stack.rotate_left(pos); } - let usage = usage.as_ref().map(|(span, query)| (*span, query.info.query.clone())); + let usage = usage + .as_ref() + .map(|(span, query)| (*span, query.info.query.clone())); // Create the cycle error let mut error = CycleError { usage, - cycle: stack.iter().map(|&(s, ref q)| QueryInfo { - span: s, - query: q.info.query.clone(), - } ).collect(), + cycle: stack + .iter() + .map(|&(s, ref q)| QueryInfo { + span: s, + query: q.info.query.clone(), + }) + .collect(), }; // We unwrap `waiter` here since there must always be one @@ -487,27 +496,20 @@ pub unsafe fn handle_deadlock() { let registry = rayon_core::Registry::current(); - let gcx_ptr = tls::GCX_PTR.with(|gcx_ptr| { - gcx_ptr as *const _ - }); + let gcx_ptr = tls::GCX_PTR.with(|gcx_ptr| gcx_ptr as *const _); let gcx_ptr = &*gcx_ptr; - let syntax_globals = syntax::GLOBALS.with(|syntax_globals| { - syntax_globals as *const _ - }); + let syntax_globals = syntax::GLOBALS.with(|syntax_globals| syntax_globals as *const _); let syntax_globals = &*syntax_globals; - let syntax_pos_globals = syntax_pos::GLOBALS.with(|syntax_pos_globals| { - syntax_pos_globals as *const _ - }); + let syntax_pos_globals = + syntax_pos::GLOBALS.with(|syntax_pos_globals| syntax_pos_globals as *const _); let syntax_pos_globals = &*syntax_pos_globals; thread::spawn(move || { tls::GCX_PTR.set(gcx_ptr, || { syntax_pos::GLOBALS.set(syntax_pos_globals, || { syntax_pos::GLOBALS.set(syntax_pos_globals, || { - tls::with_thread_locals(|| { - tls::with_global(|tcx| deadlock(tcx, ®istry)) - }) + tls::with_thread_locals(|| tls::with_global(|tcx| deadlock(tcx, ®istry))) }) }) }) diff --git a/src/librustc/ty/query/keys.rs b/src/librustc/ty/query/keys.rs index 959a5ff5767a3..c9ceb012b8da3 100644 --- a/src/librustc/ty/query/keys.rs +++ b/src/librustc/ty/query/keys.rs @@ -1,17 +1,17 @@ //! Defines the set of legal keys that can be used in queries. +use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use infer::canonical::Canonical; -use hir::def_id::{CrateNum, DefId, LOCAL_CRATE, DefIndex}; +use mir; use traits; -use ty::{self, Ty, TyCtxt}; -use ty::subst::Substs; use ty::fast_reject::SimplifiedType; -use mir; +use ty::subst::Substs; +use ty::{self, Ty, TyCtxt}; use std::fmt::Debug; use std::hash::Hash; -use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::symbol::InternedString; +use syntax_pos::{Span, DUMMY_SP}; /// The `Key` trait controls what types can legally be used as the key /// for a query. @@ -127,7 +127,7 @@ impl<'tcx> Key for (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>) { } } -impl<'tcx> Key for ty::PolyTraitRef<'tcx>{ +impl<'tcx> Key for ty::PolyTraitRef<'tcx> { fn query_crate(&self) -> CrateNum { self.def_id().krate } diff --git a/src/librustc/ty/query/mod.rs b/src/librustc/ty/query/mod.rs index 4d026b97233ee..450f67b71cf94 100644 --- a/src/librustc/ty/query/mod.rs +++ b/src/librustc/ty/query/mod.rs @@ -1,65 +1,64 @@ use dep_graph::{DepConstructor, DepNode}; use errors::DiagnosticBuilder; -use hir::def_id::{CrateNum, DefId, DefIndex}; use hir::def::{Def, Export}; -use hir::{self, TraitCandidate, ItemLocalId, CodegenFnAttrs}; -use rustc_data_structures::svh::Svh; +use hir::def_id::{CrateNum, DefId, DefIndex}; +use hir::{self, CodegenFnAttrs, ItemLocalId, TraitCandidate}; use infer::canonical::{self, Canonical}; use lint; use middle::borrowck::BorrowCheckResult; -use middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary, ForeignModule}; -use middle::cstore::{NativeLibraryKind, DepKind, CrateSource}; +use middle::cstore::{CrateSource, DepKind, NativeLibraryKind}; +use middle::cstore::{ExternCrate, ForeignModule, LinkagePreference, NativeLibrary}; +use middle::exported_symbols::{ExportedSymbol, SymbolExportLevel}; +use middle::lang_items::{LangItem, LanguageItems}; +use middle::lib_features::LibFeatures; use middle::privacy::AccessLevels; use middle::reachable::ReachableSet; use middle::region; -use middle::resolve_lifetime::{ResolveLifetimes, Region, ObjectLifetimeDefault}; +use middle::resolve_lifetime::{ObjectLifetimeDefault, Region, ResolveLifetimes}; use middle::stability::{self, DeprecationEntry}; -use middle::lib_features::LibFeatures; -use middle::lang_items::{LanguageItems, LangItem}; -use middle::exported_symbols::{SymbolExportLevel, ExportedSymbol}; -use mir::interpret::{ConstEvalRawResult, ConstEvalResult}; -use mir::mono::CodegenUnit; use mir; use mir::interpret::GlobalId; -use session::{CompileResult, CrateDisambiguator}; +use mir::interpret::{ConstEvalRawResult, ConstEvalResult}; +use mir::mono::CodegenUnit; +use rustc_data_structures::svh::Svh; use session::config::OutputFilenames; -use traits::{self, Vtable}; -use traits::query::{ - CanonicalPredicateGoal, CanonicalProjectionGoal, - CanonicalTyGoal, CanonicalTypeOpAscribeUserTypeGoal, - CanonicalTypeOpEqGoal, CanonicalTypeOpSubtypeGoal, CanonicalTypeOpProvePredicateGoal, - CanonicalTypeOpNormalizeGoal, NoSolution, -}; +use session::{CompileResult, CrateDisambiguator}; +use traits::query::dropck_outlives::{DropckOutlivesResult, DtorckConstraint}; use traits::query::method_autoderef::MethodAutoderefStepsResult; -use traits::query::dropck_outlives::{DtorckConstraint, DropckOutlivesResult}; use traits::query::normalize::NormalizationResult; use traits::query::outlives_bounds::OutlivesBound; +use traits::query::{ + CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal, + CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpNormalizeGoal, + CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal, NoSolution, +}; use traits::specialization_graph; use traits::Clauses; -use ty::{self, CrateInherentImpls, ParamEnvAnd, Ty, TyCtxt}; +use traits::{self, Vtable}; use ty::steal::Steal; use ty::subst::Substs; -use util::nodemap::{DefIdSet, DefIdMap, ItemLocalSet}; -use util::common::{ErrorReported}; +use ty::{self, CrateInherentImpls, ParamEnvAnd, Ty, TyCtxt}; +use util::common::ErrorReported; +use util::nodemap::{DefIdMap, DefIdSet, ItemLocalSet}; use util::profiling::ProfileCategory::*; use rustc_data_structures::bit_set::BitSet; -use rustc_data_structures::indexed_vec::IndexVec; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_data_structures::indexed_vec::IndexVec; use rustc_data_structures::stable_hasher::StableVec; use rustc_data_structures::sync::Lrc; use rustc_target::spec::PanicStrategy; use std::borrow::Cow; +use std::intrinsics::type_name; use std::ops::Deref; use std::sync::Arc; -use std::intrinsics::type_name; -use syntax_pos::{Span, DUMMY_SP}; -use syntax_pos::symbol::InternedString; -use syntax::attr; use syntax::ast; +use syntax::attr; use syntax::feature_gate; use syntax::symbol::Symbol; +use syntax_pos::symbol::InternedString; +use syntax_pos::{Span, DUMMY_SP}; #[macro_use] mod plumbing; @@ -67,9 +66,9 @@ use self::plumbing::*; pub use self::plumbing::{force_from_dep_node, CycleError}; mod job; -pub use self::job::{QueryJob, QueryInfo}; #[cfg(parallel_queries)] pub use self::job::handle_deadlock; +pub use self::job::{QueryInfo, QueryJob}; mod keys; use self::keys::Key; @@ -731,7 +730,6 @@ impl<'a, 'tcx, 'lcx> TyCtxt<'a, 'tcx, 'lcx> { // These functions are little shims used to find the dep-node for a // given query when there is not a *direct* mapping: - fn features_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> { DepConstructor::Features } @@ -745,17 +743,15 @@ fn erase_regions_ty<'tcx>(ty: Ty<'tcx>) -> DepConstructor<'tcx> { } fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> { - DepConstructor::TypeParamPredicates { - item_id, - param_id - } + DepConstructor::TypeParamPredicates { item_id, param_id } } -fn fulfill_obligation_dep_node<'tcx>((param_env, trait_ref): - (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) -> DepConstructor<'tcx> { +fn fulfill_obligation_dep_node<'tcx>( + (param_env, trait_ref): (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>), +) -> DepConstructor<'tcx> { DepConstructor::FulfillObligation { param_env, - trait_ref + trait_ref, } } @@ -772,9 +768,7 @@ fn reachability_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> { } fn mir_shim_dep_node<'tcx>(instance_def: ty::InstanceDef<'tcx>) -> DepConstructor<'tcx> { - DepConstructor::MirShim { - instance_def - } + DepConstructor::MirShim { instance_def } } fn symbol_name_dep_node<'tcx>(instance: ty::Instance<'tcx>) -> DepConstructor<'tcx> { @@ -785,12 +779,14 @@ fn typeck_item_bodies_dep_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> { DepConstructor::TypeckBodiesKrate } -fn const_eval_dep_node<'tcx>(param_env: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>) - -> DepConstructor<'tcx> { +fn const_eval_dep_node<'tcx>( + param_env: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>, +) -> DepConstructor<'tcx> { DepConstructor::ConstEval { param_env } } -fn const_eval_raw_dep_node<'tcx>(param_env: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>) - -> DepConstructor<'tcx> { +fn const_eval_raw_dep_node<'tcx>( + param_env: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>, +) -> DepConstructor<'tcx> { DepConstructor::ConstEvalRaw { param_env } } @@ -830,9 +826,9 @@ fn specializes_node<'tcx>((a, b): (DefId, DefId)) -> DepConstructor<'tcx> { DepConstructor::Specializes { impl1: a, impl2: b } } -fn implementations_of_trait_node<'tcx>((krate, trait_id): (CrateNum, DefId)) - -> DepConstructor<'tcx> -{ +fn implementations_of_trait_node<'tcx>( + (krate, trait_id): (CrateNum, DefId), +) -> DepConstructor<'tcx> { DepConstructor::ImplementationsOfTrait { krate, trait_id } } @@ -881,11 +877,12 @@ fn output_filenames_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> { } fn vtable_methods_node<'tcx>(trait_ref: ty::PolyTraitRef<'tcx>) -> DepConstructor<'tcx> { - DepConstructor::VtableMethods{ trait_ref } + DepConstructor::VtableMethods { trait_ref } } -fn substitute_normalize_and_test_predicates_node<'tcx>(key: (DefId, &'tcx Substs<'tcx>)) - -> DepConstructor<'tcx> { +fn substitute_normalize_and_test_predicates_node<'tcx>( + key: (DefId, &'tcx Substs<'tcx>), +) -> DepConstructor<'tcx> { DepConstructor::SubstituteNormalizeAndTestPredicates { key } } @@ -893,9 +890,8 @@ fn target_features_whitelist_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> { DepConstructor::TargetFeaturesWhitelist } -fn instance_def_size_estimate_dep_node<'tcx>(instance_def: ty::InstanceDef<'tcx>) - -> DepConstructor<'tcx> { - DepConstructor::InstanceDefSizeEstimate { - instance_def - } +fn instance_def_size_estimate_dep_node<'tcx>( + instance_def: ty::InstanceDef<'tcx>, +) -> DepConstructor<'tcx> { + DepConstructor::InstanceDefSizeEstimate { instance_def } } diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 3432aba7ee0d0..1a7068821db37 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -1,23 +1,24 @@ use dep_graph::{DepNodeIndex, SerializedDepNodeIndex}; use errors::Diagnostic; use hir; -use hir::def_id::{CrateNum, DefIndex, DefId, LocalDefId, LOCAL_CRATE}; +use hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, LOCAL_CRATE}; use hir::map::definitions::DefPathHash; use ich::{CachingSourceMapView, Fingerprint}; -use mir::{self, interpret}; use mir::interpret::{AllocDecodingSession, AllocDecodingState}; +use mir::{self, interpret}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::sync::{Lrc, Lock, HashMapExt, Once}; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, - SpecializedDecoder, SpecializedEncoder, - UseSpecializedDecodable, UseSpecializedEncodable}; +use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_data_structures::sync::{HashMapExt, Lock, Lrc, Once}; +use rustc_serialize::{ + opaque, Decodable, Decoder, Encodable, Encoder, SpecializedDecoder, SpecializedEncoder, + UseSpecializedDecodable, UseSpecializedEncodable, +}; use session::{CrateDisambiguator, Session}; use std::mem; use syntax::ast::NodeId; use syntax::source_map::{SourceMap, StableSourceFileId}; -use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile}; -use syntax_pos::hygiene::{Mark, SyntaxContext, ExpnInfo}; +use syntax_pos::hygiene::{ExpnInfo, Mark, SyntaxContext}; +use syntax_pos::{BytePos, SourceFile, Span, DUMMY_SP}; use ty; use ty::codec::{self as ty_codec, TyDecoder, TyEncoder}; use ty::context::TyCtxt; @@ -40,7 +41,6 @@ const TAG_INVALID_SPAN: u8 = 1; /// of a few selected queries (like `typeck_tables_of` and `mir_optimized`) and /// any diagnostics that have been emitted during a query. pub struct OnDiskCache<'sess> { - // The complete cache data in serialized form. serialized_data: Vec, @@ -154,21 +154,23 @@ impl<'sess> OnDiskCache<'sess> { } } - pub fn serialize<'a, 'tcx, E>(&self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - encoder: &mut E) - -> Result<(), E::Error> - where E: ty_codec::TyEncoder - { + pub fn serialize<'a, 'tcx, E>( + &self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + encoder: &mut E, + ) -> Result<(), E::Error> + where + E: ty_codec::TyEncoder, + { // Serializing the DepGraph should not modify it: tcx.dep_graph.with_ignore(|| { // Allocate SourceFileIndices let (file_to_file_index, file_index_to_stable_id) = { let files = tcx.sess.source_map().files(); - let mut file_to_file_index = FxHashMap::with_capacity_and_hasher( - files.len(), Default::default()); - let mut file_index_to_stable_id = FxHashMap::with_capacity_and_hasher( - files.len(), Default::default()); + let mut file_to_file_index = + FxHashMap::with_capacity_and_hasher(files.len(), Default::default()); + let mut file_index_to_stable_id = + FxHashMap::with_capacity_and_hasher(files.len(), Default::default()); for (index, file) in files.iter().enumerate() { let index = SourceFileIndex(index as u32); @@ -247,19 +249,20 @@ impl<'sess> OnDiskCache<'sess> { })?; // Encode diagnostics - let diagnostics_index: EncodedDiagnosticsIndex = self.current_diagnostics.borrow() + let diagnostics_index: EncodedDiagnosticsIndex = self + .current_diagnostics + .borrow() .iter() - .map(|(dep_node_index, diagnostics)| - { - let pos = AbsoluteBytePos::new(encoder.position()); - // Let's make sure we get the expected type here: - let diagnostics: &EncodedDiagnostics = diagnostics; - let dep_node_index = SerializedDepNodeIndex::new(dep_node_index.index()); - encoder.encode_tagged(dep_node_index, diagnostics)?; - - Ok((dep_node_index, pos)) - }) - .collect::>()?; + .map(|(dep_node_index, diagnostics)| { + let pos = AbsoluteBytePos::new(encoder.position()); + // Let's make sure we get the expected type here: + let diagnostics: &EncodedDiagnostics = diagnostics; + let dep_node_index = SerializedDepNodeIndex::new(dep_node_index.index()); + encoder.encode_tagged(dep_node_index, diagnostics)?; + + Ok((dep_node_index, pos)) + }) + .collect::>()?; let interpret_alloc_index = { let mut interpret_alloc_index = Vec::new(); @@ -276,11 +279,7 @@ impl<'sess> OnDiskCache<'sess> { let id = encoder.interpret_allocs_inverse[idx]; let pos = encoder.position() as u32; interpret_alloc_index.push(pos); - interpret::specialized_encode_alloc_id( - &mut encoder, - tcx, - id, - )?; + interpret::specialized_encode_alloc_id(&mut encoder, tcx, id)?; } n = new_n; } @@ -288,21 +287,27 @@ impl<'sess> OnDiskCache<'sess> { }; let sorted_cnums = sorted_cnums_including_local_crate(tcx); - let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| { - let crate_name = tcx.original_crate_name(cnum).as_str().to_string(); - let crate_disambiguator = tcx.crate_disambiguator(cnum); - (cnum.as_u32(), crate_name, crate_disambiguator) - }).collect(); + let prev_cnums: Vec<_> = sorted_cnums + .iter() + .map(|&cnum| { + let crate_name = tcx.original_crate_name(cnum).as_str().to_string(); + let crate_disambiguator = tcx.crate_disambiguator(cnum); + (cnum.as_u32(), crate_name, crate_disambiguator) + }) + .collect(); // Encode the file footer let footer_pos = encoder.position() as u64; - encoder.encode_tagged(TAG_FILE_FOOTER, &Footer { - file_index_to_stable_id, - prev_cnums, - query_result_index, - diagnostics_index, - interpret_alloc_index, - })?; + encoder.encode_tagged( + TAG_FILE_FOOTER, + &Footer { + file_index_to_stable_id, + prev_cnums, + query_result_index, + diagnostics_index, + interpret_alloc_index, + }, + )?; // Encode the position of the footer as the last 8 bytes of the // file so we know where to look for it. @@ -325,15 +330,17 @@ impl<'sess> OnDiskCache<'sess> { } /// Load a diagnostic emitted during the previous compilation session. - pub fn load_diagnostics<'a, 'tcx>(&self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - dep_node_index: SerializedDepNodeIndex) - -> Vec { + pub fn load_diagnostics<'a, 'tcx>( + &self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + dep_node_index: SerializedDepNodeIndex, + ) -> Vec { let diagnostics: Option = self.load_indexed( tcx, dep_node_index, &self.prev_diagnostics_index, - "diagnostics"); + "diagnostics", + ); diagnostics.unwrap_or_default() } @@ -341,9 +348,7 @@ impl<'sess> OnDiskCache<'sess> { /// Store a diagnostic emitted during the current compilation session. /// Anything stored like this will be available via `load_diagnostics` in /// the next compilation session. - pub fn store_diagnostics(&self, - dep_node_index: DepNodeIndex, - diagnostics: Vec) { + pub fn store_diagnostics(&self, dep_node_index: DepNodeIndex, diagnostics: Vec) { let mut current_diagnostics = self.current_diagnostics.borrow_mut(); let prev = current_diagnostics.insert(dep_node_index, diagnostics); debug_assert!(prev.is_none()); @@ -351,49 +356,55 @@ impl<'sess> OnDiskCache<'sess> { /// Returns the cached query result if there is something in the cache for /// the given SerializedDepNodeIndex. Otherwise returns None. - pub fn try_load_query_result<'tcx, T>(&self, - tcx: TyCtxt<'_, 'tcx, 'tcx>, - dep_node_index: SerializedDepNodeIndex) - -> Option - where T: Decodable + pub fn try_load_query_result<'tcx, T>( + &self, + tcx: TyCtxt<'_, 'tcx, 'tcx>, + dep_node_index: SerializedDepNodeIndex, + ) -> Option + where + T: Decodable, { - self.load_indexed(tcx, - dep_node_index, - &self.query_result_index, - "query result") + self.load_indexed( + tcx, + dep_node_index, + &self.query_result_index, + "query result", + ) } /// Store a diagnostic emitted during computation of an anonymous query. /// Since many anonymous queries can share the same `DepNode`, we aggregate /// them -- as opposed to regular queries where we assume that there is a /// 1:1 relationship between query-key and `DepNode`. - pub fn store_diagnostics_for_anon_node(&self, - dep_node_index: DepNodeIndex, - mut diagnostics: Vec) { + pub fn store_diagnostics_for_anon_node( + &self, + dep_node_index: DepNodeIndex, + mut diagnostics: Vec, + ) { let mut current_diagnostics = self.current_diagnostics.borrow_mut(); - let x = current_diagnostics.entry(dep_node_index).or_insert_with(|| { - mem::replace(&mut diagnostics, Vec::new()) - }); + let x = current_diagnostics + .entry(dep_node_index) + .or_insert_with(|| mem::replace(&mut diagnostics, Vec::new())); x.extend(diagnostics.into_iter()); } - fn load_indexed<'tcx, T>(&self, - tcx: TyCtxt<'_, 'tcx, 'tcx>, - dep_node_index: SerializedDepNodeIndex, - index: &FxHashMap, - debug_tag: &'static str) - -> Option - where T: Decodable + fn load_indexed<'tcx, T>( + &self, + tcx: TyCtxt<'_, 'tcx, 'tcx>, + dep_node_index: SerializedDepNodeIndex, + index: &FxHashMap, + debug_tag: &'static str, + ) -> Option + where + T: Decodable, { let pos = index.get(&dep_node_index).cloned()?; // Initialize the cnum_map using the value from the thread which finishes the closure first - self.cnum_map.init_nonlocking_same(|| { - Self::compute_cnum_map(tcx, &self.prev_cnums[..]) - }); + self.cnum_map + .init_nonlocking_same(|| Self::compute_cnum_map(tcx, &self.prev_cnums[..])); let mut decoder = CacheDecoder { tcx, @@ -407,12 +418,8 @@ impl<'sess> OnDiskCache<'sess> { }; match decode_tagged(&mut decoder, dep_node_index) { - Ok(value) => { - Some(value) - } - Err(e) => { - bug!("Could not decode cached {}: {}", debug_tag, e) - } + Ok(value) => Some(value), + Err(e) => bug!("Could not decode cached {}: {}", debug_tag, e), } } @@ -420,22 +427,22 @@ impl<'sess> OnDiskCache<'sess> { // current-session-CrateNum. There might be CrateNums from the previous // Session that don't occur in the current one. For these, the mapping // maps to None. - fn compute_cnum_map(tcx: TyCtxt<'_, '_, '_>, - prev_cnums: &[(u32, String, CrateDisambiguator)]) - -> IndexVec> - { + fn compute_cnum_map( + tcx: TyCtxt<'_, '_, '_>, + prev_cnums: &[(u32, String, CrateDisambiguator)], + ) -> IndexVec> { tcx.dep_graph.with_ignore(|| { - let current_cnums = tcx.all_crate_nums(LOCAL_CRATE).iter().map(|&cnum| { - let crate_name = tcx.original_crate_name(cnum) - .to_string(); - let crate_disambiguator = tcx.crate_disambiguator(cnum); - ((crate_name, crate_disambiguator), cnum) - }).collect::>(); - - let map_size = prev_cnums.iter() - .map(|&(cnum, ..)| cnum) - .max() - .unwrap_or(0) + 1; + let current_cnums = tcx + .all_crate_nums(LOCAL_CRATE) + .iter() + .map(|&cnum| { + let crate_name = tcx.original_crate_name(cnum).to_string(); + let crate_disambiguator = tcx.crate_disambiguator(cnum); + ((crate_name, crate_disambiguator), cnum) + }) + .collect::>(); + + let map_size = prev_cnums.iter().map(|&(cnum, ..)| cnum).max().unwrap_or(0) + 1; let mut map = IndexVec::from_elem_n(None, map_size as usize); for &(prev_cnum, ref crate_name, crate_disambiguator) in prev_cnums { @@ -474,11 +481,16 @@ impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { .. } = *self; - file_index_to_file.borrow_mut().entry(index).or_insert_with(|| { - let stable_id = file_index_to_stable_id[&index]; - source_map.source_file_by_stable_id(stable_id) - .expect("Failed to lookup SourceFile in new context.") - }).clone() + file_index_to_file + .borrow_mut() + .entry(index) + .or_insert_with(|| { + let stable_id = file_index_to_stable_id[&index]; + source_map + .source_file_by_stable_id(stable_id) + .expect("Failed to lookup SourceFile in new context.") + }) + .clone() } } @@ -500,13 +512,12 @@ impl<'a, 'tcx, 'x> DecoderWithPosition for CacheDecoder<'a, 'tcx, 'x> { // Decode something that was encoded with encode_tagged() and verify that the // tag matches and the correct amount of bytes was read. -fn decode_tagged<'a, 'tcx, D, T, V>(decoder: &mut D, - expected_tag: T) - -> Result - where T: Decodable + Eq + ::std::fmt::Debug, - V: Decodable, - D: DecoderWithPosition, - 'tcx: 'a, +fn decode_tagged<'a, 'tcx, D, T, V>(decoder: &mut D, expected_tag: T) -> Result +where + T: Decodable + Eq + ::std::fmt::Debug, + V: Decodable, + D: DecoderWithPosition, + 'tcx: 'a, { let start_pos = decoder.position(); @@ -521,9 +532,7 @@ fn decode_tagged<'a, 'tcx, D, T, V>(decoder: &mut D, Ok(value) } - impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, 'x> { - #[inline] fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx @@ -539,11 +548,13 @@ impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, self.opaque.data[self.opaque.position()] } - fn cached_ty_for_shorthand(&mut self, - shorthand: usize, - or_insert_with: F) - -> Result, Self::Error> - where F: FnOnce(&mut Self) -> Result, Self::Error> + fn cached_ty_for_shorthand( + &mut self, + shorthand: usize, + or_insert_with: F, + ) -> Result, Self::Error> + where + F: FnOnce(&mut Self) -> Result, Self::Error>, { let tcx = self.tcx(); @@ -563,7 +574,8 @@ impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, } fn with_position(&mut self, pos: usize, f: F) -> R - where F: FnOnce(&mut Self) -> R + where + F: FnOnce(&mut Self) -> R, { debug_assert!(pos < self.opaque.data.len()); @@ -575,13 +587,11 @@ impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, } fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum { - self.cnum_map[cnum].unwrap_or_else(|| { - bug!("Could not find new CrateNum for {:?}", cnum) - }) + self.cnum_map[cnum].unwrap_or_else(|| bug!("Could not find new CrateNum for {:?}", cnum)) } } -implement_ty_decoder!( CacheDecoder<'a, 'tcx, 'x> ); +implement_ty_decoder!(CacheDecoder<'a, 'tcx, 'x>); impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { fn specialized_decode(&mut self) -> Result { @@ -612,37 +622,33 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { let expn_info_tag = u8::decode(self)?; let ctxt = match expn_info_tag { - TAG_NO_EXPANSION_INFO => { - SyntaxContext::empty() - } + TAG_NO_EXPANSION_INFO => SyntaxContext::empty(), TAG_EXPANSION_INFO_INLINE => { let pos = AbsoluteBytePos::new(self.opaque.position()); let expn_info: ExpnInfo = Decodable::decode(self)?; let ctxt = SyntaxContext::allocate_directly(expn_info); - self.synthetic_expansion_infos.borrow_mut().insert(pos, ctxt); + self.synthetic_expansion_infos + .borrow_mut() + .insert(pos, ctxt); ctxt } TAG_EXPANSION_INFO_SHORTHAND => { let pos = AbsoluteBytePos::decode(self)?; - let cached_ctxt = self.synthetic_expansion_infos - .borrow() - .get(&pos) - .cloned(); + let cached_ctxt = self.synthetic_expansion_infos.borrow().get(&pos).cloned(); if let Some(ctxt) = cached_ctxt { ctxt } else { - let expn_info = self.with_position(pos.to_usize(), |this| { - ExpnInfo::decode(this) - })?; + let expn_info = + self.with_position(pos.to_usize(), |this| ExpnInfo::decode(this))?; let ctxt = SyntaxContext::allocate_directly(expn_info); - self.synthetic_expansion_infos.borrow_mut().insert(pos, ctxt); + self.synthetic_expansion_infos + .borrow_mut() + .insert(pos, ctxt); ctxt } } - _ => { - unreachable!() - } + _ => unreachable!(), }; Ok(Span::new(lo, hi, ctxt)) @@ -686,10 +692,7 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> let def_path_hash = DefPathHash::decode(self)?; // Use the DefPathHash to map to the current DefId. - let def_id = self.tcx() - .def_path_hash_to_def_id - .as_ref() - .unwrap()[&def_path_hash]; + let def_id = self.tcx().def_path_hash_to_def_id.as_ref().unwrap()[&def_path_hash]; debug_assert!(def_id.is_local()); @@ -700,7 +703,7 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> // context of the current session. Ok(hir::HirId { owner: def_id.index, - local_id + local_id, }) } } @@ -722,7 +725,8 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x } impl<'a, 'tcx, 'x, T: Decodable> SpecializedDecoder> -for CacheDecoder<'a, 'tcx, 'x> { + for CacheDecoder<'a, 'tcx, 'x> +{ #[inline] fn specialized_decode(&mut self) -> Result, Self::Error> { let discr = u8::decode(self)?; @@ -733,9 +737,7 @@ for CacheDecoder<'a, 'tcx, 'x> { let val = T::decode(self)?; Ok(mir::ClearCrossCrate::Set(val)) } - _ => { - unreachable!() - } + _ => unreachable!(), } } } @@ -743,8 +745,9 @@ for CacheDecoder<'a, 'tcx, 'x> { //- ENCODING ------------------------------------------------------------------- struct CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder, - 'tcx: 'a, +where + E: 'enc + ty_codec::TyEncoder, + 'tcx: 'a, { tcx: TyCtxt<'a, 'tcx, 'tcx>, encoder: &'enc mut E, @@ -758,7 +761,8 @@ struct CacheEncoder<'enc, 'a, 'tcx, E> } impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { fn source_file_index(&mut self, source_file: Lrc) -> SourceFileIndex { self.file_to_file_index[&(&*source_file as *const SourceFile)] @@ -769,11 +773,11 @@ impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> /// encode the specified tag, then the given value, then the number of /// bytes taken up by tag and value. On decoding, we can then verify that /// we get the expected tag and read the expected number of bytes. - fn encode_tagged(&mut self, - tag: T, - value: &V) - -> Result<(), E::Error> - { + fn encode_tagged( + &mut self, + tag: T, + value: &V, + ) -> Result<(), E::Error> { use ty::codec::TyEncoder; let start_pos = self.position(); @@ -786,7 +790,8 @@ impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> } impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> { use std::collections::hash_map::Entry; @@ -797,7 +802,7 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder< self.interpret_allocs_inverse.push(*alloc_id); e.insert(idx); idx - }, + } }; index.encode(self) @@ -805,10 +810,10 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder< } impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> { - if *span == DUMMY_SP { return TAG_INVALID_SPAN.encode(self); } @@ -819,11 +824,11 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx return TAG_INVALID_SPAN.encode(self); } - let (file_lo, line_lo, col_lo) = match self.source_map - .byte_pos_to_line_and_col(span_data.lo) { - Some(pos) => pos, - None => return TAG_INVALID_SPAN.encode(self) - }; + let (file_lo, line_lo, col_lo) = + match self.source_map.byte_pos_to_line_and_col(span_data.lo) { + Some(pos) => pos, + None => return TAG_INVALID_SPAN.encode(self), + }; if !file_lo.contains(span_data.hi) { return TAG_INVALID_SPAN.encode(self); @@ -862,7 +867,8 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx } impl<'enc, 'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { #[inline] fn position(&self) -> usize { @@ -871,7 +877,8 @@ impl<'enc, 'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'a, 'tcx, E> } impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { #[inline] fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> { @@ -880,37 +887,38 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, } impl<'enc, 'a, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { #[inline] fn specialized_encode(&mut self, ty: &ty::Ty<'tcx>) -> Result<(), Self::Error> { - ty_codec::encode_with_shorthand(self, ty, - |encoder| &mut encoder.type_shorthands) + ty_codec::encode_with_shorthand(self, ty, |encoder| &mut encoder.type_shorthands) } } impl<'enc, 'a, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { #[inline] - fn specialized_encode(&mut self, - predicates: &ty::GenericPredicates<'tcx>) - -> Result<(), Self::Error> { - ty_codec::encode_predicates(self, predicates, - |encoder| &mut encoder.predicate_shorthands) + fn specialized_encode( + &mut self, + predicates: &ty::GenericPredicates<'tcx>, + ) -> Result<(), Self::Error> { + ty_codec::encode_predicates(self, predicates, |encoder| { + &mut encoder.predicate_shorthands + }) } } impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { #[inline] fn specialized_encode(&mut self, id: &hir::HirId) -> Result<(), Self::Error> { - let hir::HirId { - owner, - local_id, - } = *id; + let hir::HirId { owner, local_id } = *id; let def_path_hash = self.tcx.hir().definitions().def_path_hash(owner); @@ -919,9 +927,9 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a } } - impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { #[inline] fn specialized_encode(&mut self, id: &DefId) -> Result<(), Self::Error> { @@ -931,7 +939,8 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tc } impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { #[inline] fn specialized_encode(&mut self, id: &LocalDefId) -> Result<(), Self::Error> { @@ -940,7 +949,8 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a } impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { fn specialized_encode(&mut self, _: &DefIndex) -> Result<(), Self::Error> { bug!("Encoding DefIndex without context.") @@ -950,7 +960,8 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, // NodeIds are not stable across compilation sessions, so we store them in their // HirId representation. This allows use to map them to the current NodeId. impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { #[inline] fn specialized_encode(&mut self, node_id: &NodeId) -> Result<(), Self::Error> { @@ -960,7 +971,7 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 't } impl<'enc, 'a, 'tcx> SpecializedEncoder -for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder> + for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder> { fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> { f.encode_opaque(&mut self.encoder) @@ -968,18 +979,15 @@ for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder> } impl<'enc, 'a, 'tcx, E, T> SpecializedEncoder> -for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder, - T: Encodable, + for CacheEncoder<'enc, 'a, 'tcx, E> +where + E: 'enc + ty_codec::TyEncoder, + T: Encodable, { #[inline] - fn specialized_encode(&mut self, - val: &mir::ClearCrossCrate) - -> Result<(), Self::Error> { + fn specialized_encode(&mut self, val: &mir::ClearCrossCrate) -> Result<(), Self::Error> { match *val { - mir::ClearCrossCrate::Clear => { - TAG_CLEAR_CROSS_CRATE_CLEAR.encode(self) - } + mir::ClearCrossCrate::Clear => TAG_CLEAR_CROSS_CRATE_CLEAR.encode(self), mir::ClearCrossCrate::Set(ref val) => { TAG_CLEAR_CROSS_CRATE_SET.encode(self)?; val.encode(self) @@ -997,7 +1005,8 @@ macro_rules! encoder_methods { } impl<'enc, 'a, 'tcx, E> Encoder for CacheEncoder<'enc, 'a, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +where + E: 'enc + ty_codec::TyEncoder, { type Error = E::Error; @@ -1041,7 +1050,7 @@ impl UseSpecializedDecodable for IntEncodedWithFixedSize {} impl SpecializedEncoder for opaque::Encoder { fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> { let start_pos = self.position(); - for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE { + for i in 0..IntEncodedWithFixedSize::ENCODED_SIZE { ((x.0 >> i * 8) as u8).encode(self)?; } let end_pos = self.position(); @@ -1055,7 +1064,7 @@ impl<'enc> SpecializedDecoder for opaque::Decoder<'enc> let mut value: u64 = 0; let start_pos = self.position(); - for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE { + for i in 0..IntEncodedWithFixedSize::ENCODED_SIZE { let byte: u8 = Decodable::decode(self)?; value |= (byte as u64) << (i * 8); } @@ -1067,34 +1076,36 @@ impl<'enc> SpecializedDecoder for opaque::Decoder<'enc> } } -fn encode_query_results<'enc, 'a, 'tcx, Q, E>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - encoder: &mut CacheEncoder<'enc, 'a, 'tcx, E>, - query_result_index: &mut EncodedQueryResultIndex) - -> Result<(), E::Error> - where Q: super::config::QueryDescription<'tcx>, - E: 'enc + TyEncoder, - Q::Value: Encodable, +fn encode_query_results<'enc, 'a, 'tcx, Q, E>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + encoder: &mut CacheEncoder<'enc, 'a, 'tcx, E>, + query_result_index: &mut EncodedQueryResultIndex, +) -> Result<(), E::Error> +where + Q: super::config::QueryDescription<'tcx>, + E: 'enc + TyEncoder, + Q::Value: Encodable, { - let desc = &format!("encode_query_results for {}", - unsafe { ::std::intrinsics::type_name::() }); + let desc = &format!("encode_query_results for {}", unsafe { + ::std::intrinsics::type_name::() + }); time(tcx.sess, desc, || { - - let map = Q::query_cache(tcx).borrow(); - assert!(map.active.is_empty()); - for (key, entry) in map.results.iter() { - if Q::cache_on_disk(key.clone()) { - let dep_node = SerializedDepNodeIndex::new(entry.index.index()); - - // Record position of the cache entry - query_result_index.push((dep_node, AbsoluteBytePos::new(encoder.position()))); - - // Encode the type check tables with the SerializedDepNodeIndex - // as tag. - encoder.encode_tagged(dep_node, &entry.value)?; + let map = Q::query_cache(tcx).borrow(); + assert!(map.active.is_empty()); + for (key, entry) in map.results.iter() { + if Q::cache_on_disk(key.clone()) { + let dep_node = SerializedDepNodeIndex::new(entry.index.index()); + + // Record position of the cache entry + query_result_index.push((dep_node, AbsoluteBytePos::new(encoder.position()))); + + // Encode the type check tables with the SerializedDepNodeIndex + // as tag. + encoder.encode_tagged(dep_node, &entry.value)?; + } } - } - Ok(()) + Ok(()) }) } diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs index 6887f480f72e0..e97a7f038032f 100644 --- a/src/librustc/ty/query/plumbing.rs +++ b/src/librustc/ty/query/plumbing.rs @@ -2,27 +2,27 @@ //! that generate the actual methods on tcx which find and execute the //! provider, manage the caches, and so forth. -use dep_graph::{DepNodeIndex, DepNode, DepKind, DepNodeColor}; -use errors::DiagnosticBuilder; -use errors::Level; +use dep_graph::{DepKind, DepNode, DepNodeColor, DepNodeIndex}; use errors::Diagnostic; +use errors::DiagnosticBuilder; use errors::FatalError; -use ty::tls; -use ty::{TyCtxt}; -use ty::query::Query; -use ty::query::config::{QueryConfig, QueryDescription}; -use ty::query::job::{QueryJob, QueryResult, QueryInfo}; +use errors::Level; use ty::item_path; +use ty::query::config::{QueryConfig, QueryDescription}; +use ty::query::job::{QueryInfo, QueryJob, QueryResult}; +use ty::query::Query; +use ty::tls; +use ty::TyCtxt; use util::common::{profq_msg, ProfileQueriesMsg, QueryMsg}; -use rustc_data_structures::fx::{FxHashMap}; -use rustc_data_structures::sync::{Lrc, Lock}; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::sync::{Lock, Lrc}; +use std::collections::hash_map::Entry; use std::mem; use std::ptr; -use std::collections::hash_map::Entry; -use syntax_pos::Span; use syntax::source_map::DUMMY_SP; +use syntax_pos::Span; pub struct QueryCache<'tcx, D: QueryConfig<'tcx> + ?Sized> { pub(super) results: FxHashMap>, @@ -37,9 +37,7 @@ pub(super) struct QueryValue { } impl QueryValue { - pub(super) fn new(value: T, - dep_node_index: DepNodeIndex) - -> QueryValue { + pub(super) fn new(value: T, dep_node_index: DepNodeIndex) -> QueryValue { QueryValue { value, index: dep_node_index, @@ -66,7 +64,7 @@ macro_rules! profq_msg { profq_msg($tcx.sess, $msg) } } - } + }; } // If enabled, format a key using its debug string, which can be @@ -76,13 +74,14 @@ macro_rules! profq_query_msg { let msg = if cfg!(debug_assertions) { if $tcx.sess.profile_queries_and_keys() { Some(format!("{:?}", $key)) - } else { None } - } else { None }; - QueryMsg { - query: $query, - msg, - } - }} + } else { + None + } + } else { + None + }; + QueryMsg { query: $query, msg } + }}; } /// A type representing the responsibility to execute the job in the `job` field. @@ -125,12 +124,10 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { return TryGetJob::JobCompleted(result); } let job = match lock.active.entry((*key).clone()) { - Entry::Occupied(entry) => { - match *entry.get() { - QueryResult::Started(ref job) => job.clone(), - QueryResult::Poisoned => FatalError.raise(), - } - } + Entry::Occupied(entry) => match *entry.get() { + QueryResult::Started(ref job) => job.clone(), + QueryResult::Poisoned => FatalError.raise(), + }, Entry::Vacant(entry) => { // No job entry for this query. Return a new one to be started later return tls::with_related_context(tcx, |icx| { @@ -149,7 +146,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { }; entry.insert(QueryResult::Started(job)); TryGetJob::NotYetStarted(owner) - }) + }); } }; mem::drop(lock); @@ -198,10 +195,10 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { pub(super) fn start<'lcx, F, R>( &self, tcx: TyCtxt<'_, 'tcx, 'lcx>, - compute: F) - -> (R, Vec) + compute: F, + ) -> (R, Vec) where - F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'lcx>) -> R + F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'lcx>) -> R, { // The TyCtxt stored in TLS has the same global interner lifetime // as `tcx`, so we use `with_related_context` to relate the 'gcx lifetimes @@ -216,9 +213,7 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> { }; // Use the ImplicitCtxt while we execute the query - tls::enter_context(&new_icx, |_| { - compute(tcx) - }) + tls::enter_context(&new_icx, |_| compute(tcx)) }); // Extract the diagnostic from the job @@ -233,7 +228,10 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> Drop for JobOwner<'a, 'tcx, Q> { #[cold] fn drop(&mut self) { // Poison the query so jobs waiting on it panic - self.cache.borrow_mut().active.insert(self.key.clone(), QueryResult::Poisoned); + self.cache + .borrow_mut() + .active + .insert(self.key.clone(), QueryResult::Poisoned); // Also signal the completion of the job, so waiters // will continue execution self.job.signal_complete(); @@ -263,13 +261,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { #[cold] pub(super) fn report_cycle( self, - box CycleError { usage, cycle: stack }: Box> - ) -> Box> - { + box CycleError { + usage, + cycle: stack, + }: Box>, + ) -> Box> { assert!(!stack.is_empty()); let fix_span = |span: Span, query: &Query<'gcx>| { - self.sess.source_map().def_span(query.default_span(self, span)) + self.sess + .source_map() + .def_span(query.default_span(self, span)) }; // Disable naming impls with types in this path, since that @@ -278,27 +280,36 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // collect/coherence phases anyhow.) item_path::with_forced_impl_filename_line(|| { let span = fix_span(stack[1 % stack.len()].span, &stack[0].query); - let mut err = struct_span_err!(self.sess, - span, - E0391, - "cycle detected when {}", - stack[0].query.describe(self)); + let mut err = struct_span_err!( + self.sess, + span, + E0391, + "cycle detected when {}", + stack[0].query.describe(self) + ); for i in 1..stack.len() { let query = &stack[i].query; let span = fix_span(stack[(i + 1) % stack.len()].span, query); - err.span_note(span, &format!("...which requires {}...", query.describe(self))); + err.span_note( + span, + &format!("...which requires {}...", query.describe(self)), + ); } - err.note(&format!("...which again requires {}, completing the cycle", - stack[0].query.describe(self))); + err.note(&format!( + "...which again requires {}, completing the cycle", + stack[0].query.describe(self) + )); if let Some((span, query)) = usage { - err.span_note(fix_span(span, &query), - &format!("cycle used when {}", query.describe(self))); + err.span_note( + fix_span(span, &query), + &format!("cycle used when {}", query.describe(self)), + ); } - return Box::new(err) + return Box::new(err); }) } @@ -311,12 +322,16 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let mut i = 0; while let Some(query) = current_query { - let mut db = DiagnosticBuilder::new(icx.tcx.sess.diagnostic(), + let mut db = DiagnosticBuilder::new( + icx.tcx.sess.diagnostic(), Level::FailureNote, - &format!("#{} [{}] {}", - i, - query.info.query.name(), - query.info.query.describe(icx.tcx))); + &format!( + "#{} [{}] {}", + i, + query.info.query.name(), + query.info.query.describe(icx.tcx) + ), + ); db.set_span(icx.tcx.sess.source_map().def_span(query.info.span)); icx.tcx.sess.diagnostic().force_print_db(db); @@ -339,9 +354,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.dep_graph.read_index(dep_node_index); Some(dep_node_index) } - Some(DepNodeColor::Red) => { - None - } + Some(DepNodeColor::Red) => None, None => { // try_mark_green (called below) will panic when full incremental // compilation is disabled. If that's the case, we can't try to mark nodes @@ -355,9 +368,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.dep_graph.read_index(dep_node_index); Some(dep_node_index) } - None => { - None - } + None => None, } } } @@ -367,19 +378,18 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { fn try_get_with>( self, span: Span, - key: Q::Key) - -> Result>> - { - debug!("ty::queries::{}::try_get_with(key={:?}, span={:?})", - Q::NAME, - key, - span); - - profq_msg!(self, - ProfileQueriesMsg::QueryBegin( - span.data(), - profq_query_msg!(Q::NAME, self, key), - ) + key: Q::Key, + ) -> Result>> { + debug!( + "ty::queries::{}::try_get_with(key={:?}, span={:?})", + Q::NAME, + key, + span + ); + + profq_msg!( + self, + ProfileQueriesMsg::QueryBegin(span.data(), profq_query_msg!(Q::NAME, self, key),) ); self.sess.profiler(|p| p.record_query(Q::CATEGORY)); @@ -399,7 +409,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // expensive for some DepKinds. if !self.dep_graph.is_fully_enabled() { let null_dep_node = DepNode::new_no_params(::dep_graph::DepKind::Null); - return self.force_query_with_job::(key, job, null_dep_node).map(|(v, _)| v); + return self + .force_query_with_job::(key, job, null_dep_node) + .map(|(v, _)| v); } let dep_node = Q::to_dep_node(self, &key); @@ -409,9 +421,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.sess.profiler(|p| p.start_activity(Q::CATEGORY)); let res = job.start(self, |tcx| { - tcx.dep_graph.with_anon_task(dep_node.kind, || { - Q::compute(tcx.global_tcx(), key) - }) + tcx.dep_graph + .with_anon_task(dep_node.kind, || Q::compute(tcx.global_tcx(), key)) }); self.sess.profiler(|p| p.end_activity(Q::CATEGORY)); @@ -420,7 +431,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.dep_graph.read_index(dep_node_index); - self.queries.on_disk_cache + self.queries + .on_disk_cache .store_diagnostics_for_anon_node(dep_node_index, diagnostics); job.complete(&result, dep_node_index); @@ -433,10 +445,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { profq_msg!(self, ProfileQueriesMsg::CacheHit); self.sess.profiler(|p| p.record_query_hit(Q::CATEGORY)); - return self.load_from_disk_and_cache_in_memory::(key, - job, - dep_node_index, - &dep_node) + return self.load_from_disk_and_cache_in_memory::( + key, + job, + dep_node_index, + &dep_node, + ); } } @@ -445,7 +459,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.dep_graph.read_index(dep_node_index); Ok(result) } - Err(e) => Err(e) + Err(e) => Err(e), } } @@ -454,33 +468,31 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { key: Q::Key, job: JobOwner<'a, 'gcx, Q>, dep_node_index: DepNodeIndex, - dep_node: &DepNode - ) -> Result>> - { + dep_node: &DepNode, + ) -> Result>> { // Note this function can be called concurrently from the same query // We must ensure that this is handled correctly debug_assert!(self.dep_graph.is_green(dep_node)); // First we try to load the result from the on-disk cache - let result = if Q::cache_on_disk(key.clone()) && - self.sess.opts.debugging_opts.incremental_queries { - let prev_dep_node_index = - self.dep_graph.prev_dep_node_index_of(dep_node); - let result = Q::try_load_from_disk(self.global_tcx(), - prev_dep_node_index); - - // We always expect to find a cached result for things that - // can be forced from DepNode. - debug_assert!(!dep_node.kind.can_reconstruct_query_key() || - result.is_some(), - "Missing on-disk cache entry for {:?}", - dep_node); - result - } else { - // Some things are never cached on disk. - None - }; + let result = + if Q::cache_on_disk(key.clone()) && self.sess.opts.debugging_opts.incremental_queries { + let prev_dep_node_index = self.dep_graph.prev_dep_node_index_of(dep_node); + let result = Q::try_load_from_disk(self.global_tcx(), prev_dep_node_index); + + // We always expect to find a cached result for things that + // can be forced from DepNode. + debug_assert!( + !dep_node.kind.can_reconstruct_query_key() || result.is_some(), + "Missing on-disk cache entry for {:?}", + dep_node + ); + result + } else { + // Some things are never cached on disk. + None + }; let result = if let Some(result) = result { result @@ -494,9 +506,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let (result, _) = job.start(self, |tcx| { // The dep-graph for this computation is already in // place - tcx.dep_graph.with_ignore(|| { - Q::compute(tcx, key) - }) + tcx.dep_graph.with_ignore(|| Q::compute(tcx, key)) }); result }; @@ -524,13 +534,16 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { dep_node: &DepNode, dep_node_index: DepNodeIndex, ) { - use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; use ich::Fingerprint; - - assert!(Some(self.dep_graph.fingerprint_of(dep_node_index)) == - self.dep_graph.prev_fingerprint_of(dep_node), - "Fingerprint for green query instance not loaded \ - from cache: {:?}", dep_node); + use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; + + assert!( + Some(self.dep_graph.fingerprint_of(dep_node_index)) + == self.dep_graph.prev_fingerprint_of(dep_node), + "Fingerprint for green query instance not loaded \ + from cache: {:?}", + dep_node + ); debug!("BEGIN verify_ich({:?})", dep_node); let mut hcx = self.create_stable_hashing_context(); @@ -543,26 +556,33 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let old_hash = self.dep_graph.fingerprint_of(dep_node_index); - assert!(new_hash == old_hash, "Found unstable fingerprints \ - for {:?}", dep_node); + assert!( + new_hash == old_hash, + "Found unstable fingerprints \ + for {:?}", + dep_node + ); } fn force_query_with_job>( self, key: Q::Key, job: JobOwner<'_, 'gcx, Q>, - dep_node: DepNode) - -> Result<(Q::Value, DepNodeIndex), Box>> { + dep_node: DepNode, + ) -> Result<(Q::Value, DepNodeIndex), Box>> { // If the following assertion triggers, it can have two reasons: // 1. Something is wrong with DepNode creation, either here or // in DepGraph::try_mark_green() // 2. Two distinct query keys get mapped to the same DepNode // (see for example #48923) - assert!(!self.dep_graph.dep_node_exists(&dep_node), - "Forcing query with already existing DepNode.\n\ - - query-key: {:?}\n\ - - dep-node: {:?}", - key, dep_node); + assert!( + !self.dep_graph.dep_node_exists(&dep_node), + "Forcing query with already existing DepNode.\n\ + - query-key: {:?}\n\ + - dep-node: {:?}", + key, + dep_node + ); profq_msg!(self, ProfileQueriesMsg::ProviderBegin); self.sess.profiler(|p| { @@ -572,15 +592,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let res = job.start(self, |tcx| { if dep_node.kind.is_eval_always() { - tcx.dep_graph.with_eval_always_task(dep_node, - tcx, - key, - Q::compute) + tcx.dep_graph + .with_eval_always_task(dep_node, tcx, key, Q::compute) } else { - tcx.dep_graph.with_task(dep_node, - tcx, - key, - Q::compute) + tcx.dep_graph.with_task(dep_node, tcx, key, Q::compute) } }); @@ -594,7 +609,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } if dep_node.kind != ::dep_graph::DepKind::Null { - self.queries.on_disk_cache + self.queries + .on_disk_cache .store_diagnostics(dep_node_index, diagnostics); } @@ -636,12 +652,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } #[allow(dead_code)] - fn force_query>( - self, - key: Q::Key, - span: Span, - dep_node: DepNode - ) { + fn force_query>(self, key: Q::Key, span: Span, dep_node: DepNode) { profq_msg!( self, ProfileQueriesMsg::QueryBegin(span.data(), profq_query_msg!(Q::NAME, self, key)) @@ -671,22 +682,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // FIXME: Try uninlining this #[inline(always)] - pub(super) fn get_query>( - self, - span: Span, - key: Q::Key, - ) -> Q::Value { - self.try_get_with::(span, key).unwrap_or_else(|e| { - self.emit_error::(e) - }) + pub(super) fn get_query>(self, span: Span, key: Q::Key) -> Q::Value { + self.try_get_with::(span, key) + .unwrap_or_else(|e| self.emit_error::(e)) } #[inline(never)] #[cold] - fn emit_error>( - self, - e: Box>, - ) -> Q::Value { + fn emit_error>(self, e: Box>) -> Q::Value { self.report_cycle(e).emit(); Q::handle_cycle_error(self) } @@ -1087,7 +1090,6 @@ macro_rules! define_provider_struct { }; } - /// The red/green evaluation system will try to mark a specific DepNode in the /// dependency graph as green by recursively trying to mark the dependencies of /// that DepNode as green. While doing so, it will sometimes encounter a DepNode @@ -1130,9 +1132,10 @@ macro_rules! define_provider_struct { /// then `force_from_dep_node()` should not fail for it. Otherwise, you can just /// add it to the "We don't have enough information to reconstruct..." group in /// the match below. -pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>, - dep_node: &DepNode) - -> bool { +pub fn force_from_dep_node<'a, 'gcx, 'lcx>( + tcx: TyCtxt<'a, 'gcx, 'lcx>, + dep_node: &DepNode, +) -> bool { use hir::def_id::LOCAL_CRATE; // We must avoid ever having to call force_from_dep_node() for a @@ -1148,11 +1151,13 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>, // each CGU, right after partitioning. This way `try_mark_green` will always // hit the cache instead of having to go through `force_from_dep_node`. // This assertion makes sure, we actually keep applying the solution above. - debug_assert!(dep_node.kind != DepKind::CodegenUnit, - "calling force_from_dep_node() on DepKind::CodegenUnit"); + debug_assert!( + dep_node.kind != DepKind::CodegenUnit, + "calling force_from_dep_node() on DepKind::CodegenUnit" + ); if !dep_node.kind.can_reconstruct_query_key() { - return false + return false; } macro_rules! def_id { @@ -1161,21 +1166,21 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>, def_id } else { // return from the whole function - return false + return false; } - } + }; }; macro_rules! krate { - () => { (def_id!()).krate } + () => { + (def_id!()).krate + }; }; macro_rules! force { - ($query:ident, $key:expr) => { - { - tcx.force_query::<::ty::query::queries::$query<'_>>($key, DUMMY_SP, *dep_node); - } - } + ($query:ident, $key:expr) => {{ + tcx.force_query::<::ty::query::queries::$query<'_>>($key, DUMMY_SP, *dep_node); + }}; }; // FIXME(#45015): We should try move this boilerplate code into a macro @@ -1419,7 +1424,6 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>, true } - // FIXME(#45015): Another piece of boilerplate code that could be generated in // a combined define_dep_nodes!()/define_queries!() macro. macro_rules! impl_load_from_cache { diff --git a/src/librustc/ty/query/values.rs b/src/librustc/ty/query/values.rs index 3f84f1bc78972..3b62c1e977e6d 100644 --- a/src/librustc/ty/query/values.rs +++ b/src/librustc/ty/query/values.rs @@ -27,7 +27,8 @@ impl<'tcx> Value<'tcx> for Ty<'tcx> { impl<'tcx> Value<'tcx> for ty::SymbolName { fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self { - ty::SymbolName { name: Symbol::intern("").as_interned_str() } + ty::SymbolName { + name: Symbol::intern("").as_interned_str(), + } } } - diff --git a/src/librustc/ty/relate.rs b/src/librustc/ty/relate.rs index 84e15a751353e..9f9eab2f7b4d2 100644 --- a/src/librustc/ty/relate.rs +++ b/src/librustc/ty/relate.rs @@ -4,19 +4,19 @@ //! types or regions but can be other things. Examples of type relations are //! subtyping, type equality, etc. +use hir as ast; use hir::def_id::DefId; use mir::interpret::ConstValue; -use ty::subst::{Kind, UnpackedKind, Substs}; -use ty::{self, Ty, TyCtxt, TypeFoldable}; -use ty::error::{ExpectedFound, TypeError}; use mir::interpret::GlobalId; -use util::common::ErrorReported; -use syntax_pos::DUMMY_SP; -use std::rc::Rc; -use std::iter; use rustc_target::spec::abi; -use hir as ast; +use std::iter; +use std::rc::Rc; +use syntax_pos::DUMMY_SP; use traits; +use ty::error::{ExpectedFound, TypeError}; +use ty::subst::{Kind, Substs, UnpackedKind}; +use ty::{self, Ty, TyCtxt, TypeFoldable}; +use util::common::ErrorReported; pub type RelateResult<'tcx, T> = Result>; @@ -30,10 +30,10 @@ pub enum TraitObjectMode { NoSquash, /// A temporary mode to treat `Send + Sync = Sync + Send`, should be /// used only in coherence. - SquashAutoTraitsIssue33140 + SquashAutoTraitsIssue33140, } -pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized { +pub trait TypeRelation<'a, 'gcx: 'a + 'tcx, 'tcx: 'a>: Sized { fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx>; /// Return the trait object mode to be used. @@ -46,8 +46,9 @@ pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized { /// relation. Just affects error messages. fn a_is_expected(&self) -> bool; - fn with_cause(&mut self, _cause: Cause, f: F) -> R - where F: FnOnce(&mut Self) -> R + fn with_cause(&mut self, _cause: Cause, f: F) -> R + where + F: FnOnce(&mut Self) -> R, { f(self) } @@ -60,27 +61,28 @@ pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized { /// Relate the two substitutions for the given item. The default /// is to look up the variance for the item and proceed /// accordingly. - fn relate_item_substs(&mut self, - item_def_id: DefId, - a_subst: &'tcx Substs<'tcx>, - b_subst: &'tcx Substs<'tcx>) - -> RelateResult<'tcx, &'tcx Substs<'tcx>> - { - debug!("relate_item_substs(item_def_id={:?}, a_subst={:?}, b_subst={:?})", - item_def_id, - a_subst, - b_subst); + fn relate_item_substs( + &mut self, + item_def_id: DefId, + a_subst: &'tcx Substs<'tcx>, + b_subst: &'tcx Substs<'tcx>, + ) -> RelateResult<'tcx, &'tcx Substs<'tcx>> { + debug!( + "relate_item_substs(item_def_id={:?}, a_subst={:?}, b_subst={:?})", + item_def_id, a_subst, b_subst + ); let opt_variances = self.tcx().variances_of(item_def_id); relate_substs(self, Some(&opt_variances), a_subst, b_subst) } /// Switch variance for the purpose of relating `a` and `b`. - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T>; + fn relate_with_variance>( + &mut self, + variance: ty::Variance, + a: &T, + b: &T, + ) -> RelateResult<'tcx, T>; // Overrideable relations. You shouldn't typically call these // directly, instead call `relate()`, which in turn calls @@ -88,37 +90,46 @@ pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized { // additional hooks for other types in the future if needed // without making older code, which called `relate`, obsolete. - fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>>; + fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>>; - fn regions(&mut self, a: ty::Region<'tcx>, b: ty::Region<'tcx>) - -> RelateResult<'tcx, ty::Region<'tcx>>; + fn regions( + &mut self, + a: ty::Region<'tcx>, + b: ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>>; - fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where T: Relate<'tcx>; + fn binders( + &mut self, + a: &ty::Binder, + b: &ty::Binder, + ) -> RelateResult<'tcx, ty::Binder> + where + T: Relate<'tcx>; } pub trait Relate<'tcx>: TypeFoldable<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, a: &Self, b: &Self) - -> RelateResult<'tcx, Self> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a; + fn relate<'a, 'gcx, R>(relation: &mut R, a: &Self, b: &Self) -> RelateResult<'tcx, Self> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a; } /////////////////////////////////////////////////////////////////////////// // Relate impls impl<'tcx> Relate<'tcx> for ty::TypeAndMut<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::TypeAndMut<'tcx>, - b: &ty::TypeAndMut<'tcx>) - -> RelateResult<'tcx, ty::TypeAndMut<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::TypeAndMut<'tcx>, + b: &ty::TypeAndMut<'tcx>, + ) -> RelateResult<'tcx, ty::TypeAndMut<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { - debug!("{}.mts({:?}, {:?})", - relation.tag(), - a, - b); + debug!("{}.mts({:?}, {:?})", relation.tag(), a, b); if a.mutbl != b.mutbl { Err(TypeError::Mutability) } else { @@ -128,17 +139,24 @@ impl<'tcx> Relate<'tcx> for ty::TypeAndMut<'tcx> { ast::Mutability::MutMutable => ty::Invariant, }; let ty = relation.relate_with_variance(variance, &a.ty, &b.ty)?; - Ok(ty::TypeAndMut {ty: ty, mutbl: mutbl}) + Ok(ty::TypeAndMut { + ty: ty, + mutbl: mutbl, + }) } } } -pub fn relate_substs<'a, 'gcx, 'tcx, R>(relation: &mut R, - variances: Option<&Vec>, - a_subst: &'tcx Substs<'tcx>, - b_subst: &'tcx Substs<'tcx>) - -> RelateResult<'tcx, &'tcx Substs<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a +pub fn relate_substs<'a, 'gcx, 'tcx, R>( + relation: &mut R, + variances: Option<&Vec>, + a_subst: &'tcx Substs<'tcx>, + b_subst: &'tcx Substs<'tcx>, +) -> RelateResult<'tcx, &'tcx Substs<'tcx>> +where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { let tcx = relation.tcx(); @@ -151,17 +169,24 @@ pub fn relate_substs<'a, 'gcx, 'tcx, R>(relation: &mut R, } impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::FnSig<'tcx>, - b: &ty::FnSig<'tcx>) - -> RelateResult<'tcx, ty::FnSig<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::FnSig<'tcx>, + b: &ty::FnSig<'tcx>, + ) -> RelateResult<'tcx, ty::FnSig<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { let tcx = relation.tcx(); if a.variadic != b.variadic { - return Err(TypeError::VariadicMismatch( - expected_found(relation, &a.variadic, &b.variadic))); + return Err(TypeError::VariadicMismatch(expected_found( + relation, + &a.variadic, + &b.variadic, + ))); } let unsafety = relation.relate(&a.unsafety, &b.unsafety)?; let abi = relation.relate(&a.abi, &b.abi)?; @@ -170,7 +195,10 @@ impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> { return Err(TypeError::ArgCount); } - let inputs_and_output = a.inputs().iter().cloned() + let inputs_and_output = a + .inputs() + .iter() + .cloned() .zip(b.inputs().iter().cloned()) .map(|x| (x, false)) .chain(iter::once(((a.output(), b.output()), true))) @@ -191,11 +219,15 @@ impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> { } impl<'tcx> Relate<'tcx> for ast::Unsafety { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ast::Unsafety, - b: &ast::Unsafety) - -> RelateResult<'tcx, ast::Unsafety> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ast::Unsafety, + b: &ast::Unsafety, + ) -> RelateResult<'tcx, ast::Unsafety> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { if a != b { Err(TypeError::UnsafetyMismatch(expected_found(relation, a, b))) @@ -206,11 +238,15 @@ impl<'tcx> Relate<'tcx> for ast::Unsafety { } impl<'tcx> Relate<'tcx> for abi::Abi { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &abi::Abi, - b: &abi::Abi) - -> RelateResult<'tcx, abi::Abi> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &abi::Abi, + b: &abi::Abi, + ) -> RelateResult<'tcx, abi::Abi> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { if a == b { Ok(*a) @@ -221,15 +257,22 @@ impl<'tcx> Relate<'tcx> for abi::Abi { } impl<'tcx> Relate<'tcx> for ty::ProjectionTy<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::ProjectionTy<'tcx>, - b: &ty::ProjectionTy<'tcx>) - -> RelateResult<'tcx, ty::ProjectionTy<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::ProjectionTy<'tcx>, + b: &ty::ProjectionTy<'tcx>, + ) -> RelateResult<'tcx, ty::ProjectionTy<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { if a.item_def_id != b.item_def_id { - Err(TypeError::ProjectionMismatched( - expected_found(relation, &a.item_def_id, &b.item_def_id))) + Err(TypeError::ProjectionMismatched(expected_found( + relation, + &a.item_def_id, + &b.item_def_id, + ))) } else { let substs = relation.relate(&a.substs, &b.substs)?; Ok(ty::ProjectionTy { @@ -241,15 +284,22 @@ impl<'tcx> Relate<'tcx> for ty::ProjectionTy<'tcx> { } impl<'tcx> Relate<'tcx> for ty::ExistentialProjection<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::ExistentialProjection<'tcx>, - b: &ty::ExistentialProjection<'tcx>) - -> RelateResult<'tcx, ty::ExistentialProjection<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::ExistentialProjection<'tcx>, + b: &ty::ExistentialProjection<'tcx>, + ) -> RelateResult<'tcx, ty::ExistentialProjection<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { if a.item_def_id != b.item_def_id { - Err(TypeError::ProjectionMismatched( - expected_found(relation, &a.item_def_id, &b.item_def_id))) + Err(TypeError::ProjectionMismatched(expected_found( + relation, + &a.item_def_id, + &b.item_def_id, + ))) } else { let ty = relation.relate(&a.ty, &b.ty)?; let substs = relation.relate(&a.substs, &b.substs)?; @@ -263,11 +313,15 @@ impl<'tcx> Relate<'tcx> for ty::ExistentialProjection<'tcx> { } impl<'tcx> Relate<'tcx> for Vec> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &Vec>, - b: &Vec>) - -> RelateResult<'tcx, Vec>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &Vec>, + b: &Vec>, + ) -> RelateResult<'tcx, Vec>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { // To be compatible, `a` and `b` must be for precisely the // same set of traits and item names. We always require that @@ -275,46 +329,68 @@ impl<'tcx> Relate<'tcx> for Vec> { // so we can just iterate through the lists pairwise, so long as they are the // same length. if a.len() != b.len() { - Err(TypeError::ProjectionBoundsLength(expected_found(relation, &a.len(), &b.len()))) + Err(TypeError::ProjectionBoundsLength(expected_found( + relation, + &a.len(), + &b.len(), + ))) } else { a.iter() - .zip(b) - .map(|(a, b)| relation.relate(a, b)) - .collect() + .zip(b) + .map(|(a, b)| relation.relate(a, b)) + .collect() } } } impl<'tcx> Relate<'tcx> for ty::TraitRef<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::TraitRef<'tcx>, - b: &ty::TraitRef<'tcx>) - -> RelateResult<'tcx, ty::TraitRef<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::TraitRef<'tcx>, + b: &ty::TraitRef<'tcx>, + ) -> RelateResult<'tcx, ty::TraitRef<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { // Different traits cannot be related if a.def_id != b.def_id { - Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id))) + Err(TypeError::Traits(expected_found( + relation, &a.def_id, &b.def_id, + ))) } else { let substs = relate_substs(relation, None, a.substs, b.substs)?; - Ok(ty::TraitRef { def_id: a.def_id, substs: substs }) + Ok(ty::TraitRef { + def_id: a.def_id, + substs: substs, + }) } } } impl<'tcx> Relate<'tcx> for ty::ExistentialTraitRef<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::ExistentialTraitRef<'tcx>, - b: &ty::ExistentialTraitRef<'tcx>) - -> RelateResult<'tcx, ty::ExistentialTraitRef<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::ExistentialTraitRef<'tcx>, + b: &ty::ExistentialTraitRef<'tcx>, + ) -> RelateResult<'tcx, ty::ExistentialTraitRef<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { // Different traits cannot be related if a.def_id != b.def_id { - Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id))) + Err(TypeError::Traits(expected_found( + relation, &a.def_id, &b.def_id, + ))) } else { let substs = relate_substs(relation, None, a.substs, b.substs)?; - Ok(ty::ExistentialTraitRef { def_id: a.def_id, substs: substs }) + Ok(ty::ExistentialTraitRef { + def_id: a.def_id, + substs: substs, + }) } } } @@ -329,11 +405,15 @@ TupleStructTypeFoldableImpl! { } impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &GeneratorWitness<'tcx>, - b: &GeneratorWitness<'tcx>) - -> RelateResult<'tcx, GeneratorWitness<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &GeneratorWitness<'tcx>, + b: &GeneratorWitness<'tcx>, + ) -> RelateResult<'tcx, GeneratorWitness<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { assert_eq!(a.0.len(), b.0.len()); let tcx = relation.tcx(); @@ -343,11 +423,15 @@ impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> { } impl<'tcx> Relate<'tcx> for Ty<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &Ty<'tcx>, - b: &Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &Ty<'tcx>, + b: &Ty<'tcx>, + ) -> RelateResult<'tcx, Ty<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { relation.tys(a, b) } @@ -356,20 +440,22 @@ impl<'tcx> Relate<'tcx> for Ty<'tcx> { /// The main "type relation" routine. Note that this does not handle /// inference artifacts, so you should filter those out before calling /// it. -pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a +pub fn super_relate_tys<'a, 'gcx, 'tcx, R>( + relation: &mut R, + a: Ty<'tcx>, + b: Ty<'tcx>, +) -> RelateResult<'tcx, Ty<'tcx>> +where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { let tcx = relation.tcx(); let a_sty = &a.sty; let b_sty = &b.sty; debug!("super_relate_tys: a_sty={:?} b_sty={:?}", a_sty, b_sty); match (a_sty, b_sty) { - (&ty::Infer(_), _) | - (_, &ty::Infer(_)) => - { + (&ty::Infer(_), _) | (_, &ty::Infer(_)) => { // The caller should handle these cases! bug!("var types encountered in super_relate_tys") } @@ -378,59 +464,39 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, bug!("bound types encountered in super_relate_tys") } - (&ty::Error, _) | (_, &ty::Error) => - { - Ok(tcx.types.err) - } + (&ty::Error, _) | (_, &ty::Error) => Ok(tcx.types.err), - (&ty::Never, _) | - (&ty::Char, _) | - (&ty::Bool, _) | - (&ty::Int(_), _) | - (&ty::Uint(_), _) | - (&ty::Float(_), _) | - (&ty::Str, _) + (&ty::Never, _) + | (&ty::Char, _) + | (&ty::Bool, _) + | (&ty::Int(_), _) + | (&ty::Uint(_), _) + | (&ty::Float(_), _) + | (&ty::Str, _) if a == b => { Ok(a) } - (&ty::Param(ref a_p), &ty::Param(ref b_p)) - if a_p.idx == b_p.idx => - { - Ok(a) - } + (&ty::Param(ref a_p), &ty::Param(ref b_p)) if a_p.idx == b_p.idx => Ok(a), - (ty::Placeholder(p1), ty::Placeholder(p2)) if p1 == p2 => { - Ok(a) - } + (ty::Placeholder(p1), ty::Placeholder(p2)) if p1 == p2 => Ok(a), - (&ty::Adt(a_def, a_substs), &ty::Adt(b_def, b_substs)) - if a_def == b_def => - { + (&ty::Adt(a_def, a_substs), &ty::Adt(b_def, b_substs)) if a_def == b_def => { let substs = relation.relate_item_substs(a_def.did, a_substs, b_substs)?; Ok(tcx.mk_adt(a_def, substs)) } - (&ty::Foreign(a_id), &ty::Foreign(b_id)) - if a_id == b_id => - { - Ok(tcx.mk_foreign(a_id)) - } + (&ty::Foreign(a_id), &ty::Foreign(b_id)) if a_id == b_id => Ok(tcx.mk_foreign(a_id)), (&ty::Dynamic(ref a_obj, ref a_region), &ty::Dynamic(ref b_obj, ref b_region)) => { - let region_bound = relation.with_cause(Cause::ExistentialRegionBound, - |relation| { - relation.relate_with_variance( - ty::Contravariant, - a_region, - b_region) - })?; + let region_bound = relation.with_cause(Cause::ExistentialRegionBound, |relation| { + relation.relate_with_variance(ty::Contravariant, a_region, b_region) + })?; Ok(tcx.mk_dynamic(relation.relate(a_obj, b_obj)?, region_bound)) } - (&ty::Generator(a_id, a_substs, movability), - &ty::Generator(b_id, b_substs, _)) + (&ty::Generator(a_id, a_substs, movability), &ty::Generator(b_id, b_substs, _)) if a_id == b_id => { // All Generator types with the same id represent @@ -440,21 +506,19 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, Ok(tcx.mk_generator(a_id, substs, movability)) } - (&ty::GeneratorWitness(a_types), &ty::GeneratorWitness(b_types)) => - { + (&ty::GeneratorWitness(a_types), &ty::GeneratorWitness(b_types)) => { // Wrap our types with a temporary GeneratorWitness struct // inside the binder so we can related them let a_types = a_types.map_bound(GeneratorWitness); let b_types = b_types.map_bound(GeneratorWitness); // Then remove the GeneratorWitness for the result - let types = relation.relate(&a_types, &b_types)?.map_bound(|witness| witness.0); + let types = relation + .relate(&a_types, &b_types)? + .map_bound(|witness| witness.0); Ok(tcx.mk_generator_witness(types)) } - (&ty::Closure(a_id, a_substs), - &ty::Closure(b_id, b_substs)) - if a_id == b_id => - { + (&ty::Closure(a_id, a_substs), &ty::Closure(b_id, b_substs)) if a_id == b_id => { // All Closure types with the same id represent // the (anonymous) type of the same closure expression. So // all of their regions should be equated. @@ -462,23 +526,26 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, Ok(tcx.mk_closure(a_id, substs)) } - (&ty::RawPtr(ref a_mt), &ty::RawPtr(ref b_mt)) => - { + (&ty::RawPtr(ref a_mt), &ty::RawPtr(ref b_mt)) => { let mt = relation.relate(a_mt, b_mt)?; Ok(tcx.mk_ptr(mt)) } - (&ty::Ref(a_r, a_ty, a_mutbl), &ty::Ref(b_r, b_ty, b_mutbl)) => - { + (&ty::Ref(a_r, a_ty, a_mutbl), &ty::Ref(b_r, b_ty, b_mutbl)) => { let r = relation.relate_with_variance(ty::Contravariant, &a_r, &b_r)?; - let a_mt = ty::TypeAndMut { ty: a_ty, mutbl: a_mutbl }; - let b_mt = ty::TypeAndMut { ty: b_ty, mutbl: b_mutbl }; + let a_mt = ty::TypeAndMut { + ty: a_ty, + mutbl: a_mutbl, + }; + let b_mt = ty::TypeAndMut { + ty: b_ty, + mutbl: b_mutbl, + }; let mt = relation.relate(&a_mt, &b_mt)?; Ok(tcx.mk_ref(r, mt)) } - (&ty::Array(a_t, sz_a), &ty::Array(b_t, sz_b)) => - { + (&ty::Array(a_t, sz_a), &ty::Array(b_t, sz_b)) => { let t = relation.relate(&a_t, &b_t)?; assert_eq!(sz_a.ty, tcx.types.usize); assert_eq!(sz_b.ty, tcx.types.usize); @@ -491,31 +558,33 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, // FIXME(eddyb) get the right param_env. let param_env = ty::ParamEnv::empty(); if let Some(substs) = tcx.lift_to_global(&substs) { - let instance = ty::Instance::resolve( - tcx.global_tcx(), - param_env, - def_id, - substs, - ); + let instance = + ty::Instance::resolve(tcx.global_tcx(), param_env, def_id, substs); if let Some(instance) = instance { let cid = GlobalId { instance, - promoted: None + promoted: None, }; - if let Some(s) = tcx.const_eval(param_env.and(cid)) - .ok() - .map(|c| c.unwrap_usize(tcx)) { - return Ok(s) + if let Some(s) = tcx + .const_eval(param_env.and(cid)) + .ok() + .map(|c| c.unwrap_usize(tcx)) + { + return Ok(s); } } } - tcx.sess.delay_span_bug(tcx.def_span(def_id), - "array length could not be evaluated"); + tcx.sess.delay_span_bug( + tcx.def_span(def_id), + "array length could not be evaluated", + ); Err(ErrorReported) } _ => { - tcx.sess.delay_span_bug(DUMMY_SP, - &format!("arrays should not have {:?} as length", x)); + tcx.sess.delay_span_bug( + DUMMY_SP, + &format!("arrays should not have {:?} as length", x), + ); Err(ErrorReported) } } @@ -525,30 +594,30 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, if sz_a_u64 == sz_b_u64 { Ok(tcx.mk_ty(ty::Array(t, sz_a))) } else { - Err(TypeError::FixedArraySize( - expected_found(relation, &sz_a_u64, &sz_b_u64))) + Err(TypeError::FixedArraySize(expected_found( + relation, &sz_a_u64, &sz_b_u64, + ))) } } // We reported an error or will ICE, so we can return Error. - (Err(ErrorReported), _) | (_, Err(ErrorReported)) => { - Ok(tcx.types.err) - } + (Err(ErrorReported), _) | (_, Err(ErrorReported)) => Ok(tcx.types.err), } } - (&ty::Slice(a_t), &ty::Slice(b_t)) => - { + (&ty::Slice(a_t), &ty::Slice(b_t)) => { let t = relation.relate(&a_t, &b_t)?; Ok(tcx.mk_slice(t)) } - (&ty::Tuple(as_), &ty::Tuple(bs)) => - { + (&ty::Tuple(as_), &ty::Tuple(bs)) => { if as_.len() == bs.len() { Ok(tcx.mk_tup(as_.iter().zip(bs).map(|(a, b)| relation.relate(a, b)))?) } else if !(as_.is_empty() || bs.is_empty()) { - Err(TypeError::TupleSize( - expected_found(relation, &as_.len(), &bs.len()))) + Err(TypeError::TupleSize(expected_found( + relation, + &as_.len(), + &bs.len(), + ))) } else { Err(TypeError::Sorts(expected_found(relation, &a, &b))) } @@ -561,8 +630,7 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, Ok(tcx.mk_fn_def(a_def_id, substs)) } - (&ty::FnPtr(a_fty), &ty::FnPtr(b_fty)) => - { + (&ty::FnPtr(a_fty), &ty::FnPtr(b_fty)) => { let fty = relation.relate(&a_fty, &b_fty)?; Ok(tcx.mk_fn_ptr(fty)) } @@ -585,38 +653,35 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, Ok(tcx.mk_opaque(a_def_id, substs)) } - _ => - { - Err(TypeError::Sorts(expected_found(relation, &a, &b))) - } + _ => Err(TypeError::Sorts(expected_found(relation, &a, &b))), } } impl<'tcx> Relate<'tcx> for &'tcx ty::List> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &Self, - b: &Self) - -> RelateResult<'tcx, Self> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { + fn relate<'a, 'gcx, R>(relation: &mut R, a: &Self, b: &Self) -> RelateResult<'tcx, Self> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, + { use ty::ExistentialPredicate::*; let tcx = relation.tcx(); let (a_buf, b_buf); let (a_norm, b_norm): (&[_], &[_]) = match relation.trait_object_mode() { - TraitObjectMode::NoSquash => { - (a, b) - } + TraitObjectMode::NoSquash => (a, b), TraitObjectMode::SquashAutoTraitsIssue33140 => { // Treat auto-trait "principal" components as equal // to the non-principal components, to make // `dyn Send+Sync = dyn Sync+Send`. let normalize = |d: &[ty::ExistentialPredicate<'tcx>]| { - let mut result: Vec<_> = d.iter().map(|pi| match pi { - Trait(ref a) if tcx.trait_is_auto(a.def_id) => { - AutoTrait(a.def_id) - }, - other => *other - }).collect(); + let mut result: Vec<_> = d + .iter() + .map(|pi| match pi { + Trait(ref a) if tcx.trait_is_auto(a.def_id) => AutoTrait(a.def_id), + other => *other, + }) + .collect(); result.sort_by(|a, b| a.stable_cmp(tcx, b)); result.dedup(); @@ -631,7 +696,9 @@ impl<'tcx> Relate<'tcx> for &'tcx ty::List> { }; if a_norm.len() != b_norm.len() { - return Err(TypeError::ExistentialMismatch(expected_found(relation, a, b))); + return Err(TypeError::ExistentialMismatch(expected_found( + relation, a, b, + ))); } let v = a_norm.iter().zip(b_norm.iter()).map(|(ep_a, ep_b)| { @@ -640,7 +707,9 @@ impl<'tcx> Relate<'tcx> for &'tcx ty::List> { (Trait(ref a), Trait(ref b)) => Ok(Trait(relation.relate(a, b)?)), (Projection(ref a), Projection(ref b)) => Ok(Projection(relation.relate(a, b)?)), (AutoTrait(ref a), AutoTrait(ref b)) if a == b => Ok(AutoTrait(*a)), - _ => Err(TypeError::ExistentialMismatch(expected_found(relation, a, b))) + _ => Err(TypeError::ExistentialMismatch(expected_found( + relation, a, b, + ))), } }); Ok(tcx.mk_existential_predicates(v)?) @@ -648,11 +717,15 @@ impl<'tcx> Relate<'tcx> for &'tcx ty::List> { } impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::ClosureSubsts<'tcx>, - b: &ty::ClosureSubsts<'tcx>) - -> RelateResult<'tcx, ty::ClosureSubsts<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::ClosureSubsts<'tcx>, + b: &ty::ClosureSubsts<'tcx>, + ) -> RelateResult<'tcx, ty::ClosureSubsts<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { let substs = relate_substs(relation, None, a.substs, b.substs)?; Ok(ty::ClosureSubsts { substs }) @@ -660,11 +733,15 @@ impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> { } impl<'tcx> Relate<'tcx> for ty::GeneratorSubsts<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::GeneratorSubsts<'tcx>, - b: &ty::GeneratorSubsts<'tcx>) - -> RelateResult<'tcx, ty::GeneratorSubsts<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::GeneratorSubsts<'tcx>, + b: &ty::GeneratorSubsts<'tcx>, + ) -> RelateResult<'tcx, ty::GeneratorSubsts<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { let substs = relate_substs(relation, None, a.substs, b.substs)?; Ok(ty::GeneratorSubsts { substs }) @@ -672,44 +749,56 @@ impl<'tcx> Relate<'tcx> for ty::GeneratorSubsts<'tcx> { } impl<'tcx> Relate<'tcx> for &'tcx Substs<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &&'tcx Substs<'tcx>, - b: &&'tcx Substs<'tcx>) - -> RelateResult<'tcx, &'tcx Substs<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &&'tcx Substs<'tcx>, + b: &&'tcx Substs<'tcx>, + ) -> RelateResult<'tcx, &'tcx Substs<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { relate_substs(relation, None, a, b) } } impl<'tcx> Relate<'tcx> for ty::Region<'tcx> { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::Region<'tcx>, - b: &ty::Region<'tcx>) - -> RelateResult<'tcx, ty::Region<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::Region<'tcx>, + b: &ty::Region<'tcx>, + ) -> RelateResult<'tcx, ty::Region<'tcx>> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { relation.regions(*a, *b) } } impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for ty::Binder { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &ty::Binder, - b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>( + relation: &mut R, + a: &ty::Binder, + b: &ty::Binder, + ) -> RelateResult<'tcx, ty::Binder> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { relation.binders(a, b) } } impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Rc { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &Rc, - b: &Rc) - -> RelateResult<'tcx, Rc> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>(relation: &mut R, a: &Rc, b: &Rc) -> RelateResult<'tcx, Rc> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { let a: &T = a; let b: &T = b; @@ -718,11 +807,11 @@ impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Rc { } impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Box { - fn relate<'a, 'gcx, R>(relation: &mut R, - a: &Box, - b: &Box) - -> RelateResult<'tcx, Box> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a + fn relate<'a, 'gcx, R>(relation: &mut R, a: &Box, b: &Box) -> RelateResult<'tcx, Box> + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { let a: &T = a; let b: &T = b; @@ -734,10 +823,12 @@ impl<'tcx> Relate<'tcx> for Kind<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &Kind<'tcx>, - b: &Kind<'tcx> + b: &Kind<'tcx>, ) -> RelateResult<'tcx, Kind<'tcx>> where - R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a, + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { match (a.unpack(), b.unpack()) { (UnpackedKind::Lifetime(a_lt), UnpackedKind::Lifetime(b_lt)) => { @@ -746,12 +837,16 @@ impl<'tcx> Relate<'tcx> for Kind<'tcx> { (UnpackedKind::Type(a_ty), UnpackedKind::Type(b_ty)) => { Ok(relation.relate(&a_ty, &b_ty)?.into()) } - (UnpackedKind::Lifetime(unpacked), x) => { - bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) - } - (UnpackedKind::Type(unpacked), x) => { - bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) - } + (UnpackedKind::Lifetime(unpacked), x) => bug!( + "impossible case reached: can't relate: {:?} with {:?}", + unpacked, + x + ), + (UnpackedKind::Type(unpacked), x) => bug!( + "impossible case reached: can't relate: {:?} with {:?}", + unpacked, + x + ), } } } @@ -760,9 +855,12 @@ impl<'tcx> Relate<'tcx> for ty::TraitPredicate<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &ty::TraitPredicate<'tcx>, - b: &ty::TraitPredicate<'tcx> + b: &ty::TraitPredicate<'tcx>, ) -> RelateResult<'tcx, ty::TraitPredicate<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { Ok(ty::TraitPredicate { trait_ref: relation.relate(&a.trait_ref, &b.trait_ref)?, @@ -776,7 +874,10 @@ impl<'tcx> Relate<'tcx> for ty::ProjectionPredicate<'tcx> { a: &ty::ProjectionPredicate<'tcx>, b: &ty::ProjectionPredicate<'tcx>, ) -> RelateResult<'tcx, ty::ProjectionPredicate<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { Ok(ty::ProjectionPredicate { projection_ty: relation.relate(&a.projection_ty, &b.projection_ty)?, @@ -789,9 +890,12 @@ impl<'tcx> Relate<'tcx> for traits::WhereClause<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::WhereClause<'tcx>, - b: &traits::WhereClause<'tcx> + b: &traits::WhereClause<'tcx>, ) -> RelateResult<'tcx, traits::WhereClause<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { use traits::WhereClause::*; match (a, b) { @@ -817,7 +921,7 @@ impl<'tcx> Relate<'tcx> for traits::WhereClause<'tcx> { ))) } - _ => Err(TypeError::Mismatch), + _ => Err(TypeError::Mismatch), } } } @@ -826,15 +930,18 @@ impl<'tcx> Relate<'tcx> for traits::WellFormed<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::WellFormed<'tcx>, - b: &traits::WellFormed<'tcx> + b: &traits::WellFormed<'tcx>, ) -> RelateResult<'tcx, traits::WellFormed<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { use traits::WellFormed::*; match (a, b) { (Trait(a_pred), Trait(b_pred)) => Ok(Trait(relation.relate(a_pred, b_pred)?)), (Ty(a_ty), Ty(b_ty)) => Ok(Ty(relation.relate(a_ty, b_ty)?)), - _ => Err(TypeError::Mismatch), + _ => Err(TypeError::Mismatch), } } } @@ -843,15 +950,18 @@ impl<'tcx> Relate<'tcx> for traits::FromEnv<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::FromEnv<'tcx>, - b: &traits::FromEnv<'tcx> + b: &traits::FromEnv<'tcx>, ) -> RelateResult<'tcx, traits::FromEnv<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { use traits::FromEnv::*; match (a, b) { (Trait(a_pred), Trait(b_pred)) => Ok(Trait(relation.relate(a_pred, b_pred)?)), (Ty(a_ty), Ty(b_ty)) => Ok(Ty(relation.relate(a_ty, b_ty)?)), - _ => Err(TypeError::Mismatch), + _ => Err(TypeError::Mismatch), } } } @@ -860,9 +970,12 @@ impl<'tcx> Relate<'tcx> for traits::DomainGoal<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::DomainGoal<'tcx>, - b: &traits::DomainGoal<'tcx> + b: &traits::DomainGoal<'tcx>, ) -> RelateResult<'tcx, traits::DomainGoal<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { use traits::DomainGoal::*; match (a, b) { @@ -874,7 +987,7 @@ impl<'tcx> Relate<'tcx> for traits::DomainGoal<'tcx> { Ok(Normalize(relation.relate(a_pred, b_pred)?)) } - _ => Err(TypeError::Mismatch), + _ => Err(TypeError::Mismatch), } } } @@ -883,9 +996,12 @@ impl<'tcx> Relate<'tcx> for traits::Goal<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::Goal<'tcx>, - b: &traits::Goal<'tcx> + b: &traits::Goal<'tcx>, ) -> RelateResult<'tcx, traits::Goal<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { use traits::GoalKind::*; match (a, b) { @@ -911,9 +1027,7 @@ impl<'tcx> Relate<'tcx> for traits::Goal<'tcx> { Ok(relation.tcx().mk_goal(DomainGoal(goal))) } - (Quantified(a_qkind, a_goal), Quantified(b_qkind, b_goal)) - if a_qkind == b_qkind => - { + (Quantified(a_qkind, a_goal), Quantified(b_qkind, b_goal)) if a_qkind == b_qkind => { let goal = relation.relate(a_goal, b_goal)?; Ok(relation.tcx().mk_goal(Quantified(*a_qkind, goal))) } @@ -929,9 +1043,12 @@ impl<'tcx> Relate<'tcx> for traits::Goals<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::Goals<'tcx>, - b: &traits::Goals<'tcx> + b: &traits::Goals<'tcx>, ) -> RelateResult<'tcx, traits::Goals<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { if a.len() != b.len() { return Err(TypeError::Mismatch); @@ -947,9 +1064,12 @@ impl<'tcx> Relate<'tcx> for traits::Clause<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::Clause<'tcx>, - b: &traits::Clause<'tcx> + b: &traits::Clause<'tcx>, ) -> RelateResult<'tcx, traits::Clause<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { use traits::Clause::*; match (a, b) { @@ -972,9 +1092,12 @@ impl<'tcx> Relate<'tcx> for traits::Clauses<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::Clauses<'tcx>, - b: &traits::Clauses<'tcx> + b: &traits::Clauses<'tcx>, ) -> RelateResult<'tcx, traits::Clauses<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { if a.len() != b.len() { return Err(TypeError::Mismatch); @@ -990,9 +1113,12 @@ impl<'tcx> Relate<'tcx> for traits::ProgramClause<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::ProgramClause<'tcx>, - b: &traits::ProgramClause<'tcx> + b: &traits::ProgramClause<'tcx>, ) -> RelateResult<'tcx, traits::ProgramClause<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { Ok(traits::ProgramClause { goal: relation.relate(&a.goal, &b.goal)?, @@ -1006,9 +1132,12 @@ impl<'tcx> Relate<'tcx> for traits::Environment<'tcx> { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::Environment<'tcx>, - b: &traits::Environment<'tcx> + b: &traits::Environment<'tcx>, ) -> RelateResult<'tcx, traits::Environment<'tcx>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { Ok(traits::Environment { clauses: relation.relate(&a.clauses, &b.clauses)?, @@ -1017,14 +1146,18 @@ impl<'tcx> Relate<'tcx> for traits::Environment<'tcx> { } impl<'tcx, G> Relate<'tcx> for traits::InEnvironment<'tcx, G> - where G: Relate<'tcx> +where + G: Relate<'tcx>, { fn relate<'a, 'gcx, R>( relation: &mut R, a: &traits::InEnvironment<'tcx, G>, - b: &traits::InEnvironment<'tcx, G> + b: &traits::InEnvironment<'tcx, G>, ) -> RelateResult<'tcx, traits::InEnvironment<'tcx, G>> - where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a + where + R: TypeRelation<'a, 'gcx, 'tcx>, + 'gcx: 'tcx, + 'tcx: 'a, { Ok(traits::InEnvironment { environment: relation.relate(&a.environment, &b.environment)?, @@ -1036,26 +1169,31 @@ impl<'tcx, G> Relate<'tcx> for traits::InEnvironment<'tcx, G> /////////////////////////////////////////////////////////////////////////// // Error handling -pub fn expected_found<'a, 'gcx, 'tcx, R, T>(relation: &mut R, - a: &T, - b: &T) - -> ExpectedFound - where R: TypeRelation<'a, 'gcx, 'tcx>, T: Clone, 'gcx: 'a+'tcx, 'tcx: 'a +pub fn expected_found<'a, 'gcx, 'tcx, R, T>(relation: &mut R, a: &T, b: &T) -> ExpectedFound +where + R: TypeRelation<'a, 'gcx, 'tcx>, + T: Clone, + 'gcx: 'a + 'tcx, + 'tcx: 'a, { expected_found_bool(relation.a_is_expected(), a, b) } -pub fn expected_found_bool(a_is_expected: bool, - a: &T, - b: &T) - -> ExpectedFound - where T: Clone +pub fn expected_found_bool(a_is_expected: bool, a: &T, b: &T) -> ExpectedFound +where + T: Clone, { let a = a.clone(); let b = b.clone(); if a_is_expected { - ExpectedFound {expected: a, found: b} + ExpectedFound { + expected: a, + found: b, + } } else { - ExpectedFound {expected: b, found: a} + ExpectedFound { + expected: b, + found: a, + } } } diff --git a/src/librustc/ty/steal.rs b/src/librustc/ty/steal.rs index 336a4c3bf2279..0e1980d3b2abb 100644 --- a/src/librustc/ty/steal.rs +++ b/src/librustc/ty/steal.rs @@ -1,4 +1,4 @@ -use rustc_data_structures::sync::{RwLock, ReadGuard, MappedReadGuard}; +use rustc_data_structures::sync::{MappedReadGuard, ReadGuard, RwLock}; /// The `Steal` struct is intended to used as the value for a query. /// Specifically, we sometimes have queries (*cough* MIR *cough*) @@ -21,25 +21,28 @@ use rustc_data_structures::sync::{RwLock, ReadGuard, MappedReadGuard}; /// /// FIXME(#41710) -- what is the best way to model linear queries? pub struct Steal { - value: RwLock> + value: RwLock>, } impl Steal { pub fn new(value: T) -> Self { Steal { - value: RwLock::new(Some(value)) + value: RwLock::new(Some(value)), } } pub fn borrow(&self) -> MappedReadGuard<'_, T> { ReadGuard::map(self.value.borrow(), |opt| match *opt { None => bug!("attempted to read from stolen value"), - Some(ref v) => v + Some(ref v) => v, }) } pub fn steal(&self) -> T { - let value_ref = &mut *self.value.try_write().expect("stealing value which is locked"); + let value_ref = &mut *self + .value + .try_write() + .expect("stealing value which is locked"); let value = value_ref.take(); value.expect("attempt to read from stolen value") } diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs index f9b43f42d5298..499c96e4b3779 100644 --- a/src/librustc/ty/structural_impls.rs +++ b/src/librustc/ty/structural_impls.rs @@ -3,13 +3,13 @@ //! hand, though we've recently added some macros (e.g., //! `BraceStructLiftImpl!`) to help with the tedium. -use mir::ProjectionKind; +use mir::interpret; use mir::interpret::ConstValue; -use ty::{self, Lift, Ty, TyCtxt}; -use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; -use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use mir::ProjectionKind; +use rustc_data_structures::indexed_vec::{Idx, IndexVec}; use smallvec::SmallVec; -use mir::interpret; +use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; +use ty::{self, Lift, Ty, TyCtxt}; use std::rc::Rc; @@ -61,7 +61,8 @@ CloneTypeFoldableAndLiftImpls! { impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>> Lift<'tcx> for (A, B) { type Lifted = (A::Lifted, B::Lifted); fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.0).and_then(|a| tcx.lift(&self.1).map(|b| (a, b))) + tcx.lift(&self.0) + .and_then(|a| tcx.lift(&self.1).map(|b| (a, b))) } } @@ -69,7 +70,8 @@ impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>, C: Lift<'tcx>> Lift<'tcx> for (A, B, C) type Lifted = (A::Lifted, B::Lifted, C::Lifted); fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { tcx.lift(&self.0).and_then(|a| { - tcx.lift(&self.1).and_then(|b| tcx.lift(&self.2).map(|c| (a, b, c))) + tcx.lift(&self.1) + .and_then(|b| tcx.lift(&self.2).map(|c| (a, b, c))) }) } } @@ -79,7 +81,7 @@ impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Option { fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { match *self { Some(ref x) => tcx.lift(x).map(Some), - None => Some(None) + None => Some(None), } } } @@ -89,7 +91,7 @@ impl<'tcx, T: Lift<'tcx>, E: Lift<'tcx>> Lift<'tcx> for Result { fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { match *self { Ok(ref x) => tcx.lift(x).map(Ok), - Err(ref e) => tcx.lift(e).map(Err) + Err(ref e) => tcx.lift(e).map(Err), } } } @@ -105,8 +107,7 @@ impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for [T] { type Lifted = Vec; fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { // type annotation needed to inform `projection_must_outlive` - let mut result : Vec<>::Lifted> - = Vec::with_capacity(self.len()); + let mut result: Vec<>::Lifted> = Vec::with_capacity(self.len()); for x in self { if let Some(value) = tcx.lift(x) { result.push(value); @@ -128,9 +129,7 @@ impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Vec { impl<'tcx, I: Idx, T: Lift<'tcx>> Lift<'tcx> for IndexVec { type Lifted = IndexVec; fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { - self.iter() - .map(|e| tcx.lift(e)) - .collect() + self.iter().map(|e| tcx.lift(e)).collect() } } @@ -147,78 +146,78 @@ impl<'a, 'tcx> Lift<'tcx> for ty::TraitRef<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialTraitRef<'a> { type Lifted = ty::ExistentialTraitRef<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| ty::ExistentialTraitRef { - def_id: self.def_id, - substs, - }) + tcx.lift(&self.substs) + .map(|substs| ty::ExistentialTraitRef { + def_id: self.def_id, + substs, + }) } } impl<'a, 'tcx> Lift<'tcx> for ty::TraitPredicate<'a> { type Lifted = ty::TraitPredicate<'tcx>; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) - -> Option> { - tcx.lift(&self.trait_ref).map(|trait_ref| ty::TraitPredicate { - trait_ref, - }) + fn lift_to_tcx<'b, 'gcx>( + &self, + tcx: TyCtxt<'b, 'gcx, 'tcx>, + ) -> Option> { + tcx.lift(&self.trait_ref) + .map(|trait_ref| ty::TraitPredicate { trait_ref }) } } impl<'a, 'tcx> Lift<'tcx> for ty::SubtypePredicate<'a> { type Lifted = ty::SubtypePredicate<'tcx>; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) - -> Option> { - tcx.lift(&(self.a, self.b)).map(|(a, b)| ty::SubtypePredicate { - a_is_expected: self.a_is_expected, - a, - b, - }) + fn lift_to_tcx<'b, 'gcx>( + &self, + tcx: TyCtxt<'b, 'gcx, 'tcx>, + ) -> Option> { + tcx.lift(&(self.a, self.b)) + .map(|(a, b)| ty::SubtypePredicate { + a_is_expected: self.a_is_expected, + a, + b, + }) } } -impl<'tcx, A: Copy+Lift<'tcx>, B: Copy+Lift<'tcx>> Lift<'tcx> for ty::OutlivesPredicate { +impl<'tcx, A: Copy + Lift<'tcx>, B: Copy + Lift<'tcx>> Lift<'tcx> for ty::OutlivesPredicate { type Lifted = ty::OutlivesPredicate; fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { - tcx.lift(&(self.0, self.1)).map(|(a, b)| ty::OutlivesPredicate(a, b)) + tcx.lift(&(self.0, self.1)) + .map(|(a, b)| ty::OutlivesPredicate(a, b)) } } impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionTy<'a> { type Lifted = ty::ProjectionTy<'tcx>; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) - -> Option> { - tcx.lift(&self.substs).map(|substs| { - ty::ProjectionTy { - item_def_id: self.item_def_id, - substs, - } + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option> { + tcx.lift(&self.substs).map(|substs| ty::ProjectionTy { + item_def_id: self.item_def_id, + substs, }) } } impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionPredicate<'a> { type Lifted = ty::ProjectionPredicate<'tcx>; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) - -> Option> { - tcx.lift(&(self.projection_ty, self.ty)).map(|(projection_ty, ty)| { - ty::ProjectionPredicate { - projection_ty, - ty, - } - }) + fn lift_to_tcx<'b, 'gcx>( + &self, + tcx: TyCtxt<'b, 'gcx, 'tcx>, + ) -> Option> { + tcx.lift(&(self.projection_ty, self.ty)) + .map(|(projection_ty, ty)| ty::ProjectionPredicate { projection_ty, ty }) } } impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialProjection<'a> { type Lifted = ty::ExistentialProjection<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| { - ty::ExistentialProjection { + tcx.lift(&self.substs) + .map(|substs| ty::ExistentialProjection { substs, ty: tcx.lift(&self.ty).expect("type must lift when substs do"), item_def_id: self.item_def_id, - } - }) + }) } } @@ -226,12 +225,8 @@ impl<'a, 'tcx> Lift<'tcx> for ty::Predicate<'a> { type Lifted = ty::Predicate<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { match *self { - ty::Predicate::Trait(ref binder) => { - tcx.lift(binder).map(ty::Predicate::Trait) - } - ty::Predicate::Subtype(ref binder) => { - tcx.lift(binder).map(ty::Predicate::Subtype) - } + ty::Predicate::Trait(ref binder) => tcx.lift(binder).map(ty::Predicate::Trait), + ty::Predicate::Subtype(ref binder) => tcx.lift(binder).map(ty::Predicate::Subtype), ty::Predicate::RegionOutlives(ref binder) => { tcx.lift(binder).map(ty::Predicate::RegionOutlives) } @@ -241,23 +236,18 @@ impl<'a, 'tcx> Lift<'tcx> for ty::Predicate<'a> { ty::Predicate::Projection(ref binder) => { tcx.lift(binder).map(ty::Predicate::Projection) } - ty::Predicate::WellFormed(ty) => { - tcx.lift(&ty).map(ty::Predicate::WellFormed) - } + ty::Predicate::WellFormed(ty) => tcx.lift(&ty).map(ty::Predicate::WellFormed), ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) => { - tcx.lift(&closure_substs) - .map(|closure_substs| ty::Predicate::ClosureKind(closure_def_id, - closure_substs, - kind)) + tcx.lift(&closure_substs).map(|closure_substs| { + ty::Predicate::ClosureKind(closure_def_id, closure_substs, kind) + }) } ty::Predicate::ObjectSafe(trait_def_id) => { Some(ty::Predicate::ObjectSafe(trait_def_id)) } - ty::Predicate::ConstEvaluatable(def_id, substs) => { - tcx.lift(&substs).map(|substs| { - ty::Predicate::ConstEvaluatable(def_id, substs) - }) - } + ty::Predicate::ConstEvaluatable(def_id, substs) => tcx + .lift(&substs) + .map(|substs| ty::Predicate::ConstEvaluatable(def_id, substs)), } } } @@ -272,13 +262,12 @@ impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::Binder { impl<'a, 'tcx> Lift<'tcx> for ty::ParamEnv<'a> { type Lifted = ty::ParamEnv<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.caller_bounds).map(|caller_bounds| { - ty::ParamEnv { + tcx.lift(&self.caller_bounds) + .map(|caller_bounds| ty::ParamEnv { reveal: self.reveal, caller_bounds, def_id: self.def_id, - } - }) + }) } } @@ -286,12 +275,8 @@ impl<'a, 'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::ParamEnvAnd<'a, T> { type Lifted = ty::ParamEnvAnd<'tcx, T::Lifted>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { tcx.lift(&self.param_env).and_then(|param_env| { - tcx.lift(&self.value).map(|value| { - ty::ParamEnvAnd { - param_env, - value, - } - }) + tcx.lift(&self.value) + .map(|value| ty::ParamEnvAnd { param_env, value }) }) } } @@ -299,18 +284,16 @@ impl<'a, 'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::ParamEnvAnd<'a, T> { impl<'a, 'tcx> Lift<'tcx> for ty::ClosureSubsts<'a> { type Lifted = ty::ClosureSubsts<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| { - ty::ClosureSubsts { substs } - }) + tcx.lift(&self.substs) + .map(|substs| ty::ClosureSubsts { substs }) } } impl<'a, 'tcx> Lift<'tcx> for ty::GeneratorSubsts<'a> { type Lifted = ty::GeneratorSubsts<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| { - ty::GeneratorSubsts { substs } - }) + tcx.lift(&self.substs) + .map(|substs| ty::GeneratorSubsts { substs }) } } @@ -318,9 +301,8 @@ impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjustment<'a> { type Lifted = ty::adjustment::Adjustment<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { tcx.lift(&self.kind).and_then(|kind| { - tcx.lift(&self.target).map(|target| { - ty::adjustment::Adjustment { kind, target } - }) + tcx.lift(&self.target) + .map(|target| ty::adjustment::Adjustment { kind, target }) }) } } @@ -329,18 +311,18 @@ impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjust<'a> { type Lifted = ty::adjustment::Adjust<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { match *self { - ty::adjustment::Adjust::NeverToAny => - Some(ty::adjustment::Adjust::NeverToAny), - ty::adjustment::Adjust::ReifyFnPointer => - Some(ty::adjustment::Adjust::ReifyFnPointer), - ty::adjustment::Adjust::UnsafeFnPointer => - Some(ty::adjustment::Adjust::UnsafeFnPointer), - ty::adjustment::Adjust::ClosureFnPointer => - Some(ty::adjustment::Adjust::ClosureFnPointer), - ty::adjustment::Adjust::MutToConstPointer => - Some(ty::adjustment::Adjust::MutToConstPointer), - ty::adjustment::Adjust::Unsize => - Some(ty::adjustment::Adjust::Unsize), + ty::adjustment::Adjust::NeverToAny => Some(ty::adjustment::Adjust::NeverToAny), + ty::adjustment::Adjust::ReifyFnPointer => Some(ty::adjustment::Adjust::ReifyFnPointer), + ty::adjustment::Adjust::UnsafeFnPointer => { + Some(ty::adjustment::Adjust::UnsafeFnPointer) + } + ty::adjustment::Adjust::ClosureFnPointer => { + Some(ty::adjustment::Adjust::ClosureFnPointer) + } + ty::adjustment::Adjust::MutToConstPointer => { + Some(ty::adjustment::Adjust::MutToConstPointer) + } + ty::adjustment::Adjust::Unsize => Some(ty::adjustment::Adjust::Unsize), ty::adjustment::Adjust::Deref(ref overloaded) => { tcx.lift(overloaded).map(ty::adjustment::Adjust::Deref) } @@ -354,12 +336,11 @@ impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjust<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::OverloadedDeref<'a> { type Lifted = ty::adjustment::OverloadedDeref<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.region).map(|region| { - ty::adjustment::OverloadedDeref { + tcx.lift(&self.region) + .map(|region| ty::adjustment::OverloadedDeref { region, mutbl: self.mutbl, - } - }) + }) } } @@ -370,9 +351,7 @@ impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoBorrow<'a> { ty::adjustment::AutoBorrow::Ref(r, m) => { tcx.lift(&r).map(|r| ty::adjustment::AutoBorrow::Ref(r, m)) } - ty::adjustment::AutoBorrow::RawPtr(m) => { - Some(ty::adjustment::AutoBorrow::RawPtr(m)) - } + ty::adjustment::AutoBorrow::RawPtr(m) => Some(ty::adjustment::AutoBorrow::RawPtr(m)), } } } @@ -381,25 +360,21 @@ impl<'a, 'tcx> Lift<'tcx> for ty::GenSig<'a> { type Lifted = ty::GenSig<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { tcx.lift(&(self.yield_ty, self.return_ty)) - .map(|(yield_ty, return_ty)| { - ty::GenSig { - yield_ty, - return_ty, - } - }) + .map(|(yield_ty, return_ty)| ty::GenSig { + yield_ty, + return_ty, + }) } } impl<'a, 'tcx> Lift<'tcx> for ty::FnSig<'a> { type Lifted = ty::FnSig<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { - tcx.lift(&self.inputs_and_output).map(|x| { - ty::FnSig { - inputs_and_output: x, - variadic: self.variadic, - unsafety: self.unsafety, - abi: self.abi, - } + tcx.lift(&self.inputs_and_output).map(|x| ty::FnSig { + inputs_and_output: x, + variadic: self.variadic, + unsafety: self.unsafety, + abi: self.abi, }) } } @@ -408,12 +383,8 @@ impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::error::ExpectedFound { type Lifted = ty::error::ExpectedFound; fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { tcx.lift(&self.expected).and_then(|expected| { - tcx.lift(&self.found).map(|found| { - ty::error::ExpectedFound { - expected, - found, - } - }) + tcx.lift(&self.found) + .map(|found| ty::error::ExpectedFound { expected, found }) }) } } @@ -443,7 +414,7 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> { ProjectionMismatched(x) => ProjectionMismatched(x), ProjectionBoundsLength(x) => ProjectionBoundsLength(x), Sorts(ref x) => return tcx.lift(x).map(Sorts), - ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch) + ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch), }) } } @@ -452,22 +423,22 @@ impl<'a, 'tcx> Lift<'tcx> for ty::InstanceDef<'a> { type Lifted = ty::InstanceDef<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { match *self { - ty::InstanceDef::Item(def_id) => - Some(ty::InstanceDef::Item(def_id)), - ty::InstanceDef::VtableShim(def_id) => - Some(ty::InstanceDef::VtableShim(def_id)), - ty::InstanceDef::Intrinsic(def_id) => - Some(ty::InstanceDef::Intrinsic(def_id)), - ty::InstanceDef::FnPtrShim(def_id, ref ty) => - Some(ty::InstanceDef::FnPtrShim(def_id, tcx.lift(ty)?)), - ty::InstanceDef::Virtual(def_id, n) => - Some(ty::InstanceDef::Virtual(def_id, n)), - ty::InstanceDef::ClosureOnceShim { call_once } => - Some(ty::InstanceDef::ClosureOnceShim { call_once }), - ty::InstanceDef::DropGlue(def_id, ref ty) => - Some(ty::InstanceDef::DropGlue(def_id, tcx.lift(ty)?)), - ty::InstanceDef::CloneShim(def_id, ref ty) => - Some(ty::InstanceDef::CloneShim(def_id, tcx.lift(ty)?)), + ty::InstanceDef::Item(def_id) => Some(ty::InstanceDef::Item(def_id)), + ty::InstanceDef::VtableShim(def_id) => Some(ty::InstanceDef::VtableShim(def_id)), + ty::InstanceDef::Intrinsic(def_id) => Some(ty::InstanceDef::Intrinsic(def_id)), + ty::InstanceDef::FnPtrShim(def_id, ref ty) => { + Some(ty::InstanceDef::FnPtrShim(def_id, tcx.lift(ty)?)) + } + ty::InstanceDef::Virtual(def_id, n) => Some(ty::InstanceDef::Virtual(def_id, n)), + ty::InstanceDef::ClosureOnceShim { call_once } => { + Some(ty::InstanceDef::ClosureOnceShim { call_once }) + } + ty::InstanceDef::DropGlue(def_id, ref ty) => { + Some(ty::InstanceDef::DropGlue(def_id, tcx.lift(ty)?)) + } + ty::InstanceDef::CloneShim(def_id, ref ty) => { + Some(ty::InstanceDef::CloneShim(def_id, tcx.lift(ty)?)) + } } } } @@ -508,7 +479,7 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::AdtDef { } } -impl<'tcx, T:TypeFoldable<'tcx>, U:TypeFoldable<'tcx>> TypeFoldable<'tcx> for (T, U) { +impl<'tcx, T: TypeFoldable<'tcx>, U: TypeFoldable<'tcx>> TypeFoldable<'tcx> for (T, U) { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> (T, U) { (self.0.fold_with(folder), self.1.fold_with(folder)) } @@ -558,7 +529,10 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Vec { impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Box<[T]> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - self.iter().map(|t| t.fold_with(folder)).collect::>().into_boxed_slice() + self.iter() + .map(|t| t.fold_with(folder)) + .collect::>() + .into_boxed_slice() } fn super_visit_with>(&self, visitor: &mut V) -> bool { @@ -566,7 +540,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Box<[T]> { } } -impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder { +impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { self.map_bound_ref(|ty| ty.fold_with(folder)) } @@ -590,7 +564,10 @@ BraceStructTypeFoldableImpl! { impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - let v = self.iter().map(|p| p.fold_with(folder)).collect::>(); + let v = self + .iter() + .map(|p| p.fold_with(folder)) + .collect::>(); folder.tcx().intern_existential_predicates(&v) } @@ -609,7 +586,10 @@ EnumTypeFoldableImpl! { impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - let v = self.iter().map(|t| t.fold_with(folder)).collect::>(); + let v = self + .iter() + .map(|t| t.fold_with(folder)) + .collect::>(); folder.tcx().intern_type_list(&v) } @@ -620,7 +600,10 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - let v = self.iter().map(|t| t.fold_with(folder)).collect::>(); + let v = self + .iter() + .map(|t| t.fold_with(folder)) + .collect::>(); folder.tcx().intern_projs(&v) } @@ -638,44 +621,30 @@ impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> { Item(did) => Item(did.fold_with(folder)), VtableShim(did) => VtableShim(did.fold_with(folder)), Intrinsic(did) => Intrinsic(did.fold_with(folder)), - FnPtrShim(did, ty) => FnPtrShim( - did.fold_with(folder), - ty.fold_with(folder), - ), - Virtual(did, i) => Virtual( - did.fold_with(folder), - i, - ), + FnPtrShim(did, ty) => FnPtrShim(did.fold_with(folder), ty.fold_with(folder)), + Virtual(did, i) => Virtual(did.fold_with(folder), i), ClosureOnceShim { call_once } => ClosureOnceShim { call_once: call_once.fold_with(folder), }, - DropGlue(did, ty) => DropGlue( - did.fold_with(folder), - ty.fold_with(folder), - ), - CloneShim(did, ty) => CloneShim( - did.fold_with(folder), - ty.fold_with(folder), - ), + DropGlue(did, ty) => DropGlue(did.fold_with(folder), ty.fold_with(folder)), + CloneShim(did, ty) => CloneShim(did.fold_with(folder), ty.fold_with(folder)), }, } } fn super_visit_with>(&self, visitor: &mut V) -> bool { use ty::InstanceDef::*; - self.substs.visit_with(visitor) || - match self.def { - Item(did) | VtableShim(did) | Intrinsic(did) | Virtual(did, _) => { - did.visit_with(visitor) - }, - FnPtrShim(did, ty) | CloneShim(did, ty) => { - did.visit_with(visitor) || ty.visit_with(visitor) - }, - DropGlue(did, ty) => { - did.visit_with(visitor) || ty.visit_with(visitor) - }, - ClosureOnceShim { call_once } => call_once.visit_with(visitor), - } + self.substs.visit_with(visitor) + || match self.def { + Item(did) | VtableShim(did) | Intrinsic(did) | Virtual(did, _) => { + did.visit_with(visitor) + } + FnPtrShim(did, ty) | CloneShim(did, ty) => { + did.visit_with(visitor) || ty.visit_with(visitor) + } + DropGlue(did, ty) => did.visit_with(visitor) || ty.visit_with(visitor), + ClosureOnceShim { call_once } => call_once.visit_with(visitor), + } } } @@ -683,7 +652,7 @@ impl<'tcx> TypeFoldable<'tcx> for interpret::GlobalId<'tcx> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { Self { instance: self.instance.fold_with(folder), - promoted: self.promoted + promoted: self.promoted, } } @@ -699,21 +668,15 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { ty::Array(typ, sz) => ty::Array(typ.fold_with(folder), sz.fold_with(folder)), ty::Slice(typ) => ty::Slice(typ.fold_with(folder)), ty::Adt(tid, substs) => ty::Adt(tid, substs.fold_with(folder)), - ty::Dynamic(ref trait_ty, ref region) => - ty::Dynamic(trait_ty.fold_with(folder), region.fold_with(folder)), - ty::Tuple(ts) => ty::Tuple(ts.fold_with(folder)), - ty::FnDef(def_id, substs) => { - ty::FnDef(def_id, substs.fold_with(folder)) + ty::Dynamic(ref trait_ty, ref region) => { + ty::Dynamic(trait_ty.fold_with(folder), region.fold_with(folder)) } + ty::Tuple(ts) => ty::Tuple(ts.fold_with(folder)), + ty::FnDef(def_id, substs) => ty::FnDef(def_id, substs.fold_with(folder)), ty::FnPtr(f) => ty::FnPtr(f.fold_with(folder)), - ty::Ref(ref r, ty, mutbl) => { - ty::Ref(r.fold_with(folder), ty.fold_with(folder), mutbl) - } + ty::Ref(ref r, ty, mutbl) => ty::Ref(r.fold_with(folder), ty.fold_with(folder), mutbl), ty::Generator(did, substs, movability) => { - ty::Generator( - did, - substs.fold_with(folder), - movability) + ty::Generator(did, substs.fold_with(folder), movability) } ty::GeneratorWitness(types) => ty::GeneratorWitness(types.fold_with(folder)), ty::Closure(did, substs) => ty::Closure(did, substs.fold_with(folder)), @@ -723,19 +686,19 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { } ty::Opaque(did, substs) => ty::Opaque(did, substs.fold_with(folder)), - ty::Bool | - ty::Char | - ty::Str | - ty::Int(_) | - ty::Uint(_) | - ty::Float(_) | - ty::Error | - ty::Infer(_) | - ty::Param(..) | - ty::Bound(..) | - ty::Placeholder(..) | - ty::Never | - ty::Foreign(..) => return self + ty::Bool + | ty::Char + | ty::Str + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Error + | ty::Infer(_) + | ty::Param(..) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Never + | ty::Foreign(..) => return self, }; if self.sty == sty { @@ -755,15 +718,14 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { ty::Array(typ, sz) => typ.visit_with(visitor) || sz.visit_with(visitor), ty::Slice(typ) => typ.visit_with(visitor), ty::Adt(_, substs) => substs.visit_with(visitor), - ty::Dynamic(ref trait_ty, ref reg) => - trait_ty.visit_with(visitor) || reg.visit_with(visitor), + ty::Dynamic(ref trait_ty, ref reg) => { + trait_ty.visit_with(visitor) || reg.visit_with(visitor) + } ty::Tuple(ts) => ts.visit_with(visitor), ty::FnDef(_, substs) => substs.visit_with(visitor), ty::FnPtr(ref f) => f.visit_with(visitor), ty::Ref(r, ty, _) => r.visit_with(visitor) || ty.visit_with(visitor), - ty::Generator(_did, ref substs, _) => { - substs.visit_with(visitor) - } + ty::Generator(_did, ref substs, _) => substs.visit_with(visitor), ty::GeneratorWitness(ref types) => types.visit_with(visitor), ty::Closure(_did, ref substs) => substs.visit_with(visitor), ty::Projection(ref data) | ty::UnnormalizedProjection(ref data) => { @@ -771,19 +733,19 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { } ty::Opaque(_, ref substs) => substs.visit_with(visitor), - ty::Bool | - ty::Char | - ty::Str | - ty::Int(_) | - ty::Uint(_) | - ty::Float(_) | - ty::Error | - ty::Infer(_) | - ty::Bound(..) | - ty::Placeholder(..) | - ty::Param(..) | - ty::Never | - ty::Foreign(..) => false, + ty::Bool + | ty::Char + | ty::Str + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Error + | ty::Infer(_) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Param(..) + | ty::Never + | ty::Foreign(..) => false, } } @@ -898,7 +860,10 @@ BraceStructTypeFoldableImpl! { impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - let v = self.iter().map(|p| p.fold_with(folder)).collect::>(); + let v = self + .iter() + .map(|p| p.fold_with(folder)) + .collect::>(); folder.tcx().intern_predicates(&v) } @@ -1028,9 +993,9 @@ impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> { fn super_visit_with>(&self, visitor: &mut V) -> bool { match *self { - ConstValue::Scalar(_) | - ConstValue::ScalarPair(_, _) | - ConstValue::ByRef(_, _, _) => false, + ConstValue::Scalar(_) | ConstValue::ScalarPair(_, _) | ConstValue::ByRef(_, _, _) => { + false + } ConstValue::Unevaluated(_, substs) => substs.visit_with(visitor), } } @@ -1040,10 +1005,7 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Const<'tcx> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { let ty = self.ty.fold_with(folder); let val = self.val.fold_with(folder); - folder.tcx().mk_const(ty::Const { - ty, - val - }) + folder.tcx().mk_const(ty::Const { ty, val }) } fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index a2720bdf385cd..a70c922ac6528 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -3,35 +3,36 @@ use hir; use hir::def_id::DefId; use infer::canonical::Canonical; -use mir::interpret::ConstValue; use middle::region; +use mir::interpret::ConstValue; +use mir::interpret::{Pointer, Scalar}; use polonius_engine::Atom; use rustc_data_structures::indexed_vec::Idx; -use ty::subst::{Substs, Subst, Kind, UnpackedKind}; -use ty::{self, AdtDef, TypeFlags, Ty, TyCtxt, TypeFoldable}; -use ty::{List, TyS, ParamEnvAnd, ParamEnv}; +use ty::subst::{Kind, Subst, Substs, UnpackedKind}; +use ty::{self, AdtDef, Ty, TyCtxt, TypeFlags, TypeFoldable}; +use ty::{List, ParamEnv, ParamEnvAnd, TyS}; use util::captures::Captures; -use mir::interpret::{Scalar, Pointer}; +use rustc_target::spec::abi; use smallvec::SmallVec; -use std::iter; use std::cmp::Ordering; -use rustc_target::spec::abi; +use std::iter; use syntax::ast::{self, Ident}; use syntax::symbol::{keywords, InternedString}; -use serialize; use self::InferTy::*; use self::TyKind::*; +use serialize; -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub struct TypeAndMut<'tcx> { pub ty: Ty<'tcx>, pub mutbl: hir::Mutability, } -#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, - RustcEncodable, RustcDecodable, Copy)] +#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, RustcDecodable, Copy)] /// A "free" region `fr` can be interpreted as "some region /// at least as big as the scope `fr.scope`". pub struct FreeRegion { @@ -39,8 +40,7 @@ pub struct FreeRegion { pub bound_region: BoundRegion, } -#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, - RustcEncodable, RustcDecodable, Copy)] +#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, RustcDecodable, Copy)] pub enum BoundRegion { /// An anonymous region parameter for a given fn (&T) BrAnon(u32), @@ -148,7 +148,10 @@ pub enum TyKind<'tcx> { FnPtr(PolyFnSig<'tcx>), /// A trait, defined with `trait`. - Dynamic(Binder<&'tcx List>>, ty::Region<'tcx>), + Dynamic( + Binder<&'tcx List>>, + ty::Region<'tcx>, + ), /// The anonymous type of a closure. Used to represent the type of /// `|a| a`. @@ -301,7 +304,9 @@ static_assert!(MEM_SIZE_OF_TY_KIND: ::std::mem::size_of::>() == 24); /// /// It'd be nice to split this struct into ClosureSubsts and /// GeneratorSubsts, I believe. -nmatsakis -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub struct ClosureSubsts<'tcx> { /// Lifetime and type parameters from the enclosing function, /// concatenated with the types of the upvars. @@ -334,9 +339,11 @@ impl<'tcx> ClosureSubsts<'tcx> { } #[inline] - pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> - impl Iterator> + 'tcx - { + pub fn upvar_tys( + self, + def_id: DefId, + tcx: TyCtxt<'_, '_, '_>, + ) -> impl Iterator> + 'tcx { let SplitClosureSubsts { upvar_kinds, .. } = self.split(def_id, tcx); upvar_kinds.iter().map(|t| { if let UnpackedKind::Type(ty) = t.unpack() { @@ -368,7 +375,10 @@ impl<'tcx> ClosureSubsts<'tcx> { /// /// If you have an inference context, use `infcx.closure_kind()`. pub fn closure_kind(self, def_id: DefId, tcx: TyCtxt<'_, 'tcx, 'tcx>) -> ty::ClosureKind { - self.split(def_id, tcx).closure_kind_ty.to_opt_closure_kind().unwrap() + self.split(def_id, tcx) + .closure_kind_ty + .to_opt_closure_kind() + .unwrap() } /// Extracts the signature from the closure; only usable outside @@ -384,7 +394,9 @@ impl<'tcx> ClosureSubsts<'tcx> { } } -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub struct GeneratorSubsts<'tcx> { pub substs: &'tcx Substs<'tcx>, } @@ -418,9 +430,11 @@ impl<'tcx> GeneratorSubsts<'tcx> { } #[inline] - pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> - impl Iterator> + 'tcx - { + pub fn upvar_tys( + self, + def_id: DefId, + tcx: TyCtxt<'_, '_, '_>, + ) -> impl Iterator> + 'tcx { let SplitGeneratorSubsts { upvar_kinds, .. } = self.split(def_id, tcx); upvar_kinds.iter().map(|t| { if let UnpackedKind::Type(ty) = t.unpack() { @@ -469,7 +483,7 @@ impl<'a, 'gcx, 'tcx> GeneratorSubsts<'tcx> { self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>, - ) -> impl Iterator> + Captures<'gcx> + 'a { + ) -> impl Iterator> + Captures<'gcx> + 'a { let state = tcx.generator_layout(def_id).fields.iter(); state.map(move |d| d.ty.subst(tcx, self.substs)) } @@ -477,18 +491,23 @@ impl<'a, 'gcx, 'tcx> GeneratorSubsts<'tcx> { /// This is the types of the fields of a generate which /// is available before the generator transformation. /// It includes the upvars and the state discriminant which is u32. - pub fn pre_transforms_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> - impl Iterator> + 'a - { + pub fn pre_transforms_tys( + self, + def_id: DefId, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + ) -> impl Iterator> + 'a { self.upvar_tys(def_id, tcx).chain(iter::once(tcx.types.u32)) } /// This is the types of all the fields stored in a generator. /// It includes the upvars, state types and the state discriminant which is u32. - pub fn field_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> - impl Iterator> + Captures<'gcx> + 'a - { - self.pre_transforms_tys(def_id, tcx).chain(self.state_tys(def_id, tcx)) + pub fn field_tys( + self, + def_id: DefId, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + ) -> impl Iterator> + Captures<'gcx> + 'a { + self.pre_transforms_tys(def_id, tcx) + .chain(self.state_tys(def_id, tcx)) } } @@ -500,9 +519,11 @@ pub enum UpvarSubsts<'tcx> { impl<'tcx> UpvarSubsts<'tcx> { #[inline] - pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> - impl Iterator> + 'tcx - { + pub fn upvar_tys( + self, + def_id: DefId, + tcx: TyCtxt<'_, '_, '_>, + ) -> impl Iterator> + 'tcx { let upvar_kinds = match self { UpvarSubsts::Closure(substs) => substs.split(def_id, tcx).upvar_kinds, UpvarSubsts::Generator(substs) => substs.split(def_id, tcx).upvar_kinds, @@ -517,7 +538,9 @@ impl<'tcx> UpvarSubsts<'tcx> { } } -#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, RustcEncodable, RustcDecodable)] +#[derive( + Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, RustcEncodable, RustcDecodable, +)] pub enum ExistentialPredicate<'tcx> { /// e.g., Iterator Trait(ExistentialTraitRef<'tcx>), @@ -534,27 +557,33 @@ impl<'a, 'gcx, 'tcx> ExistentialPredicate<'tcx> { use self::ExistentialPredicate::*; match (*self, *other) { (Trait(_), Trait(_)) => Ordering::Equal, - (Projection(ref a), Projection(ref b)) => - tcx.def_path_hash(a.item_def_id).cmp(&tcx.def_path_hash(b.item_def_id)), - (AutoTrait(ref a), AutoTrait(ref b)) => - tcx.trait_def(*a).def_path_hash.cmp(&tcx.trait_def(*b).def_path_hash), + (Projection(ref a), Projection(ref b)) => tcx + .def_path_hash(a.item_def_id) + .cmp(&tcx.def_path_hash(b.item_def_id)), + (AutoTrait(ref a), AutoTrait(ref b)) => tcx + .trait_def(*a) + .def_path_hash + .cmp(&tcx.trait_def(*b).def_path_hash), (Trait(_), _) => Ordering::Less, (Projection(_), Trait(_)) => Ordering::Greater, (Projection(_), _) => Ordering::Less, (AutoTrait(_), _) => Ordering::Greater, } } - } impl<'a, 'gcx, 'tcx> Binder> { - pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>) - -> ty::Predicate<'tcx> { + pub fn with_self_ty( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + self_ty: Ty<'tcx>, + ) -> ty::Predicate<'tcx> { use ty::ToPredicate; match *self.skip_binder() { ExistentialPredicate::Trait(tr) => Binder(tr).with_self_ty(tcx, self_ty).to_predicate(), - ExistentialPredicate::Projection(p) => - ty::Predicate::Projection(Binder(p.with_self_ty(tcx, self_ty))), + ExistentialPredicate::Projection(p) => { + ty::Predicate::Projection(Binder(p.with_self_ty(tcx, self_ty))) + } ExistentialPredicate::AutoTrait(did) => { let trait_ref = Binder(ty::TraitRef { def_id: did, @@ -577,23 +606,20 @@ impl<'tcx> List> { } #[inline] - pub fn projection_bounds<'a>(&'a self) -> - impl Iterator> + 'a { - self.iter().filter_map(|predicate| { - match *predicate { - ExistentialPredicate::Projection(p) => Some(p), - _ => None, - } + pub fn projection_bounds<'a>( + &'a self, + ) -> impl Iterator> + 'a { + self.iter().filter_map(|predicate| match *predicate { + ExistentialPredicate::Projection(p) => Some(p), + _ => None, }) } #[inline] - pub fn auto_traits<'a>(&'a self) -> impl Iterator + 'a { - self.iter().filter_map(|predicate| { - match *predicate { - ExistentialPredicate::AutoTrait(d) => Some(d), - _ => None - } + pub fn auto_traits<'a>(&'a self) -> impl Iterator + 'a { + self.iter().filter_map(|predicate| match *predicate { + ExistentialPredicate::AutoTrait(d) => Some(d), + _ => None, }) } } @@ -604,18 +630,20 @@ impl<'tcx> Binder<&'tcx List>> { } #[inline] - pub fn projection_bounds<'a>(&'a self) -> - impl Iterator> + 'a { + pub fn projection_bounds<'a>( + &'a self, + ) -> impl Iterator> + 'a { self.skip_binder().projection_bounds().map(Binder::bind) } #[inline] - pub fn auto_traits<'a>(&'a self) -> impl Iterator + 'a { + pub fn auto_traits<'a>(&'a self) -> impl Iterator + 'a { self.skip_binder().auto_traits() } - pub fn iter<'a>(&'a self) - -> impl DoubleEndedIterator>> + 'tcx { + pub fn iter<'a>( + &'a self, + ) -> impl DoubleEndedIterator>> + 'tcx { self.skip_binder().iter().cloned().map(Binder::bind) } } @@ -643,7 +671,10 @@ pub struct TraitRef<'tcx> { impl<'tcx> TraitRef<'tcx> { pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>) -> TraitRef<'tcx> { - TraitRef { def_id: def_id, substs: substs } + TraitRef { + def_id: def_id, + substs: substs, + } } /// Returns a `TraitRef` of the form `P0: Foo` where `Pi` @@ -668,15 +699,16 @@ impl<'tcx> TraitRef<'tcx> { self.substs.types() } - pub fn from_method(tcx: TyCtxt<'_, '_, 'tcx>, - trait_id: DefId, - substs: &Substs<'tcx>) - -> ty::TraitRef<'tcx> { + pub fn from_method( + tcx: TyCtxt<'_, '_, 'tcx>, + trait_id: DefId, + substs: &Substs<'tcx>, + ) -> ty::TraitRef<'tcx> { let defs = tcx.generics_of(trait_id); ty::TraitRef { def_id: trait_id, - substs: tcx.intern_substs(&substs[..defs.params.len()]) + substs: tcx.intern_substs(&substs[..defs.params.len()]), } } } @@ -694,7 +726,9 @@ impl<'tcx> PolyTraitRef<'tcx> { pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> { // Note that we preserve binding levels - Binder(ty::TraitPredicate { trait_ref: self.skip_binder().clone() }) + Binder(ty::TraitPredicate { + trait_ref: self.skip_binder().clone(), + }) } } @@ -712,7 +746,7 @@ pub struct ExistentialTraitRef<'tcx> { } impl<'a, 'gcx, 'tcx> ExistentialTraitRef<'tcx> { - pub fn input_types<'b>(&'b self) -> impl DoubleEndedIterator> + 'b { + pub fn input_types<'b>(&'b self) -> impl DoubleEndedIterator> + 'b { // Select only the "input types" from a trait-reference. For // now this is all the types that appear in the // trait-reference, but it should eventually exclude @@ -720,15 +754,16 @@ impl<'a, 'gcx, 'tcx> ExistentialTraitRef<'tcx> { self.substs.types() } - pub fn erase_self_ty(tcx: TyCtxt<'a, 'gcx, 'tcx>, - trait_ref: ty::TraitRef<'tcx>) - -> ty::ExistentialTraitRef<'tcx> { + pub fn erase_self_ty( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: ty::TraitRef<'tcx>, + ) -> ty::ExistentialTraitRef<'tcx> { // Assert there is a Self. trait_ref.substs.type_at(0); ty::ExistentialTraitRef { def_id: trait_ref.def_id, - substs: tcx.intern_substs(&trait_ref.substs[1..]) + substs: tcx.intern_substs(&trait_ref.substs[1..]), } } @@ -736,14 +771,17 @@ impl<'a, 'gcx, 'tcx> ExistentialTraitRef<'tcx> { /// we convert the principal trait-ref into a normal trait-ref, /// you must give *some* self-type. A common choice is `mk_err()` /// or some placeholder type. - pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>) - -> ty::TraitRef<'tcx> { + pub fn with_self_ty( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + self_ty: Ty<'tcx>, + ) -> ty::TraitRef<'tcx> { // otherwise the escaping vars would be captured by the binder // debug_assert!(!self_ty.has_escaping_bound_vars()); ty::TraitRef { def_id: self.def_id, - substs: tcx.mk_substs_trait(self_ty, self.substs) + substs: tcx.mk_substs_trait(self_ty, self.substs), } } } @@ -759,9 +797,11 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> { /// we convert the principal trait-ref into a normal trait-ref, /// you must give *some* self-type. A common choice is `mk_err()` /// or some placeholder type. - pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>, - self_ty: Ty<'tcx>) - -> ty::PolyTraitRef<'tcx> { + pub fn with_self_ty( + &self, + tcx: TyCtxt<'_, '_, 'tcx>, + self_ty: Ty<'tcx>, + ) -> ty::PolyTraitRef<'tcx> { self.map_bound(|trait_ref| trait_ref.with_self_ty(tcx, self_ty)) } } @@ -773,7 +813,9 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> { /// erase, or otherwise "discharge" these bound vars, we change the /// type from `Binder` to just `T` (see /// e.g., `liberate_late_bound_regions`). -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub struct Binder(T); impl Binder { @@ -782,7 +824,8 @@ impl Binder { /// binder. This is commonly used to 'inject' a value T into a /// different binding level. pub fn dummy<'tcx>(value: T) -> Binder - where T: TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { debug_assert!(!value.has_escaping_bound_vars()); Binder(value) @@ -818,13 +861,15 @@ impl Binder { } pub fn map_bound_ref(&self, f: F) -> Binder - where F: FnOnce(&T) -> U + where + F: FnOnce(&T) -> U, { self.as_ref().map_bound(f) } pub fn map_bound(self, f: F) -> Binder - where F: FnOnce(T) -> U + where + F: FnOnce(T) -> U, { Binder(f(self.0)) } @@ -840,7 +885,8 @@ impl Binder { /// indices, and given the shallow binding structure we often use, /// would not be that useful.) pub fn no_bound_vars<'tcx>(self) -> Option - where T: TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { if self.skip_binder().has_escaping_bound_vars() { None @@ -856,8 +902,9 @@ impl Binder { /// `f` should consider bound regions at depth 1 to be free, and /// anything it produces with bound regions at depth 1 will be /// bound in the resulting return value. - pub fn fuse(self, u: Binder, f: F) -> Binder - where F: FnOnce(T, U) -> R + pub fn fuse(self, u: Binder, f: F) -> Binder + where + F: FnOnce(T, U) -> R, { Binder(f(self.0, u.0)) } @@ -868,8 +915,9 @@ impl Binder { /// `f` should consider bound regions at depth 1 to be free, and /// anything it produces with bound regions at depth 1 will be /// bound in the resulting return values. - pub fn split(self, f: F) -> (Binder, Binder) - where F: FnOnce(T) -> (U, V) + pub fn split(self, f: F) -> (Binder, Binder) + where + F: FnOnce(T) -> (U, V), { let (u, v) = f(self.0); (Binder(u), Binder(v)) @@ -878,7 +926,9 @@ impl Binder { /// Represents the projection of an associated type. In explicit UFCS /// form this would be written `>::N`. -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub struct ProjectionTy<'tcx> { /// The parameters of the associated item. pub substs: &'tcx Substs<'tcx>, @@ -894,12 +944,18 @@ impl<'a, 'tcx> ProjectionTy<'tcx> { /// Construct a `ProjectionTy` by searching the trait from `trait_ref` for the /// associated item named `item_name`. pub fn from_ref_and_name( - tcx: TyCtxt<'_, '_, '_>, trait_ref: ty::TraitRef<'tcx>, item_name: Ident + tcx: TyCtxt<'_, '_, '_>, + trait_ref: ty::TraitRef<'tcx>, + item_name: Ident, ) -> ProjectionTy<'tcx> { - let item_def_id = tcx.associated_items(trait_ref.def_id).find(|item| { - item.kind == ty::AssociatedKind::Type && - tcx.hygienic_eq(item_name, item.ident, trait_ref.def_id) - }).unwrap().def_id; + let item_def_id = tcx + .associated_items(trait_ref.def_id) + .find(|item| { + item.kind == ty::AssociatedKind::Type + && tcx.hygienic_eq(item_name, item.ident, trait_ref.def_id) + }) + .unwrap() + .def_id; ProjectionTy { substs: trait_ref.substs, @@ -995,7 +1051,6 @@ impl<'tcx> PolyFnSig<'tcx> { pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder>>; - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)] pub struct ParamTy { pub idx: u32, @@ -1004,7 +1059,10 @@ pub struct ParamTy { impl<'a, 'gcx, 'tcx> ParamTy { pub fn new(index: u32, name: InternedString) -> ParamTy { - ParamTy { idx: index, name: name } + ParamTy { + idx: index, + name: name, + } } pub fn for_self() -> ParamTy { @@ -1183,7 +1241,9 @@ pub enum RegionKind { impl<'tcx> serialize::UseSpecializedDecodable for Region<'tcx> {} -#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, PartialOrd, Ord)] +#[derive( + Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, PartialOrd, Ord, +)] pub struct EarlyBoundRegion { pub def_id: DefId, pub index: u32, @@ -1235,13 +1295,17 @@ newtype_index! { pub struct BoundVar { .. } } -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub struct BoundTy { pub var: BoundVar, pub kind: BoundTyKind, } -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub enum BoundTyKind { Anon, Param(InternedString), @@ -1260,7 +1324,9 @@ impl From for BoundTy { } /// A `ProjectionPredicate` for an `ExistentialTraitRef`. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive( + Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable, +)] pub struct ExistentialProjection<'tcx> { pub item_def_id: DefId, pub substs: &'tcx Substs<'tcx>, @@ -1276,16 +1342,17 @@ impl<'a, 'tcx, 'gcx> ExistentialProjection<'tcx> { /// reference. pub fn trait_ref(&self, tcx: TyCtxt<'_, '_, '_>) -> ty::ExistentialTraitRef<'tcx> { let def_id = tcx.associated_item(self.item_def_id).container.id(); - ty::ExistentialTraitRef{ + ty::ExistentialTraitRef { def_id, substs: self.substs, } } - pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - self_ty: Ty<'tcx>) - -> ty::ProjectionPredicate<'tcx> - { + pub fn with_self_ty( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + self_ty: Ty<'tcx>, + ) -> ty::ProjectionPredicate<'tcx> { // otherwise the escaping regions would be captured by the binders debug_assert!(!self_ty.has_escaping_bound_vars()); @@ -1300,8 +1367,11 @@ impl<'a, 'tcx, 'gcx> ExistentialProjection<'tcx> { } impl<'a, 'tcx, 'gcx> PolyExistentialProjection<'tcx> { - pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>) - -> ty::PolyProjectionPredicate<'tcx> { + pub fn with_self_ty( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + self_ty: Ty<'tcx>, + ) -> ty::PolyProjectionPredicate<'tcx> { self.map_bound(|p| p.with_self_ty(tcx, self_ty)) } @@ -1432,11 +1502,10 @@ impl RegionKind { /// error (and should fail an assertion failure). pub fn shifted_out_to_binder(&self, to_binder: ty::DebruijnIndex) -> RegionKind { match *self { - ty::ReLateBound(debruijn, r) => ty::ReLateBound( - debruijn.shifted_out_to_binder(to_binder), - r, - ), - r => r + ty::ReLateBound(debruijn, r) => { + ty::ReLateBound(debruijn.shifted_out_to_binder(to_binder), r) + } + r => r, } } @@ -1471,14 +1540,10 @@ impl RegionKind { flags = flags | TypeFlags::HAS_FREE_REGIONS; flags = flags | TypeFlags::HAS_RE_EARLY_BOUND; } - ty::ReEmpty | - ty::ReStatic | - ty::ReFree { .. } | - ty::ReScope { .. } => { + ty::ReEmpty | ty::ReStatic | ty::ReFree { .. } | ty::ReScope { .. } => { flags = flags | TypeFlags::HAS_FREE_REGIONS; } - ty::ReErased => { - } + ty::ReErased => {} ty::ReClosureBound(..) => { flags = flags | TypeFlags::HAS_FREE_REGIONS; } @@ -1515,11 +1580,12 @@ impl RegionKind { /// function might return the def-id of a closure. pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_, '_, '_>) -> DefId { match self { - ty::ReEarlyBound(br) => { - tcx.parent_def_id(br.def_id).unwrap() - } + ty::ReEarlyBound(br) => tcx.parent_def_id(br.def_id).unwrap(), ty::ReFree(fr) => fr.scope, - _ => bug!("free_region_binding_scope invoked on inappropriate region: {:?}", self), + _ => bug!( + "free_region_binding_scope invoked on inappropriate region: {:?}", + self + ), } } } @@ -1562,17 +1628,20 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { // one uninhabited field. def.variants.iter().all(|var| { var.fields.iter().any(|field| { - tcx.type_of(field.did).conservative_is_privately_uninhabited(tcx) + tcx.type_of(field.did) + .conservative_is_privately_uninhabited(tcx) }) }) } - ty::Tuple(tys) => tys.iter().any(|ty| ty.conservative_is_privately_uninhabited(tcx)), + ty::Tuple(tys) => tys + .iter() + .any(|ty| ty.conservative_is_privately_uninhabited(tcx)), ty::Array(ty, len) => { match len.assert_usize(tcx) { // If the array is definitely non-empty, it's uninhabited if // the type of its elements is uninhabited. Some(n) if n != 0 => ty.conservative_is_privately_uninhabited(tcx), - _ => false + _ => false, } } ty::Ref(..) => { @@ -1615,7 +1684,9 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { } } - pub fn is_bool(&self) -> bool { self.sty == Bool } + pub fn is_bool(&self) -> bool { + self.sty == Bool + } pub fn is_param(&self, index: u32) -> bool { match self.sty { @@ -1637,7 +1708,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { Slice(_) | Str => true, _ => false, }, - _ => false + _ => false, } } @@ -1653,23 +1724,24 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { match self.sty { Array(ty, _) | Slice(ty) => ty, Str => tcx.mk_mach_uint(ast::UintTy::U8), - _ => bug!("sequence_element_type called on non-sequence value: {}", self), + _ => bug!( + "sequence_element_type called on non-sequence value: {}", + self + ), } } pub fn simd_type(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { match self.sty { - Adt(def, substs) => { - def.non_enum_variant().fields[0].ty(tcx, substs) - } - _ => bug!("simd_type called on invalid type") + Adt(def, substs) => def.non_enum_variant().fields[0].ty(tcx, substs), + _ => bug!("simd_type called on invalid type"), } } pub fn simd_size(&self, _cx: TyCtxt<'_, '_, '_>) -> usize { match self.sty { Adt(def, _) => def.non_enum_variant().fields.len(), - _ => bug!("simd_size called on invalid type") + _ => bug!("simd_size called on invalid type"), } } @@ -1682,9 +1754,12 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { pub fn is_mutable_pointer(&self) -> bool { match self.sty { - RawPtr(TypeAndMut { mutbl: hir::Mutability::MutMutable, .. }) | - Ref(_, _, hir::Mutability::MutMutable) => true, - _ => false + RawPtr(TypeAndMut { + mutbl: hir::Mutability::MutMutable, + .. + }) + | Ref(_, _, hir::Mutability::MutMutable) => true, + _ => false, } } @@ -1731,18 +1806,16 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { /// contents are abstract to rustc.) pub fn is_scalar(&self) -> bool { match self.sty { - Bool | Char | Int(_) | Float(_) | Uint(_) | - Infer(IntVar(_)) | Infer(FloatVar(_)) | - FnDef(..) | FnPtr(_) | RawPtr(_) => true, - _ => false + Bool | Char | Int(_) | Float(_) | Uint(_) | Infer(IntVar(_)) | Infer(FloatVar(_)) + | FnDef(..) | FnPtr(_) | RawPtr(_) => true, + _ => false, } } /// Returns true if this type is a floating point type and false otherwise. pub fn is_floating_point(&self) -> bool { match self.sty { - Float(_) | - Infer(FloatVar(_)) => true, + Float(_) | Infer(FloatVar(_)) => true, _ => false, } } @@ -1756,9 +1829,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { pub fn is_enum(&self) -> bool { match self.sty { - Adt(adt_def, _) => { - adt_def.is_enum() - } + Adt(adt_def, _) => adt_def.is_enum(), _ => false, } } @@ -1781,7 +1852,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { pub fn is_integral(&self) -> bool { match self.sty { Infer(IntVar(_)) | Int(_) | Uint(_) => true, - _ => false + _ => false, } } @@ -1812,7 +1883,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { pub fn is_fp(&self) -> bool { match self.sty { Infer(FloatVar(_)) | Float(_) => true, - _ => false + _ => false, } } @@ -1855,12 +1926,10 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { /// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly. pub fn builtin_deref(&self, explicit: bool) -> Option> { match self.sty { - Adt(def, _) if def.is_box() => { - Some(TypeAndMut { - ty: self.boxed_ty(), - mutbl: hir::MutImmutable, - }) - }, + Adt(def, _) if def.is_box() => Some(TypeAndMut { + ty: self.boxed_ty(), + mutbl: hir::MutImmutable, + }), Ref(_, ty, mutbl) => Some(TypeAndMut { ty, mutbl }), RawPtr(mt) if explicit => Some(mt), _ => None, @@ -1877,11 +1946,9 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { pub fn fn_sig(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PolyFnSig<'tcx> { match self.sty { - FnDef(def_id, substs) => { - tcx.fn_sig(def_id).subst(tcx, substs) - } + FnDef(def_id, substs) => tcx.fn_sig(def_id).subst(tcx, substs), FnPtr(f) => f, - _ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self) + _ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self), } } @@ -1919,36 +1986,15 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { out.push(region); out.extend(obj.principal().skip_binder().substs.regions()); } - Adt(_, substs) | Opaque(_, substs) => { - out.extend(substs.regions()) - } - Closure(_, ClosureSubsts { ref substs }) | - Generator(_, GeneratorSubsts { ref substs }, _) => { - out.extend(substs.regions()) - } + Adt(_, substs) | Opaque(_, substs) => out.extend(substs.regions()), + Closure(_, ClosureSubsts { ref substs }) + | Generator(_, GeneratorSubsts { ref substs }, _) => out.extend(substs.regions()), Projection(ref data) | UnnormalizedProjection(ref data) => { out.extend(data.substs.regions()) } - FnDef(..) | - FnPtr(_) | - GeneratorWitness(..) | - Bool | - Char | - Int(_) | - Uint(_) | - Float(_) | - Str | - Array(..) | - Slice(_) | - RawPtr(_) | - Never | - Tuple(..) | - Foreign(..) | - Param(_) | - Bound(..) | - Placeholder(..) | - Infer(_) | - Error => {} + FnDef(..) | FnPtr(_) | GeneratorWitness(..) | Bool | Char | Int(_) | Uint(_) + | Float(_) | Str | Array(..) | Slice(_) | RawPtr(_) | Never | Tuple(..) + | Foreign(..) | Param(_) | Bound(..) | Placeholder(..) | Infer(_) | Error => {} } } @@ -1987,22 +2033,29 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { /// `false` means nothing -- could be sized, might not be. pub fn is_trivially_sized(&self, tcx: TyCtxt<'_, '_, 'tcx>) -> bool { match self.sty { - ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) | - ty::Uint(_) | ty::Int(_) | ty::Bool | ty::Float(_) | - ty::FnDef(..) | ty::FnPtr(_) | ty::RawPtr(..) | - ty::Char | ty::Ref(..) | ty::Generator(..) | - ty::GeneratorWitness(..) | ty::Array(..) | ty::Closure(..) | - ty::Never | ty::Error => - true, - - ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => - false, - - ty::Tuple(tys) => - tys.iter().all(|ty| ty.is_trivially_sized(tcx)), - - ty::Adt(def, _substs) => - def.sized_constraint(tcx).is_empty(), + ty::Infer(ty::IntVar(_)) + | ty::Infer(ty::FloatVar(_)) + | ty::Uint(_) + | ty::Int(_) + | ty::Bool + | ty::Float(_) + | ty::FnDef(..) + | ty::FnPtr(_) + | ty::RawPtr(..) + | ty::Char + | ty::Ref(..) + | ty::Generator(..) + | ty::GeneratorWitness(..) + | ty::Array(..) + | ty::Closure(..) + | ty::Never + | ty::Error => true, + + ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false, + + ty::Tuple(tys) => tys.iter().all(|ty| ty.is_trivially_sized(tcx)), + + ty::Adt(def, _substs) => def.sized_constraint(tcx).is_empty(), ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => false, @@ -2010,18 +2063,21 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { ty::Infer(ty::TyVar(_)) => false, - ty::Bound(..) | - ty::Placeholder(..) | - ty::Infer(ty::FreshTy(_)) | - ty::Infer(ty::FreshIntTy(_)) | - ty::Infer(ty::FreshFloatTy(_)) => - bug!("is_trivially_sized applied to unexpected type: {:?}", self), + ty::Bound(..) + | ty::Placeholder(..) + | ty::Infer(ty::FreshTy(_)) + | ty::Infer(ty::FreshIntTy(_)) + | ty::Infer(ty::FreshFloatTy(_)) => { + bug!("is_trivially_sized applied to unexpected type: {:?}", self) + } } } } /// Typed constant value. -#[derive(Copy, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Eq, PartialEq, Ord, PartialOrd)] +#[derive( + Copy, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Eq, PartialEq, Ord, PartialOrd, +)] pub struct Const<'tcx> { pub ty: Ty<'tcx>, @@ -2047,18 +2103,11 @@ impl<'tcx> Const<'tcx> { val: ConstValue<'tcx>, ty: Ty<'tcx>, ) -> &'tcx Self { - tcx.mk_const(Const { - val, - ty, - }) + tcx.mk_const(Const { val, ty }) } #[inline] - pub fn from_scalar( - tcx: TyCtxt<'_, '_, 'tcx>, - val: Scalar, - ty: Ty<'tcx>, - ) -> &'tcx Self { + pub fn from_scalar(tcx: TyCtxt<'_, '_, 'tcx>, val: Scalar, ty: Ty<'tcx>) -> &'tcx Self { Self::from_const_value(tcx, ConstValue::Scalar(val), ty) } @@ -2069,13 +2118,21 @@ impl<'tcx> Const<'tcx> { ty: ParamEnvAnd<'tcx, Ty<'tcx>>, ) -> &'tcx Self { let ty = tcx.lift_to_global(&ty).unwrap(); - let size = tcx.layout_of(ty).unwrap_or_else(|e| { - panic!("could not compute layout for {:?}: {:?}", ty, e) - }).size; + let size = tcx + .layout_of(ty) + .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e)) + .size; let shift = 128 - size.bits(); let truncated = (bits << shift) >> shift; assert_eq!(truncated, bits, "from_bits called with untruncated value"); - Self::from_scalar(tcx, Scalar::Bits { bits, size: size.bytes() as u8 }, ty.value) + Self::from_scalar( + tcx, + Scalar::Bits { + bits, + size: size.bytes() as u8, + }, + ty.value, + ) } #[inline] @@ -2126,32 +2183,30 @@ impl<'tcx> Const<'tcx> { #[inline] pub fn assert_bool(&self, tcx: TyCtxt<'_, '_, '_>) -> Option { - self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.bool)).and_then(|v| match v { - 0 => Some(false), - 1 => Some(true), - _ => None, - }) + self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.bool)) + .and_then(|v| match v { + 0 => Some(false), + 1 => Some(true), + _ => None, + }) } #[inline] pub fn assert_usize(&self, tcx: TyCtxt<'_, '_, '_>) -> Option { - self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.usize)).map(|v| v as u64) + self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.usize)) + .map(|v| v as u64) } #[inline] - pub fn unwrap_bits( - &self, - tcx: TyCtxt<'_, '_, '_>, - ty: ParamEnvAnd<'tcx, Ty<'tcx>>, - ) -> u128 { - self.assert_bits(tcx, ty).unwrap_or_else(|| - bug!("expected bits of {}, got {:#?}", ty.value, self)) + pub fn unwrap_bits(&self, tcx: TyCtxt<'_, '_, '_>, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> u128 { + self.assert_bits(tcx, ty) + .unwrap_or_else(|| bug!("expected bits of {}, got {:#?}", ty.value, self)) } #[inline] pub fn unwrap_usize(&self, tcx: TyCtxt<'_, '_, '_>) -> u64 { - self.assert_usize(tcx).unwrap_or_else(|| - bug!("expected constant usize, got {:#?}", self)) + self.assert_usize(tcx) + .unwrap_or_else(|| bug!("expected constant usize, got {:#?}", self)) } } diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs index 64e7af815b4bf..babd24af92bca 100644 --- a/src/librustc/ty/subst.rs +++ b/src/librustc/ty/subst.rs @@ -2,12 +2,12 @@ use hir::def_id::DefId; use infer::canonical::Canonical; -use ty::{self, Lift, List, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; +use ty::{self, Lift, List, Ty, TyCtxt}; -use serialize::{self, Encodable, Encoder, Decodable, Decoder}; -use syntax_pos::{Span, DUMMY_SP}; +use serialize::{self, Decodable, Decoder, Encodable, Encoder}; use smallvec::SmallVec; +use syntax_pos::{Span, DUMMY_SP}; use core::intrinsics; use std::cmp::Ordering; @@ -24,7 +24,7 @@ use std::num::NonZeroUsize; #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct Kind<'tcx> { ptr: NonZeroUsize, - marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>)> + marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>)>, } const TAG_MASK: usize = 0b11; @@ -53,10 +53,8 @@ impl<'tcx> UnpackedKind<'tcx> { }; Kind { - ptr: unsafe { - NonZeroUsize::new_unchecked(ptr | tag) - }, - marker: PhantomData + ptr: unsafe { NonZeroUsize::new_unchecked(ptr | tag) }, + marker: PhantomData, } } } @@ -93,7 +91,7 @@ impl<'tcx> Kind<'tcx> { match ptr & TAG_MASK { REGION_TAG => UnpackedKind::Lifetime(&*((ptr & !TAG_MASK) as *const _)), TYPE_TAG => UnpackedKind::Type(&*((ptr & !TAG_MASK) as *const _)), - _ => intrinsics::unreachable() + _ => intrinsics::unreachable(), } } } @@ -161,39 +159,32 @@ pub type Substs<'tcx> = List>; impl<'a, 'gcx, 'tcx> Substs<'tcx> { /// Creates a `Substs` that maps each generic parameter to itself. - pub fn identity_for_item(tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId) - -> &'tcx Substs<'tcx> { - Substs::for_item(tcx, def_id, |param, _| { - tcx.mk_param_from_def(param) - }) + pub fn identity_for_item(tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> &'tcx Substs<'tcx> { + Substs::for_item(tcx, def_id, |param, _| tcx.mk_param_from_def(param)) } /// Creates a `Substs` that maps each generic parameter to a higher-ranked /// var bound at index `0`. For types, we use a `BoundVar` index equal to /// the type parameter index. For regions, we use the `BoundRegion::BrNamed` /// variant (which has a def-id). - pub fn bound_vars_for_item( - tcx: TyCtxt<'a, 'gcx, 'tcx>, - def_id: DefId - ) -> &'tcx Substs<'tcx> { - Substs::for_item(tcx, def_id, |param, _| { - match param.kind { - ty::GenericParamDefKind::Type { .. } => { - tcx.mk_ty( - ty::Bound(ty::INNERMOST, ty::BoundTy { - var: ty::BoundVar::from(param.index), - kind: ty::BoundTyKind::Param(param.name), - }) - ).into() - } - - ty::GenericParamDefKind::Lifetime => { - tcx.mk_region(ty::RegionKind::ReLateBound( - ty::INNERMOST, - ty::BoundRegion::BrNamed(param.def_id, param.name) - )).into() - } - } + pub fn bound_vars_for_item(tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> &'tcx Substs<'tcx> { + Substs::for_item(tcx, def_id, |param, _| match param.kind { + ty::GenericParamDefKind::Type { .. } => tcx + .mk_ty(ty::Bound( + ty::INNERMOST, + ty::BoundTy { + var: ty::BoundVar::from(param.index), + kind: ty::BoundTyKind::Param(param.name), + }, + )) + .into(), + + ty::GenericParamDefKind::Lifetime => tcx + .mk_region(ty::RegionKind::ReLateBound( + ty::INNERMOST, + ty::BoundRegion::BrNamed(param.def_id, param.name), + )) + .into(), }) } @@ -202,11 +193,13 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { /// The closures get to observe the `Substs` as they're /// being built, which can be used to correctly /// substitute defaults of generic parameters. - pub fn for_item(tcx: TyCtxt<'a, 'gcx, 'tcx>, - def_id: DefId, - mut mk_kind: F) - -> &'tcx Substs<'tcx> - where F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx> + pub fn for_item( + tcx: TyCtxt<'a, 'gcx, 'tcx>, + def_id: DefId, + mut mk_kind: F, + ) -> &'tcx Substs<'tcx> + where + F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>, { let defs = tcx.generics_of(def_id); let count = defs.count(); @@ -215,12 +208,14 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { tcx.intern_substs(&substs) } - pub fn extend_to(&self, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - def_id: DefId, - mut mk_kind: F) - -> &'tcx Substs<'tcx> - where F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx> + pub fn extend_to( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + def_id: DefId, + mut mk_kind: F, + ) -> &'tcx Substs<'tcx> + where + F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>, { Substs::for_item(tcx, def_id, |param, substs| { self.get(param.index as usize) @@ -229,11 +224,13 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { }) } - fn fill_item(substs: &mut SmallVec<[Kind<'tcx>; 8]>, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - defs: &ty::Generics, - mk_kind: &mut F) - where F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx> + fn fill_item( + substs: &mut SmallVec<[Kind<'tcx>; 8]>, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + defs: &ty::Generics, + mk_kind: &mut F, + ) where + F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>, { if let Some(def_id) = defs.parent { let parent_defs = tcx.generics_of(def_id); @@ -242,10 +239,9 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { Substs::fill_single(substs, defs, mk_kind) } - fn fill_single(substs: &mut SmallVec<[Kind<'tcx>; 8]>, - defs: &ty::Generics, - mk_kind: &mut F) - where F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx> + fn fill_single(substs: &mut SmallVec<[Kind<'tcx>; 8]>, defs: &ty::Generics, mk_kind: &mut F) + where + F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>, { substs.reserve(defs.params.len()); for param in &defs.params { @@ -309,16 +305,26 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { /// in a different item, with `target_substs` as the base for /// the target impl/trait, with the source child-specific /// parameters (e.g., method parameters) on top of that base. - pub fn rebase_onto(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - source_ancestor: DefId, - target_substs: &Substs<'tcx>) - -> &'tcx Substs<'tcx> { + pub fn rebase_onto( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + source_ancestor: DefId, + target_substs: &Substs<'tcx>, + ) -> &'tcx Substs<'tcx> { let defs = tcx.generics_of(source_ancestor); - tcx.mk_substs(target_substs.iter().chain(&self[defs.params.len()..]).cloned()) + tcx.mk_substs( + target_substs + .iter() + .chain(&self[defs.params.len()..]) + .cloned(), + ) } - pub fn truncate_to(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, generics: &ty::Generics) - -> &'tcx Substs<'tcx> { + pub fn truncate_to( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + generics: &ty::Generics, + ) -> &'tcx Substs<'tcx> { tcx.mk_substs(self.iter().take(generics.count()).cloned()) } } @@ -351,29 +357,33 @@ impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Substs<'tcx> {} // there is more information available (for better errors). pub trait Subst<'tcx>: Sized { - fn subst<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - substs: &[Kind<'tcx>]) -> Self { + fn subst<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &[Kind<'tcx>]) -> Self { self.subst_spanned(tcx, substs, None) } - fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - substs: &[Kind<'tcx>], - span: Option) - -> Self; + fn subst_spanned<'a, 'gcx>( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + substs: &[Kind<'tcx>], + span: Option, + ) -> Self; } -impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T { - fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - substs: &[Kind<'tcx>], - span: Option) - -> T - { - let mut folder = SubstFolder { tcx, - substs, - span, - root_ty: None, - ty_stack_depth: 0, - binders_passed: 0 }; +impl<'tcx, T: TypeFoldable<'tcx>> Subst<'tcx> for T { + fn subst_spanned<'a, 'gcx>( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + substs: &[Kind<'tcx>], + span: Option, + ) -> T { + let mut folder = SubstFolder { + tcx, + substs, + span, + root_ty: None, + ty_stack_depth: 0, + binders_passed: 0, + }; (*self).fold_with(&mut folder) } } @@ -381,7 +391,7 @@ impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T { /////////////////////////////////////////////////////////////////////////// // The actual substitution engine itself is a type folder. -struct SubstFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct SubstFolder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &'a [Kind<'tcx>], @@ -399,7 +409,9 @@ struct SubstFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { } impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> { - fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.tcx + } fn fold_binder>(&mut self, t: &ty::Binder) -> ty::Binder { self.binders_passed += 1; @@ -418,9 +430,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> { ty::ReEarlyBound(data) => { let r = self.substs.get(data.index as usize).map(|k| k.unpack()); match r { - Some(UnpackedKind::Lifetime(lt)) => { - self.shift_region_through_binders(lt) - } + Some(UnpackedKind::Lifetime(lt)) => self.shift_region_through_binders(lt), _ => { let span = self.span.unwrap_or(DUMMY_SP); span_bug!( @@ -430,11 +440,12 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> { (index={})", data.name, self.root_ty, - data.index); + data.index + ); } } } - _ => r + _ => r, } } @@ -451,12 +462,8 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> { self.ty_stack_depth += 1; let t1 = match t.sty { - ty::Param(p) => { - self.ty_for_param(p, t) - } - _ => { - t.super_fold_with(self) - } + ty::Param(p) => self.ty_for_param(p, t), + _ => t.super_fold_with(self), }; assert_eq!(depth + 1, self.ty_stack_depth); @@ -485,7 +492,8 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { source_ty, p.idx, self.root_ty, - self.substs); + self.substs + ); } }; @@ -535,8 +543,12 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { /// first case we do not increase the Debruijn index and in the second case we do. The reason /// is that only in the second case have we passed through a fn binder. fn shift_vars_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> { - debug!("shift_vars(ty={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})", - ty, self.binders_passed, ty.has_escaping_bound_vars()); + debug!( + "shift_vars(ty={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})", + ty, + self.binders_passed, + ty.has_escaping_bound_vars() + ); if self.binders_passed == 0 || !ty.has_escaping_bound_vars() { return ty; diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs index 37ec560d6c19f..637b406dfc876 100644 --- a/src/librustc/ty/trait_def.rs +++ b/src/librustc/ty/trait_def.rs @@ -8,8 +8,7 @@ use ty::fold::TypeFoldable; use ty::{Ty, TyCtxt}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher, - StableHasherResult}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use rustc_data_structures::sync::Lrc; /// A trait's definition with type information. @@ -44,13 +43,14 @@ pub struct TraitImpls { } impl<'a, 'gcx, 'tcx> TraitDef { - pub fn new(def_id: DefId, - unsafety: hir::Unsafety, - paren_sugar: bool, - has_auto_impl: bool, - is_marker: bool, - def_path_hash: DefPathHash) - -> TraitDef { + pub fn new( + def_id: DefId, + unsafety: hir::Unsafety, + paren_sugar: bool, + has_auto_impl: bool, + is_marker: bool, + def_path_hash: DefPathHash, + ) -> TraitDef { TraitDef { def_id, unsafety, @@ -61,9 +61,11 @@ impl<'a, 'gcx, 'tcx> TraitDef { } } - pub fn ancestors(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - of_impl: DefId) - -> specialization_graph::Ancestors { + pub fn ancestors( + &self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + of_impl: DefId, + ) -> specialization_graph::Ancestors { specialization_graph::ancestors(tcx, self.def_id, of_impl) } } @@ -85,11 +87,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Iterate over every impl that could possibly match the /// self-type `self_ty`. - pub fn for_each_relevant_impl(self, - def_id: DefId, - self_ty: Ty<'tcx>, - mut f: F) - { + pub fn for_each_relevant_impl( + self, + def_id: DefId, + self_ty: Ty<'tcx>, + mut f: F, + ) { let impls = self.trait_impls_of(def_id); for &impl_def_id in impls.blanket_impls.iter() { @@ -138,16 +141,20 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn all_impls(self, def_id: DefId) -> Vec { let impls = self.trait_impls_of(def_id); - impls.blanket_impls.iter().chain( - impls.non_blanket_impls.values().flatten() - ).cloned().collect() + impls + .blanket_impls + .iter() + .chain(impls.non_blanket_impls.values().flatten()) + .cloned() + .collect() } } // Query provider for `trait_impls_of`. -pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - trait_id: DefId) - -> Lrc { +pub(super) fn trait_impls_of_provider<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + trait_id: DefId, +) -> Lrc { let mut impls = TraitImpls::default(); { @@ -157,13 +164,12 @@ pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, return; } - if let Some(simplified_self_ty) = - fast_reject::simplify_type(tcx, impl_self_ty, false) - { - impls.non_blanket_impls - .entry(simplified_self_ty) - .or_default() - .push(impl_def_id); + if let Some(simplified_self_ty) = fast_reject::simplify_type(tcx, impl_self_ty, false) { + impls + .non_blanket_impls + .entry(simplified_self_ty) + .or_default() + .push(impl_def_id); } else { impls.blanket_impls.push(impl_def_id); } @@ -188,9 +194,11 @@ pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } impl<'a> HashStable> for TraitImpls { - fn hash_stable(&self, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher) { + fn hash_stable( + &self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher, + ) { let TraitImpls { ref blanket_impls, ref non_blanket_impls, diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index ac062a2378611..24784e696a943 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -5,17 +5,17 @@ use hir::def_id::DefId; use hir::map::DefPathData; use hir::{self, Node}; use ich::NodeIdHashingMode; +use middle::lang_items; use traits::{self, ObligationCause}; -use ty::{self, Ty, TyCtxt, GenericParamDefKind, TypeFoldable}; -use ty::subst::{Substs, UnpackedKind}; +use ty::layout::{Integer, IntegerExt}; use ty::query::TyCtxtAt; +use ty::subst::{Substs, UnpackedKind}; use ty::TyKind::*; -use ty::layout::{Integer, IntegerExt}; +use ty::{self, GenericParamDefKind, Ty, TyCtxt, TypeFoldable}; use util::common::ErrorReported; -use middle::lang_items; -use rustc_data_structures::stable_hasher::{StableHasher, HashStable}; use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use std::{cmp, fmt}; use syntax::ast; use syntax::attr::{self, SignedInt, UnsignedInt}; @@ -25,21 +25,20 @@ use syntax_pos::{Span, DUMMY_SP}; pub struct Discr<'tcx> { /// bit representation of the discriminant, so `-128i8` is `0xFF_u128` pub val: u128, - pub ty: Ty<'tcx> + pub ty: Ty<'tcx>, } impl<'tcx> fmt::Display for Discr<'tcx> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { match self.ty.sty { ty::Int(ity) => { - let bits = ty::tls::with(|tcx| { - Integer::from_attr(&tcx, SignedInt(ity)).size().bits() - }); + let bits = + ty::tls::with(|tcx| Integer::from_attr(&tcx, SignedInt(ity)).size().bits()); let x = self.val as i128; // sign extend the raw representation to be an i128 let x = (x << (128 - bits)) >> (128 - bits); write!(fmt, "{}", x) - }, + } _ => write!(fmt, "{}", self.val), } } @@ -78,48 +77,53 @@ impl<'tcx> Discr<'tcx> { // zero the upper bits let val = val as u128; let val = (val << shift) >> shift; - (Self { - val: val as u128, - ty: self.ty, - }, oflo) + ( + Self { + val: val as u128, + ty: self.ty, + }, + oflo, + ) } else { let max = u128::max_value() >> shift; let val = self.val; let oflo = val > max - n; - let val = if oflo { - n - (max - val) - 1 - } else { - val + n - }; - (Self { - val: val, - ty: self.ty, - }, oflo) + let val = if oflo { n - (max - val) - 1 } else { val + n }; + ( + Self { + val: val, + ty: self.ty, + }, + oflo, + ) } } } pub trait IntTypeExt { fn to_ty<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>; - fn disr_incr<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, val: Option>) - -> Option>; + fn disr_incr<'a, 'tcx>( + &self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + val: Option>, + ) -> Option>; fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Discr<'tcx>; } impl IntTypeExt for attr::IntType { fn to_ty<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { match *self { - SignedInt(ast::IntTy::I8) => tcx.types.i8, - SignedInt(ast::IntTy::I16) => tcx.types.i16, - SignedInt(ast::IntTy::I32) => tcx.types.i32, - SignedInt(ast::IntTy::I64) => tcx.types.i64, - SignedInt(ast::IntTy::I128) => tcx.types.i128, - SignedInt(ast::IntTy::Isize) => tcx.types.isize, - UnsignedInt(ast::UintTy::U8) => tcx.types.u8, - UnsignedInt(ast::UintTy::U16) => tcx.types.u16, - UnsignedInt(ast::UintTy::U32) => tcx.types.u32, - UnsignedInt(ast::UintTy::U64) => tcx.types.u64, - UnsignedInt(ast::UintTy::U128) => tcx.types.u128, + SignedInt(ast::IntTy::I8) => tcx.types.i8, + SignedInt(ast::IntTy::I16) => tcx.types.i16, + SignedInt(ast::IntTy::I32) => tcx.types.i32, + SignedInt(ast::IntTy::I64) => tcx.types.i64, + SignedInt(ast::IntTy::I128) => tcx.types.i128, + SignedInt(ast::IntTy::Isize) => tcx.types.isize, + UnsignedInt(ast::UintTy::U8) => tcx.types.u8, + UnsignedInt(ast::UintTy::U16) => tcx.types.u16, + UnsignedInt(ast::UintTy::U32) => tcx.types.u32, + UnsignedInt(ast::UintTy::U64) => tcx.types.u64, + UnsignedInt(ast::UintTy::U128) => tcx.types.u128, UnsignedInt(ast::UintTy::Usize) => tcx.types.usize, } } @@ -127,7 +131,7 @@ impl IntTypeExt for attr::IntType { fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Discr<'tcx> { Discr { val: 0, - ty: self.to_ty(tcx) + ty: self.to_ty(tcx), } } @@ -150,7 +154,6 @@ impl IntTypeExt for attr::IntType { } } - #[derive(Clone)] pub enum CopyImplementationError<'tcx> { InfrigingFields(Vec<&'tcx ty::FieldDef>), @@ -174,18 +177,24 @@ pub enum Representability { } impl<'tcx> ty::ParamEnv<'tcx> { - pub fn can_type_implement_copy<'a>(self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - self_type: Ty<'tcx>) - -> Result<(), CopyImplementationError<'tcx>> { + pub fn can_type_implement_copy<'a>( + self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + self_type: Ty<'tcx>, + ) -> Result<(), CopyImplementationError<'tcx>> { // FIXME: (@jroesch) float this code up tcx.infer_ctxt().enter(|infcx| { let (adt, substs) = match self_type.sty { // These types used to have a builtin impl. // Now libcore provides that impl. - ty::Uint(_) | ty::Int(_) | ty::Bool | ty::Float(_) | - ty::Char | ty::RawPtr(..) | ty::Never | - ty::Ref(_, _, hir::MutImmutable) => return Ok(()), + ty::Uint(_) + | ty::Int(_) + | ty::Bool + | ty::Float(_) + | ty::Char + | ty::RawPtr(..) + | ty::Never + | ty::Ref(_, _, hir::MutImmutable) => return Ok(()), ty::Adt(adt, substs) => (adt, substs), @@ -200,11 +209,16 @@ impl<'tcx> ty::ParamEnv<'tcx> { continue; } let span = tcx.def_span(field.did); - let cause = ObligationCause { span, ..ObligationCause::dummy() }; + let cause = ObligationCause { + span, + ..ObligationCause::dummy() + }; let ctx = traits::FulfillmentContext::new(); match traits::fully_normalize(&infcx, ctx, cause, self, &ty) { - Ok(ty) => if !infcx.type_is_copy_modulo_regions(self, ty, span) { - infringing.push(field); + Ok(ty) => { + if !infcx.type_is_copy_modulo_regions(self, ty, span) { + infringing.push(field); + } } Err(errors) => { infcx.report_fulfillment_errors(&errors, None, false); @@ -295,31 +309,28 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// structure definitions. /// For `(Foo>, Foo)`, the result will be `(Foo, Trait)`, /// whereas struct_tail produces `T`, and `Trait`, respectively. - pub fn struct_lockstep_tails(self, - source: Ty<'tcx>, - target: Ty<'tcx>) - -> (Ty<'tcx>, Ty<'tcx>) { + pub fn struct_lockstep_tails(self, source: Ty<'tcx>, target: Ty<'tcx>) -> (Ty<'tcx>, Ty<'tcx>) { let (mut a, mut b) = (source, target); loop { match (&a.sty, &b.sty) { (&Adt(a_def, a_substs), &Adt(b_def, b_substs)) - if a_def == b_def && a_def.is_struct() => { + if a_def == b_def && a_def.is_struct() => + { if let Some(f) = a_def.non_enum_variant().fields.last() { a = f.ty(self, a_substs); b = f.ty(self, b_substs); } else { break; } - }, - (&Tuple(a_tys), &Tuple(b_tys)) - if a_tys.len() == b_tys.len() => { + } + (&Tuple(a_tys), &Tuple(b_tys)) if a_tys.len() == b_tys.len() => { if let Some(a_last) = a_tys.last() { a = a_last; b = b_tys.last().unwrap(); } else { break; } - }, + } _ => break, } } @@ -345,29 +356,29 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// /// FIXME callers may only have a &[Predicate], not a Vec, so that's /// what this code should accept. - pub fn required_region_bounds(self, - erased_self_ty: Ty<'tcx>, - predicates: Vec>) - -> Vec> { - debug!("required_region_bounds(erased_self_ty={:?}, predicates={:?})", - erased_self_ty, - predicates); + pub fn required_region_bounds( + self, + erased_self_ty: Ty<'tcx>, + predicates: Vec>, + ) -> Vec> { + debug!( + "required_region_bounds(erased_self_ty={:?}, predicates={:?})", + erased_self_ty, predicates + ); assert!(!erased_self_ty.has_escaping_bound_vars()); traits::elaborate_predicates(self, predicates) .filter_map(|predicate| { match predicate { - ty::Predicate::Projection(..) | - ty::Predicate::Trait(..) | - ty::Predicate::Subtype(..) | - ty::Predicate::WellFormed(..) | - ty::Predicate::ObjectSafe(..) | - ty::Predicate::ClosureKind(..) | - ty::Predicate::RegionOutlives(..) | - ty::Predicate::ConstEvaluatable(..) => { - None - } + ty::Predicate::Projection(..) + | ty::Predicate::Trait(..) + | ty::Predicate::Subtype(..) + | ty::Predicate::WellFormed(..) + | ty::Predicate::ObjectSafe(..) + | ty::Predicate::ClosureKind(..) + | ty::Predicate::RegionOutlives(..) + | ty::Predicate::ConstEvaluatable(..) => None, ty::Predicate::TypeOutlives(predicate) => { // Search for a bound of the form `erased_self_ty // : 'a`, but be wary of something like `for<'a> @@ -394,7 +405,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn calculate_dtor( self, adt_did: DefId, - validate: &mut dyn FnMut(Self, DefId) -> Result<(), ErrorReported> + validate: &mut dyn FnMut(Self, DefId) -> Result<(), ErrorReported>, ) -> Option { let drop_trait = if let Some(def_id) = self.lang_items().drop_trait() { def_id @@ -424,15 +435,13 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Note that this returns only the constraints for the /// destructor of `def` itself. For the destructors of the /// contents, you need `adt_dtorck_constraint`. - pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) - -> Vec> - { + pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) -> Vec> { let dtor = match def.destructor(self) { None => { debug!("destructor_constraints({:?}) - no dtor", def.did); - return vec![] + return vec![]; } - Some(dtor) => dtor.did + Some(dtor) => dtor.did, }; // RFC 1238: if the destructor method is tagged with the @@ -475,25 +484,26 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let impl_substs = match self.type_of(impl_def_id).sty { ty::Adt(def_, substs) if def_ == def => substs, - _ => bug!() + _ => bug!(), }; let item_substs = match self.type_of(def.did).sty { ty::Adt(def_, substs) if def_ == def => substs, - _ => bug!() + _ => bug!(), }; - let result = item_substs.iter().zip(impl_substs.iter()) + let result = item_substs + .iter() + .zip(impl_substs.iter()) .filter(|&(_, &k)| { match k.unpack() { UnpackedKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => { !impl_generics.region_param(ebr, self).pure_wrt_drop } UnpackedKind::Type(&ty::TyS { - sty: ty::Param(ref pt), .. - }) => { - !impl_generics.type_param(pt, self).pure_wrt_drop - } + sty: ty::Param(ref pt), + .. + }) => !impl_generics.type_param(pt, self).pure_wrt_drop, UnpackedKind::Lifetime(_) | UnpackedKind::Type(_) => { // not a type or region param - this should be reported // as an error. @@ -559,11 +569,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// /// Note that the return value is a late-bound region and hence /// wrapped in a binder. - pub fn closure_env_ty(self, - closure_def_id: DefId, - closure_substs: ty::ClosureSubsts<'tcx>) - -> Option>> - { + pub fn closure_env_ty( + self, + closure_def_id: DefId, + closure_substs: ty::ClosureSubsts<'tcx>, + ) -> Option>> { let closure_ty = self.mk_closure(closure_def_id, closure_substs); let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv); let closure_kind_ty = closure_substs.closure_kind_ty(closure_def_id, self); @@ -579,13 +589,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Given the def-id of some item that has no type parameters, make /// a suitable "empty substs" for it. pub fn empty_substs_for_def_id(self, item_def_id: DefId) -> &'tcx Substs<'tcx> { - Substs::for_item(self, item_def_id, |param, _| { - match param.kind { - GenericParamDefKind::Lifetime => self.types.re_erased.into(), - GenericParamDefKind::Type {..} => { - bug!("empty_substs_for_def_id: {:?} has type parameters", item_def_id) - } - } + Substs::for_item(self, item_def_id, |param, _| match param.kind { + GenericParamDefKind::Lifetime => self.types.re_erased.into(), + GenericParamDefKind::Type { .. } => bug!( + "empty_substs_for_def_id: {:?} has type parameters", + item_def_id + ), }) } @@ -594,27 +603,27 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if let Some(node) = self.hir().get_if_local(def_id) { match node { Node::Item(&hir::Item { - node: hir::ItemKind::Static(_, mutbl, _), .. + node: hir::ItemKind::Static(_, mutbl, _), + .. }) => Some(mutbl), Node::ForeignItem(&hir::ForeignItem { - node: hir::ForeignItemKind::Static(_, is_mutbl), .. - }) => - Some(if is_mutbl { - hir::Mutability::MutMutable - } else { - hir::Mutability::MutImmutable - }), - _ => None + node: hir::ForeignItemKind::Static(_, is_mutbl), + .. + }) => Some(if is_mutbl { + hir::Mutability::MutMutable + } else { + hir::Mutability::MutImmutable + }), + _ => None, } } else { match self.describe_def(def_id) { - Some(Def::Static(_, is_mutbl)) => - Some(if is_mutbl { - hir::Mutability::MutMutable - } else { - hir::Mutability::MutImmutable - }), - _ => None + Some(Def::Static(_, is_mutbl)) => Some(if is_mutbl { + hir::Mutability::MutMutable + } else { + hir::Mutability::MutImmutable + }), + _ => None, } } } @@ -628,11 +637,12 @@ impl<'a, 'tcx> ty::TyS<'tcx> { /// does copies even when the type actually doesn't satisfy the /// full requirements for the `Copy` trait (cc #29149) -- this /// winds up being reported as an error during NLL borrow check. - pub fn is_copy_modulo_regions(&'tcx self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - span: Span) - -> bool { + pub fn is_copy_modulo_regions( + &'tcx self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + span: Span, + ) -> bool { tcx.at(span).is_copy_raw(param_env.and(self)) } @@ -642,10 +652,11 @@ impl<'a, 'tcx> ty::TyS<'tcx> { /// over-approximation in generic contexts, where one can have /// strange rules like `>::Bar: Sized` that /// actually carry lifetime requirements. - pub fn is_sized(&'tcx self, - tcx_at: TyCtxtAt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>)-> bool - { + pub fn is_sized( + &'tcx self, + tcx_at: TyCtxtAt<'a, 'tcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> bool { tcx_at.is_sized_raw(param_env.and(self)) } @@ -656,11 +667,12 @@ impl<'a, 'tcx> ty::TyS<'tcx> { /// optimization as well as the rules around static values. Note /// that the `Freeze` trait is not exposed to end users and is /// effectively an implementation detail. - pub fn is_freeze(&'tcx self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - span: Span)-> bool - { + pub fn is_freeze( + &'tcx self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + span: Span, + ) -> bool { tcx.at(span).is_freeze_raw(param_env.and(self)) } @@ -671,10 +683,11 @@ impl<'a, 'tcx> ty::TyS<'tcx> { /// (Note that this implies that if `ty` has a destructor attached, /// then `needs_drop` will definitely return `true` for `ty`.) #[inline] - pub fn needs_drop(&'tcx self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>) - -> bool { + pub fn needs_drop( + &'tcx self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> bool { tcx.needs_drop_raw(param_env.and(self)) } @@ -685,7 +698,10 @@ impl<'a, 'tcx> ty::TyS<'tcx> { return false; } - substs_a.types().zip(substs_b.types()).all(|(a, b)| Self::same_type(a, b)) + substs_a + .types() + .zip(substs_b.types()) + .all(|(a, b)| Self::same_type(a, b)) } _ => a == b, } @@ -693,31 +709,24 @@ impl<'a, 'tcx> ty::TyS<'tcx> { /// Check whether a type is representable. This means it cannot contain unboxed /// structural recursion. This check is needed for structs and enums. - pub fn is_representable(&'tcx self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - sp: Span) - -> Representability - { + pub fn is_representable(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span) -> Representability { // Iterate until something non-representable is found - fn fold_repr>(iter: It) -> Representability { - iter.fold(Representability::Representable, |r1, r2| { - match (r1, r2) { - (Representability::SelfRecursive(v1), - Representability::SelfRecursive(v2)) => { - Representability::SelfRecursive(v1.into_iter().chain(v2).collect()) - } - (r1, r2) => cmp::max(r1, r2) + fn fold_repr>(iter: It) -> Representability { + iter.fold(Representability::Representable, |r1, r2| match (r1, r2) { + (Representability::SelfRecursive(v1), Representability::SelfRecursive(v2)) => { + Representability::SelfRecursive(v1.into_iter().chain(v2).collect()) } + (r1, r2) => cmp::max(r1, r2), }) } fn are_inner_types_recursive<'a, 'tcx>( - tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + sp: Span, seen: &mut Vec>, representable_cache: &mut FxHashMap, Representability>, - ty: Ty<'tcx>) - -> Representability - { + ty: Ty<'tcx>, + ) -> Representability { match ty.sty { Tuple(ref ts) => { // Find non representable @@ -735,9 +744,13 @@ impl<'a, 'tcx> ty::TyS<'tcx> { fold_repr(def.all_fields().map(|field| { let ty = field.ty(tcx, substs); let span = tcx.hir().span_if_local(field.did).unwrap_or(sp); - match is_type_structurally_recursive(tcx, span, seen, - representable_cache, ty) - { + match is_type_structurally_recursive( + tcx, + span, + seen, + representable_cache, + ty, + ) { Representability::SelfRecursive(_) => { Representability::SelfRecursive(vec![span]) } @@ -756,10 +769,8 @@ impl<'a, 'tcx> ty::TyS<'tcx> { fn same_struct_or_enum<'tcx>(ty: Ty<'tcx>, def: &'tcx ty::AdtDef) -> bool { match ty.sty { - Adt(ty_def, _) => { - ty_def == def - } - _ => false + Adt(ty_def, _) => ty_def == def, + _ => false, } } @@ -770,17 +781,19 @@ impl<'a, 'tcx> ty::TyS<'tcx> { sp: Span, seen: &mut Vec>, representable_cache: &mut FxHashMap, Representability>, - ty: Ty<'tcx>) -> Representability - { + ty: Ty<'tcx>, + ) -> Representability { debug!("is_type_structurally_recursive: {:?} {:?}", ty, sp); if let Some(representability) = representable_cache.get(ty) { - debug!("is_type_structurally_recursive: {:?} {:?} - (cached) {:?}", - ty, sp, representability); + debug!( + "is_type_structurally_recursive: {:?} {:?} - (cached) {:?}", + ty, sp, representability + ); return representability.clone(); } - let representability = is_type_structurally_recursive_inner( - tcx, sp, seen, representable_cache, ty); + let representability = + is_type_structurally_recursive_inner(tcx, sp, seen, representable_cache, ty); representable_cache.insert(ty, representability.clone()); representability @@ -791,8 +804,8 @@ impl<'a, 'tcx> ty::TyS<'tcx> { sp: Span, seen: &mut Vec>, representable_cache: &mut FxHashMap, Representability>, - ty: Ty<'tcx>) -> Representability - { + ty: Ty<'tcx>, + ) -> Representability { match ty.sty { Adt(def, _) => { { @@ -809,9 +822,7 @@ impl<'a, 'tcx> ty::TyS<'tcx> { if let Some(&seen_type) = iter.next() { if same_struct_or_enum(seen_type, def) { - debug!("SelfRecursive: {:?} contains {:?}", - seen_type, - ty); + debug!("SelfRecursive: {:?} contains {:?}", seen_type, ty); return Representability::SelfRecursive(vec![sp]); } } @@ -828,9 +839,7 @@ impl<'a, 'tcx> ty::TyS<'tcx> { for &seen_type in iter { if ty::TyS::same_type(ty, seen_type) { - debug!("ContainsRecursive: {:?} contains {:?}", - seen_type, - ty); + debug!("ContainsRecursive: {:?} contains {:?}", seen_type, ty); return Representability::ContainsRecursive; } } @@ -857,89 +866,102 @@ impl<'a, 'tcx> ty::TyS<'tcx> { // of seen types and check recursion for each of them (issues #3008, #3779). let mut seen: Vec> = Vec::new(); let mut representable_cache = FxHashMap::default(); - let r = is_type_structurally_recursive( - tcx, sp, &mut seen, &mut representable_cache, self); + let r = is_type_structurally_recursive(tcx, sp, &mut seen, &mut representable_cache, self); debug!("is_type_representable: {:?} is {:?}", self, r); r } } -fn is_copy_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> bool -{ +fn is_copy_raw<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, +) -> bool { let (param_env, ty) = query.into_parts(); let trait_def_id = tcx.require_lang_item(lang_items::CopyTraitLangItem); - tcx.infer_ctxt() - .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions( + tcx.infer_ctxt().enter(|infcx| { + traits::type_known_to_meet_bound_modulo_regions( &infcx, param_env, ty, trait_def_id, DUMMY_SP, - )) + ) + }) } -fn is_sized_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> bool -{ +fn is_sized_raw<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, +) -> bool { let (param_env, ty) = query.into_parts(); let trait_def_id = tcx.require_lang_item(lang_items::SizedTraitLangItem); - tcx.infer_ctxt() - .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions( + tcx.infer_ctxt().enter(|infcx| { + traits::type_known_to_meet_bound_modulo_regions( &infcx, param_env, ty, trait_def_id, DUMMY_SP, - )) + ) + }) } -fn is_freeze_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> bool -{ +fn is_freeze_raw<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, +) -> bool { let (param_env, ty) = query.into_parts(); let trait_def_id = tcx.require_lang_item(lang_items::FreezeTraitLangItem); - tcx.infer_ctxt() - .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions( + tcx.infer_ctxt().enter(|infcx| { + traits::type_known_to_meet_bound_modulo_regions( &infcx, param_env, ty, trait_def_id, DUMMY_SP, - )) + ) + }) } -fn needs_drop_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) - -> bool -{ +fn needs_drop_raw<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, +) -> bool { let (param_env, ty) = query.into_parts(); let needs_drop = |ty: Ty<'tcx>| -> bool { - tcx.try_needs_drop_raw(DUMMY_SP, param_env.and(ty)).unwrap_or_else(|mut bug| { - // Cycles should be reported as an error by `check_representable`. - // - // Consider the type as not needing drop in the meanwhile to - // avoid further errors. - // - // In case we forgot to emit a bug elsewhere, delay our - // diagnostic to get emitted as a compiler bug. - bug.delay_as_bug(); - false - }) + tcx.try_needs_drop_raw(DUMMY_SP, param_env.and(ty)) + .unwrap_or_else(|mut bug| { + // Cycles should be reported as an error by `check_representable`. + // + // Consider the type as not needing drop in the meanwhile to + // avoid further errors. + // + // In case we forgot to emit a bug elsewhere, delay our + // diagnostic to get emitted as a compiler bug. + bug.delay_as_bug(); + false + }) }; assert!(!ty.needs_infer()); match ty.sty { // Fast-path for primitive types - ty::Infer(ty::FreshIntTy(_)) | ty::Infer(ty::FreshFloatTy(_)) | - ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Never | - ty::FnDef(..) | ty::FnPtr(_) | ty::Char | ty::GeneratorWitness(..) | - ty::RawPtr(_) | ty::Ref(..) | ty::Str => false, + ty::Infer(ty::FreshIntTy(_)) + | ty::Infer(ty::FreshFloatTy(_)) + | ty::Bool + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Never + | ty::FnDef(..) + | ty::FnPtr(_) + | ty::Char + | ty::GeneratorWitness(..) + | ty::RawPtr(_) + | ty::Ref(..) + | ty::Str => false, // Foreign types can never have destructors ty::Foreign(..) => false, @@ -967,8 +989,14 @@ fn needs_drop_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // Can refer to a type which may drop. // FIXME(eddyb) check this against a ParamEnv. - ty::Dynamic(..) | ty::Projection(..) | ty::Param(_) | ty::Bound(..) | - ty::Placeholder(..) | ty::Opaque(..) | ty::Infer(_) | ty::Error => true, + ty::Dynamic(..) + | ty::Projection(..) + | ty::Param(_) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Opaque(..) + | ty::Infer(_) + | ty::Error => true, ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"), @@ -988,10 +1016,12 @@ fn needs_drop_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // only if they manually implement `Drop` (handled above). ty::Adt(def, _) if def.is_union() => false, - ty::Adt(def, substs) => - def.variants.iter().any( - |variant| variant.fields.iter().any( - |field| needs_drop(field.ty(tcx, substs)))), + ty::Adt(def, substs) => def.variants.iter().any(|variant| { + variant + .fields + .iter() + .any(|field| needs_drop(field.ty(tcx, substs))) + }), } } @@ -1000,7 +1030,7 @@ pub enum ExplicitSelf<'tcx> { ByReference(ty::Region<'tcx>, hir::Mutability), ByRawPointer(hir::Mutability), ByBox, - Other + Other, } impl<'tcx> ExplicitSelf<'tcx> { @@ -1028,27 +1058,18 @@ impl<'tcx> ExplicitSelf<'tcx> { /// } /// ``` /// - pub fn determine

( - self_arg_ty: Ty<'tcx>, - is_self_ty: P - ) -> ExplicitSelf<'tcx> + pub fn determine

(self_arg_ty: Ty<'tcx>, is_self_ty: P) -> ExplicitSelf<'tcx> where - P: Fn(Ty<'tcx>) -> bool + P: Fn(Ty<'tcx>) -> bool, { use self::ExplicitSelf::*; match self_arg_ty.sty { _ if is_self_ty(self_arg_ty) => ByValue, - ty::Ref(region, ty, mutbl) if is_self_ty(ty) => { - ByReference(region, mutbl) - } - ty::RawPtr(ty::TypeAndMut { ty, mutbl }) if is_self_ty(ty) => { - ByRawPointer(mutbl) - } - ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => { - ByBox - } - _ => Other + ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl), + ty::RawPtr(ty::TypeAndMut { ty, mutbl }) if is_self_ty(ty) => ByRawPointer(mutbl), + ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => ByBox, + _ => Other, } } } diff --git a/src/librustc/ty/walk.rs b/src/librustc/ty/walk.rs index a2cc859e4a82d..cb10bc0c342aa 100644 --- a/src/librustc/ty/walk.rs +++ b/src/librustc/ty/walk.rs @@ -2,8 +2,8 @@ //! WARNING: this does not keep track of the region depth. use mir::interpret::ConstValue; -use ty::{self, Ty}; use smallvec::{self, SmallVec}; +use ty::{self, Ty}; // The TypeWalker's stack is hot enough that it's worth going to some effort to // avoid heap allocations. @@ -17,7 +17,10 @@ pub struct TypeWalker<'tcx> { impl<'tcx> TypeWalker<'tcx> { pub fn new(ty: Ty<'tcx>) -> TypeWalker<'tcx> { - TypeWalker { stack: smallvec![ty], last_subtree: 1, } + TypeWalker { + stack: smallvec![ty], + last_subtree: 1, + } } /// Skips the subtree of types corresponding to the last type @@ -43,9 +46,7 @@ impl<'tcx> Iterator for TypeWalker<'tcx> { fn next(&mut self) -> Option> { debug!("next(): stack={:?}", self.stack); match self.stack.pop() { - None => { - None - } + None => None, Some(ty) => { self.last_subtree = self.stack.len(); push_subtypes(&mut self.stack, ty); @@ -70,10 +71,19 @@ pub fn walk_shallow<'tcx>(ty: Ty<'tcx>) -> smallvec::IntoIter(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) { match parent_ty.sty { - ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | - ty::Str | ty::Infer(_) | ty::Param(_) | ty::Never | ty::Error | - ty::Placeholder(..) | ty::Bound(..) | ty::Foreign(..) => { - } + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Str + | ty::Infer(_) + | ty::Param(_) + | ty::Never + | ty::Error + | ty::Placeholder(..) + | ty::Bound(..) + | ty::Foreign(..) => {} ty::Array(ty, len) => { push_const(stack, len); stack.push(ty); @@ -94,11 +104,12 @@ fn push_subtypes<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) { stack.extend(obj.iter().rev().flat_map(|predicate| { let (substs, opt_ty) = match *predicate.skip_binder() { ty::ExistentialPredicate::Trait(tr) => (tr.substs, None), - ty::ExistentialPredicate::Projection(p) => - (p.substs, Some(p.ty)), + ty::ExistentialPredicate::Projection(p) => (p.substs, Some(p.ty)), ty::ExistentialPredicate::AutoTrait(_) => - // Empty iterator - (ty::Substs::empty(), None), + // Empty iterator + { + (ty::Substs::empty(), None) + } }; substs.types().rev().chain(opt_ty) diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index aacc63c47de61..4455874974bc1 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -1,13 +1,13 @@ use hir::def_id::DefId; -use mir::interpret::ConstValue; use infer::InferCtxt; -use ty::subst::Substs; -use traits; -use ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable}; +use middle::lang_items; +use mir::interpret::ConstValue; use std::iter::once; use syntax::ast; use syntax_pos::Span; -use middle::lang_items; +use traits; +use ty::subst::Substs; +use ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable}; /// Returns the set of obligations needed to make `ty` well-formed. /// If `ty` contains unresolved inference variables, this may include @@ -15,22 +15,30 @@ use middle::lang_items; /// inference variable, returns `None`, because we are not able to /// make any progress at all. This is to prevent "livelock" where we /// say "$0 is WF if $0 is WF". -pub fn obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - body_id: ast::NodeId, - ty: Ty<'tcx>, - span: Span) - -> Option>> -{ - let mut wf = WfPredicates { infcx, - param_env, - body_id, - span, - out: vec![] }; +pub fn obligations<'a, 'gcx, 'tcx>( + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + body_id: ast::NodeId, + ty: Ty<'tcx>, + span: Span, +) -> Option>> { + let mut wf = WfPredicates { + infcx, + param_env, + body_id, + span, + out: vec![], + }; if wf.compute(ty) { - debug!("wf::obligations({:?}, body_id={:?}) = {:?}", ty, body_id, wf.out); + debug!( + "wf::obligations({:?}, body_id={:?}) = {:?}", + ty, body_id, wf.out + ); let result = wf.normalize(); - debug!("wf::obligations({:?}, body_id={:?}) ~~> {:?}", ty, body_id, result); + debug!( + "wf::obligations({:?}, body_id={:?}) ~~> {:?}", + ty, body_id, result + ); Some(result) } else { None // no progress made, return None @@ -41,34 +49,45 @@ pub fn obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, /// well-formed. For example, if there is a trait `Set` defined like /// `trait Set`, then the trait reference `Foo: Set` is WF /// if `Bar: Eq`. -pub fn trait_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - body_id: ast::NodeId, - trait_ref: &ty::TraitRef<'tcx>, - span: Span) - -> Vec> -{ - let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![] }; +pub fn trait_obligations<'a, 'gcx, 'tcx>( + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + body_id: ast::NodeId, + trait_ref: &ty::TraitRef<'tcx>, + span: Span, +) -> Vec> { + let mut wf = WfPredicates { + infcx, + param_env, + body_id, + span, + out: vec![], + }; wf.compute_trait_ref(trait_ref, Elaborate::All); wf.normalize() } -pub fn predicate_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - body_id: ast::NodeId, - predicate: &ty::Predicate<'tcx>, - span: Span) - -> Vec> -{ - let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![] }; +pub fn predicate_obligations<'a, 'gcx, 'tcx>( + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + body_id: ast::NodeId, + predicate: &ty::Predicate<'tcx>, + span: Span, +) -> Vec> { + let mut wf = WfPredicates { + infcx, + param_env, + body_id, + span, + out: vec![], + }; // (*) ok to skip binders, because wf code is prepared for it match *predicate { ty::Predicate::Trait(ref t) => { wf.compute_trait_ref(&t.skip_binder().trait_ref, Elaborate::None); // (*) } - ty::Predicate::RegionOutlives(..) => { - } + ty::Predicate::RegionOutlives(..) => {} ty::Predicate::TypeOutlives(ref t) => { wf.compute(t.skip_binder().0); } @@ -80,10 +99,8 @@ pub fn predicate_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, ty::Predicate::WellFormed(t) => { wf.compute(t); } - ty::Predicate::ObjectSafe(_) => { - } - ty::Predicate::ClosureKind(..) => { - } + ty::Predicate::ObjectSafe(_) => {} + ty::Predicate::ClosureKind(..) => {} ty::Predicate::Subtype(ref data) => { wf.compute(data.skip_binder().a); // (*) wf.compute(data.skip_binder().b); // (*) @@ -101,7 +118,7 @@ pub fn predicate_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, wf.normalize() } -struct WfPredicates<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct WfPredicates<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, param_env: ty::ParamEnv<'tcx>, body_id: ast::NodeId, @@ -147,14 +164,15 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { let cause = self.cause(traits::MiscObligation); let infcx = &mut self.infcx; let param_env = self.param_env; - self.out.iter() - .inspect(|pred| assert!(!pred.has_escaping_bound_vars())) - .flat_map(|pred| { - let mut selcx = traits::SelectionContext::new(infcx); - let pred = traits::normalize(&mut selcx, param_env, cause.clone(), pred); - once(pred.value).chain(pred.obligations) - }) - .collect() + self.out + .iter() + .inspect(|pred| assert!(!pred.has_escaping_bound_vars())) + .flat_map(|pred| { + let mut selcx = traits::SelectionContext::new(infcx); + let pred = traits::normalize(&mut selcx, param_env, cause.clone(), pred); + once(pred.value).chain(pred.obligations) + }) + .collect() } /// Pushes the obligations required for `trait_ref` to be WF into @@ -166,24 +184,27 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { let param_env = self.param_env; if let Elaborate::All = elaborate { - let predicates = obligations.iter() - .map(|obligation| obligation.predicate.clone()) - .collect(); + let predicates = obligations + .iter() + .map(|obligation| obligation.predicate.clone()) + .collect(); let implied_obligations = traits::elaborate_predicates(self.infcx.tcx, predicates); - let implied_obligations = implied_obligations.map(|pred| { - traits::Obligation::new(cause.clone(), param_env, pred) - }); + let implied_obligations = implied_obligations + .map(|pred| traits::Obligation::new(cause.clone(), param_env, pred)); self.out.extend(implied_obligations); } self.out.extend(obligations); self.out.extend( - trait_ref.substs.types() - .filter(|ty| !ty.has_escaping_bound_vars()) - .map(|ty| traits::Obligation::new(cause.clone(), - param_env, - ty::Predicate::WellFormed(ty)))); + trait_ref + .substs + .types() + .filter(|ty| !ty.has_escaping_bound_vars()) + .map(|ty| { + traits::Obligation::new(cause.clone(), param_env, ty::Predicate::WellFormed(ty)) + }), + ); } /// Pushes the obligations required for `trait_ref::Item` to be WF @@ -198,7 +219,8 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { if !data.has_escaping_bound_vars() { let predicate = trait_ref.to_predicate(); let cause = self.cause(traits::ProjectionWf(data)); - self.out.push(traits::Obligation::new(cause, self.param_env, predicate)); + self.out + .push(traits::Obligation::new(cause, self.param_env, predicate)); } } @@ -212,9 +234,8 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { let predicate = ty::Predicate::ConstEvaluatable(def_id, substs); let cause = self.cause(traits::MiscObligation); - self.out.push(traits::Obligation::new(cause, - self.param_env, - predicate)); + self.out + .push(traits::Obligation::new(cause, self.param_env, predicate)); } } @@ -222,10 +243,17 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { if !subty.has_escaping_bound_vars() { let cause = self.cause(cause); let trait_ref = ty::TraitRef { - def_id: self.infcx.tcx.require_lang_item(lang_items::SizedTraitLangItem), + def_id: self + .infcx + .tcx + .require_lang_item(lang_items::SizedTraitLangItem), substs: self.infcx.tcx.mk_substs_trait(subty, &[]), }; - self.out.push(traits::Obligation::new(cause, self.param_env, trait_ref.to_predicate())); + self.out.push(traits::Obligation::new( + cause, + self.param_env, + trait_ref.to_predicate(), + )); } } @@ -238,19 +266,19 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { let param_env = self.param_env; while let Some(ty) = subtys.next() { match ty.sty { - ty::Bool | - ty::Char | - ty::Int(..) | - ty::Uint(..) | - ty::Float(..) | - ty::Error | - ty::Str | - ty::GeneratorWitness(..) | - ty::Never | - ty::Param(_) | - ty::Bound(..) | - ty::Placeholder(..) | - ty::Foreign(..) => { + ty::Bool + | ty::Char + | ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::Error + | ty::Str + | ty::GeneratorWitness(..) + | ty::Never + | ty::Param(_) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Foreign(..) => { // WfScalar, WfParameter, etc } @@ -298,13 +326,13 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { // WfReference if !r.has_escaping_bound_vars() && !rty.has_escaping_bound_vars() { let cause = self.cause(traits::ReferenceOutlivesReferent(ty)); - self.out.push( - traits::Obligation::new( - cause, - param_env, - ty::Predicate::TypeOutlives( - ty::Binder::dummy( - ty::OutlivesPredicate(rty, r))))); + self.out.push(traits::Obligation::new( + cause, + param_env, + ty::Predicate::TypeOutlives(ty::Binder::dummy(ty::OutlivesPredicate( + rty, r, + ))), + )); } } @@ -385,13 +413,13 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { let cause = self.cause(traits::MiscObligation); let component_traits = data.auto_traits().chain(once(data.principal().def_id())); - self.out.extend( - component_traits.map(|did| traits::Obligation::new( + self.out.extend(component_traits.map(|did| { + traits::Obligation::new( cause.clone(), param_env, - ty::Predicate::ObjectSafe(did) - )) - ); + ty::Predicate::ObjectSafe(did), + ) + })); } // Inference variables are the complicated case, since we don't @@ -409,16 +437,22 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { // is satisfied to ensure termination.) ty::Infer(_) => { let ty = self.infcx.shallow_resolve(ty); - if let ty::Infer(_) = ty.sty { // not yet resolved... - if ty == ty0 { // ...this is the type we started from! no progress. + if let ty::Infer(_) = ty.sty { + // not yet resolved... + if ty == ty0 { + // ...this is the type we started from! no progress. return false; } let cause = self.cause(traits::MiscObligation); - self.out.push( // ...not the type we started from, so we made progress. - traits::Obligation::new(cause, - self.param_env, - ty::Predicate::WellFormed(ty))); + self.out.push( + // ...not the type we started from, so we made progress. + traits::Obligation::new( + cause, + self.param_env, + ty::Predicate::WellFormed(ty), + ), + ); } else { // Yes, resolved, proceed with the // result. Should never return false because @@ -433,27 +467,31 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { return true; } - fn nominal_obligations(&mut self, - def_id: DefId, - substs: &Substs<'tcx>) - -> Vec> - { - let predicates = - self.infcx.tcx.predicates_of(def_id) - .instantiate(self.infcx.tcx, substs); + fn nominal_obligations( + &mut self, + def_id: DefId, + substs: &Substs<'tcx>, + ) -> Vec> { + let predicates = self + .infcx + .tcx + .predicates_of(def_id) + .instantiate(self.infcx.tcx, substs); let cause = self.cause(traits::ItemObligation(def_id)); - predicates.predicates - .into_iter() - .map(|pred| traits::Obligation::new(cause.clone(), - self.param_env, - pred)) - .filter(|pred| !pred.has_escaping_bound_vars()) - .collect() + predicates + .predicates + .into_iter() + .map(|pred| traits::Obligation::new(cause.clone(), self.param_env, pred)) + .filter(|pred| !pred.has_escaping_bound_vars()) + .collect() } - fn from_object_ty(&mut self, ty: Ty<'tcx>, - data: ty::Binder<&'tcx ty::List>>, - region: ty::Region<'tcx>) { + fn from_object_ty( + &mut self, + ty: Ty<'tcx>, + data: ty::Binder<&'tcx ty::List>>, + region: ty::Region<'tcx>, + ) { // Imagine a type like this: // // trait Foo { } @@ -487,19 +525,20 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { // am looking forward to the future here. if !data.has_escaping_bound_vars() { - let implicit_bounds = - object_region_bounds(self.infcx.tcx, data); + let implicit_bounds = object_region_bounds(self.infcx.tcx, data); let explicit_bound = region; self.out.reserve(implicit_bounds.len()); for implicit_bound in implicit_bounds { let cause = self.cause(traits::ObjectTypeBound(ty, explicit_bound)); - let outlives = ty::Binder::dummy( - ty::OutlivesPredicate(explicit_bound, implicit_bound)); - self.out.push(traits::Obligation::new(cause, - self.param_env, - outlives.to_predicate())); + let outlives = + ty::Binder::dummy(ty::OutlivesPredicate(explicit_bound, implicit_bound)); + self.out.push(traits::Obligation::new( + cause, + self.param_env, + outlives.to_predicate(), + )); } } } @@ -513,21 +552,23 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { /// `ty::required_region_bounds`, see that for more information. pub fn object_region_bounds<'a, 'gcx, 'tcx>( tcx: TyCtxt<'a, 'gcx, 'tcx>, - existential_predicates: ty::Binder<&'tcx ty::List>>) - -> Vec> -{ + existential_predicates: ty::Binder<&'tcx ty::List>>, +) -> Vec> { // Since we don't actually *know* the self type for an object, // this "open(err)" serves as a kind of dummy standin -- basically // a placeholder type. let open_ty = tcx.mk_infer(ty::FreshTy(0)); - let predicates = existential_predicates.iter().filter_map(|predicate| { - if let ty::ExistentialPredicate::Projection(_) = *predicate.skip_binder() { - None - } else { - Some(predicate.with_self_ty(tcx, open_ty)) - } - }).collect(); + let predicates = existential_predicates + .iter() + .filter_map(|predicate| { + if let ty::ExistentialPredicate::Projection(_) = *predicate.skip_binder() { + None + } else { + Some(predicate.with_self_ty(tcx, open_ty)) + } + }) + .collect(); tcx.required_region_bounds(open_ty, predicates) } diff --git a/src/librustc/util/bug.rs b/src/librustc/util/bug.rs index 7698f5ece98cc..146cf67f161e5 100644 --- a/src/librustc/util/bug.rs +++ b/src/librustc/util/bug.rs @@ -1,8 +1,8 @@ // These functions are used by macro expansion for bug! and span_bug! -use ty::tls; use std::fmt; -use syntax_pos::{Span, MultiSpan}; +use syntax_pos::{MultiSpan, Span}; +use ty::tls; #[cold] #[inline(never)] diff --git a/src/librustc/util/captures.rs b/src/librustc/util/captures.rs index 09d576b23c0f5..677ccb31454ea 100644 --- a/src/librustc/util/captures.rs +++ b/src/librustc/util/captures.rs @@ -3,6 +3,6 @@ /// Basically a workaround; see [this comment] for details. /// /// [this comment]: https://github.com/rust-lang/rust/issues/34511#issuecomment-373423999 -pub trait Captures<'a> { } +pub trait Captures<'a> {} -impl<'a, T: ?Sized> Captures<'a> for T { } +impl<'a, T: ?Sized> Captures<'a> for T {} diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index cc0ca165053d3..d42b094478d82 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -2,20 +2,20 @@ use rustc_data_structures::sync::Lock; -use std::cell::{RefCell, Cell}; +use std::cell::{Cell, RefCell}; use std::collections::HashMap; +use std::env; use std::fmt::Debug; -use std::hash::{Hash, BuildHasher}; +use std::hash::{BuildHasher, Hash}; use std::panic; -use std::env; use std::time::{Duration, Instant}; -use std::sync::mpsc::{Sender}; -use syntax_pos::{SpanData}; -use ty::TyCtxt; -use dep_graph::{DepNode}; +use dep_graph::DepNode; use lazy_static; use session::Session; +use std::sync::mpsc::Sender; +use syntax_pos::SpanData; +use ty::TyCtxt; // The name of the associated type for `Fn` return types pub const FN_OUTPUT_NAME: &str = "Output"; @@ -38,22 +38,24 @@ lazy_static! { fn panic_hook(info: &panic::PanicInfo<'_>) { (*DEFAULT_HOOK)(info); - let backtrace = env::var_os("RUST_BACKTRACE").map(|x| &x != "0").unwrap_or(false); + let backtrace = env::var_os("RUST_BACKTRACE") + .map(|x| &x != "0") + .unwrap_or(false); if backtrace { TyCtxt::try_print_query_stack(); } - #[cfg(windows)] - unsafe { - if env::var("RUSTC_BREAK_ON_ICE").is_ok() { - extern "system" { - fn DebugBreak(); - } - // Trigger a debugger if we crashed during bootstrap - DebugBreak(); + #[cfg(windows)] + unsafe { + if env::var("RUSTC_BREAK_ON_ICE").is_ok() { + extern "system" { + fn DebugBreak(); } + // Trigger a debugger if we crashed during bootstrap + DebugBreak(); } + } } pub fn install_panic_hook() { @@ -61,14 +63,14 @@ pub fn install_panic_hook() { } /// Parameters to the `Dump` variant of type `ProfileQueriesMsg`. -#[derive(Clone,Debug)] +#[derive(Clone, Debug)] pub struct ProfQDumpParams { /// A base path for the files we will dump - pub path:String, + pub path: String, /// To ensure that the compiler waits for us to finish our dumps - pub ack:Sender<()>, + pub ack: Sender<()>, /// toggle dumping a log file with every `ProfileQueriesMsg` - pub dump_profq_msg_log:bool, + pub dump_profq_msg_log: bool, } #[allow(nonstandard_style)] @@ -80,7 +82,7 @@ pub struct QueryMsg { /// A sequence of these messages induce a trace of query-based incremental compilation. /// FIXME(matthewhammer): Determine whether we should include cycle detection here or not. -#[derive(Clone,Debug)] +#[derive(Clone, Debug)] pub enum ProfileQueriesMsg { /// begin a timed pass TimeBegin(String), @@ -102,7 +104,7 @@ pub enum ProfileQueriesMsg { /// dump a record of the queries to the given path Dump(ProfQDumpParams), /// halt the profiling/monitoring background thread - Halt + Halt, } /// If enabled, send a message to the profile-queries thread @@ -138,16 +140,20 @@ pub fn set_time_depth(depth: usize) { TIME_DEPTH.with(|slot| slot.set(depth)); } -pub fn time(sess: &Session, what: &str, f: F) -> T where +pub fn time(sess: &Session, what: &str, f: F) -> T +where F: FnOnce() -> T, { time_ext(sess.time_passes(), Some(sess), what, f) } -pub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> T where +pub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> T +where F: FnOnce() -> T, { - if !do_it { return f(); } + if !do_it { + return f(); + } let old = TIME_DEPTH.with(|slot| { let r = slot.get(); @@ -178,7 +184,7 @@ pub fn time_ext(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> pub fn print_time_passes_entry(do_it: bool, what: &str, dur: Duration) { if !do_it { - return + return; } let old = TIME_DEPTH.with(|slot| { @@ -202,19 +208,20 @@ fn print_time_passes_entry_internal(what: &str, dur: Duration) { } None => String::new(), }; - println!("{}time: {}{}\t{}", - " ".repeat(indentation), - duration_to_secs_str(dur), - mem_string, - what); + println!( + "{}time: {}{}\t{}", + " ".repeat(indentation), + duration_to_secs_str(dur), + mem_string, + what + ); } // Hack up our own formatting for the duration to make it easier for scripts // to parse (always use the same number of decimal places and the same unit). pub fn duration_to_secs_str(dur: Duration) -> String { const NANOS_PER_SEC: f64 = 1_000_000_000.0; - let secs = dur.as_secs() as f64 + - dur.subsec_nanos() as f64 / NANOS_PER_SEC; + let secs = dur.as_secs() as f64 + dur.subsec_nanos() as f64 / NANOS_PER_SEC; format!("{:.3}", secs) } @@ -239,7 +246,8 @@ pub fn to_readable_str(mut val: usize) -> String { groups.join("_") } -pub fn record_time(accu: &Lock, f: F) -> T where +pub fn record_time(accu: &Lock, f: F) -> T +where F: FnOnce() -> T, { let start = Instant::now(); @@ -288,9 +296,11 @@ fn get_resident() -> Option { #[link(name = "psapi")] extern "system" { fn GetCurrentProcess() -> HANDLE; - fn GetProcessMemoryInfo(Process: HANDLE, - ppsmemCounters: PPROCESS_MEMORY_COUNTERS, - cb: DWORD) -> BOOL; + fn GetProcessMemoryInfo( + Process: HANDLE, + ppsmemCounters: PPROCESS_MEMORY_COUNTERS, + cb: DWORD, + ) -> BOOL; } let mut pmc: PROCESS_MEMORY_COUNTERS = unsafe { mem::zeroed() }; pmc.cb = mem::size_of_val(&pmc) as DWORD; @@ -300,7 +310,8 @@ fn get_resident() -> Option { } } -pub fn indent(op: F) -> R where +pub fn indent(op: F) -> R +where R: Debug, F: FnOnce() -> R, { @@ -317,12 +328,16 @@ pub struct Indenter { } impl Drop for Indenter { - fn drop(&mut self) { debug!("<<"); } + fn drop(&mut self) { + debug!("<<"); + } } pub fn indenter() -> Indenter { debug!(">>"); - Indenter { _cannot_construct_outside_of_this_module: () } + Indenter { + _cannot_construct_outside_of_this_module: (), + } } pub trait MemoizationMap { @@ -337,17 +352,22 @@ pub trait MemoizationMap { /// added into the dep graph. See the `DepTrackingMap` impl for /// more details! fn memoize(&self, key: Self::Key, op: OP) -> Self::Value - where OP: FnOnce() -> Self::Value; + where + OP: FnOnce() -> Self::Value; } -impl MemoizationMap for RefCell> - where K: Hash+Eq+Clone, V: Clone, S: BuildHasher +impl MemoizationMap for RefCell> +where + K: Hash + Eq + Clone, + V: Clone, + S: BuildHasher, { type Key = K; type Value = V; fn memoize(&self, key: K, op: OP) -> V - where OP: FnOnce() -> V + where + OP: FnOnce() -> V, { let result = self.borrow().get(&key).cloned(); match result { diff --git a/src/librustc/util/nodemap.rs b/src/librustc/util/nodemap.rs index fe6ab075a1a8a..9e8226705b407 100644 --- a/src/librustc/util/nodemap.rs +++ b/src/librustc/util/nodemap.rs @@ -11,7 +11,7 @@ macro_rules! define_id_collections { ($map_name:ident, $set_name:ident, $key:ty) => { pub type $map_name = FxHashMap<$key, T>; pub type $set_name = FxHashSet<$key>; - } + }; } define_id_collections!(NodeMap, NodeSet, ast::NodeId); diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 79405b124001d..3fb0aa7c531da 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -1,25 +1,25 @@ use hir::def_id::DefId; use hir::map::definitions::DefPathData; -use mir::interpret::ConstValue; use middle::region; +use mir::interpret::ConstValue; use ty::subst::{self, Subst}; +use ty::{self, GenericParamCount, GenericParamDefKind, Ty, TyCtxt, TypeFoldable}; +use ty::{Adt, Bool, Char}; +use ty::{Array, Error, Float, FnDef, FnPtr, Slice, Str}; +use ty::{Bound, Never, Param, RawPtr, Ref, Tuple}; use ty::{BrAnon, BrEnv, BrFresh, BrNamed}; -use ty::{Bool, Char, Adt}; -use ty::{Error, Str, Array, Slice, Float, FnDef, FnPtr}; -use ty::{Param, Bound, RawPtr, Ref, Never, Tuple}; -use ty::{Closure, Generator, GeneratorWitness, Foreign, Projection, Opaque}; -use ty::{Placeholder, UnnormalizedProjection, Dynamic, Int, Uint, Infer}; -use ty::{self, Ty, TyCtxt, TypeFoldable, GenericParamCount, GenericParamDefKind}; +use ty::{Closure, Foreign, Generator, GeneratorWitness, Opaque, Projection}; +use ty::{Dynamic, Infer, Int, Placeholder, Uint, UnnormalizedProjection}; use util::nodemap::FxHashSet; use std::cell::Cell; use std::fmt; use std::usize; +use hir; use rustc_target::spec::abi::Abi; use syntax::ast::CRATE_NODE_ID; -use syntax::symbol::{Symbol, InternedString}; -use hir; +use syntax::symbol::{InternedString, Symbol}; /// The "region highlights" are used to control region printing during /// specific error messages. When a "region highlight" is enabled, it @@ -58,11 +58,7 @@ impl RegionHighlightMode { } /// Internal helper to update current settings during the execution of `op`. - fn set( - old_mode: Self, - new_mode: Self, - op: impl FnOnce() -> R, - ) -> R { + fn set(old_mode: Self, new_mode: Self, op: impl FnOnce() -> R) -> R { REGION_HIGHLIGHT_MODE.with(|c| { c.set(new_mode); let result = op(); @@ -97,7 +93,9 @@ impl RegionHighlightMode { ) -> R { let old_mode = Self::get(); let mut new_mode = old_mode; - let first_avail_slot = new_mode.highlight_regions.iter_mut() + let first_avail_slot = new_mode + .highlight_regions + .iter_mut() .filter(|s| s.is_none()) .next() .unwrap_or_else(|| { @@ -121,13 +119,10 @@ impl RegionHighlightMode { /// Returns true if any placeholders are highlighted. fn any_region_vids_highlighted(&self) -> bool { - Self::get() - .highlight_regions - .iter() - .any(|h| match h { - Some((ty::ReVar(_), _)) => true, - _ => false, - }) + Self::get().highlight_regions.iter().any(|h| match h { + Some((ty::ReVar(_), _)) => true, + _ => false, + }) } /// Returns `Some(n)` with the number to use for the given region, @@ -165,13 +160,10 @@ impl RegionHighlightMode { /// Returns true if any placeholders are highlighted. pub fn any_placeholders_highlighted(&self) -> bool { - Self::get() - .highlight_regions - .iter() - .any(|h| match h { - Some((ty::RePlaceholder(_), _)) => true, - _ => false, - }) + Self::get().highlight_regions.iter().any(|h| match h { + Some((ty::RePlaceholder(_), _)) => true, + _ => false, + }) } /// Returns `Some(N)` if the placeholder `p` is highlighted to print as `'N`. @@ -276,15 +268,14 @@ macro_rules! print { }; } - struct LateBoundRegionNameCollector(FxHashSet); impl<'tcx> ty::fold::TypeVisitor<'tcx> for LateBoundRegionNameCollector { fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool { match *r { ty::ReLateBound(_, ty::BrNamed(_, name)) => { self.0.insert(name); - }, - _ => {}, + } + _ => {} } r.super_visit_with(self) } @@ -302,9 +293,14 @@ pub struct PrintContext { impl PrintContext { fn new() -> Self { ty::tls::with_opt(|tcx| { - let (is_verbose, identify_regions) = tcx.map( - |tcx| (tcx.sess.verbose(), tcx.sess.opts.debugging_opts.identify_regions) - ).unwrap_or((false, false)); + let (is_verbose, identify_regions) = tcx + .map(|tcx| { + ( + tcx.sess.verbose(), + tcx.sess.opts.debugging_opts.identify_regions, + ) + }) + .unwrap_or((false, false)); PrintContext { is_debug: false, is_verbose: is_verbose, @@ -316,7 +312,8 @@ impl PrintContext { }) } fn prepare_late_bound_region_info<'tcx, T>(&mut self, value: &ty::Binder) - where T: TypeFoldable<'tcx> + where + T: TypeFoldable<'tcx>, { let mut collector = LateBoundRegionNameCollector(Default::default()); value.visit_with(&mut collector); @@ -359,12 +356,13 @@ pub trait Print { } impl PrintContext { - fn fn_sig(&mut self, - f: &mut F, - inputs: &[Ty<'_>], - variadic: bool, - output: Ty<'_>) - -> fmt::Result { + fn fn_sig( + &mut self, + f: &mut F, + inputs: &[Ty<'_>], + variadic: bool, + output: Ty<'_>, + ) -> fmt::Result { write!(f, "(")?; let mut inputs = inputs.iter(); if let Some(&ty) = inputs.next() { @@ -384,12 +382,13 @@ impl PrintContext { Ok(()) } - fn parameterized(&mut self, - f: &mut F, - substs: &subst::Substs<'_>, - did: DefId, - projections: &[ty::ProjectionPredicate<'_>]) - -> fmt::Result { + fn parameterized( + &mut self, + f: &mut F, + substs: &subst::Substs<'_>, + did: DefId, + projections: &[ty::ProjectionPredicate<'_>], + ) -> fmt::Result { let key = ty::tls::with(|tcx| tcx.def_key(did)); let verbose = self.is_verbose; @@ -406,37 +405,40 @@ impl PrintContext { loop { let key = tcx.def_key(item_def_id); match key.disambiguated_data.data { - DefPathData::AssocTypeInTrait(_) | - DefPathData::AssocTypeInImpl(_) | - DefPathData::AssocExistentialInImpl(_) | - DefPathData::Trait(_) | - DefPathData::Impl | - DefPathData::TypeNs(_) => { + DefPathData::AssocTypeInTrait(_) + | DefPathData::AssocTypeInImpl(_) + | DefPathData::AssocExistentialInImpl(_) + | DefPathData::Trait(_) + | DefPathData::Impl + | DefPathData::TypeNs(_) => { break; } - DefPathData::ValueNs(_) | - DefPathData::EnumVariant(_) => { + DefPathData::ValueNs(_) | DefPathData::EnumVariant(_) => { is_value_path = true; break; } - DefPathData::CrateRoot | - DefPathData::Misc | - DefPathData::Module(_) | - DefPathData::MacroDef(_) | - DefPathData::ClosureExpr | - DefPathData::TypeParam(_) | - DefPathData::LifetimeParam(_) | - DefPathData::Field(_) | - DefPathData::StructCtor | - DefPathData::AnonConst | - DefPathData::ImplTrait | - DefPathData::GlobalMetaData(_) => { + DefPathData::CrateRoot + | DefPathData::Misc + | DefPathData::Module(_) + | DefPathData::MacroDef(_) + | DefPathData::ClosureExpr + | DefPathData::TypeParam(_) + | DefPathData::LifetimeParam(_) + | DefPathData::Field(_) + | DefPathData::StructCtor + | DefPathData::AnonConst + | DefPathData::ImplTrait + | DefPathData::GlobalMetaData(_) => { // if we're making a symbol for something, there ought // to be a value or type-def or something in there // *somewhere* item_def_id.index = key.parent.unwrap_or_else(|| { - bug!("finding type for {:?}, encountered def-id {:?} with no \ - parent", did, item_def_id); + bug!( + "finding type for {:?}, encountered def-id {:?} with no \ + parent", + did, + item_def_id + ); }); } } @@ -455,7 +457,13 @@ impl PrintContext { own_counts = generics.own_counts(); if has_self { - print!(f, self, write("<"), print_display(substs.type_at(0)), write(" as "))?; + print!( + f, + self, + write("<"), + print_display(substs.type_at(0)), + write(" as ") + )?; } path_def_id = def_id; @@ -472,13 +480,17 @@ impl PrintContext { } if !verbose { - let mut type_params = - generics.params.iter().rev().filter_map(|param| match param.kind { + let mut type_params = generics + .params + .iter() + .rev() + .filter_map(|param| match param.kind { GenericParamDefKind::Lifetime => None, GenericParamDefKind::Type { has_default, .. } => { Some((param.def_id, has_default)) } - }).peekable(); + }) + .peekable(); let has_default = { let has_default = type_params.peek().map(|(_, has_default)| has_default); *has_default.unwrap_or(&false) @@ -552,9 +564,10 @@ impl PrintContext { print_regions(f, "<", 0, own_counts.lifetimes)?; - let tps = substs.types() - .take(own_counts.types - num_supplied_defaults) - .skip(has_self as usize); + let tps = substs + .types() + .take(own_counts.types - num_supplied_defaults) + .skip(has_self as usize); for ty in tps { start_or_continue(f, "<", ", ")?; @@ -563,12 +576,18 @@ impl PrintContext { for projection in projections { start_or_continue(f, "<", ", ")?; - ty::tls::with(|tcx| - print!(f, self, - write("{}=", - tcx.associated_item(projection.projection_ty.item_def_id).ident), - print_display(projection.ty)) - )?; + ty::tls::with(|tcx| { + print!( + f, + self, + write( + "{}=", + tcx.associated_item(projection.projection_ty.item_def_id) + .ident + ), + print_display(projection.ty) + ) + })?; } start_or_continue(f, "", ">")?; @@ -599,19 +618,25 @@ impl PrintContext { Ok(()) } - fn in_binder<'a, 'gcx, 'tcx, T, U, F>(&mut self, - f: &mut F, - tcx: TyCtxt<'a, 'gcx, 'tcx>, - original: &ty::Binder, - lifted: Option>) -> fmt::Result - where T: Print, U: Print + TypeFoldable<'tcx>, F: fmt::Write + fn in_binder<'a, 'gcx, 'tcx, T, U, F>( + &mut self, + f: &mut F, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + original: &ty::Binder, + lifted: Option>, + ) -> fmt::Result + where + T: Print, + U: Print + TypeFoldable<'tcx>, + F: fmt::Write, { fn name_by_region_index(index: usize) -> InternedString { match index { 0 => Symbol::intern("'r"), 1 => Symbol::intern("'s"), - i => Symbol::intern(&format!("'t{}", i-2)), - }.as_interned_str() + i => Symbol::intern(&format!("'t{}", i - 2)), + } + .as_interned_str() } // Replace any anonymous late-bound regions with named @@ -641,29 +666,29 @@ impl PrintContext { let old_region_index = self.region_index; let mut region_index = old_region_index; - let new_value = tcx.replace_late_bound_regions(&value, |br| { - let _ = start_or_continue(f, "for<", ", "); - let br = match br { - ty::BrNamed(_, name) => { - let _ = write!(f, "{}", name); - br - } - ty::BrAnon(_) | - ty::BrFresh(_) | - ty::BrEnv => { - let name = loop { - let name = name_by_region_index(region_index); - region_index += 1; - if !self.is_name_used(&name) { - break name; - } - }; - let _ = write!(f, "{}", name); - ty::BrNamed(tcx.hir().local_def_id(CRATE_NODE_ID), name) - } - }; - tcx.mk_region(ty::ReLateBound(ty::INNERMOST, br)) - }).0; + let new_value = tcx + .replace_late_bound_regions(&value, |br| { + let _ = start_or_continue(f, "for<", ", "); + let br = match br { + ty::BrNamed(_, name) => { + let _ = write!(f, "{}", name); + br + } + ty::BrAnon(_) | ty::BrFresh(_) | ty::BrEnv => { + let name = loop { + let name = name_by_region_index(region_index); + region_index += 1; + if !self.is_name_used(&name) { + break name; + } + }; + let _ = write!(f, "{}", name); + ty::BrNamed(tcx.hir().local_def_id(CRATE_NODE_ID), name) + } + }; + tcx.mk_region(ty::ReLateBound(ty::INNERMOST, br)) + }) + .0; start_or_continue(f, "", "> ")?; // Push current state to gcx, and restore after writing new_value. @@ -691,11 +716,12 @@ pub fn identify_regions() -> bool { ty::tls::with(|tcx| tcx.sess.opts.debugging_opts.identify_regions) } -pub fn parameterized(f: &mut F, - substs: &subst::Substs<'_>, - did: DefId, - projections: &[ty::ProjectionPredicate<'_>]) - -> fmt::Result { +pub fn parameterized( + f: &mut F, + substs: &subst::Substs<'_>, + did: DefId, + projections: &[ty::ProjectionPredicate<'_>], +) -> fmt::Result { PrintContext::new().parameterized(f, substs, did, projections) } @@ -741,53 +767,51 @@ impl fmt::Debug for ty::GenericParamDef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let type_name = match self.kind { ty::GenericParamDefKind::Lifetime => "Lifetime", - ty::GenericParamDefKind::Type {..} => "Type", + ty::GenericParamDefKind::Type { .. } => "Type", }; - write!(f, "{}({}, {:?}, {})", - type_name, - self.name, - self.def_id, - self.index) + write!( + f, + "{}({}, {:?}, {})", + type_name, self.name, self.def_id, self.index + ) } } impl fmt::Debug for ty::TraitDef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - ty::tls::with(|tcx| { - write!(f, "{}", tcx.item_path_str(self.def_id)) - }) + ty::tls::with(|tcx| write!(f, "{}", tcx.item_path_str(self.def_id))) } } impl fmt::Debug for ty::AdtDef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - ty::tls::with(|tcx| { - write!(f, "{}", tcx.item_path_str(self.did)) - }) + ty::tls::with(|tcx| write!(f, "{}", tcx.item_path_str(self.did))) } } impl<'tcx> fmt::Debug for ty::ClosureUpvar<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "ClosureUpvar({:?},{:?})", - self.def, - self.ty) + write!(f, "ClosureUpvar({:?},{:?})", self.def, self.ty) } } impl fmt::Debug for ty::UpvarId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "UpvarId({:?};`{}`;{:?})", - self.var_path.hir_id, - ty::tls::with(|tcx| tcx.hir().name(tcx.hir().hir_to_node_id(self.var_path.hir_id))), - self.closure_expr_id) + write!( + f, + "UpvarId({:?};`{}`;{:?})", + self.var_path.hir_id, + ty::tls::with(|tcx| tcx + .hir() + .name(tcx.hir().hir_to_node_id(self.var_path.hir_id))), + self.closure_expr_id + ) } } impl<'tcx> fmt::Debug for ty::UpvarBorrow<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "UpvarBorrow({:?}, {:?})", - self.kind, self.region) + write!(f, "UpvarBorrow({:?}, {:?})", self.kind, self.region) } } diff --git a/src/librustc/util/profiling.rs b/src/librustc/util/profiling.rs index d598709ae3aac..5107562e27b50 100644 --- a/src/librustc/util/profiling.rs +++ b/src/librustc/util/profiling.rs @@ -150,7 +150,7 @@ impl SelfProfiler { match self.timer_stack.last().cloned() { None => { self.current_timer = Instant::now(); - }, + } Some(current_category) if current_category == category => { //since the current category is the same as the new activity's category, //we don't need to do anything with the timer, we just need to push it on the stack @@ -181,10 +181,10 @@ impl SelfProfiler { pub fn end_activity(&mut self, category: ProfileCategory) { match self.timer_stack.pop() { None => bug!("end_activity() was called but there was no running activity"), - Some(c) => - assert!( - c == category, - "end_activity() was called but a different activity was running"), + Some(c) => assert!( + c == category, + "end_activity() was called but a different activity was running" + ), } //check if the new running timer is in the same category as this one @@ -228,13 +228,14 @@ impl SelfProfiler { assert!( self.timer_stack.is_empty(), - "there were timers running when print_results() was called"); + "there were timers running when print_results() was called" + ); let out = io::stderr(); let mut lock = out.lock(); - let crate_name = - opts.crate_name + let crate_name = opts + .crate_name .as_ref() .map(|n| format!(" for {}", n)) .unwrap_or_default(); @@ -247,20 +248,30 @@ impl SelfProfiler { writeln!(lock).unwrap(); writeln!(lock, "Optimization level: {:?}", opts.optimize).unwrap(); - let incremental = if opts.incremental.is_some() { "on" } else { "off" }; + let incremental = if opts.incremental.is_some() { + "on" + } else { + "off" + }; writeln!(lock, "Incremental: {}", incremental).unwrap(); } pub fn save_results(&self, opts: &Options) { let category_data = self.data.json(); - let compilation_options = - format!("{{ \"optimization_level\": \"{:?}\", \"incremental\": {} }}", - opts.optimize, - if opts.incremental.is_some() { "true" } else { "false" }); - - let json = format!("{{ \"category_data\": {}, \"compilation_options\": {} }}", - category_data, - compilation_options); + let compilation_options = format!( + "{{ \"optimization_level\": \"{:?}\", \"incremental\": {} }}", + opts.optimize, + if opts.incremental.is_some() { + "true" + } else { + "false" + } + ); + + let json = format!( + "{{ \"category_data\": {}, \"compilation_options\": {} }}", + category_data, compilation_options + ); fs::write("self_profiler_results.json", json).unwrap(); } diff --git a/src/librustc/util/time_graph.rs b/src/librustc/util/time_graph.rs index 4dd383fd234ac..013dedc23c7c5 100644 --- a/src/librustc/util/time_graph.rs +++ b/src/librustc/util/time_graph.rs @@ -45,27 +45,29 @@ struct RaiiToken { timeline: TimelineId, events: Vec<(String, Instant)>, // The token must not be Send: - _marker: PhantomData<*const ()> + _marker: PhantomData<*const ()>, } - impl Drop for RaiiToken { fn drop(&mut self) { - self.graph.end(self.timeline, mem::replace(&mut self.events, Vec::new())); + self.graph + .end(self.timeline, mem::replace(&mut self.events, Vec::new())); } } impl TimeGraph { pub fn new() -> TimeGraph { TimeGraph { - data: Arc::new(Mutex::new(FxHashMap::default())) + data: Arc::new(Mutex::new(FxHashMap::default())), } } - pub fn start(&self, - timeline: TimelineId, - work_package_kind: WorkPackageKind, - name: &str) -> Timeline { + pub fn start( + &self, + timeline: TimelineId, + work_package_kind: WorkPackageKind, + name: &str, + ) -> Timeline { { let mut table = self.data.lock().unwrap(); @@ -114,24 +116,23 @@ impl TimeGraph { assert!(data.open_work_package.is_none()); } - let mut threads: Vec = - table.values().map(|data| data.clone()).collect(); + let mut threads: Vec = table.values().map(|data| data.clone()).collect(); threads.sort_by_key(|timeline| timeline.timings[0].start); let earliest_instant = threads[0].timings[0].start; - let latest_instant = threads.iter() - .map(|timeline| timeline.timings - .last() - .unwrap() - .end) - .max() - .unwrap(); + let latest_instant = threads + .iter() + .map(|timeline| timeline.timings.last().unwrap().end) + .max() + .unwrap(); let max_distance = distance(earliest_instant, latest_instant); let mut file = File::create(format!("{}.html", output_filename)).unwrap(); - writeln!(file, " + writeln!( + file, + " \n\n").unwrap(); @@ -122,174 +127,172 @@ fn profile_queries_thread(r: Receiver) { match (frame.parse_st.clone(), msg) { (_, ProfileQueriesMsg::Halt) | (_, ProfileQueriesMsg::Dump(_)) => { unreachable!(); - }, + } // Parse State: Clear - (ParseState::Clear, - ProfileQueriesMsg::QueryBegin(span, querymsg)) => { + (ParseState::Clear, ProfileQueriesMsg::QueryBegin(span, querymsg)) => { let start = Instant::now(); - frame.parse_st = ParseState::HaveQuery - (Query { span, msg: querymsg }, start) - }, - (ParseState::Clear, - ProfileQueriesMsg::CacheHit) => { + frame.parse_st = ParseState::HaveQuery( + Query { + span, + msg: querymsg, + }, + start, + ) + } + (ParseState::Clear, ProfileQueriesMsg::CacheHit) => { panic!("parse error: unexpected CacheHit; expected QueryBegin") - }, - (ParseState::Clear, - ProfileQueriesMsg::ProviderBegin) => { + } + (ParseState::Clear, ProfileQueriesMsg::ProviderBegin) => { panic!("parse error: expected QueryBegin before beginning a provider") - }, - (ParseState::Clear, - ProfileQueriesMsg::ProviderEnd) => { + } + (ParseState::Clear, ProfileQueriesMsg::ProviderEnd) => { let provider_extent = frame.traces; match stack.pop() { - None => - panic!("parse error: expected a stack frame; found an empty stack"), - Some(old_frame) => { - match old_frame.parse_st { - ParseState::HaveQuery(q, start) => { - let duration = start.elapsed(); - frame = StackFrame{ - parse_st: ParseState::Clear, - traces: old_frame.traces - }; - let dur_extent = total_duration(&provider_extent); - let trace = Rec { - effect: Effect::QueryBegin(q, CacheCase::Miss), - extent: Box::new(provider_extent), - start: start, - dur_self: duration - dur_extent, - dur_total: duration, - }; - frame.traces.push( trace ); - }, - _ => panic!("internal parse error: malformed parse stack") - } + None => { + panic!("parse error: expected a stack frame; found an empty stack") } + Some(old_frame) => match old_frame.parse_st { + ParseState::HaveQuery(q, start) => { + let duration = start.elapsed(); + frame = StackFrame { + parse_st: ParseState::Clear, + traces: old_frame.traces, + }; + let dur_extent = total_duration(&provider_extent); + let trace = Rec { + effect: Effect::QueryBegin(q, CacheCase::Miss), + extent: Box::new(provider_extent), + start: start, + dur_self: duration - dur_extent, + dur_total: duration, + }; + frame.traces.push(trace); + } + _ => panic!("internal parse error: malformed parse stack"), + }, } - }, - (ParseState::Clear, - ProfileQueriesMsg::TimeBegin(msg)) => { + } + (ParseState::Clear, ProfileQueriesMsg::TimeBegin(msg)) => { let start = Instant::now(); frame.parse_st = ParseState::HaveTimeBegin(msg, start); stack.push(frame); - frame = StackFrame{parse_st: ParseState::Clear, traces: vec![]}; - }, + frame = StackFrame { + parse_st: ParseState::Clear, + traces: vec![], + }; + } (_, ProfileQueriesMsg::TimeBegin(_)) => { panic!("parse error; did not expect time begin here"); - }, - (ParseState::Clear, - ProfileQueriesMsg::TimeEnd) => { + } + (ParseState::Clear, ProfileQueriesMsg::TimeEnd) => { let provider_extent = frame.traces; match stack.pop() { - None => - panic!("parse error: expected a stack frame; found an empty stack"), - Some(old_frame) => { - match old_frame.parse_st { - ParseState::HaveTimeBegin(msg, start) => { - let duration = start.elapsed(); - frame = StackFrame{ - parse_st: ParseState::Clear, - traces: old_frame.traces - }; - let dur_extent = total_duration(&provider_extent); - let trace = Rec { - effect: Effect::TimeBegin(msg), - extent: Box::new(provider_extent), - start: start, - dur_total: duration, - dur_self: duration - dur_extent, - }; - frame.traces.push( trace ); - }, - _ => panic!("internal parse error: malformed parse stack") - } + None => { + panic!("parse error: expected a stack frame; found an empty stack") } + Some(old_frame) => match old_frame.parse_st { + ParseState::HaveTimeBegin(msg, start) => { + let duration = start.elapsed(); + frame = StackFrame { + parse_st: ParseState::Clear, + traces: old_frame.traces, + }; + let dur_extent = total_duration(&provider_extent); + let trace = Rec { + effect: Effect::TimeBegin(msg), + extent: Box::new(provider_extent), + start: start, + dur_total: duration, + dur_self: duration - dur_extent, + }; + frame.traces.push(trace); + } + _ => panic!("internal parse error: malformed parse stack"), + }, } - }, - (_, ProfileQueriesMsg::TimeEnd) => { - panic!("parse error") - }, - (ParseState::Clear, - ProfileQueriesMsg::TaskBegin(key)) => { + } + (_, ProfileQueriesMsg::TimeEnd) => panic!("parse error"), + (ParseState::Clear, ProfileQueriesMsg::TaskBegin(key)) => { let start = Instant::now(); frame.parse_st = ParseState::HaveTaskBegin(key, start); stack.push(frame); - frame = StackFrame{ parse_st: ParseState::Clear, traces: vec![] }; - }, + frame = StackFrame { + parse_st: ParseState::Clear, + traces: vec![], + }; + } (_, ProfileQueriesMsg::TaskBegin(_)) => { panic!("parse error; did not expect time begin here"); - }, - (ParseState::Clear, - ProfileQueriesMsg::TaskEnd) => { + } + (ParseState::Clear, ProfileQueriesMsg::TaskEnd) => { let provider_extent = frame.traces; match stack.pop() { - None => - panic!("parse error: expected a stack frame; found an empty stack"), - Some(old_frame) => { - match old_frame.parse_st { - ParseState::HaveTaskBegin(key, start) => { - let duration = start.elapsed(); - frame = StackFrame{ - parse_st: ParseState::Clear, - traces: old_frame.traces - }; - let dur_extent = total_duration(&provider_extent); - let trace = Rec { - effect: Effect::TaskBegin(key), - extent: Box::new(provider_extent), - start: start, - dur_total: duration, - dur_self: duration - dur_extent, - }; - frame.traces.push( trace ); - }, - _ => panic!("internal parse error: malformed parse stack") - } + None => { + panic!("parse error: expected a stack frame; found an empty stack") } + Some(old_frame) => match old_frame.parse_st { + ParseState::HaveTaskBegin(key, start) => { + let duration = start.elapsed(); + frame = StackFrame { + parse_st: ParseState::Clear, + traces: old_frame.traces, + }; + let dur_extent = total_duration(&provider_extent); + let trace = Rec { + effect: Effect::TaskBegin(key), + extent: Box::new(provider_extent), + start: start, + dur_total: duration, + dur_self: duration - dur_extent, + }; + frame.traces.push(trace); + } + _ => panic!("internal parse error: malformed parse stack"), + }, } - }, - (_, ProfileQueriesMsg::TaskEnd) => { - panic!("parse error") - }, + } + (_, ProfileQueriesMsg::TaskEnd) => panic!("parse error"), // Parse State: HaveQuery - (ParseState::HaveQuery(q,start), - ProfileQueriesMsg::CacheHit) => { + (ParseState::HaveQuery(q, start), ProfileQueriesMsg::CacheHit) => { let duration = start.elapsed(); - let trace : Rec = Rec{ + let trace: Rec = Rec { effect: Effect::QueryBegin(q, CacheCase::Hit), extent: Box::new(vec![]), start: start, dur_self: duration, dur_total: duration, }; - frame.traces.push( trace ); + frame.traces.push(trace); frame.parse_st = ParseState::Clear; - }, - (ParseState::HaveQuery(_, _), - ProfileQueriesMsg::ProviderBegin) => { + } + (ParseState::HaveQuery(_, _), ProfileQueriesMsg::ProviderBegin) => { stack.push(frame); - frame = StackFrame{ parse_st: ParseState::Clear, traces: vec![] }; - }, + frame = StackFrame { + parse_st: ParseState::Clear, + traces: vec![], + }; + } // Parse errors: - - (ParseState::HaveQuery(q, _), - ProfileQueriesMsg::ProviderEnd) => { - panic!("parse error: unexpected ProviderEnd; \ - expected something else to follow BeginQuery for {:?}", q) - }, - (ParseState::HaveQuery(q1, _), - ProfileQueriesMsg::QueryBegin(span2, querymsg2)) => { - panic!("parse error: unexpected QueryBegin; \ - earlier query is unfinished: {:?} and now {:?}", - q1, Query{span:span2, msg: querymsg2}) - }, - (ParseState::HaveTimeBegin(_, _), _) => { - unreachable!() - }, - (ParseState::HaveTaskBegin(_, _), _) => { - unreachable!() - }, + (ParseState::HaveQuery(q, _), ProfileQueriesMsg::ProviderEnd) => panic!( + "parse error: unexpected ProviderEnd; \ + expected something else to follow BeginQuery for {:?}", + q + ), + ( + ParseState::HaveQuery(q1, _), + ProfileQueriesMsg::QueryBegin(span2, querymsg2), + ) => panic!( + "parse error: unexpected QueryBegin; \ + earlier query is unfinished: {:?} and now {:?}", + q1, + Query { + span: span2, + msg: querymsg2 + } + ), + (ParseState::HaveTimeBegin(_, _), _) => unreachable!(), + (ParseState::HaveTaskBegin(_, _), _) => unreachable!(), } } } diff --git a/src/librustc_driver/profile/trace.rs b/src/librustc_driver/profile/trace.rs index 95c4ea6ff2347..fc10614391637 100644 --- a/src/librustc_driver/profile/trace.rs +++ b/src/librustc_driver/profile/trace.rs @@ -1,10 +1,10 @@ use super::*; -use syntax_pos::SpanData; -use rustc_data_structures::fx::FxHashMap; +use rustc::dep_graph::DepNode; use rustc::util::common::QueryMsg; +use rustc_data_structures::fx::FxHashMap; use std::fs::File; use std::time::{Duration, Instant}; -use rustc::dep_graph::{DepNode}; +use syntax_pos::SpanData; #[derive(Debug, Clone, Eq, PartialEq)] pub struct Query { @@ -17,7 +17,8 @@ pub enum Effect { TaskBegin(DepNode), } pub enum CacheCase { - Hit, Miss + Hit, + Miss, } /// Recursive trace structure pub struct Rec { @@ -50,23 +51,24 @@ pub fn cons_of_key(k: &DepNode) -> String { // First return value is text; second return value is a CSS class pub fn html_of_effect(eff: &Effect) -> (String, String) { match *eff { - Effect::TimeBegin(ref msg) => { - (msg.clone(), - "time-begin".to_string()) - }, + Effect::TimeBegin(ref msg) => (msg.clone(), "time-begin".to_string()), Effect::TaskBegin(ref key) => { let cons = cons_of_key(key); (cons.clone(), format!("{} task-begin", cons)) - }, + } Effect::QueryBegin(ref qmsg, ref cc) => { let cons = cons_of_query_msg(qmsg); - (cons.clone(), - format!("{} {}", - cons, - match *cc { - CacheCase::Hit => "hit", - CacheCase::Miss => "miss", - })) + ( + cons.clone(), + format!( + "{} {}", + cons, + match *cc { + CacheCase::Hit => "hit", + CacheCase::Miss => "miss", + } + ), + ) } } } @@ -79,16 +81,27 @@ fn html_of_duration(_start: &Instant, dur: &Duration) -> (String, String) { fn html_of_fraction(frac: f64) -> (String, &'static str) { let css = { - if frac > 0.50 { "frac-50" } - else if frac > 0.40 { "frac-40" } - else if frac > 0.30 { "frac-30" } - else if frac > 0.20 { "frac-20" } - else if frac > 0.10 { "frac-10" } - else if frac > 0.05 { "frac-05" } - else if frac > 0.02 { "frac-02" } - else if frac > 0.01 { "frac-01" } - else if frac > 0.001 { "frac-001" } - else { "frac-0" } + if frac > 0.50 { + "frac-50" + } else if frac > 0.40 { + "frac-40" + } else if frac > 0.30 { + "frac-30" + } else if frac > 0.20 { + "frac-20" + } else if frac > 0.10 { + "frac-10" + } else if frac > 0.05 { + "frac-05" + } else if frac > 0.02 { + "frac-02" + } else if frac > 0.01 { + "frac-01" + } else if frac > 0.001 { + "frac-001" + } else { + "frac-0" + } }; let percent = frac * 100.0; @@ -118,15 +131,22 @@ fn write_traces_rec(file: &mut File, traces: &[Rec], total: Duration, depth: usi let fraction = duration_div(t.dur_total, total); let percent = fraction * 100.0; let (frc_text, frc_css_classes) = html_of_fraction(fraction); - writeln!(file, "

", - depth, - t.extent.len(), - /* Heuristic for 'important' CSS class: */ - if t.extent.len() > 5 || percent >= 1.0 { " important" } else { "" }, - eff_css_classes, - dur_css_classes, - frc_css_classes, - ).unwrap(); + writeln!( + file, + "
", + depth, + t.extent.len(), + /* Heuristic for 'important' CSS class: */ + if t.extent.len() > 5 || percent >= 1.0 { + " important" + } else { + "" + }, + eff_css_classes, + dur_css_classes, + frc_css_classes, + ) + .unwrap(); writeln!(file, "
{}
", eff_text).unwrap(); writeln!(file, "
{}
", dur_text).unwrap(); writeln!(file, "
{}
", frc_text).unwrap(); @@ -135,7 +155,7 @@ fn write_traces_rec(file: &mut File, traces: &[Rec], total: Duration, depth: usi } } -fn compute_counts_rec(counts: &mut FxHashMap, traces: &[Rec]) { +fn compute_counts_rec(counts: &mut FxHashMap, traces: &[Rec]) { counts.reserve(traces.len()); for t in traces.iter() { match t.effect { @@ -146,41 +166,39 @@ fn compute_counts_rec(counts: &mut FxHashMap, traces: &[Rec] count: 1, dur_self: t.dur_self, dur_total: t.dur_total, - } + }, }; counts.insert(msg.clone(), qm); - }, + } Effect::TaskBegin(ref key) => { let cons = cons_of_key(key); let qm = match counts.get(&cons) { - Some(qm) => - QueryMetric { - count: qm.count + 1, - dur_self: qm.dur_self + t.dur_self, - dur_total: qm.dur_total + t.dur_total, - }, + Some(qm) => QueryMetric { + count: qm.count + 1, + dur_self: qm.dur_self + t.dur_self, + dur_total: qm.dur_total + t.dur_total, + }, None => QueryMetric { count: 1, dur_self: t.dur_self, dur_total: t.dur_total, - } + }, }; counts.insert(cons, qm); - }, + } Effect::QueryBegin(ref qmsg, ref _cc) => { let qcons = cons_of_query_msg(qmsg); let qm = match counts.get(&qcons) { - Some(qm) => - QueryMetric { - count: qm.count + 1, - dur_total: qm.dur_total + t.dur_total, - dur_self: qm.dur_self + t.dur_self - }, + Some(qm) => QueryMetric { + count: qm.count + 1, + dur_total: qm.dur_total + t.dur_total, + dur_self: qm.dur_self + t.dur_self, + }, None => QueryMetric { count: 1, dur_total: t.dur_total, dur_self: t.dur_self, - } + }, }; counts.insert(qcons, qm); } @@ -193,17 +211,29 @@ pub fn write_counts(count_file: &mut File, counts: &mut FxHashMap>(); + let mut data = counts + .iter() + .map(|(ref cons, ref qm)| { + ( + cons.clone(), + qm.count.clone(), + qm.dur_total.clone(), + qm.dur_self.clone(), + ) + }) + .collect::>(); data.sort_by_key(|k| Reverse(k.3)); for (cons, count, dur_total, dur_self) in data { - writeln!(count_file, "{}, {}, {}, {}", - cons, count, - duration_to_secs_str(dur_total), - duration_to_secs_str(dur_self) - ).unwrap(); + writeln!( + count_file, + "{}, {}, {}, {}", + cons, + count, + duration_to_secs_str(dur_total), + duration_to_secs_str(dur_self) + ) + .unwrap(); } } @@ -218,7 +248,10 @@ pub fn write_traces(html_file: &mut File, counts_file: &mut File, traces: &[Rec] } pub fn write_style(html_file: &mut File) { - write!(html_file, "{}", " + write!( + html_file, + "{}", + " body { font-family: sans-serif; background: black; @@ -300,5 +333,7 @@ body { border-width: 6px; font-size: 14px; } -").unwrap(); +" + ) + .unwrap(); } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 9c027f110eb4e..85c6adcd940fd 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -136,7 +136,8 @@ fn test_env_with_pool( None, MakeGlobMap::No, |_| Ok(()), - ).expect("phase 2 aborted") + ) + .expect("phase 2 aborted") }; let mut arenas = ty::AllArenas::new(); @@ -295,7 +296,8 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { } pub fn make_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool { - match self.infcx + match self + .infcx .at(&ObligationCause::dummy(), self.param_env) .sub(a, b) { diff --git a/src/librustc_errors/diagnostic.rs b/src/librustc_errors/diagnostic.rs index 3fa391c84ab25..0f5296fb39de5 100644 --- a/src/librustc_errors/diagnostic.rs +++ b/src/librustc_errors/diagnostic.rs @@ -1,11 +1,11 @@ -use CodeSuggestion; -use SubstitutionPart; -use Substitution; -use Applicability; -use Level; +use snippet::Style; use std::fmt; use syntax_pos::{MultiSpan, Span}; -use snippet::Style; +use Applicability; +use CodeSuggestion; +use Level; +use Substitution; +use SubstitutionPart; #[must_use] #[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] @@ -68,7 +68,7 @@ pub enum StringPart { impl StringPart { pub fn content(&self) -> &str { match self { - &StringPart::Normal(ref s) | & StringPart::Highlighted(ref s) => s + &StringPart::Normal(ref s) | &StringPart::Highlighted(ref s) => s, } } } @@ -91,20 +91,11 @@ impl Diagnostic { pub fn is_error(&self) -> bool { match self.level { - Level::Bug | - Level::Fatal | - Level::PhaseFatal | - Level::Error | - Level::FailureNote => { + Level::Bug | Level::Fatal | Level::PhaseFatal | Level::Error | Level::FailureNote => { true } - Level::Warning | - Level::Note | - Level::Help | - Level::Cancelled => { - false - } + Level::Warning | Level::Note | Level::Help | Level::Cancelled => false, } } @@ -140,36 +131,34 @@ impl Diagnostic { self } - pub fn note_expected_found(&mut self, - label: &dyn fmt::Display, - expected: DiagnosticStyledString, - found: DiagnosticStyledString) - -> &mut Self - { + pub fn note_expected_found( + &mut self, + label: &dyn fmt::Display, + expected: DiagnosticStyledString, + found: DiagnosticStyledString, + ) -> &mut Self { self.note_expected_found_extra(label, expected, found, &"", &"") } - pub fn note_expected_found_extra(&mut self, - label: &dyn fmt::Display, - expected: DiagnosticStyledString, - found: DiagnosticStyledString, - expected_extra: &dyn fmt::Display, - found_extra: &dyn fmt::Display) - -> &mut Self - { + pub fn note_expected_found_extra( + &mut self, + label: &dyn fmt::Display, + expected: DiagnosticStyledString, + found: DiagnosticStyledString, + expected_extra: &dyn fmt::Display, + found_extra: &dyn fmt::Display, + ) -> &mut Self { let mut msg: Vec<_> = vec![(format!("expected {} `", label), Style::NoStyle)]; - msg.extend(expected.0.iter() - .map(|x| match *x { - StringPart::Normal(ref s) => (s.to_owned(), Style::NoStyle), - StringPart::Highlighted(ref s) => (s.to_owned(), Style::Highlight), - })); + msg.extend(expected.0.iter().map(|x| match *x { + StringPart::Normal(ref s) => (s.to_owned(), Style::NoStyle), + StringPart::Highlighted(ref s) => (s.to_owned(), Style::Highlight), + })); msg.push((format!("`{}\n", expected_extra), Style::NoStyle)); msg.push((format!(" found {} `", label), Style::NoStyle)); - msg.extend(found.0.iter() - .map(|x| match *x { - StringPart::Normal(ref s) => (s.to_owned(), Style::NoStyle), - StringPart::Highlighted(ref s) => (s.to_owned(), Style::Highlight), - })); + msg.extend(found.0.iter().map(|x| match *x { + StringPart::Normal(ref s) => (s.to_owned(), Style::NoStyle), + StringPart::Highlighted(ref s) => (s.to_owned(), Style::Highlight), + })); msg.push((format!("`{}", found_extra), Style::NoStyle)); // For now, just attach these as notes @@ -181,7 +170,8 @@ impl Diagnostic { self.highlighted_note(vec![ (format!("`{}` from trait: `", name), Style::NoStyle), (signature, Style::Highlight), - ("`".to_string(), Style::NoStyle)]); + ("`".to_string(), Style::NoStyle), + ]); self } @@ -195,10 +185,7 @@ impl Diagnostic { self } - pub fn span_note>(&mut self, - sp: S, - msg: &str) - -> &mut Self { + pub fn span_note>(&mut self, sp: S, msg: &str) -> &mut Self { self.sub(Level::Note, msg, sp.into(), None); self } @@ -208,23 +195,17 @@ impl Diagnostic { self } - pub fn span_warn>(&mut self, - sp: S, - msg: &str) - -> &mut Self { + pub fn span_warn>(&mut self, sp: S, msg: &str) -> &mut Self { self.sub(Level::Warning, msg, sp.into(), None); self } - pub fn help(&mut self , msg: &str) -> &mut Self { + pub fn help(&mut self, msg: &str) -> &mut Self { self.sub(Level::Help, msg, MultiSpan::new(), None); self } - pub fn span_help>(&mut self, - sp: S, - msg: &str) - -> &mut Self { + pub fn span_help>(&mut self, sp: S, msg: &str) -> &mut Self { self.sub(Level::Help, msg, sp.into(), None); self } @@ -307,23 +288,19 @@ impl Diagnostic { msg: &str, suggestion: Vec<(Span, String)>, ) -> &mut Self { - self.multipart_suggestion_with_applicability( - msg, - suggestion, - Applicability::Unspecified, - ) + self.multipart_suggestion_with_applicability(msg, suggestion, Applicability::Unspecified) } /// Prints out a message with multiple suggested edits of the code. #[deprecated(note = "Use `span_suggestions_with_applicability`")] pub fn span_suggestions(&mut self, sp: Span, msg: &str, suggestions: Vec) -> &mut Self { self.suggestions.push(CodeSuggestion { - substitutions: suggestions.into_iter().map(|snippet| Substitution { - parts: vec![SubstitutionPart { - snippet, - span: sp, - }], - }).collect(), + substitutions: suggestions + .into_iter() + .map(|snippet| Substitution { + parts: vec![SubstitutionPart { snippet, span: sp }], + }) + .collect(), msg: msg.to_owned(), show_code_when_inline: true, applicability: Applicability::Unspecified, @@ -333,9 +310,13 @@ impl Diagnostic { /// This is a suggestion that may contain mistakes or fillers and should /// be read and understood by a human. - pub fn span_suggestion_with_applicability(&mut self, sp: Span, msg: &str, - suggestion: String, - applicability: Applicability) -> &mut Self { + pub fn span_suggestion_with_applicability( + &mut self, + sp: Span, + msg: &str, + suggestion: String, + applicability: Applicability, + ) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: vec![Substitution { parts: vec![SubstitutionPart { @@ -350,16 +331,19 @@ impl Diagnostic { self } - pub fn span_suggestions_with_applicability(&mut self, sp: Span, msg: &str, - suggestions: impl Iterator, applicability: Applicability) -> &mut Self - { + pub fn span_suggestions_with_applicability( + &mut self, + sp: Span, + msg: &str, + suggestions: impl Iterator, + applicability: Applicability, + ) -> &mut Self { self.suggestions.push(CodeSuggestion { - substitutions: suggestions.map(|snippet| Substitution { - parts: vec![SubstitutionPart { - snippet, - span: sp, - }], - }).collect(), + substitutions: suggestions + .map(|snippet| Substitution { + parts: vec![SubstitutionPart { snippet, span: sp }], + }) + .collect(), msg: msg.to_owned(), show_code_when_inline: true, applicability, @@ -368,7 +352,11 @@ impl Diagnostic { } pub fn span_suggestion_short_with_applicability( - &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability + &mut self, + sp: Span, + msg: &str, + suggestion: String, + applicability: Applicability, ) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: vec![Substitution { @@ -399,7 +387,10 @@ impl Diagnostic { } pub fn message(&self) -> String { - self.message.iter().map(|i| i.0.as_str()).collect::() + self.message + .iter() + .map(|i| i.0.as_str()) + .collect::() } pub fn styled_message(&self) -> &Vec<(String, Style)> { @@ -416,11 +407,13 @@ impl Diagnostic { /// Convenience function for internal use, clients should use one of the /// public methods above. - pub fn sub(&mut self, - level: Level, - message: &str, - span: MultiSpan, - render_span: Option) { + pub fn sub( + &mut self, + level: Level, + message: &str, + span: MultiSpan, + render_span: Option, + ) { let sub = SubDiagnostic { level, message: vec![(message.to_owned(), Style::NoStyle)], @@ -432,11 +425,13 @@ impl Diagnostic { /// Convenience function for internal use, clients should use one of the /// public methods above. - fn sub_with_highlights(&mut self, - level: Level, - message: Vec<(String, Style)>, - span: MultiSpan, - render_span: Option) { + fn sub_with_highlights( + &mut self, + level: Level, + message: Vec<(String, Style)>, + span: MultiSpan, + render_span: Option, + ) { let sub = SubDiagnostic { level, message, @@ -449,7 +444,10 @@ impl Diagnostic { impl SubDiagnostic { pub fn message(&self) -> String { - self.message.iter().map(|i| i.0.as_str()).collect::() + self.message + .iter() + .map(|i| i.0.as_str()) + .collect::() } pub fn styled_message(&self) -> &Vec<(String, Style)> { diff --git a/src/librustc_errors/diagnostic_builder.rs b/src/librustc_errors/diagnostic_builder.rs index 7449b2b758302..2a38b22816fb8 100644 --- a/src/librustc_errors/diagnostic_builder.rs +++ b/src/librustc_errors/diagnostic_builder.rs @@ -1,14 +1,14 @@ +use Applicability; use Diagnostic; use DiagnosticId; use DiagnosticStyledString; -use Applicability; -use Level; -use Handler; use std::fmt::{self, Debug}; use std::ops::{Deref, DerefMut}; use std::thread::panicking; use syntax_pos::{MultiSpan, Span}; +use Handler; +use Level; /// Used for emitting structured error messages and other diagnostic information. /// @@ -206,66 +206,59 @@ impl<'a> DiagnosticBuilder<'a> { suggestions: Vec, ) -> &mut Self); - pub fn multipart_suggestion_with_applicability(&mut self, - msg: &str, - suggestion: Vec<(Span, String)>, - applicability: Applicability, - ) -> &mut Self { + pub fn multipart_suggestion_with_applicability( + &mut self, + msg: &str, + suggestion: Vec<(Span, String)>, + applicability: Applicability, + ) -> &mut Self { if !self.allow_suggestions { - return self + return self; } - self.diagnostic.multipart_suggestion_with_applicability( - msg, - suggestion, - applicability, - ); + self.diagnostic + .multipart_suggestion_with_applicability(msg, suggestion, applicability); self } - pub fn span_suggestion_with_applicability(&mut self, - sp: Span, - msg: &str, - suggestion: String, - applicability: Applicability) - -> &mut Self { + pub fn span_suggestion_with_applicability( + &mut self, + sp: Span, + msg: &str, + suggestion: String, + applicability: Applicability, + ) -> &mut Self { if !self.allow_suggestions { - return self + return self; } - self.diagnostic.span_suggestion_with_applicability( - sp, - msg, - suggestion, - applicability, - ); + self.diagnostic + .span_suggestion_with_applicability(sp, msg, suggestion, applicability); self } - pub fn span_suggestions_with_applicability(&mut self, - sp: Span, - msg: &str, - suggestions: impl Iterator, - applicability: Applicability) - -> &mut Self { + pub fn span_suggestions_with_applicability( + &mut self, + sp: Span, + msg: &str, + suggestions: impl Iterator, + applicability: Applicability, + ) -> &mut Self { if !self.allow_suggestions { - return self + return self; } - self.diagnostic.span_suggestions_with_applicability( - sp, - msg, - suggestions, - applicability, - ); + self.diagnostic + .span_suggestions_with_applicability(sp, msg, suggestions, applicability); self } - pub fn span_suggestion_short_with_applicability(&mut self, - sp: Span, - msg: &str, - suggestion: String, - applicability: Applicability) - -> &mut Self { + pub fn span_suggestion_short_with_applicability( + &mut self, + sp: Span, + msg: &str, + suggestion: String, + applicability: Applicability, + ) -> &mut Self { if !self.allow_suggestions { - return self + return self; } self.diagnostic.span_suggestion_short_with_applicability( sp, @@ -291,19 +284,19 @@ impl<'a> DiagnosticBuilder<'a> { /// Convenience function for internal use, clients should use one of the /// struct_* methods on Handler. - pub fn new_with_code(handler: &'a Handler, - level: Level, - code: Option, - message: &str) - -> DiagnosticBuilder<'a> { + pub fn new_with_code( + handler: &'a Handler, + level: Level, + code: Option, + message: &str, + ) -> DiagnosticBuilder<'a> { let diagnostic = Diagnostic::new_with_code(level, code, message); DiagnosticBuilder::new_diagnostic(handler, diagnostic) } /// Creates a new `DiagnosticBuilder` with an already constructed /// diagnostic. - pub fn new_diagnostic(handler: &'a Handler, diagnostic: Diagnostic) - -> DiagnosticBuilder<'a> { + pub fn new_diagnostic(handler: &'a Handler, diagnostic: Diagnostic) -> DiagnosticBuilder<'a> { DiagnosticBuilder { handler, diagnostic, @@ -323,9 +316,11 @@ impl<'a> Debug for DiagnosticBuilder<'a> { impl<'a> Drop for DiagnosticBuilder<'a> { fn drop(&mut self) { if !panicking() && !self.cancelled() { - let mut db = DiagnosticBuilder::new(self.handler, - Level::Bug, - "Error constructed but not emitted"); + let mut db = DiagnosticBuilder::new( + self.handler, + Level::Bug, + "Error constructed but not emitted", + ); db.emit(); panic!(); } diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 0443b2228e5b4..b450f90421a71 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -1,20 +1,20 @@ use self::Destination::*; -use syntax_pos::{SourceFile, Span, MultiSpan}; +use syntax_pos::{MultiSpan, SourceFile, Span}; -use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, SourceMapperDyn, DiagnosticId}; -use snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style}; +use snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, Style, StyledString}; use styled_buffer::StyledBuffer; +use {CodeSuggestion, DiagnosticBuilder, DiagnosticId, Level, SourceMapperDyn, SubDiagnostic}; +use atty; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; -use atty; use std::borrow::Cow; -use std::io::prelude::*; -use std::io; use std::cmp::{min, Reverse}; -use termcolor::{StandardStream, ColorChoice, ColorSpec, BufferWriter}; -use termcolor::{WriteColor, Color, Buffer}; +use std::io; +use std::io::prelude::*; +use termcolor::{Buffer, Color, WriteColor}; +use termcolor::{BufferWriter, ColorChoice, ColorSpec, StandardStream}; use unicode_width; const ANONYMIZED_LINE_NUM: &str = "LL"; @@ -45,7 +45,8 @@ impl Emitter for EmitterWriter { // don't display long messages as labels sugg.msg.split_whitespace().count() < 10 && // don't display multiline suggestions as labels - !sugg.substitutions[0].parts[0].snippet.contains('\n') { + !sugg.substitutions[0].parts[0].snippet.contains('\n') + { let substitution = &sugg.substitutions[0].parts[0].snippet.trim(); let msg = if substitution.len() == 0 || !sugg.show_code_when_inline { // This substitution is only removal or we explicitly don't want to show the @@ -64,16 +65,20 @@ impl Emitter for EmitterWriter { } } - self.fix_multispans_in_std_macros(&mut primary_span, - &mut children, - db.handler.flags.external_macro_backtrace); - - self.emit_messages_default(&db.level, - &db.styled_message(), - &db.code, - &primary_span, - &children, - &suggestions); + self.fix_multispans_in_std_macros( + &mut primary_span, + &mut children, + db.handler.flags.external_macro_backtrace, + ); + + self.emit_messages_default( + &db.level, + &db.styled_message(), + &db.code, + &primary_span, + &children, + &suggestions, + ); } fn should_show_explain(&self) -> bool { @@ -106,9 +111,7 @@ impl ColorConfig { } } ColorConfig::Never => ColorChoice::Never, - ColorConfig::Auto if atty::is(atty::Stream::Stderr) => { - ColorChoice::Auto - } + ColorConfig::Auto if atty::is(atty::Stream::Stderr) => ColorChoice::Auto, ColorConfig::Auto => ColorChoice::Never, } } @@ -129,11 +132,12 @@ struct FileWithAnnotatedLines { } impl EmitterWriter { - pub fn stderr(color_config: ColorConfig, - source_map: Option>, - short_message: bool, - teach: bool) - -> EmitterWriter { + pub fn stderr( + color_config: ColorConfig, + source_map: Option>, + short_message: bool, + teach: bool, + ) -> EmitterWriter { let dst = Destination::from_stderr(color_config); EmitterWriter { dst, @@ -144,11 +148,12 @@ impl EmitterWriter { } } - pub fn new(dst: Box, - source_map: Option>, - short_message: bool, - teach: bool) - -> EmitterWriter { + pub fn new( + dst: Box, + source_map: Option>, + short_message: bool, + teach: bool, + ) -> EmitterWriter { EmitterWriter { dst: Raw(dst), sm: source_map, @@ -172,11 +177,12 @@ impl EmitterWriter { } fn preprocess_annotations(&mut self, msp: &MultiSpan) -> Vec { - fn add_annotation_to_file(file_vec: &mut Vec, - file: Lrc, - line_index: usize, - ann: Annotation) { - + fn add_annotation_to_file( + file_vec: &mut Vec, + file: Lrc, + line_index: usize, + ann: Annotation, + ) { for slot in file_vec.iter_mut() { // Look through each of our files for the one we're adding to if slot.file.name == file.name { @@ -200,9 +206,9 @@ impl EmitterWriter { file_vec.push(FileWithAnnotatedLines { file, lines: vec![Line { - line_index, - annotations: vec![ann], - }], + line_index, + annotations: vec![ann], + }], multiline_depth: 0, }); } @@ -252,10 +258,7 @@ impl EmitterWriter { }; if !ann.is_multiline() { - add_annotation_to_file(&mut output, - lo.file, - lo.line, - ann); + add_annotation_to_file(&mut output, lo.file, lo.line, ann); } } } @@ -268,8 +271,8 @@ impl EmitterWriter { let ref mut a = item.1; // Move all other multiline annotations overlapping with this one // one level to the right. - if &ann != a && - num_overlap(ann.line_start, ann.line_end, a.line_start, a.line_end, true) + if &ann != a + && num_overlap(ann.line_start, ann.line_end, a.line_start, a.line_end, true) { a.increase_depth(); } else { @@ -278,7 +281,7 @@ impl EmitterWriter { } } - let mut max_depth = 0; // max overlapping multiline spans + let mut max_depth = 0; // max overlapping multiline spans for (file, ann) in multiline_annotations { if ann.depth > max_depth { max_depth = ann.depth; @@ -301,12 +304,14 @@ impl EmitterWriter { output } - fn render_source_line(&self, - buffer: &mut StyledBuffer, - file: Lrc, - line: &Line, - width_offset: usize, - code_offset: usize) -> Vec<(usize, Style)> { + fn render_source_line( + &self, + buffer: &mut StyledBuffer, + file: Lrc, + line: &Line, + width_offset: usize, + code_offset: usize, + ) -> Vec<(usize, Style)> { if line.line_index == 0 { return Vec::new(); } @@ -320,10 +325,12 @@ impl EmitterWriter { // First create the source line we will highlight. buffer.puts(line_offset, code_offset, &source_string, Style::Quotation); - buffer.puts(line_offset, - 0, - &self.maybe_anonymized(line.line_index), - Style::LineNumber); + buffer.puts( + line_offset, + 0, + &self.maybe_anonymized(line.line_index), + Style::LineNumber, + ); draw_col_separator(buffer, line_offset, width_offset - 2); @@ -346,18 +353,17 @@ impl EmitterWriter { if line.annotations.len() == 1 { if let Some(ref ann) = line.annotations.get(0) { if let AnnotationType::MultilineStart(depth) = ann.annotation_type { - if source_string.chars() - .take(ann.start_col) - .all(|c| c.is_whitespace()) { + if source_string + .chars() + .take(ann.start_col) + .all(|c| c.is_whitespace()) + { let style = if ann.is_primary { Style::UnderlinePrimary } else { Style::UnderlineSecondary }; - buffer.putc(line_offset, - width_offset + depth - 1, - '/', - style); + buffer.putc(line_offset, width_offset + depth - 1, '/', style); return vec![(depth, style)]; } } @@ -469,7 +475,8 @@ impl EmitterWriter { if overlaps(next, annotation, 0) // This label overlaps with another one and both && annotation.has_label() // take space (they have text and are not && j > i // multiline lines). - && p == 0 // We're currently on the first line, move the label one line down + && p == 0 + // We're currently on the first line, move the label one line down { // This annotation needs a new line in the output. p += 1; @@ -478,7 +485,7 @@ impl EmitterWriter { } annotations_position.push((p, annotation)); for (j, next) in annotations.iter().enumerate() { - if j > i { + if j > i { let l = if let Some(ref label) = next.label { label.len() + 2 } else { @@ -505,7 +512,8 @@ impl EmitterWriter { || (overlaps(next, annotation, l) && next.end_col <= annotation.end_col && next.has_label() - && p == 0) // Avoid #42595. + && p == 0) + // Avoid #42595. { // This annotation needs a new line in the output. p += 1; @@ -541,10 +549,12 @@ impl EmitterWriter { // | for pos in 0..=line_len { draw_col_separator(buffer, line_offset + pos + 1, width_offset - 2); - buffer.putc(line_offset + pos + 1, - width_offset - 2, - '|', - Style::LineNumber); + buffer.putc( + line_offset + pos + 1, + width_offset - 2, + '|', + Style::LineNumber, + ); } // Write the horizontal lines for multiline annotations @@ -567,21 +577,24 @@ impl EmitterWriter { }; let pos = pos + 1; match annotation.annotation_type { - AnnotationType::MultilineStart(depth) | - AnnotationType::MultilineEnd(depth) => { - draw_range(buffer, - '_', - line_offset + pos, - width_offset + depth, - code_offset + annotation.start_col, - style); + AnnotationType::MultilineStart(depth) | AnnotationType::MultilineEnd(depth) => { + draw_range( + buffer, + '_', + line_offset + pos, + width_offset + depth, + code_offset + annotation.start_col, + style, + ); } _ if self.teach => { - buffer.set_style_range(line_offset, - code_offset + annotation.start_col, - code_offset + annotation.end_col, - style, - annotation.is_primary); + buffer.set_style_range( + line_offset, + code_offset + annotation.start_col, + code_offset + annotation.end_col, + style, + annotation.is_primary, + ); } _ => {} } @@ -608,27 +621,18 @@ impl EmitterWriter { if pos > 1 && (annotation.has_label() || annotation.takes_space()) { for p in line_offset + 1..=line_offset + pos { - buffer.putc(p, - code_offset + annotation.start_col, - '|', - style); + buffer.putc(p, code_offset + annotation.start_col, '|', style); } } match annotation.annotation_type { AnnotationType::MultilineStart(depth) => { for p in line_offset + pos + 1..line_offset + line_len + 2 { - buffer.putc(p, - width_offset + depth - 1, - '|', - style); + buffer.putc(p, width_offset + depth - 1, '|', style); } } AnnotationType::MultilineEnd(depth) => { for p in line_offset..=line_offset + pos { - buffer.putc(p, - width_offset + depth - 1, - '|', - style); + buffer.putc(p, width_offset + depth - 1, '|', style); } } _ => (), @@ -658,10 +662,7 @@ impl EmitterWriter { (pos + 2, annotation.start_col) }; if let Some(ref label) = annotation.label { - buffer.puts(line_offset + pos, - code_offset + col, - &label, - style); + buffer.puts(line_offset + pos, code_offset + col, &label, style); } } @@ -696,14 +697,12 @@ impl EmitterWriter { ('-', Style::UnderlineSecondary) }; for p in annotation.start_col..annotation.end_col { - buffer.putc(line_offset + 1, - code_offset + p, - underline, - style); + buffer.putc(line_offset + 1, code_offset + p, underline, style); } } - annotations_position.iter().filter_map(|&(_, annotation)| { - match annotation.annotation_type { + annotations_position + .iter() + .filter_map(|&(_, annotation)| match annotation.annotation_type { AnnotationType::MultilineStart(p) | AnnotationType::MultilineEnd(p) => { let style = if annotation.is_primary { Style::LabelPrimary @@ -712,10 +711,9 @@ impl EmitterWriter { }; Some((p, style)) } - _ => None - } - - }).collect::>() + _ => None, + }) + .collect::>() } fn get_multispan_max_line_num(&mut self, msp: &MultiSpan) -> usize { @@ -759,9 +757,11 @@ impl EmitterWriter { // This "fixes" MultiSpans that contain Spans that are pointing to locations inside of // <*macros>. Since these locations are often difficult to read, we move these Spans from // <*macros> to their corresponding use site. - fn fix_multispan_in_std_macros(&mut self, - span: &mut MultiSpan, - always_backtrace: bool) -> bool { + fn fix_multispan_in_std_macros( + &mut self, + span: &mut MultiSpan, + always_backtrace: bool, + ) -> bool { let mut spans_updated = false; if let Some(ref sm) = self.sm { @@ -786,31 +786,40 @@ impl EmitterWriter { continue; } if always_backtrace { - new_labels.push((def_site, - format!("in this expansion of `{}`{}", - trace.macro_decl_name, - if backtrace_len > 2 { - // if backtrace_len == 1 it'll be pointed - // at by "in this macro invocation" - format!(" (#{})", i + 1) - } else { - String::new() - }))); + new_labels.push(( + def_site, + format!( + "in this expansion of `{}`{}", + trace.macro_decl_name, + if backtrace_len > 2 { + // if backtrace_len == 1 it'll be pointed + // at by "in this macro invocation" + format!(" (#{})", i + 1) + } else { + String::new() + } + ), + )); } // Check to make sure we're not in any <*macros> - if !sm.span_to_filename(def_site).is_macros() && - !trace.macro_decl_name.starts_with("desugaring of ") && - !trace.macro_decl_name.starts_with("#[") || - always_backtrace { - new_labels.push((trace.call_site, - format!("in this macro invocation{}", - if backtrace_len > 2 && always_backtrace { - // only specify order when the macro - // backtrace is multiple levels deep - format!(" (#{})", i + 1) - } else { - String::new() - }))); + if !sm.span_to_filename(def_site).is_macros() + && !trace.macro_decl_name.starts_with("desugaring of ") + && !trace.macro_decl_name.starts_with("#[") + || always_backtrace + { + new_labels.push(( + trace.call_site, + format!( + "in this macro invocation{}", + if backtrace_len > 2 && always_backtrace { + // only specify order when the macro + // backtrace is multiple levels deep + format!(" (#{})", i + 1) + } else { + String::new() + } + ), + )); if !always_backtrace { break; } @@ -825,9 +834,7 @@ impl EmitterWriter { if sp_label.span.is_dummy() { continue; } - if sm.span_to_filename(sp_label.span.clone()).is_macros() && - !always_backtrace - { + if sm.span_to_filename(sp_label.span.clone()).is_macros() && !always_backtrace { let v = sp_label.span.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp_label.span.clone(), use_site.call_site.clone())); @@ -847,10 +854,12 @@ impl EmitterWriter { // This does a small "fix" for multispans by looking to see if it can find any that // point directly at <*macros>. Since these are often difficult to read, this // will change the span to point at the use site. - fn fix_multispans_in_std_macros(&mut self, - span: &mut MultiSpan, - children: &mut Vec, - backtrace: bool) { + fn fix_multispans_in_std_macros( + &mut self, + span: &mut MultiSpan, + children: &mut Vec, + backtrace: bool, + ) { let mut spans_updated = self.fix_multispan_in_std_macros(span, backtrace); for child in children.iter_mut() { spans_updated |= self.fix_multispan_in_std_macros(&mut child.span, backtrace); @@ -858,12 +867,13 @@ impl EmitterWriter { if spans_updated { children.push(SubDiagnostic { level: Level::Note, - message: vec![ - ("this error originates in a macro outside of the current crate \ - (in Nightly builds, run with -Z external-macro-backtrace \ - for more info)".to_string(), - Style::NoStyle), - ], + message: vec![( + "this error originates in a macro outside of the current crate \ + (in Nightly builds, run with -Z external-macro-backtrace \ + for more info)" + .to_string(), + Style::NoStyle, + )], span: MultiSpan::new(), render_span: None, }); @@ -872,13 +882,14 @@ impl EmitterWriter { /// Add a left margin to every line but the first, given a padding length and the label being /// displayed, keeping the provided highlighting. - fn msg_to_buffer(&self, - buffer: &mut StyledBuffer, - msg: &[(String, Style)], - padding: usize, - label: &str, - override_style: Option -"##)?; +"## + )?; Ok(()) } @@ -72,8 +79,12 @@ impl Formatter for HTMLFormatter { Ok(()) } - fn error_code_block(&self, output: &mut dyn Write, info: &ErrorMetadata, - err_code: &str) -> Result<(), Box> { + fn error_code_block( + &self, + output: &mut dyn Write, + info: &ErrorMetadata, + err_code: &str, + ) -> Result<(), Box> { // Enclose each error in a div so they can be shown/hidden en masse. let desc_desc = match info.description { Some(_) => "error-described", @@ -86,17 +97,22 @@ impl Formatter for HTMLFormatter { write!(output, "
", desc_desc, use_desc)?; // Error title (with self-link). - write!(output, - "

{0}

\n", - err_code)?; + write!( + output, + "

{0}

\n", + err_code + )?; // Description rendered as markdown. match info.description { Some(ref desc) => { let mut id_map = self.0.borrow_mut(); - write!(output, "{}", - Markdown(desc, &[], RefCell::new(&mut id_map), ErrorCodes::Yes))? - }, + write!( + output, + "{}", + Markdown(desc, &[], RefCell::new(&mut id_map), ErrorCodes::Yes) + )? + } None => write!(output, "

No description.

\n")?, } @@ -105,7 +121,9 @@ impl Formatter for HTMLFormatter { } fn footer(&self, output: &mut dyn Write) -> Result<(), Box> { - write!(output, r##" -"##)?; +"## + )?; Ok(()) } } @@ -179,8 +198,12 @@ impl Formatter for MarkdownFormatter { Ok(()) } - fn error_code_block(&self, output: &mut dyn Write, info: &ErrorMetadata, - err_code: &str) -> Result<(), Box> { + fn error_code_block( + &self, + output: &mut dyn Write, + info: &ErrorMetadata, + err_code: &str, + ) -> Result<(), Box> { Ok(match info.description { Some(ref desc) => write!(output, "## {}\n{}\n", err_code, desc)?, None => (), @@ -213,8 +236,11 @@ fn load_all_errors(metadata_dir: &Path) -> Result(err_map: &ErrorMetadataMap, output_path: &Path, - formatter: T) -> Result<(), Box> { +fn render_error_page( + err_map: &ErrorMetadataMap, + output_path: &Path, + formatter: T, +) -> Result<(), Box> { let mut output_file = File::create(output_path)?; formatter.header(&mut output_file)?; @@ -232,8 +258,8 @@ fn main_with_result(format: OutputFormat, dst: &Path) -> Result<(), Box panic!("Unknown output format: {}", s), - OutputFormat::HTML(h) => render_error_page(&err_map, dst, h)?, + OutputFormat::Unknown(s) => panic!("Unknown output format: {}", s), + OutputFormat::HTML(h) => render_error_page(&err_map, dst, h)?, OutputFormat::Markdown(m) => render_error_page(&err_map, dst, m)?, } Ok(()) @@ -241,15 +267,18 @@ fn main_with_result(format: OutputFormat, dst: &Path) -> Result<(), Box (OutputFormat, PathBuf) { let mut args = env::args().skip(1); - let format = args.next().map(|a| OutputFormat::from(&a)) - .unwrap_or(OutputFormat::from("html")); - let dst = args.next().map(PathBuf::from).unwrap_or_else(|| { - match format { + let format = args + .next() + .map(|a| OutputFormat::from(&a)) + .unwrap_or(OutputFormat::from("html")); + let dst = args + .next() + .map(PathBuf::from) + .unwrap_or_else(|| match format { OutputFormat::HTML(..) => PathBuf::from("doc/error-index.html"), OutputFormat::Markdown(..) => PathBuf::from("doc/error-index.md"), OutputFormat::Unknown(..) => PathBuf::from(""), - } - }); + }); (format, dst) } @@ -259,9 +288,7 @@ fn main() { *slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/"))); }); let (format, dst) = parse_args(); - let result = syntax::with_globals(move || { - main_with_result(format, &dst) - }); + let result = syntax::with_globals(move || main_with_result(format, &dst)); if let Err(e) = result { panic!("{}", e.description()); } diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index 59662be349dcb..771fee486e189 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -18,16 +18,18 @@ use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; use std::env; use std::fs; -use std::path::{Path, PathBuf, Component}; +use std::path::{Component, Path, PathBuf}; use std::rc::Rc; use Redirect::*; macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {:?}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {:?}", stringify!($e), e), + } + }; } fn main() { @@ -61,17 +63,17 @@ type Cache = HashMap; fn small_url_encode(s: &str) -> String { s.replace("<", "%3C") - .replace(">", "%3E") - .replace(" ", "%20") - .replace("?", "%3F") - .replace("'", "%27") - .replace("&", "%26") - .replace(",", "%2C") - .replace(":", "%3A") - .replace(";", "%3B") - .replace("[", "%5B") - .replace("]", "%5D") - .replace("\"", "%22") + .replace(">", "%3E") + .replace(" ", "%20") + .replace("?", "%3F") + .replace("'", "%27") + .replace("&", "%26") + .replace(",", "%2C") + .replace(":", "%3A") + .replace(";", "%3B") + .replace("[", "%5B") + .replace("]", "%5D") + .replace("\"", "%22") } impl FileEntry { @@ -109,11 +111,7 @@ fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) { } } -fn check(cache: &mut Cache, - root: &Path, - file: &Path, - errors: &mut bool) - -> Option { +fn check(cache: &mut Cache, root: &Path, file: &Path, errors: &mut bool) -> Option { // Ignore none HTML files. if file.extension().and_then(|s| s.to_str()) != Some("html") { return None; @@ -122,17 +120,18 @@ fn check(cache: &mut Cache, // Unfortunately we're not 100% full of valid links today to we need a few // whitelists to get this past `make check` today. // FIXME(#32129) - if file.ends_with("std/string/struct.String.html") || - file.ends_with("interpret/struct.ImmTy.html") || - file.ends_with("symbol/struct.InternedString.html") || - file.ends_with("ast/struct.ThinVec.html") || - file.ends_with("util/struct.ThinVec.html") || - file.ends_with("layout/struct.TyLayout.html") || - file.ends_with("humantime/struct.Timestamp.html") || - file.ends_with("log/index.html") || - file.ends_with("ty/struct.Slice.html") || - file.ends_with("ty/enum.Attributes.html") || - file.ends_with("ty/struct.SymbolName.html") { + if file.ends_with("std/string/struct.String.html") + || file.ends_with("interpret/struct.ImmTy.html") + || file.ends_with("symbol/struct.InternedString.html") + || file.ends_with("ast/struct.ThinVec.html") + || file.ends_with("util/struct.ThinVec.html") + || file.ends_with("layout/struct.TyLayout.html") + || file.ends_with("humantime/struct.Timestamp.html") + || file.ends_with("log/index.html") + || file.ends_with("ty/struct.Slice.html") + || file.ends_with("ty/enum.Attributes.html") + || file.ends_with("ty/struct.SymbolName.html") + { return None; } // FIXME(#32553) @@ -140,13 +139,14 @@ fn check(cache: &mut Cache, return None; } // FIXME(#32130) - if file.ends_with("btree_set/struct.BTreeSet.html") || - file.ends_with("struct.BTreeSet.html") || - file.ends_with("btree_map/struct.BTreeMap.html") || - file.ends_with("hash_map/struct.HashMap.html") || - file.ends_with("hash_set/struct.HashSet.html") || - file.ends_with("sync/struct.Lrc.html") || - file.ends_with("sync/struct.RwLock.html") { + if file.ends_with("btree_set/struct.BTreeSet.html") + || file.ends_with("struct.BTreeSet.html") + || file.ends_with("btree_map/struct.BTreeMap.html") + || file.ends_with("hash_map/struct.HashMap.html") + || file.ends_with("hash_set/struct.HashSet.html") + || file.ends_with("sync/struct.Lrc.html") + || file.ends_with("sync/struct.RwLock.html") + { return None; } @@ -156,17 +156,22 @@ fn check(cache: &mut Cache, Err(_) => return None, }; { - cache.get_mut(&pretty_file) - .unwrap() - .parse_ids(&pretty_file, &contents, errors); + cache + .get_mut(&pretty_file) + .unwrap() + .parse_ids(&pretty_file, &contents, errors); } // Search for anything that's the regex 'href[ ]*=[ ]*".*?"' with_attrs_in_source(&contents, " href", |url, i, base| { // Ignore external URLs - if url.starts_with("http:") || url.starts_with("https:") || - url.starts_with("javascript:") || url.starts_with("ftp:") || - url.starts_with("irc:") || url.starts_with("data:") { + if url.starts_with("http:") + || url.starts_with("https:") + || url.starts_with("javascript:") + || url.starts_with("ftp:") + || url.starts_with("irc:") + || url.starts_with("data:") + { return; } let mut parts = url.splitn(2, "#"); @@ -182,21 +187,26 @@ fn check(cache: &mut Cache, path.pop(); for part in Path::new(base).join(url).components() { match part { - Component::Prefix(_) | - Component::RootDir => { + Component::Prefix(_) | Component::RootDir => { // Avoid absolute paths as they make the docs not // relocatable by making assumptions on where the docs // are hosted relative to the site root. *errors = true; - println!("{}:{}: absolute path - {}", - pretty_file.display(), - i + 1, - Path::new(base).join(url).display()); + println!( + "{}:{}: absolute path - {}", + pretty_file.display(), + i + 1, + Path::new(base).join(url).display() + ); return; } Component::CurDir => {} - Component::ParentDir => { path.pop(); } - Component::Normal(s) => { path.push(s); } + Component::ParentDir => { + path.pop(); + } + Component::Normal(s) => { + path.push(s); + } } } } @@ -209,10 +219,12 @@ fn check(cache: &mut Cache, // the docs offline so it's best to avoid them. *errors = true; let pretty_path = path.strip_prefix(root).unwrap_or(&path); - println!("{}:{}: directory link - {}", - pretty_file.display(), - i + 1, - pretty_path.display()); + println!( + "{}:{}: directory link - {}", + pretty_file.display(), + i + 1, + pretty_path.display() + ); return; } if let Some(extension) = path.extension() { @@ -229,10 +241,12 @@ fn check(cache: &mut Cache, } Err(LoadError::BrokenRedirect(target, _)) => { *errors = true; - println!("{}:{}: broken redirect to {}", - pretty_file.display(), - i + 1, - target.display()); + println!( + "{}:{}: broken redirect to {}", + pretty_file.display(), + i + 1, + target.display() + ); return; } Err(LoadError::IsRedirect) => unreachable!(), @@ -241,8 +255,10 @@ fn check(cache: &mut Cache, if let Some(ref fragment) = fragment { // Fragments like `#1-6` are most likely line numbers to be // interpreted by javascript, so we're ignoring these - if fragment.splitn(2, '-') - .all(|f| f.chars().all(|c| c.is_numeric())) { + if fragment + .splitn(2, '-') + .all(|f| f.chars().all(|c| c.is_numeric())) + { return; } @@ -256,9 +272,7 @@ fn check(cache: &mut Cache, if !entry.ids.contains(*fragment) { *errors = true; - print!("{}:{}: broken link fragment ", - pretty_file.display(), - i + 1); + print!("{}:{}: broken link fragment ", pretty_file.display(), i + 1); println!("`#{}` pointing to `{}`", fragment, pretty_path.display()); }; } @@ -272,17 +286,16 @@ fn check(cache: &mut Cache, Some(pretty_file) } -fn load_file(cache: &mut Cache, - root: &Path, - file: &Path, - redirect: Redirect) - -> Result<(PathBuf, Rc), LoadError> { +fn load_file( + cache: &mut Cache, + root: &Path, + file: &Path, + redirect: Redirect, +) -> Result<(PathBuf, Rc), LoadError> { let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file)); let (maybe_redirect, contents) = match cache.entry(pretty_file.clone()) { - Entry::Occupied(entry) => { - (None, entry.get().source.clone()) - } + Entry::Occupied(entry) => (None, entry.get().source.clone()), Entry::Vacant(entry) => { let contents = match fs::read_to_string(file) { Ok(s) => Rc::new(s), @@ -310,9 +323,7 @@ fn load_file(cache: &mut Cache, } }; match maybe_redirect.map(|url| file.parent().unwrap().join(url)) { - Some(redirect_file) => { - load_file(cache, root, &redirect_file, FromRedirect(true)) - } + Some(redirect_file) => load_file(cache, root, &redirect_file, FromRedirect(true)), None => Ok((pretty_file, contents)), } } diff --git a/src/tools/remote-test-client/src/main.rs b/src/tools/remote-test-client/src/main.rs index cb9dac27ce491..a1b058495d871 100644 --- a/src/tools/remote-test-client/src/main.rs +++ b/src/tools/remote-test-client/src/main.rs @@ -4,7 +4,6 @@ /// /// Here is also where we bake in the support to spawn the QEMU emulator as /// well. - use std::env; use std::fs::{self, File}; use std::io::prelude::*; @@ -18,36 +17,31 @@ use std::time::Duration; const REMOTE_ADDR_ENV: &'static str = "TEST_DEVICE_ADDR"; macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } fn main() { let mut args = env::args().skip(1); match &args.next().unwrap()[..] { - "spawn-emulator" => { - spawn_emulator(&args.next().unwrap(), - Path::new(&args.next().unwrap()), - Path::new(&args.next().unwrap()), - args.next().map(|s| s.into())) - } - "push" => { - push(Path::new(&args.next().unwrap())) - } - "run" => { - run(args.next().unwrap(), args.collect()) - } + "spawn-emulator" => spawn_emulator( + &args.next().unwrap(), + Path::new(&args.next().unwrap()), + Path::new(&args.next().unwrap()), + args.next().map(|s| s.into()), + ), + "push" => push(Path::new(&args.next().unwrap())), + "run" => run(args.next().unwrap(), args.collect()), cmd => panic!("unknown command: {}", cmd), } } -fn spawn_emulator(target: &str, - server: &Path, - tmpdir: &Path, - rootfs: Option) { +fn spawn_emulator(target: &str, server: &Path, tmpdir: &Path, rootfs: Option) { let device_address = env::var(REMOTE_ADDR_ENV).unwrap_or("127.0.0.1:12345".to_string()); if env::var(REMOTE_ADDR_ENV).is_ok() { @@ -68,7 +62,7 @@ fn spawn_emulator(target: &str, if client.write_all(b"ping").is_ok() { let mut b = [0; 4]; if client.read_exact(&mut b).is_ok() { - break + break; } } } @@ -78,42 +72,36 @@ fn spawn_emulator(target: &str, fn start_android_emulator(server: &Path) { println!("waiting for device to come online"); - let status = Command::new("adb") - .arg("wait-for-device") - .status() - .unwrap(); + let status = Command::new("adb").arg("wait-for-device").status().unwrap(); assert!(status.success()); println!("pushing server"); let status = Command::new("adb") - .arg("push") - .arg(server) - .arg("/data/tmp/testd") - .status() - .unwrap(); + .arg("push") + .arg(server) + .arg("/data/tmp/testd") + .status() + .unwrap(); assert!(status.success()); println!("forwarding tcp"); let status = Command::new("adb") - .arg("forward") - .arg("tcp:12345") - .arg("tcp:12345") - .status() - .unwrap(); + .arg("forward") + .arg("tcp:12345") + .arg("tcp:12345") + .status() + .unwrap(); assert!(status.success()); println!("executing server"); Command::new("adb") - .arg("shell") - .arg("/data/tmp/testd") - .spawn() - .unwrap(); + .arg("shell") + .arg("/data/tmp/testd") + .spawn() + .unwrap(); } -fn start_qemu_emulator(target: &str, - rootfs: &Path, - server: &Path, - tmpdir: &Path) { +fn start_qemu_emulator(target: &str, rootfs: &Path, server: &Path, tmpdir: &Path) { // Generate a new rootfs image now that we've updated the test server // executable. This is the equivalent of: // @@ -122,49 +110,64 @@ fn start_qemu_emulator(target: &str, let rootfs_img = tmpdir.join("rootfs.img"); let mut cmd = Command::new("cpio"); cmd.arg("--null") - .arg("-o") - .arg("--format=newc") - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .current_dir(rootfs); + .arg("-o") + .arg("--format=newc") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .current_dir(rootfs); let mut child = t!(cmd.spawn()); let mut stdin = child.stdin.take().unwrap(); let rootfs = rootfs.to_path_buf(); thread::spawn(move || add_files(&mut stdin, &rootfs, &rootfs)); - t!(io::copy(&mut child.stdout.take().unwrap(), - &mut t!(File::create(&rootfs_img)))); + t!(io::copy( + &mut child.stdout.take().unwrap(), + &mut t!(File::create(&rootfs_img)) + )); assert!(t!(child.wait()).success()); // Start up the emulator, in the background match target { "arm-unknown-linux-gnueabihf" => { let mut cmd = Command::new("qemu-system-arm"); - cmd.arg("-M").arg("vexpress-a15") - .arg("-m").arg("1024") - .arg("-kernel").arg("/tmp/zImage") - .arg("-initrd").arg(&rootfs_img) - .arg("-dtb").arg("/tmp/vexpress-v2p-ca15-tc1.dtb") - .arg("-append") - .arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init") - .arg("-nographic") - .arg("-redir").arg("tcp:12345::12345"); + cmd.arg("-M") + .arg("vexpress-a15") + .arg("-m") + .arg("1024") + .arg("-kernel") + .arg("/tmp/zImage") + .arg("-initrd") + .arg(&rootfs_img) + .arg("-dtb") + .arg("/tmp/vexpress-v2p-ca15-tc1.dtb") + .arg("-append") + .arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init") + .arg("-nographic") + .arg("-redir") + .arg("tcp:12345::12345"); t!(cmd.spawn()); } "aarch64-unknown-linux-gnu" => { let mut cmd = Command::new("qemu-system-aarch64"); - cmd.arg("-machine").arg("virt") - .arg("-cpu").arg("cortex-a57") - .arg("-m").arg("1024") - .arg("-kernel").arg("/tmp/Image") - .arg("-initrd").arg(&rootfs_img) - .arg("-append") - .arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init") - .arg("-nographic") - .arg("-netdev").arg("user,id=net0,hostfwd=tcp::12345-:12345") - .arg("-device").arg("virtio-net-device,netdev=net0,mac=00:00:00:00:00:00"); + cmd.arg("-machine") + .arg("virt") + .arg("-cpu") + .arg("cortex-a57") + .arg("-m") + .arg("1024") + .arg("-kernel") + .arg("/tmp/Image") + .arg("-initrd") + .arg(&rootfs_img) + .arg("-append") + .arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init") + .arg("-nographic") + .arg("-netdev") + .arg("user,id=net0,hostfwd=tcp::12345-:12345") + .arg("-device") + .arg("virtio-net-device,netdev=net0,mac=00:00:00:00:00:00"); t!(cmd.spawn()); } - _ => panic!("cannot start emulator for: {}"< target), + _ => panic!("cannot start emulator for: {}" < target), } fn add_files(w: &mut dyn Write, root: &Path, cur: &Path) { @@ -216,9 +219,7 @@ fn run(files: String, args: Vec) { // by the client. for (k, v) in env::vars() { match &k[..] { - "PATH" | - "LD_LIBRARY_PATH" | - "PWD" => continue, + "PATH" | "LD_LIBRARY_PATH" | "PWD" => continue, _ => {} } t!(client.write_all(k.as_bytes())); @@ -251,10 +252,10 @@ fn run(files: String, args: Vec) { let mut stderr = io::stderr(); while !stdout_done || !stderr_done { t!(client.read_exact(&mut header)); - let amt = ((header[1] as u64) << 24) | - ((header[2] as u64) << 16) | - ((header[3] as u64) << 8) | - ((header[4] as u64) << 0); + let amt = ((header[1] as u64) << 24) + | ((header[2] as u64) << 16) + | ((header[3] as u64) << 8) + | ((header[4] as u64) << 0); if header[0] == 0 { if amt == 0 { stdout_done = true; @@ -275,10 +276,10 @@ fn run(files: String, args: Vec) { // Finally, read out the exit status let mut status = [0; 5]; t!(client.read_exact(&mut status)); - let code = ((status[1] as i32) << 24) | - ((status[2] as i32) << 16) | - ((status[3] as i32) << 8) | - ((status[4] as i32) << 0); + let code = ((status[1] as i32) << 24) + | ((status[2] as i32) << 16) + | ((status[3] as i32) << 8) + | ((status[4] as i32) << 0); if status[0] == 0 { std::process::exit(code); } else { @@ -295,8 +296,8 @@ fn send(path: &Path, dst: &mut dyn Write) { t!(dst.write_all(&[ (amt >> 24) as u8, (amt >> 16) as u8, - (amt >> 8) as u8, - (amt >> 0) as u8, + (amt >> 8) as u8, + (amt >> 0) as u8, ])); t!(io::copy(&mut file, dst)); } diff --git a/src/tools/remote-test-server/src/main.rs b/src/tools/remote-test-server/src/main.rs index 74dde8bf0e2c9..dc88daf63a507 100644 --- a/src/tools/remote-test-server/src/main.rs +++ b/src/tools/remote-test-server/src/main.rs @@ -9,7 +9,6 @@ /// The server supports running tests concurrently and also supports tests /// themselves having support libraries. All data over the TCP sockets is in a /// basically custom format suiting our needs. - use std::cmp; use std::env; use std::fs::{self, File, Permissions}; @@ -20,15 +19,17 @@ use std::os::unix::prelude::*; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::str; -use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; +use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; use std::sync::{Arc, Mutex}; use std::thread; macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } static TEST: AtomicUsize = ATOMIC_USIZE_INIT; @@ -54,7 +55,7 @@ impl Config { match &argument[..] { "remote" => { config.remote = true; - }, + } "verbose" | "-v" => { config.verbose = true; } @@ -93,7 +94,7 @@ fn main() { let mut socket = t!(socket); let mut buf = [0; 4]; if socket.read_exact(&mut buf).is_err() { - continue + continue; } if &buf[..] == b"ping" { t!(socket.write_all(b"pong")); @@ -205,15 +206,18 @@ fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) { // Support libraries were uploaded to `work` earlier, so make sure that's // in `LD_LIBRARY_PATH`. Also include our own current dir which may have // had some libs uploaded. - cmd.env("LD_LIBRARY_PATH", - format!("{}:{}", work.display(), path.display())); + cmd.env( + "LD_LIBRARY_PATH", + format!("{}:{}", work.display(), path.display()), + ); // Spawn the child and ferry over stdout/stderr to the socket in a framed // fashion (poor man's style) - let mut child = t!(cmd.stdin(Stdio::null()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn()); + let mut child = t!(cmd + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn()); drop(lock); let mut stdout = child.stdout.take().unwrap(); let mut stderr = child.stderr.take().unwrap(); @@ -233,8 +237,8 @@ fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) { which, (code >> 24) as u8, (code >> 16) as u8, - (code >> 8) as u8, - (code >> 0) as u8, + (code >> 8) as u8, + (code >> 0) as u8, ])); } @@ -254,8 +258,7 @@ fn recv(dir: &Path, io: &mut B) -> PathBuf { let len = cmp::min(filename.len() - 1, 50); let dst = dir.join(t!(str::from_utf8(&filename[..len]))); let amt = read_u32(io) as u64; - t!(io::copy(&mut io.take(amt), - &mut t!(File::create(&dst)))); + t!(io::copy(&mut io.take(amt), &mut t!(File::create(&dst)))); t!(fs::set_permissions(&dst, Permissions::from_mode(0o755))); dst } @@ -269,13 +272,13 @@ fn my_copy(src: &mut dyn Read, which: u8, dst: &Mutex) { which, (n >> 24) as u8, (n >> 16) as u8, - (n >> 8) as u8, - (n >> 0) as u8, + (n >> 8) as u8, + (n >> 0) as u8, ])); if n > 0 { t!(dst.write_all(&b[..n])); } else { - break + break; } } } @@ -283,8 +286,8 @@ fn my_copy(src: &mut dyn Read, which: u8, dst: &Mutex) { fn read_u32(r: &mut dyn Read) -> u32 { let mut len = [0; 4]; t!(r.read_exact(&mut len)); - ((len[0] as u32) << 24) | - ((len[1] as u32) << 16) | - ((len[2] as u32) << 8) | - ((len[3] as u32) << 0) + ((len[0] as u32) << 24) + | ((len[1] as u32) << 16) + | ((len[2] as u32) << 8) + | ((len[3] as u32) << 0) } diff --git a/src/tools/rustbook/src/main.rs b/src/tools/rustbook/src/main.rs index 80a85dc2ac0e3..4e487310719e5 100644 --- a/src/tools/rustbook/src/main.rs +++ b/src/tools/rustbook/src/main.rs @@ -6,10 +6,10 @@ extern crate clap; use std::env; use std::path::{Path, PathBuf}; -use clap::{App, ArgMatches, SubCommand, AppSettings}; +use clap::{App, AppSettings, ArgMatches, SubCommand}; -use mdbook::MDBook; use mdbook::errors::Result; +use mdbook::MDBook; fn main() { let d_message = "-d, --dest-dir=[dest-dir] @@ -18,15 +18,17 @@ fn main() { 'A directory for your book{n}(Defaults to Current Directory when omitted)'"; let matches = App::new("rustbook") - .about("Build a book with mdBook") - .author("Steve Klabnik ") - .version(&*format!("v{}", crate_version!())) - .setting(AppSettings::SubcommandRequired) - .subcommand(SubCommand::with_name("build") - .about("Build the book from the markdown files") - .arg_from_usage(d_message) - .arg_from_usage(dir_message)) - .get_matches(); + .about("Build a book with mdBook") + .author("Steve Klabnik ") + .version(&*format!("v{}", crate_version!())) + .setting(AppSettings::SubcommandRequired) + .subcommand( + SubCommand::with_name("build") + .about("Build the book from the markdown files") + .arg_from_usage(d_message) + .arg_from_usage(dir_message), + ) + .get_matches(); // Check which subcomamnd the user ran... let res = match matches.subcommand() { diff --git a/src/tools/rustdoc-themes/main.rs b/src/tools/rustdoc-themes/main.rs index 616b5444832c1..5bde07009b78d 100644 --- a/src/tools/rustdoc-themes/main.rs +++ b/src/tools/rustdoc-themes/main.rs @@ -1,7 +1,7 @@ use std::env::args; use std::fs::read_dir; use std::path::Path; -use std::process::{Command, exit}; +use std::process::{exit, Command}; const FILES_TO_IGNORE: &[&str] = &["light.css"]; @@ -13,11 +13,11 @@ fn get_folders>(folder_path: P) -> Vec { let path = entry.path(); if !path.is_file() { - continue + continue; } let filename = path.file_name().expect("file_name failed"); if FILES_TO_IGNORE.iter().any(|x| x == &filename) { - continue + continue; } ret.push(format!("{}", path.display())); } @@ -39,10 +39,10 @@ fn main() { exit(1); } let status = Command::new(rustdoc_bin) - .args(&["-Z", "unstable-options", "--theme-checker"]) - .args(&themes) - .status() - .expect("failed to execute child"); + .args(&["-Z", "unstable-options", "--theme-checker"]) + .args(&themes) + .status() + .expect("failed to execute child"); if !status.success() { exit(1); } diff --git a/src/tools/rustdoc/main.rs b/src/tools/rustdoc/main.rs index df9d2c6ba96db..692e43555d8b7 100644 --- a/src/tools/rustdoc/main.rs +++ b/src/tools/rustdoc/main.rs @@ -6,10 +6,15 @@ #[cfg_attr(all(windows, target_env = "msvc"), link_args = "/STACK:16777216")] // We only build for msvc and gnu now, but we use a exhaustive condition here // so we can expect either the stack size to be set or the build fails. -#[cfg_attr(all(windows, not(target_env = "msvc")), link_args = "-Wl,--stack,16777216")] +#[cfg_attr( + all(windows, not(target_env = "msvc")), + link_args = "-Wl,--stack,16777216" +)] // See src/rustc/rustc.rs for the corresponding rustc settings. -extern {} +extern "C" {} extern crate rustdoc; -fn main() { rustdoc::main() } +fn main() { + rustdoc::main() +} diff --git a/src/tools/tidy/src/bins.rs b/src/tools/tidy/src/bins.rs index 7a5495b6f5a07..5c94eb2cfc330 100644 --- a/src/tools/tidy/src/bins.rs +++ b/src/tools/tidy/src/bins.rs @@ -14,8 +14,8 @@ pub fn check(_path: &Path, _bad: &mut bool) {} #[cfg(unix)] pub fn check(path: &Path, bad: &mut bool) { use std::fs; - use std::process::{Command, Stdio}; use std::os::unix::prelude::*; + use std::process::{Command, Stdio}; if let Ok(contents) = fs::read_to_string("/proc/version") { // Probably on Windows Linux Subsystem or Docker via VirtualBox, @@ -25,32 +25,34 @@ pub fn check(path: &Path, bad: &mut bool) { } } - super::walk(path, - &mut |path| super::filter_dirs(path) || path.ends_with("src/etc"), - &mut |file| { - let filename = file.file_name().unwrap().to_string_lossy(); - let extensions = [".py", ".sh"]; - if extensions.iter().any(|e| filename.ends_with(e)) { - return; - } + super::walk( + path, + &mut |path| super::filter_dirs(path) || path.ends_with("src/etc"), + &mut |file| { + let filename = file.file_name().unwrap().to_string_lossy(); + let extensions = [".py", ".sh"]; + if extensions.iter().any(|e| filename.ends_with(e)) { + return; + } - let metadata = t!(fs::symlink_metadata(&file), &file); - if metadata.mode() & 0o111 != 0 { - let rel_path = file.strip_prefix(path).unwrap(); - let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/"); - let output = Command::new("git") - .arg("ls-files") - .arg(&git_friendly_path) - .current_dir(path) - .stderr(Stdio::null()) - .output() - .unwrap_or_else(|e| { - panic!("could not run git ls-files: {}", e); - }); - let path_bytes = rel_path.as_os_str().as_bytes(); - if output.status.success() && output.stdout.starts_with(path_bytes) { - tidy_error!(bad, "binary checked into source: {}", file.display()); + let metadata = t!(fs::symlink_metadata(&file), &file); + if metadata.mode() & 0o111 != 0 { + let rel_path = file.strip_prefix(path).unwrap(); + let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/"); + let output = Command::new("git") + .arg("ls-files") + .arg(&git_friendly_path) + .current_dir(path) + .stderr(Stdio::null()) + .output() + .unwrap_or_else(|e| { + panic!("could not run git ls-files: {}", e); + }); + let path_bytes = rel_path.as_os_str().as_bytes(); + if output.status.success() && output.stdout.starts_with(path_bytes) { + tidy_error!(bad, "binary checked into source: {}", file.display()); + } } - } - }) + }, + ) } diff --git a/src/tools/tidy/src/cargo.rs b/src/tools/tidy/src/cargo.rs index 3369d826ecd76..6350fdd24e8c1 100644 --- a/src/tools/tidy/src/cargo.rs +++ b/src/tools/tidy/src/cargo.rs @@ -10,7 +10,7 @@ use std::path::Path; pub fn check(path: &Path, bad: &mut bool) { if !super::filter_dirs(path) { - return + return; } for entry in t!(path.read_dir(), path).map(|e| t!(e)) { // Look for `Cargo.toml` with a sibling `src/lib.rs` or `lib.rs` @@ -34,7 +34,7 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { let librs = t!(fs::read_to_string(&libfile)); if toml.contains("name = \"bootstrap\"") { - return + return; } // "Poor man's TOML parser", just assume we use one syntax for now @@ -49,36 +49,41 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { // If we encounter a line starting with `[` then we assume it's the end of // the dependency section and bail out. let deps = match toml.find("[dependencies]") { - Some(i) => &toml[i+1..], + Some(i) => &toml[i + 1..], None => return, }; for line in deps.lines() { if line.starts_with('[') { - break + break; } let mut parts = line.splitn(2, '='); let krate = parts.next().unwrap().trim(); if parts.next().is_none() { - continue + continue; } // Don't worry about depending on core/std but not saying `extern crate // core/std`, that's intentional. if krate == "core" || krate == "std" { - continue + continue; } // This is intentional, this dependency just makes the crate available // for others later on. Cover cases let whitelisted = krate.starts_with("panic"); if toml.contains("name = \"std\"") && whitelisted { - continue + continue; } if !librs.contains(&format!("extern crate {}", krate)) { - tidy_error!(bad, "{} doesn't have `extern crate {}`, but Cargo.toml \ - depends on it", libfile.display(), krate); + tidy_error!( + bad, + "{} doesn't have `extern crate {}`, but Cargo.toml \ + depends on it", + libfile.display(), + krate + ); } } } diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index 7f0f0c6f79fcc..b848a0f34a559 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -1,6 +1,6 @@ //! Check license of third-party deps by inspecting vendor -use std::collections::{BTreeSet, HashSet, HashMap}; +use std::collections::{BTreeSet, HashMap, HashSet}; use std::fs; use std::path::Path; use std::process::Command; @@ -380,14 +380,15 @@ fn check_crate_duplicate(resolve: &Resolve, bad: &mut bool) { ]; let mut name_to_id: HashMap<_, Vec<_>> = HashMap::new(); for node in resolve.nodes.iter() { - name_to_id.entry(node.id.split_whitespace().next().unwrap()) + name_to_id + .entry(node.id.split_whitespace().next().unwrap()) .or_default() .push(&node.id); } for name in FORBIDDEN_TO_HAVE_DUPLICATES { if name_to_id[name].len() <= 1 { - continue + continue; } println!("crate `{}` is duplicated in `Cargo.lock`", name); for id in name_to_id[name].iter() { diff --git a/src/tools/tidy/src/errors.rs b/src/tools/tidy/src/errors.rs index 212b1a40f655a..c57e20318ab2e 100644 --- a/src/tools/tidy/src/errors.rs +++ b/src/tools/tidy/src/errors.rs @@ -11,54 +11,57 @@ use std::path::Path; pub fn check(path: &Path, bad: &mut bool) { let mut contents = String::new(); let mut map: HashMap<_, Vec<_>> = HashMap::new(); - super::walk(path, - &mut |path| super::filter_dirs(path) || path.ends_with("src/test"), - &mut |file| { - let filename = file.file_name().unwrap().to_string_lossy(); - if filename != "diagnostics.rs" && filename != "diagnostic_list.rs" { - return - } + super::walk( + path, + &mut |path| super::filter_dirs(path) || path.ends_with("src/test"), + &mut |file| { + let filename = file.file_name().unwrap().to_string_lossy(); + if filename != "diagnostics.rs" && filename != "diagnostic_list.rs" { + return; + } - contents.truncate(0); - t!(t!(File::open(file)).read_to_string(&mut contents)); + contents.truncate(0); + t!(t!(File::open(file)).read_to_string(&mut contents)); - // In the register_long_diagnostics! macro, entries look like this: - // - // EXXXX: r##" - // - // "##, - // - // and these long messages often have error codes themselves inside - // them, but we don't want to report duplicates in these cases. This - // variable keeps track of whether we're currently inside one of these - // long diagnostic messages. - let mut inside_long_diag = false; - for (num, line) in contents.lines().enumerate() { - if inside_long_diag { - inside_long_diag = !line.contains("\"##"); - continue - } + // In the register_long_diagnostics! macro, entries look like this: + // + // EXXXX: r##" + // + // "##, + // + // and these long messages often have error codes themselves inside + // them, but we don't want to report duplicates in these cases. This + // variable keeps track of whether we're currently inside one of these + // long diagnostic messages. + let mut inside_long_diag = false; + for (num, line) in contents.lines().enumerate() { + if inside_long_diag { + inside_long_diag = !line.contains("\"##"); + continue; + } - let mut search = line; - while let Some(i) = search.find('E') { - search = &search[i + 1..]; - let code = if search.len() > 4 { - search[..4].parse::() - } else { - continue - }; - let code = match code { - Ok(n) => n, - Err(..) => continue, - }; - map.entry(code).or_default() - .push((file.to_owned(), num + 1, line.to_owned())); - break - } + let mut search = line; + while let Some(i) = search.find('E') { + search = &search[i + 1..]; + let code = if search.len() > 4 { + search[..4].parse::() + } else { + continue; + }; + let code = match code { + Ok(n) => n, + Err(..) => continue, + }; + map.entry(code) + .or_default() + .push((file.to_owned(), num + 1, line.to_owned())); + break; + } - inside_long_diag = line.contains("r##\""); - } - }); + inside_long_diag = line.contains("r##\""); + } + }, + ); let mut max = 0; for (&code, entries) in map.iter() { @@ -66,7 +69,7 @@ pub fn check(path: &Path, bad: &mut bool) { max = code; } if entries.len() == 1 { - continue + continue; } tidy_error!(bad, "duplicate error code: {}", code); diff --git a/src/tools/tidy/src/extdeps.rs b/src/tools/tidy/src/extdeps.rs index 8b5021e9e2181..6b3748593b6ef 100644 --- a/src/tools/tidy/src/extdeps.rs +++ b/src/tools/tidy/src/extdeps.rs @@ -4,9 +4,7 @@ use std::fs; use std::path::Path; /// List of whitelisted sources for packages -const WHITELISTED_SOURCES: &[&str] = &[ - "\"registry+https://github.com/rust-lang/crates.io-index\"", -]; +const WHITELISTED_SOURCES: &[&str] = &["\"registry+https://github.com/rust-lang/crates.io-index\""]; /// check for external package sources pub fn check(path: &Path, bad: &mut bool) { @@ -18,9 +16,8 @@ pub fn check(path: &Path, bad: &mut bool) { // process each line for line in cargo_lock.lines() { - // consider only source entries - if ! line.starts_with("source = ") { + if !line.starts_with("source = ") { continue; } diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index 049a0ee49f0a4..42b2452a37aef 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -51,73 +51,94 @@ pub fn check(path: &Path, bad: &mut bool, quiet: bool) { let mut contents = String::new(); - super::walk_many(&[&path.join("test/ui"), - &path.join("test/ui-fulldeps"), - &path.join("test/compile-fail")], - &mut |path| super::filter_dirs(path), - &mut |file| { - let filename = file.file_name().unwrap().to_string_lossy(); - if !filename.ends_with(".rs") || filename == "features.rs" || - filename == "diagnostic_list.rs" { - return; - } + super::walk_many( + &[ + &path.join("test/ui"), + &path.join("test/ui-fulldeps"), + &path.join("test/compile-fail"), + ], + &mut |path| super::filter_dirs(path), + &mut |file| { + let filename = file.file_name().unwrap().to_string_lossy(); + if !filename.ends_with(".rs") + || filename == "features.rs" + || filename == "diagnostic_list.rs" + { + return; + } - let filen_underscore = filename.replace('-',"_").replace(".rs",""); - let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features); + let filen_underscore = filename.replace('-', "_").replace(".rs", ""); + let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features); - contents.truncate(0); - t!(t!(File::open(&file), &file).read_to_string(&mut contents)); + contents.truncate(0); + t!(t!(File::open(&file), &file).read_to_string(&mut contents)); - for (i, line) in contents.lines().enumerate() { - let mut err = |msg: &str| { - tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg); - }; + for (i, line) in contents.lines().enumerate() { + let mut err = |msg: &str| { + tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg); + }; - let gate_test_str = "gate-test-"; + let gate_test_str = "gate-test-"; - let feature_name = match line.find(gate_test_str) { - Some(i) => { - line[i+gate_test_str.len()..].splitn(2, ' ').next().unwrap() - }, - None => continue, - }; - match features.get_mut(feature_name) { - Some(f) => { - if filename_is_gate_test { - err(&format!("The file is already marked as gate test \ - through its name, no need for a \ - 'gate-test-{}' comment", - feature_name)); + let feature_name = match line.find(gate_test_str) { + Some(i) => line[i + gate_test_str.len()..] + .splitn(2, ' ') + .next() + .unwrap(), + None => continue, + }; + match features.get_mut(feature_name) { + Some(f) => { + if filename_is_gate_test { + err(&format!( + "The file is already marked as gate test \ + through its name, no need for a \ + 'gate-test-{}' comment", + feature_name + )); + } + f.has_gate_test = true; + } + None => { + err(&format!( + "gate-test test found referencing a nonexistent feature '{}'", + feature_name + )); } - f.has_gate_test = true; - } - None => { - err(&format!("gate-test test found referencing a nonexistent feature '{}'", - feature_name)); } } - } - }); + }, + ); // Only check the number of lang features. // Obligatory testing for library features is dumb. - let gate_untested = features.iter() - .filter(|&(_, f)| f.level == Status::Unstable) - .filter(|&(_, f)| !f.has_gate_test) - .collect::>(); + let gate_untested = features + .iter() + .filter(|&(_, f)| f.level == Status::Unstable) + .filter(|&(_, f)| !f.has_gate_test) + .collect::>(); for &(name, _) in gate_untested.iter() { println!("Expected a gate test for the feature '{}'.", name); - println!("Hint: create a failing test file named 'feature-gate-{}.rs'\ - \n in the 'ui' test suite, with its failures due to\ - \n missing usage of #![feature({})].", name, name); - println!("Hint: If you already have such a test and don't want to rename it,\ - \n you can also add a // gate-test-{} line to the test file.", - name); + println!( + "Hint: create a failing test file named 'feature-gate-{}.rs'\ + \n in the 'ui' test suite, with its failures due to\ + \n missing usage of #![feature({})].", + name, name + ); + println!( + "Hint: If you already have such a test and don't want to rename it,\ + \n you can also add a // gate-test-{} line to the test file.", + name + ); } if !gate_untested.is_empty() { - tidy_error!(bad, "Found {} features without a gate test.", gate_untested.len()); + tidy_error!( + bad, + "Found {} features without a gate test.", + gate_untested.len() + ); } if *bad { @@ -130,18 +151,16 @@ pub fn check(path: &Path, bad: &mut bool, quiet: bool) { let mut lines = Vec::new(); for (name, feature) in features.iter() { - lines.push(format!("{:<32} {:<8} {:<12} {:<8}", - name, - "lang", - feature.level, - feature.since)); + lines.push(format!( + "{:<32} {:<8} {:<12} {:<8}", + name, "lang", feature.level, feature.since + )); } for (name, feature) in lib_features { - lines.push(format!("{:<32} {:<8} {:<12} {:<8}", - name, - "lib", - feature.level, - feature.since)); + lines.push(format!( + "{:<32} {:<8} {:<12} {:<8}", + name, "lib", feature.level, feature.since + )); } lines.sort(); @@ -170,13 +189,17 @@ fn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool { } pub fn collect_lang_features(base_src_path: &Path, bad: &mut bool) -> Features { - let contents = t!(fs::read_to_string(base_src_path.join("libsyntax/feature_gate.rs"))); + let contents = t!(fs::read_to_string( + base_src_path.join("libsyntax/feature_gate.rs") + )); // we allow rustc-internal features to omit a tracking issue. // these features must be marked with `// rustc internal` in its own group. let mut next_feature_is_rustc_internal = false; - contents.lines().zip(1..) + contents + .lines() + .zip(1..) .filter_map(|(line, line_number)| { let line = line.trim(); if line.starts_with("// rustc internal") { @@ -210,16 +233,24 @@ pub fn collect_lang_features(base_src_path: &Path, bad: &mut bool) -> Features { None } else { next_feature_is_rustc_internal = false; - let s = issue_str.split('(').nth(1).unwrap().split(')').nth(0).unwrap(); + let s = issue_str + .split('(') + .nth(1) + .unwrap() + .split(')') + .nth(0) + .unwrap(); Some(s.parse().unwrap()) }; - Some((name.to_owned(), + Some(( + name.to_owned(), Feature { level, since: since.to_owned(), has_gate_test: false, tracking_issue, - })) + }, + )) }) .collect() } @@ -231,15 +262,17 @@ pub fn collect_lib_features(base_src_path: &Path) -> Features { // has been moved out-of-tree. Now it can no longer be auto-discovered by // `tidy`, because we need to filter out its (submodule) directory. Manually // add it to the set of known library features so we can still generate docs. - lib_features.insert("compiler_builtins_lib".to_owned(), Feature { - level: Status::Unstable, - since: String::new(), - has_gate_test: false, - tracking_issue: None, - }); - - map_lib_features(base_src_path, - &mut |res, _, _| { + lib_features.insert( + "compiler_builtins_lib".to_owned(), + Feature { + level: Status::Unstable, + since: String::new(), + has_gate_test: false, + tracking_issue: None, + }, + ); + + map_lib_features(base_src_path, &mut |res, _, _| { if let Ok((name, feature)) = res { if lib_features.contains_key(name) { return; @@ -247,136 +280,141 @@ pub fn collect_lib_features(base_src_path: &Path) -> Features { lib_features.insert(name.to_owned(), feature); } }); - lib_features + lib_features } -fn get_and_check_lib_features(base_src_path: &Path, - bad: &mut bool, - lang_features: &Features) -> Features { +fn get_and_check_lib_features( + base_src_path: &Path, + bad: &mut bool, + lang_features: &Features, +) -> Features { let mut lib_features = Features::new(); - map_lib_features(base_src_path, - &mut |res, file, line| { - match res { - Ok((name, f)) => { - let mut check_features = |f: &Feature, list: &Features, display: &str| { - if let Some(ref s) = list.get(name) { - if f.tracking_issue != s.tracking_issue { - tidy_error!(bad, - "{}:{}: mismatches the `issue` in {}", - file.display(), - line, - display); - } - } - }; - check_features(&f, &lang_features, "corresponding lang feature"); - check_features(&f, &lib_features, "previous"); - lib_features.insert(name.to_owned(), f); - }, - Err(msg) => { - tidy_error!(bad, "{}:{}: {}", file.display(), line, msg); - }, - } - + map_lib_features(base_src_path, &mut |res, file, line| match res { + Ok((name, f)) => { + let mut check_features = |f: &Feature, list: &Features, display: &str| { + if let Some(ref s) = list.get(name) { + if f.tracking_issue != s.tracking_issue { + tidy_error!( + bad, + "{}:{}: mismatches the `issue` in {}", + file.display(), + line, + display + ); + } + } + }; + check_features(&f, &lang_features, "corresponding lang feature"); + check_features(&f, &lib_features, "previous"); + lib_features.insert(name.to_owned(), f); + } + Err(msg) => { + tidy_error!(bad, "{}:{}: {}", file.display(), line, msg); + } }); lib_features } -fn map_lib_features(base_src_path: &Path, - mf: &mut dyn FnMut(Result<(&str, Feature), &str>, &Path, usize)) { +fn map_lib_features( + base_src_path: &Path, + mf: &mut dyn FnMut(Result<(&str, Feature), &str>, &Path, usize), +) { let mut contents = String::new(); - super::walk(base_src_path, - &mut |path| super::filter_dirs(path) || path.ends_with("src/test"), - &mut |file| { - let filename = file.file_name().unwrap().to_string_lossy(); - if !filename.ends_with(".rs") || filename == "features.rs" || - filename == "diagnostic_list.rs" { - return; - } + super::walk( + base_src_path, + &mut |path| super::filter_dirs(path) || path.ends_with("src/test"), + &mut |file| { + let filename = file.file_name().unwrap().to_string_lossy(); + if !filename.ends_with(".rs") + || filename == "features.rs" + || filename == "diagnostic_list.rs" + { + return; + } - contents.truncate(0); - t!(t!(File::open(&file), &file).read_to_string(&mut contents)); + contents.truncate(0); + t!(t!(File::open(&file), &file).read_to_string(&mut contents)); - let mut becoming_feature: Option<(String, Feature)> = None; - for (i, line) in contents.lines().enumerate() { - macro_rules! err { - ($msg:expr) => {{ - mf(Err($msg), file, i + 1); + let mut becoming_feature: Option<(String, Feature)> = None; + for (i, line) in contents.lines().enumerate() { + macro_rules! err { + ($msg:expr) => {{ + mf(Err($msg), file, i + 1); + continue; + }}; + }; + if let Some((ref name, ref mut f)) = becoming_feature { + if f.tracking_issue.is_none() { + f.tracking_issue = find_attr_val(line, "issue").map(|s| s.parse().unwrap()); + } + if line.ends_with(']') { + mf(Ok((name, f.clone())), file, i + 1); + } else if !line.ends_with(',') && !line.ends_with('\\') { + // We need to bail here because we might have missed the + // end of a stability attribute above because the ']' + // might not have been at the end of the line. + // We could then get into the very unfortunate situation that + // we continue parsing the file assuming the current stability + // attribute has not ended, and ignoring possible feature + // attributes in the process. + err!("malformed stability attribute"); + } else { + continue; + } + } + becoming_feature = None; + if line.contains("rustc_const_unstable(") { + // const fn features are handled specially + let feature_name = match find_attr_val(line, "feature") { + Some(name) => name, + None => err!("malformed stability attribute"), + }; + let feature = Feature { + level: Status::Unstable, + since: "None".to_owned(), + has_gate_test: false, + // Whether there is a common tracking issue + // for these feature gates remains an open question + // https://github.com/rust-lang/rust/issues/24111#issuecomment-340283184 + // But we take 24111 otherwise they will be shown as + // "internal to the compiler" which they are not. + tracking_issue: Some(24111), + }; + mf(Ok((feature_name, feature)), file, i + 1); continue; - }}; - }; - if let Some((ref name, ref mut f)) = becoming_feature { - if f.tracking_issue.is_none() { - f.tracking_issue = find_attr_val(line, "issue") - .map(|s| s.parse().unwrap()); } - if line.ends_with(']') { - mf(Ok((name, f.clone())), file, i + 1); - } else if !line.ends_with(',') && !line.ends_with('\\') { - // We need to bail here because we might have missed the - // end of a stability attribute above because the ']' - // might not have been at the end of the line. - // We could then get into the very unfortunate situation that - // we continue parsing the file assuming the current stability - // attribute has not ended, and ignoring possible feature - // attributes in the process. - err!("malformed stability attribute"); + let level = if line.contains("[unstable(") { + Status::Unstable + } else if line.contains("[stable(") { + Status::Stable } else { continue; - } - } - becoming_feature = None; - if line.contains("rustc_const_unstable(") { - // const fn features are handled specially + }; let feature_name = match find_attr_val(line, "feature") { Some(name) => name, None => err!("malformed stability attribute"), }; + let since = match find_attr_val(line, "since") { + Some(name) => name, + None if level == Status::Stable => { + err!("malformed stability attribute"); + } + None => "None", + }; + let tracking_issue = find_attr_val(line, "issue").map(|s| s.parse().unwrap()); + let feature = Feature { - level: Status::Unstable, - since: "None".to_owned(), + level, + since: since.to_owned(), has_gate_test: false, - // Whether there is a common tracking issue - // for these feature gates remains an open question - // https://github.com/rust-lang/rust/issues/24111#issuecomment-340283184 - // But we take 24111 otherwise they will be shown as - // "internal to the compiler" which they are not. - tracking_issue: Some(24111), + tracking_issue, }; - mf(Ok((feature_name, feature)), file, i + 1); - continue; - } - let level = if line.contains("[unstable(") { - Status::Unstable - } else if line.contains("[stable(") { - Status::Stable - } else { - continue; - }; - let feature_name = match find_attr_val(line, "feature") { - Some(name) => name, - None => err!("malformed stability attribute"), - }; - let since = match find_attr_val(line, "since") { - Some(name) => name, - None if level == Status::Stable => { - err!("malformed stability attribute"); + if line.contains(']') { + mf(Ok((feature_name, feature)), file, i + 1); + } else { + becoming_feature = Some((feature_name.to_owned(), feature)); } - None => "None", - }; - let tracking_issue = find_attr_val(line, "issue").map(|s| s.parse().unwrap()); - - let feature = Feature { - level, - since: since.to_owned(), - has_gate_test: false, - tracking_issue, - }; - if line.contains(']') { - mf(Ok((feature_name, feature)), file, i + 1); - } else { - becoming_feature = Some((feature_name.to_owned(), feature)); } - } - }); + }, + ); } diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs index 58220c44db530..ff5c98c7a43e8 100644 --- a/src/tools/tidy/src/lib.rs +++ b/src/tools/tidy/src/lib.rs @@ -13,15 +13,19 @@ use std::fs; use std::path::Path; macro_rules! t { - ($e:expr, $p:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed on {} with {}", stringify!($e), ($p).display(), e), - }); + ($e:expr, $p:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed on {} with {}", stringify!($e), ($p).display(), e), + } + }; - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } macro_rules! tidy_error { @@ -33,16 +37,16 @@ macro_rules! tidy_error { } pub mod bins; -pub mod style; -pub mod errors; -pub mod features; pub mod cargo; -pub mod pal; pub mod deps; +pub mod errors; pub mod extdeps; +pub mod features; +pub mod libcoretest; +pub mod pal; +pub mod style; pub mod ui_tests; pub mod unstable_book; -pub mod libcoretest; fn filter_dirs(path: &Path) -> bool { let skip = [ diff --git a/src/tools/tidy/src/libcoretest.rs b/src/tools/tidy/src/libcoretest.rs index b15b9c3462f79..29074f54459c2 100644 --- a/src/tools/tidy/src/libcoretest.rs +++ b/src/tools/tidy/src/libcoretest.rs @@ -3,8 +3,8 @@ //! `#![no_core]` libraries cannot be tested directly due to duplicating lang //! item. All tests must be written externally in `libcore/tests`. -use std::path::Path; use std::fs::read_to_string; +use std::path::Path; pub fn check(path: &Path, bad: &mut bool) { let libcore_path = path.join("libcore"); @@ -19,7 +19,7 @@ pub fn check(path: &Path, bad: &mut bool) { tidy_error!( bad, "{} contains #[test]; libcore tests must be placed inside \ - `src/libcore/tests/`", + `src/libcore/tests/`", subpath.display() ); } diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs index e705e2d5b96d6..b21cdda69f3b6 100644 --- a/src/tools/tidy/src/main.rs +++ b/src/tools/tidy/src/main.rs @@ -9,9 +9,9 @@ extern crate tidy; use tidy::*; -use std::process; -use std::path::PathBuf; use std::env; +use std::path::PathBuf; +use std::process; fn main() { let path: PathBuf = env::args_os().nth(1).expect("need path to src").into(); diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs index a34712d8b709b..e26667a92137c 100644 --- a/src/tools/tidy/src/pal.rs +++ b/src/tools/tidy/src/pal.rs @@ -33,8 +33,8 @@ use std::fs::File; use std::io::Read; -use std::path::Path; use std::iter::Iterator; +use std::path::Path; // Paths that may contain platform-specific code const EXCEPTION_PATHS: &[&str] = &[ @@ -43,10 +43,9 @@ const EXCEPTION_PATHS: &[&str] = &[ "src/libpanic_unwind", "src/libunwind", "src/libstd/sys/", // Platform-specific code for std lives here. - // This has the trailing slash so that sys_common is not excepted. + // This has the trailing slash so that sys_common is not excepted. "src/libstd/os", // Platform-specific public interfaces "src/rtstartup", // Not sure what to do about this. magic stuff for mingw - // temporary exceptions "src/libstd/lib.rs", "src/libstd/path.rs", @@ -56,18 +55,15 @@ const EXCEPTION_PATHS: &[&str] = &[ "src/libstd/sys_common/net.rs", "src/libterm", // Not sure how to make this crate portable, but test needs it "src/libtest", // Probably should defer to unstable std::sys APIs - // std testing crates, ok for now at least "src/libcore/tests", "src/liballoc/tests/lib.rs", - // The `VaList` implementation must have platform specific code. // The Windows implementation of a `va_list` is always a character // pointer regardless of the target architecture. As a result, // we must use `#[cfg(windows)]` to conditionally compile the // correct `VaList` structure for windows. "src/libcore/ffi.rs", - // non-std crates "src/test", "src/tools", @@ -84,20 +80,35 @@ pub fn check(path: &Path, bad: &mut bool) { let mut saw_cfg_bang = false; super::walk(path, &mut super::filter_dirs, &mut |file| { let filestr = file.to_string_lossy().replace("\\", "/"); - if !filestr.ends_with(".rs") { return } + if !filestr.ends_with(".rs") { + return; + } let is_exception_path = EXCEPTION_PATHS.iter().any(|s| filestr.contains(&**s)); - if is_exception_path { return } + if is_exception_path { + return; + } - check_cfgs(&mut contents, &file, bad, &mut saw_target_arch, &mut saw_cfg_bang); + check_cfgs( + &mut contents, + &file, + bad, + &mut saw_target_arch, + &mut saw_cfg_bang, + ); }); assert!(saw_target_arch); assert!(saw_cfg_bang); } -fn check_cfgs(contents: &mut String, file: &Path, - bad: &mut bool, saw_target_arch: &mut bool, saw_cfg_bang: &mut bool) { +fn check_cfgs( + contents: &mut String, + file: &Path, + bad: &mut bool, + saw_target_arch: &mut bool, + saw_cfg_bang: &mut bool, +) { contents.truncate(0); t!(t!(File::open(file), file).read_to_string(contents)); @@ -115,24 +126,35 @@ fn check_cfgs(contents: &mut String, file: &Path, let line_numbers = line_numbers.as_ref().expect(""); let line = match line_numbers.binary_search(&idx) { Ok(_) => unreachable!(), - Err(i) => i + 1 + Err(i) => i + 1, }; - tidy_error!(bad, "{}:{}: platform-specific cfg: {}", file.display(), line, cfg); + tidy_error!( + bad, + "{}:{}: platform-specific cfg: {}", + file.display(), + line, + cfg + ); }; for (idx, cfg) in cfgs { // Sanity check that the parsing here works - if !*saw_target_arch && cfg.contains("target_arch") { *saw_target_arch = true } - if !*saw_cfg_bang && cfg.contains("cfg!") { *saw_cfg_bang = true } + if !*saw_target_arch && cfg.contains("target_arch") { + *saw_target_arch = true + } + if !*saw_cfg_bang && cfg.contains("cfg!") { + *saw_cfg_bang = true + } - let contains_platform_specific_cfg = - cfg.contains("target_os") + let contains_platform_specific_cfg = cfg.contains("target_os") || cfg.contains("target_env") || cfg.contains("target_vendor") || cfg.contains("unix") || cfg.contains("windows"); - if !contains_platform_specific_cfg { continue } + if !contains_platform_specific_cfg { + continue; + } let preceeded_by_doc_comment = { let pre_contents = &contents[..idx]; @@ -145,7 +167,9 @@ fn check_cfgs(contents: &mut String, file: &Path, } }; - if preceeded_by_doc_comment { continue } + if preceeded_by_doc_comment { + continue; + } err(idx, cfg); } @@ -154,12 +178,15 @@ fn check_cfgs(contents: &mut String, file: &Path, fn find_test_mod(contents: &str) -> usize { if let Some(mod_tests_idx) = contents.find("mod tests") { // Also capture a previous line indicating "mod tests" in cfg-ed out - let prev_newline_idx = contents[..mod_tests_idx].rfind('\n').unwrap_or(mod_tests_idx); + let prev_newline_idx = contents[..mod_tests_idx] + .rfind('\n') + .unwrap_or(mod_tests_idx); let prev_newline_idx = contents[..prev_newline_idx].rfind('\n'); if let Some(nl) = prev_newline_idx { - let prev_line = &contents[nl + 1 .. mod_tests_idx]; + let prev_line = &contents[nl + 1..mod_tests_idx]; if prev_line.contains("cfg(all(test, not(target_os") - || prev_line.contains("cfg(all(test, not(any(target_os") { + || prev_line.contains("cfg(all(test, not(any(target_os") + { nl } else { mod_tests_idx @@ -179,7 +206,9 @@ fn parse_cfgs<'a>(contents: &'a str) -> Vec<(usize, &'a str)> { // that appear to be tokens succeeded by a paren. let cfgs = candidate_cfg_idxs.filter(|i| { let pre_idx = i.saturating_sub(*i); - let succeeds_non_ident = !contents.as_bytes().get(pre_idx) + let succeeds_non_ident = !contents + .as_bytes() + .get(pre_idx) .cloned() .map(char::from) .map(char::is_alphanumeric) @@ -187,10 +216,14 @@ fn parse_cfgs<'a>(contents: &'a str) -> Vec<(usize, &'a str)> { let contents_after = &contents[*i..]; let first_paren = contents_after.find('('); let paren_idx = first_paren.map(|ip| i + ip); - let preceeds_whitespace_and_paren = paren_idx.map(|ip| { - let maybe_space = &contents[*i + "cfg".len() .. ip]; - maybe_space.chars().all(|c| char::is_whitespace(c) || c == '!') - }).unwrap_or(false); + let preceeds_whitespace_and_paren = paren_idx + .map(|ip| { + let maybe_space = &contents[*i + "cfg".len()..ip]; + maybe_space + .chars() + .all(|c| char::is_whitespace(c) || c == '!') + }) + .unwrap_or(false); succeeds_non_ident && preceeds_whitespace_and_paren }); @@ -209,10 +242,11 @@ fn parse_cfgs<'a>(contents: &'a str) -> Vec<(usize, &'a str)> { return (i, &contents_from[..=j]); } } - _ => { } + _ => {} } } unreachable!() - }).collect() + }) + .collect() } diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs index 42e803bfe3a2a..ba6fe0cfe31fc 100644 --- a/src/tools/tidy/src/style.rs +++ b/src/tools/tidy/src/style.rs @@ -37,10 +37,12 @@ Use llvm::report_fatal_error for increased robustness."; /// Parser states for line_is_url. #[derive(PartialEq)] #[allow(non_camel_case_types)] -enum LIUState { EXP_COMMENT_START, - EXP_LINK_LABEL_OR_URL, - EXP_URL, - EXP_END } +enum LIUState { + EXP_COMMENT_START, + EXP_LINK_LABEL_OR_URL, + EXP_URL, + EXP_END, +} /// True if LINE appears to be a line comment containing an URL, /// possibly with a Markdown link label in front, and nothing else. @@ -59,16 +61,22 @@ fn line_is_url(line: &str) -> bool { (EXP_COMMENT_START, "//!") => state = EXP_LINK_LABEL_OR_URL, (EXP_LINK_LABEL_OR_URL, w) - if w.len() >= 4 && w.starts_with('[') && w.ends_with("]:") - => state = EXP_URL, + if w.len() >= 4 && w.starts_with('[') && w.ends_with("]:") => + { + state = EXP_URL + } - (EXP_LINK_LABEL_OR_URL, w) - if w.starts_with("http://") || w.starts_with("https://") - => state = EXP_END, + (EXP_LINK_LABEL_OR_URL, w) if w.starts_with("http://") || w.starts_with("https://") => { + state = EXP_END + } (EXP_URL, w) - if w.starts_with("http://") || w.starts_with("https://") || w.starts_with("../") - => state = EXP_END, + if w.starts_with("http://") + || w.starts_with("https://") + || w.starts_with("../") => + { + state = EXP_END + } (_, _) => return false, } @@ -93,9 +101,8 @@ pub fn check(path: &Path, bad: &mut bool) { super::walk(path, &mut super::filter_dirs, &mut |file| { let filename = file.file_name().unwrap().to_string_lossy(); let extensions = [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h"]; - if extensions.iter().all(|e| !filename.ends_with(e)) || - filename.starts_with(".#") { - return + if extensions.iter().all(|e| !filename.ends_with(e)) || filename.starts_with(".#") { + return; } contents.truncate(0); @@ -114,9 +121,8 @@ pub fn check(path: &Path, bad: &mut bool) { let mut err = |msg: &str| { tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg); }; - if !skip_length && line.chars().count() > COLS - && !long_line_is_ok(line) { - err(&format!("line longer than {} chars", COLS)); + if !skip_length && line.chars().count() > COLS && !long_line_is_ok(line) { + err(&format!("line longer than {} chars", COLS)); } if line.contains('\t') && !skip_tab { err("tab character"); @@ -150,7 +156,12 @@ pub fn check(path: &Path, bad: &mut bool) { match trailing_new_lines { 0 => tidy_error!(bad, "{}: missing trailing newline", file.display()), 1 | 2 => {} - n => tidy_error!(bad, "{}: too many trailing newlines ({})", file.display(), n), + n => tidy_error!( + bad, + "{}: too many trailing newlines ({})", + file.display(), + n + ), }; }) } diff --git a/src/tools/tidy/src/unstable_book.rs b/src/tools/tidy/src/unstable_book.rs index c09a04952e6b1..261716b8aa693 100644 --- a/src/tools/tidy/src/unstable_book.rs +++ b/src/tools/tidy/src/unstable_book.rs @@ -1,7 +1,7 @@ +use features::{collect_lang_features, collect_lib_features, Features, Status}; use std::collections::BTreeSet; use std::fs; use std::path; -use features::{collect_lang_features, collect_lib_features, Features, Status}; pub const PATH_STR: &str = "doc/unstable-book/src"; @@ -58,8 +58,9 @@ pub fn collect_unstable_book_section_file_names(dir: &path::Path) -> BTreeSet BTreeSet { +fn collect_unstable_book_lang_features_section_file_names( + base_src_path: &path::Path, +) -> BTreeSet { collect_unstable_book_section_file_names(&unstable_book_lang_features_path(base_src_path)) } @@ -67,31 +68,34 @@ fn collect_unstable_book_lang_features_section_file_names(base_src_path: &path:: /// /// * hyphens replaced by underscores /// * the markdown suffix ('.md') removed -fn collect_unstable_book_lib_features_section_file_names(base_src_path: &path::Path) - -> BTreeSet { +fn collect_unstable_book_lib_features_section_file_names( + base_src_path: &path::Path, +) -> BTreeSet { collect_unstable_book_section_file_names(&unstable_book_lib_features_path(base_src_path)) } pub fn check(path: &path::Path, bad: &mut bool) { - // Library features let lang_features = collect_lang_features(path, bad); - let lib_features = collect_lib_features(path).into_iter().filter(|&(ref name, _)| { - !lang_features.contains_key(name) - }).collect(); + let lib_features = collect_lib_features(path) + .into_iter() + .filter(|&(ref name, _)| !lang_features.contains_key(name)) + .collect(); let unstable_lib_feature_names = collect_unstable_feature_names(&lib_features); let unstable_book_lib_features_section_file_names = collect_unstable_book_lib_features_section_file_names(path); // Check for Unstable Book sections that don't have a corresponding unstable feature - for feature_name in &unstable_book_lib_features_section_file_names - - &unstable_lib_feature_names { - tidy_error!(bad, - "The Unstable Book has a 'library feature' section '{}' which doesn't \ - correspond to an unstable library feature", - feature_name) + for feature_name in &unstable_book_lib_features_section_file_names - &unstable_lib_feature_names + { + tidy_error!( + bad, + "The Unstable Book has a 'library feature' section '{}' which doesn't \ + correspond to an unstable library feature", + feature_name + ) } // Language features @@ -101,12 +105,15 @@ pub fn check(path: &path::Path, bad: &mut bool) { collect_unstable_book_lang_features_section_file_names(path); // Check for Unstable Book sections that don't have a corresponding unstable feature - for feature_name in &unstable_book_lang_features_section_file_names - - &unstable_lang_feature_names { - tidy_error!(bad, - "The Unstable Book has a 'language feature' section '{}' which doesn't \ - correspond to an unstable language feature", - feature_name) + for feature_name in + &unstable_book_lang_features_section_file_names - &unstable_lang_feature_names + { + tidy_error!( + bad, + "The Unstable Book has a 'language feature' section '{}' which doesn't \ + correspond to an unstable language feature", + feature_name + ) } // List unstable features that don't have Unstable Book sections diff --git a/src/tools/unstable-book-gen/src/main.rs b/src/tools/unstable-book-gen/src/main.rs index df12eaf0cb028..22ea166737b40 100644 --- a/src/tools/unstable-book-gen/src/main.rs +++ b/src/tools/unstable-book-gen/src/main.rs @@ -4,14 +4,16 @@ extern crate tidy; -use tidy::features::{Feature, Features, collect_lib_features, collect_lang_features}; -use tidy::unstable_book::{collect_unstable_feature_names, collect_unstable_book_section_file_names, - PATH_STR, LANG_FEATURES_DIR, LIB_FEATURES_DIR}; use std::collections::BTreeSet; -use std::io::Write; -use std::fs::{self, File}; use std::env; +use std::fs::{self, File}; +use std::io::Write; use std::path::Path; +use tidy::features::{collect_lang_features, collect_lib_features, Feature, Features}; +use tidy::unstable_book::{ + collect_unstable_book_section_file_names, collect_unstable_feature_names, LANG_FEATURES_DIR, + LIB_FEATURES_DIR, PATH_STR, +}; /// A helper macro to `unwrap` a result except also print out details like: /// @@ -19,57 +21,52 @@ use std::path::Path; /// * The expression that failed /// * The error itself macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } fn generate_stub_issue(path: &Path, name: &str, issue: u32) { let mut file = t!(File::create(path)); - t!(file.write_fmt(format_args!(include_str!("stub-issue.md"), - name = name, - issue = issue))); + t!(file.write_fmt(format_args!( + include_str!("stub-issue.md"), + name = name, + issue = issue + ))); } fn generate_stub_no_issue(path: &Path, name: &str) { let mut file = t!(File::create(path)); - t!(file.write_fmt(format_args!(include_str!("stub-no-issue.md"), - name = name))); + t!(file.write_fmt(format_args!(include_str!("stub-no-issue.md"), name = name))); } -fn set_to_summary_str(set: &BTreeSet, dir: &str -) -> String { - set - .iter() - .map(|ref n| format!(" - [{}]({}/{}.md)", - n.replace('-', "_"), - dir, - n)) +fn set_to_summary_str(set: &BTreeSet, dir: &str) -> String { + set.iter() + .map(|ref n| format!(" - [{}]({}/{}.md)", n.replace('-', "_"), dir, n)) .fold("".to_owned(), |s, a| s + &a + "\n") } fn generate_summary(path: &Path, lang_features: &Features, lib_features: &Features) { - let compiler_flags = collect_unstable_book_section_file_names( - &path.join("compiler-flags")); + let compiler_flags = collect_unstable_book_section_file_names(&path.join("compiler-flags")); - let compiler_flags_str = set_to_summary_str(&compiler_flags, - "compiler-flags"); + let compiler_flags_str = set_to_summary_str(&compiler_flags, "compiler-flags"); let unstable_lang_features = collect_unstable_feature_names(&lang_features); let unstable_lib_features = collect_unstable_feature_names(&lib_features); - let lang_features_str = set_to_summary_str(&unstable_lang_features, - LANG_FEATURES_DIR); - let lib_features_str = set_to_summary_str(&unstable_lib_features, - LIB_FEATURES_DIR); + let lang_features_str = set_to_summary_str(&unstable_lang_features, LANG_FEATURES_DIR); + let lib_features_str = set_to_summary_str(&unstable_lib_features, LIB_FEATURES_DIR); let mut file = t!(File::create(&path.join("SUMMARY.md"))); - t!(file.write_fmt(format_args!(include_str!("SUMMARY.md"), - compiler_flags = compiler_flags_str, - language_features = lang_features_str, - library_features = lib_features_str))); - + t!(file.write_fmt(format_args!( + include_str!("SUMMARY.md"), + compiler_flags = compiler_flags_str, + language_features = lang_features_str, + library_features = lib_features_str + ))); } fn has_valid_tracking_issue(f: &Feature) -> bool { @@ -81,7 +78,7 @@ fn has_valid_tracking_issue(f: &Feature) -> bool { false } -fn generate_unstable_book_files(src :&Path, out: &Path, features :&Features) { +fn generate_unstable_book_files(src: &Path, out: &Path, features: &Features) { let unstable_features = collect_unstable_feature_names(features); let unstable_section_file_names = collect_unstable_book_section_file_names(src); t!(fs::create_dir_all(&out)); @@ -92,9 +89,11 @@ fn generate_unstable_book_files(src :&Path, out: &Path, features :&Features) { let feature = &features[&feature_name_underscore]; if has_valid_tracking_issue(&feature) { - generate_stub_issue(&out_file_path, - &feature_name_underscore, - feature.tracking_issue.unwrap()); + generate_stub_issue( + &out_file_path, + &feature_name_underscore, + feature.tracking_issue.unwrap(), + ); } else { generate_stub_no_issue(&out_file_path, &feature_name_underscore); } @@ -117,25 +116,33 @@ fn copy_recursive(path: &Path, to: &Path) { fn main() { let src_path_str = env::args_os().skip(1).next().expect("source path required"); - let dest_path_str = env::args_os().skip(2).next().expect("destination path required"); + let dest_path_str = env::args_os() + .skip(2) + .next() + .expect("destination path required"); let src_path = Path::new(&src_path_str); let dest_path = Path::new(&dest_path_str).join("src"); let lang_features = collect_lang_features(src_path, &mut false); - let lib_features = collect_lib_features(src_path).into_iter().filter(|&(ref name, _)| { - !lang_features.contains_key(name) - }).collect(); + let lib_features = collect_lib_features(src_path) + .into_iter() + .filter(|&(ref name, _)| !lang_features.contains_key(name)) + .collect(); let doc_src_path = src_path.join(PATH_STR); t!(fs::create_dir_all(&dest_path)); - generate_unstable_book_files(&doc_src_path.join(LANG_FEATURES_DIR), - &dest_path.join(LANG_FEATURES_DIR), - &lang_features); - generate_unstable_book_files(&doc_src_path.join(LIB_FEATURES_DIR), - &dest_path.join(LIB_FEATURES_DIR), - &lib_features); + generate_unstable_book_files( + &doc_src_path.join(LANG_FEATURES_DIR), + &dest_path.join(LANG_FEATURES_DIR), + &lang_features, + ); + generate_unstable_book_files( + &doc_src_path.join(LIB_FEATURES_DIR), + &dest_path.join(LIB_FEATURES_DIR), + &lib_features, + ); copy_recursive(&doc_src_path, &dest_path);

(&mut self, mut predicate: P) -> Option + where P: FnMut(Self::Item) -> bool, - Self: Sized + ExactSizeIterator + DoubleEndedIterator + Self: Sized + ExactSizeIterator + DoubleEndedIterator, { // No need for an overflow check here, because `ExactSizeIterator` // implies that the number of elements fits into a `usize`. let n = self.len(); self.try_rfold(n, move |i, x| { let i = i - 1; - if predicate(x) { LoopState::Break(i) } - else { LoopState::Continue(i) } - }).break_value() + if predicate(x) { + LoopState::Break(i) + } else { + LoopState::Continue(i) + } + }) + .break_value() } /// Returns the maximum element of an iterator. @@ -2009,14 +2143,19 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn max(self) -> Option where Self: Sized, Self::Item: Ord + fn max(self) -> Option + where + Self: Sized, + Self::Item: Ord, { - select_fold1(self, - |_| (), - // switch to y even if it is only equal, to preserve - // stability. - |_, x, _, y| *x <= *y) - .map(|(_, x)| x) + select_fold1( + self, + |_| (), + // switch to y even if it is only equal, to preserve + // stability. + |_, x, _, y| *x <= *y, + ) + .map(|(_, x)| x) } /// Returns the minimum element of an iterator. @@ -2039,14 +2178,19 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn min(self) -> Option where Self: Sized, Self::Item: Ord + fn min(self) -> Option + where + Self: Sized, + Self::Item: Ord, { - select_fold1(self, - |_| (), - // only switch to y if it is strictly smaller, to - // preserve stability. - |_, x, _, y| *x > *y) - .map(|(_, x)| x) + select_fold1( + self, + |_| (), + // only switch to y if it is strictly smaller, to + // preserve stability. + |_, x, _, y| *x > *y, + ) + .map(|(_, x)| x) } /// Returns the element that gives the maximum value from the @@ -2066,14 +2210,18 @@ pub trait Iterator { #[inline] #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] fn max_by_key(self, f: F) -> Option - where Self: Sized, F: FnMut(&Self::Item) -> B, + where + Self: Sized, + F: FnMut(&Self::Item) -> B, { - select_fold1(self, - f, - // switch to y even if it is only equal, to preserve - // stability. - |x_p, _, y_p, _| x_p <= y_p) - .map(|(_, x)| x) + select_fold1( + self, + f, + // switch to y even if it is only equal, to preserve + // stability. + |x_p, _, y_p, _| x_p <= y_p, + ) + .map(|(_, x)| x) } /// Returns the element that gives the maximum value with respect to the @@ -2093,14 +2241,18 @@ pub trait Iterator { #[inline] #[stable(feature = "iter_max_by", since = "1.15.0")] fn max_by(self, mut compare: F) -> Option - where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering, + where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering, { - select_fold1(self, - |_| (), - // switch to y even if it is only equal, to preserve - // stability. - |_, x, _, y| Ordering::Greater != compare(x, y)) - .map(|(_, x)| x) + select_fold1( + self, + |_| (), + // switch to y even if it is only equal, to preserve + // stability. + |_, x, _, y| Ordering::Greater != compare(x, y), + ) + .map(|(_, x)| x) } /// Returns the element that gives the minimum value from the @@ -2119,14 +2271,18 @@ pub trait Iterator { /// ``` #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] fn min_by_key(self, f: F) -> Option - where Self: Sized, F: FnMut(&Self::Item) -> B, + where + Self: Sized, + F: FnMut(&Self::Item) -> B, { - select_fold1(self, - f, - // only switch to y if it is strictly smaller, to - // preserve stability. - |x_p, _, y_p, _| x_p > y_p) - .map(|(_, x)| x) + select_fold1( + self, + f, + // only switch to y if it is strictly smaller, to + // preserve stability. + |x_p, _, y_p, _| x_p > y_p, + ) + .map(|(_, x)| x) } /// Returns the element that gives the minimum value with respect to the @@ -2146,17 +2302,20 @@ pub trait Iterator { #[inline] #[stable(feature = "iter_min_by", since = "1.15.0")] fn min_by(self, mut compare: F) -> Option - where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering, + where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering, { - select_fold1(self, - |_| (), - // switch to y even if it is strictly smaller, to - // preserve stability. - |_, x, _, y| Ordering::Greater == compare(x, y)) - .map(|(_, x)| x) + select_fold1( + self, + |_| (), + // switch to y even if it is strictly smaller, to + // preserve stability. + |_, x, _, y| Ordering::Greater == compare(x, y), + ) + .map(|(_, x)| x) } - /// Reverses an iterator's direction. /// /// Usually, iterators iterate from left to right. After using `rev()`, @@ -2182,8 +2341,11 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn rev(self) -> Rev where Self: Sized + DoubleEndedIterator { - Rev{iter: self} + fn rev(self) -> Rev + where + Self: Sized + DoubleEndedIterator, + { + Rev { iter: self } } /// Converts an iterator of pairs into a pair of containers. @@ -2209,10 +2371,11 @@ pub trait Iterator { /// assert_eq!(right, [2, 4]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - fn unzip(self) -> (FromA, FromB) where + fn unzip(self) -> (FromA, FromB) + where FromA: Default + Extend, FromB: Default + Extend, - Self: Sized + Iterator, + Self: Sized + Iterator, { let mut ts: FromA = Default::default(); let mut us: FromB = Default::default(); @@ -2249,7 +2412,9 @@ pub trait Iterator { /// ``` #[unstable(feature = "iter_copied", issue = "57127")] fn copied<'a, T: 'a>(self) -> Copied - where Self: Sized + Iterator, T: Copy + where + Self: Sized + Iterator, + T: Copy, { Copied { it: self } } @@ -2278,7 +2443,9 @@ pub trait Iterator { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn cloned<'a, T: 'a>(self) -> Cloned - where Self: Sized + Iterator, T: Clone + where + Self: Sized + Iterator, + T: Clone, { Cloned { it: self } } @@ -2310,8 +2477,14 @@ pub trait Iterator { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - fn cycle(self) -> Cycle where Self: Sized + Clone { - Cycle{orig: self.clone(), iter: self} + fn cycle(self) -> Cycle + where + Self: Sized + Clone, + { + Cycle { + orig: self.clone(), + iter: self, + } } /// Sums the elements of an iterator. @@ -2338,8 +2511,9 @@ pub trait Iterator { /// ``` #[stable(feature = "iter_arith", since = "1.11.0")] fn sum(self) -> S - where Self: Sized, - S: Sum, + where + Self: Sized, + S: Sum, { Sum::sum(self) } @@ -2366,8 +2540,9 @@ pub trait Iterator { /// ``` #[stable(feature = "iter_arith", since = "1.11.0")] fn product