diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs new file mode 100644 index 00000000000..30708ebc806 --- /dev/null +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -0,0 +1,323 @@ +use std::env; +use std::path::Path; +use std::str::{self, FromStr}; + +use core::profiles::Profiles; +use core::{Dependency, Workspace}; +use core::{Package, PackageId, PackageSet, Resolve}; +use util::errors::CargoResult; +use util::{profile, Cfg, CfgExpr, Config}; + +use super::{BuildConfig, Kind, TargetConfig, Unit}; + +mod target_info; +pub use self::target_info::{FileFlavor, TargetInfo}; + +/// The build context, containing all information about a build task +pub struct BuildContext<'a, 'cfg: 'a> { + /// The workspace the build is for + pub ws: &'a Workspace<'cfg>, + /// The cargo configuration + pub config: &'cfg Config, + /// The dependency graph for our build + pub resolve: &'a Resolve, + pub profiles: &'a Profiles, + pub build_config: &'a BuildConfig, + /// This is a workaround to carry the extra compiler args for either + /// `rustc` or `rustdoc` given on the command-line for the commands `cargo + /// rustc` and `cargo rustdoc`. These commands only support one target, + /// but we don't want the args passed to any dependencies, so we include + /// the `Unit` corresponding to the top-level target. + pub extra_compiler_args: Option<(Unit<'a>, Vec)>, + pub packages: &'a PackageSet<'cfg>, + + pub target_info: TargetInfo, + pub host_info: TargetInfo, + pub incremental_env: Option, +} + +impl<'a, 'cfg> BuildContext<'a, 'cfg> { + pub fn new( + ws: &'a Workspace<'cfg>, + resolve: &'a Resolve, + packages: &'a PackageSet<'cfg>, + config: &'cfg Config, + build_config: &'a BuildConfig, + profiles: &'a Profiles, + extra_compiler_args: Option<(Unit<'a>, Vec)>, + ) -> CargoResult> { + let incremental_env = match env::var("CARGO_INCREMENTAL") { + Ok(v) => Some(v == "1"), + Err(_) => None, + }; + + let (host_info, target_info) = { + let _p = profile::start("BuildContext::probe_target_info"); + debug!("probe_target_info"); + let host_info = TargetInfo::new(config, &build_config, Kind::Host)?; + let target_info = TargetInfo::new(config, &build_config, Kind::Target)?; + (host_info, target_info) + }; + + Ok(BuildContext { + ws, + resolve, + packages, + config, + target_info, + host_info, + build_config, + profiles, + incremental_env, + extra_compiler_args, + }) + } + + pub fn extern_crate_name(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> CargoResult { + let deps = { + let a = unit.pkg.package_id(); + let b = dep.pkg.package_id(); + if a == b { + &[] + } else { + self.resolve.dependencies_listed(a, b) + } + }; + + let crate_name = dep.target.crate_name(); + let mut names = deps.iter() + .map(|d| d.rename().unwrap_or(&crate_name)); + let name = names.next().unwrap_or(&crate_name); + for n in names { + if n == name { + continue + } + bail!("multiple dependencies listed for the same crate must \ + all have the same name, but the dependency on `{}` \ + is listed as having different names", dep.pkg.package_id()); + } + Ok(name.to_string()) + } + + /// Whether a dependency should be compiled for the host or target platform, + /// specified by `Kind`. + pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + let platform = match dep.platform() { + Some(p) => p, + None => return true, + }; + let (name, info) = match kind { + Kind::Host => (self.build_config.host_triple(), &self.host_info), + Kind::Target => (self.build_config.target_triple(), &self.target_info), + }; + platform.matches(name, info.cfg()) + } + + /// Gets a package for the given package id. + pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> { + self.packages.get(id) + } + + /// Get the user-specified linker for a particular host or target + pub fn linker(&self, kind: Kind) -> Option<&Path> { + self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) + } + + /// Get the user-specified `ar` program for a particular host or target + pub fn ar(&self, kind: Kind) -> Option<&Path> { + self.target_config(kind).ar.as_ref().map(|s| s.as_ref()) + } + + /// Get the list of cfg printed out from the compiler for the specified kind + pub fn cfg(&self, kind: Kind) -> &[Cfg] { + let info = match kind { + Kind::Host => &self.host_info, + Kind::Target => &self.target_info, + }; + info.cfg().unwrap_or(&[]) + } + + /// Get the target configuration for a particular host or target + fn target_config(&self, kind: Kind) -> &TargetConfig { + match kind { + Kind::Host => &self.build_config.host, + Kind::Target => &self.build_config.target, + } + } + + /// Number of jobs specified for this build + pub fn jobs(&self) -> u32 { + self.build_config.jobs + } + + pub fn rustflags_args(&self, unit: &Unit) -> CargoResult> { + env_args( + self.config, + &self.build_config, + self.info(&unit.kind).cfg(), + unit.kind, + "RUSTFLAGS", + ) + } + + pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult> { + env_args( + self.config, + &self.build_config, + self.info(&unit.kind).cfg(), + unit.kind, + "RUSTDOCFLAGS", + ) + } + + pub fn show_warnings(&self, pkg: &PackageId) -> bool { + pkg.source_id().is_path() || self.config.extra_verbose() + } + + fn info(&self, kind: &Kind) -> &TargetInfo { + match *kind { + Kind::Host => &self.host_info, + Kind::Target => &self.target_info, + } + } + + pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec> { + if let Some((ref args_unit, ref args)) = self.extra_compiler_args { + if args_unit == unit { + return Some(args); + } + } + None + } +} + +/// Acquire extra flags to pass to the compiler from various locations. +/// +/// The locations are: +/// +/// - the `RUSTFLAGS` environment variable +/// +/// then if this was not found +/// +/// - `target.*.rustflags` from the manifest (Cargo.toml) +/// - `target.cfg(..).rustflags` from the manifest +/// +/// then if neither of these were found +/// +/// - `build.rustflags` from the manifest +/// +/// Note that if a `target` is specified, no args will be passed to host code (plugins, build +/// scripts, ...), even if it is the same as the target. +fn env_args( + config: &Config, + build_config: &BuildConfig, + target_cfg: Option<&[Cfg]>, + kind: Kind, + name: &str, +) -> CargoResult> { + // We *want* to apply RUSTFLAGS only to builds for the + // requested target architecture, and not to things like build + // scripts and plugins, which may be for an entirely different + // architecture. Cargo's present architecture makes it quite + // hard to only apply flags to things that are not build + // scripts and plugins though, so we do something more hacky + // instead to avoid applying the same RUSTFLAGS to multiple targets + // arches: + // + // 1) If --target is not specified we just apply RUSTFLAGS to + // all builds; they are all going to have the same target. + // + // 2) If --target *is* specified then we only apply RUSTFLAGS + // to compilation units with the Target kind, which indicates + // it was chosen by the --target flag. + // + // This means that, e.g. even if the specified --target is the + // same as the host, build scripts in plugins won't get + // RUSTFLAGS. + let compiling_with_target = build_config.requested_target.is_some(); + let is_target_kind = kind == Kind::Target; + + if compiling_with_target && !is_target_kind { + // This is probably a build script or plugin and we're + // compiling with --target. In this scenario there are + // no rustflags we can apply. + return Ok(Vec::new()); + } + + // First try RUSTFLAGS from the environment + if let Ok(a) = env::var(name) { + let args = a.split(' ') + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(str::to_string); + return Ok(args.collect()); + } + + let mut rustflags = Vec::new(); + + let name = name.chars() + .flat_map(|c| c.to_lowercase()) + .collect::(); + // Then the target.*.rustflags value... + let target = build_config + .requested_target + .as_ref() + .map(|s| s.as_str()) + .unwrap_or(build_config.host_triple()); + let key = format!("target.{}.{}", target, name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + rustflags.extend(args); + } + // ...including target.'cfg(...)'.rustflags + if let Some(target_cfg) = target_cfg { + if let Some(table) = config.get_table("target")? { + let cfgs = table.val.keys().filter_map(|t| { + if t.starts_with("cfg(") && t.ends_with(')') { + let cfg = &t[4..t.len() - 1]; + CfgExpr::from_str(cfg).ok().and_then(|c| { + if c.matches(target_cfg) { + Some(t) + } else { + None + } + }) + } else { + None + } + }); + + // Note that we may have multiple matching `[target]` sections and + // because we're passing flags to the compiler this can affect + // cargo's caching and whether it rebuilds. Ensure a deterministic + // ordering through sorting for now. We may perhaps one day wish to + // ensure a deterministic ordering via the order keys were defined + // in files perhaps. + let mut cfgs = cfgs.collect::>(); + cfgs.sort(); + + for n in cfgs { + let key = format!("target.{}.{}", n, name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + rustflags.extend(args); + } + } + } + } + + if !rustflags.is_empty() { + return Ok(rustflags); + } + + // Then the build.rustflags value + let key = format!("build.{}", name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + return Ok(args.collect()); + } + + Ok(Vec::new()) +} diff --git a/src/cargo/core/compiler/context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs similarity index 100% rename from src/cargo/core/compiler/context/target_info.rs rename to src/cargo/core/compiler/build_context/target_info.rs diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index 71bddf20fae..ece186c9dc1 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -175,7 +175,7 @@ impl<'cfg> Compilation<'cfg> { // When adding new environment variables depending on // crate properties which might require rebuild upon change // consider adding the corresponding properties to the hash - // in Context::target_metadata() + // in BuildContext::target_metadata() cmd.env("CARGO_MANIFEST_DIR", pkg.root()) .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index 9fcacb33614..cf3046eec2d 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use lazycell::LazyCell; -use super::{Context, FileFlavor, Kind, Layout, Unit}; +use super::{BuildContext, Context, FileFlavor, Kind, Layout, Unit}; use core::{TargetKind, Workspace}; use util::{self, CargoResult}; @@ -172,10 +172,10 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { pub(super) fn outputs( &self, unit: &Unit<'a>, - cx: &Context<'a, 'cfg>, + bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult>> { self.outputs[unit] - .try_borrow_with(|| self.calc_outputs(unit, cx)) + .try_borrow_with(|| self.calc_outputs(unit, bcx)) .map(Arc::clone) } @@ -230,15 +230,15 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { fn calc_outputs( &self, unit: &Unit<'a>, - cx: &Context<'a, 'cfg>, + bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult>> { let out_dir = self.out_dir(unit); let file_stem = self.file_stem(unit); let link_stem = self.link_stem(unit); let info = if unit.target.for_host() { - &cx.host_info + &bcx.host_info } else { - &cx.target_info + &bcx.target_info }; let mut ret = Vec::new(); @@ -268,7 +268,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { crate_type, flavor, unit.target.kind(), - cx.build_config.target_triple(), + bcx.build_config.target_triple(), )?; match file_types { @@ -324,14 +324,14 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { does not support these crate types", unsupported.join(", "), unit.pkg, - cx.build_config.target_triple() + bcx.build_config.target_triple() ) } bail!( "cannot compile `{}` as the target `{}` does not \ support any of the output crate types", unit.pkg, - cx.build_config.target_triple() + bcx.build_config.target_triple() ); } info!("Target filenames: {:?}", ret); @@ -380,10 +380,11 @@ fn compute_metadata<'a, 'cfg>( // This environment variable should not be relied on! It's // just here for rustbuild. We need a more principled method // doing this eventually. + let bcx = &cx.bcx; let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA"); if !(unit.mode.is_any_test() || unit.mode.is_check()) && (unit.target.is_dylib() || unit.target.is_cdylib() - || (unit.target.is_bin() && cx.build_config.target_triple().starts_with("wasm32-"))) + || (unit.target.is_bin() && bcx.build_config.target_triple().starts_with("wasm32-"))) && unit.pkg.package_id().source_id().is_path() && __cargo_default_lib_metadata.is_err() { @@ -396,7 +397,7 @@ fn compute_metadata<'a, 'cfg>( // to pull crates from anywhere w/o worrying about conflicts unit.pkg .package_id() - .stable_hash(cx.ws.root()) + .stable_hash(bcx.ws.root()) .hash(&mut hasher); // Add package properties which map to environment variables @@ -408,7 +409,7 @@ fn compute_metadata<'a, 'cfg>( // Also mix in enabled features to our metadata. This'll ensure that // when changing feature sets each lib is separately cached. - cx.resolve + bcx.resolve .features_sorted(unit.pkg.package_id()) .hash(&mut hasher); @@ -427,7 +428,7 @@ fn compute_metadata<'a, 'cfg>( // settings like debuginfo and whatnot. unit.profile.hash(&mut hasher); unit.mode.hash(&mut hasher); - if let Some(ref args) = cx.extra_args_for(unit) { + if let Some(ref args) = bcx.extra_args_for(unit) { args.hash(&mut hasher); } @@ -441,7 +442,7 @@ fn compute_metadata<'a, 'cfg>( unit.target.name().hash(&mut hasher); unit.target.kind().hash(&mut hasher); - cx.build_config.rustc.verbose_version.hash(&mut hasher); + bcx.build_config.rustc.verbose_version.hash(&mut hasher); // Seed the contents of __CARGO_DEFAULT_LIB_METADATA to the hasher if present. // This should be the release channel, to get a different hash for each channel. diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index f3d1e6d18ed..a0e166860ea 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -1,26 +1,22 @@ #![allow(deprecated)] use std::collections::{HashMap, HashSet}; -use std::env; -use std::path::{Path, PathBuf}; -use std::str::{self, FromStr}; +use std::fmt::Write; +use std::path::PathBuf; use std::sync::Arc; use jobserver::Client; -use core::profiles::{Profile, Profiles}; -use core::{Dependency, Workspace}; -use core::{Package, PackageId, PackageSet, Resolve, Target}; +use core::{Package, PackageId, Resolve, Target}; +use core::profiles::Profile; use ops::CompileMode; use util::errors::{CargoResult, CargoResultExt}; -use util::{internal, profile, Cfg, CfgExpr, Config}; +use util::{internal, profile, Config}; use super::custom_build::{self, BuildDeps, BuildScripts, BuildState}; use super::fingerprint::Fingerprint; use super::job_queue::JobQueue; use super::layout::Layout; -use super::links::Links; -use super::TargetConfig; -use super::{BuildConfig, Compilation, Executor, Kind}; +use super::{BuildContext, Compilation, Executor, FileFlavor, Kind}; mod unit_dependencies; use self::unit_dependencies::build_unit_dependencies; @@ -29,9 +25,6 @@ mod compilation_files; pub use self::compilation_files::Metadata; use self::compilation_files::{CompilationFiles, OutputFile}; -mod target_info; -pub use self::target_info::{FileFlavor, TargetInfo}; - /// All information needed to define a Unit. /// /// A unit is an object that has enough information so that cargo knows how to build it. @@ -70,58 +63,24 @@ pub struct Unit<'a> { pub mode: CompileMode, } -/// The build context, containing all information about a build task pub struct Context<'a, 'cfg: 'a> { - /// The workspace the build is for - pub ws: &'a Workspace<'cfg>, - /// The cargo configuration - pub config: &'cfg Config, - /// The dependency graph for our build - pub resolve: &'a Resolve, - /// Information on the compilation output + pub bcx: &'a BuildContext<'a, 'cfg>, pub compilation: Compilation<'cfg>, - pub packages: &'a PackageSet<'cfg>, pub build_state: Arc, pub build_script_overridden: HashSet<(PackageId, Kind)>, pub build_explicit_deps: HashMap, BuildDeps>, pub fingerprints: HashMap, Arc>, pub compiled: HashSet>, - pub build_config: BuildConfig, pub build_scripts: HashMap, Arc>, pub links: Links<'a>, pub used_in_plugin: HashSet>, pub jobserver: Client, - pub profiles: &'a Profiles, - /// This is a workaround to carry the extra compiler args for either - /// `rustc` or `rustdoc` given on the command-line for the commands `cargo - /// rustc` and `cargo rustdoc`. These commands only support one target, - /// but we don't want the args passed to any dependencies, so we include - /// the `Unit` corresponding to the top-level target. - extra_compiler_args: Option<(Unit<'a>, Vec)>, - - target_info: TargetInfo, - host_info: TargetInfo, - incremental_env: Option, - unit_dependencies: HashMap, Vec>>, files: Option>, } impl<'a, 'cfg> Context<'a, 'cfg> { - pub fn new( - ws: &'a Workspace<'cfg>, - resolve: &'a Resolve, - packages: &'a PackageSet<'cfg>, - config: &'cfg Config, - build_config: BuildConfig, - profiles: &'a Profiles, - extra_compiler_args: Option<(Unit<'a>, Vec)>, - ) -> CargoResult> { - let incremental_env = match env::var("CARGO_INCREMENTAL") { - Ok(v) => Some(v == "1"), - Err(_) => None, - }; - + pub fn new(config: &'cfg Config, bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult { // Load up the jobserver that we'll use to manage our parallelism. This // is the same as the GNU make implementation of a jobserver, and // intentionally so! It's hoped that we can interact with GNU make and @@ -132,47 +91,29 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // is ourself, a running process. let jobserver = match config.jobserver_from_env() { Some(c) => c.clone(), - None => Client::new(build_config.jobs as usize - 1) + None => Client::new(bcx.build_config.jobs as usize - 1) .chain_err(|| "failed to create jobserver")?, }; - let (host_info, target_info) = { - let _p = profile::start("Context::probe_target_info"); - debug!("probe_target_info"); - let host_info = TargetInfo::new(config, &build_config, Kind::Host)?; - let target_info = TargetInfo::new(config, &build_config, Kind::Target)?; - (host_info, target_info) - }; - - let mut cx = Context { - ws, - resolve, - packages, - config, - target_info, - host_info, - compilation: Compilation::new(config, build_config.rustc.process()), - build_state: Arc::new(BuildState::new(&build_config)), - build_config, + let mut compilation = Compilation::new(config, bcx.build_config.rustc.process()); + compilation.host_dylib_path = bcx.host_info.sysroot_libdir.clone(); + compilation.target_dylib_path = bcx.target_info.sysroot_libdir.clone(); + Ok(Self { + bcx, + compilation, + build_state: Arc::new(BuildState::new(&bcx.build_config)), fingerprints: HashMap::new(), - profiles, compiled: HashSet::new(), build_scripts: HashMap::new(), build_explicit_deps: HashMap::new(), links: Links::new(), used_in_plugin: HashSet::new(), - incremental_env, jobserver, build_script_overridden: HashSet::new(), unit_dependencies: HashMap::new(), files: None, - extra_compiler_args, - }; - - cx.compilation.host_dylib_path = cx.host_info.sysroot_libdir.clone(); - cx.compilation.target_dylib_path = cx.target_info.sysroot_libdir.clone(); - Ok(cx) + }) } // Returns a mapping of the root package plus its immediate dependencies to @@ -183,7 +124,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { export_dir: Option, exec: &Arc, ) -> CargoResult> { - let mut queue = JobQueue::new(&self); + let mut queue = JobQueue::new(self.bcx); self.prepare_units(export_dir, units)?; self.prepare()?; self.build_used_in_plugin_map(units)?; @@ -264,7 +205,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { ); } - let feats = self.resolve.features(unit.pkg.package_id()); + let feats = self.bcx.resolve.features(unit.pkg.package_id()); if !feats.is_empty() { self.compilation .cfgs @@ -276,7 +217,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { .collect() }); } - let rustdocflags = self.rustdocflags_args(unit)?; + let rustdocflags = self.bcx.rustdocflags_args(unit)?; if !rustdocflags.is_empty() { self.compilation .rustdocflags @@ -304,8 +245,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { self.compilation.native_dirs.insert(dir.clone()); } } - self.compilation.host = self.build_config.host_triple().to_string(); - self.compilation.target = self.build_config.target_triple().to_string(); + self.compilation.host = self.bcx.build_config.host_triple().to_string(); + self.compilation.target = self.bcx.build_config.target_triple().to_string(); Ok(self.compilation) } @@ -314,21 +255,26 @@ impl<'a, 'cfg> Context<'a, 'cfg> { export_dir: Option, units: &[Unit<'a>], ) -> CargoResult<()> { - let dest = if self.build_config.release { + let dest = if self.bcx.build_config.release { "release" } else { "debug" }; - let host_layout = Layout::new(self.ws, None, dest)?; - let target_layout = match self.build_config.requested_target.as_ref() { - Some(target) => Some(Layout::new(self.ws, Some(target), dest)?), + let host_layout = Layout::new(self.bcx.ws, None, dest)?; + let target_layout = match self.bcx.build_config.requested_target.as_ref() { + Some(target) => Some(Layout::new(self.bcx.ws, Some(target), dest)?), None => None, }; - let deps = build_unit_dependencies(units, self)?; - self.unit_dependencies = deps; - let files = - CompilationFiles::new(units, host_layout, target_layout, export_dir, self.ws, self); + build_unit_dependencies(units, self.bcx, &mut self.unit_dependencies)?; + let files = CompilationFiles::new( + units, + host_layout, + target_layout, + export_dir, + self.bcx.ws, + self, + ); self.files = Some(files); Ok(()) } @@ -403,7 +349,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// - link_dst: Optional file to link/copy the result to (without metadata suffix) /// - linkable: Whether possible to link against file (eg it's a library) pub fn outputs(&mut self, unit: &Unit<'a>) -> CargoResult>> { - self.files.as_ref().unwrap().outputs(unit, self) + self.files.as_ref().unwrap().outputs(unit, self.bcx) } /// For a package, return all targets which are registered as dependencies @@ -429,85 +375,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> { self.unit_dependencies[unit].clone() } - pub fn extern_crate_name(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> CargoResult { - let deps = { - let a = unit.pkg.package_id(); - let b = dep.pkg.package_id(); - if a == b { - &[] - } else { - self.resolve.dependencies_listed(a, b) - } - }; - - let crate_name = dep.target.crate_name(); - let mut names = deps.iter() - .map(|d| d.rename().unwrap_or(&crate_name)); - let name = names.next().unwrap_or(&crate_name); - for n in names { - if n == name { - continue - } - bail!("multiple dependencies listed for the same crate must \ - all have the same name, but the dependency on `{}` \ - is listed as having different names", dep.pkg.package_id()); - } - Ok(name.to_string()) - } - - /// Whether a dependency should be compiled for the host or target platform, - /// specified by `Kind`. - fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { - // If this dependency is only available for certain platforms, - // make sure we're only enabling it for that platform. - let platform = match dep.platform() { - Some(p) => p, - None => return true, - }; - let (name, info) = match kind { - Kind::Host => (self.build_config.host_triple(), &self.host_info), - Kind::Target => (self.build_config.target_triple(), &self.target_info), - }; - platform.matches(name, info.cfg()) - } - - /// Gets a package for the given package id. - pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> { - self.packages.get(id) - } - - /// Get the user-specified linker for a particular host or target - pub fn linker(&self, kind: Kind) -> Option<&Path> { - self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) - } - - /// Get the user-specified `ar` program for a particular host or target - pub fn ar(&self, kind: Kind) -> Option<&Path> { - self.target_config(kind).ar.as_ref().map(|s| s.as_ref()) - } - - /// Get the list of cfg printed out from the compiler for the specified kind - pub fn cfg(&self, kind: Kind) -> &[Cfg] { - let info = match kind { - Kind::Host => &self.host_info, - Kind::Target => &self.target_info, - }; - info.cfg().unwrap_or(&[]) - } - - /// Get the target configuration for a particular host or target - fn target_config(&self, kind: Kind) -> &TargetConfig { - match kind { - Kind::Host => &self.build_config.host, - Kind::Target => &self.build_config.target, - } - } - - /// Number of jobs specified for this build - pub fn jobs(&self) -> u32 { - self.build_config.jobs - } - pub fn incremental_args(&self, unit: &Unit) -> CargoResult> { // There's a number of ways to configure incremental compilation right // now. In order of descending priority (first is highest priority) we @@ -530,8 +397,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // incremental compilation or not. Primarily development profiles // have it enabled by default while release profiles have it disabled // by default. - let global_cfg = self.config.get_bool("build.incremental")?.map(|c| c.val); - let incremental = match (self.incremental_env, global_cfg, unit.profile.incremental) { + let global_cfg = self.bcx + .config + .get_bool("build.incremental")? + .map(|c| c.val); + let incremental = match ( + self.bcx.incremental_env, + global_cfg, + unit.profile.incremental, + ) { (Some(v), _, _) => v, (None, Some(false), _) => false, (None, _, other) => other, @@ -554,173 +428,70 @@ impl<'a, 'cfg> Context<'a, 'cfg> { let dir = self.files().layout(unit.kind).incremental().display(); Ok(vec!["-C".to_string(), format!("incremental={}", dir)]) } +} - pub fn rustflags_args(&self, unit: &Unit) -> CargoResult> { - env_args( - self.config, - &self.build_config, - self.info(&unit.kind).cfg(), - unit.kind, - "RUSTFLAGS", - ) - } - - pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult> { - env_args( - self.config, - &self.build_config, - self.info(&unit.kind).cfg(), - unit.kind, - "RUSTDOCFLAGS", - ) - } - - pub fn show_warnings(&self, pkg: &PackageId) -> bool { - pkg.source_id().is_path() || self.config.extra_verbose() - } +#[derive(Default)] +pub struct Links<'a> { + validated: HashSet<&'a PackageId>, + links: HashMap, +} - fn info(&self, kind: &Kind) -> &TargetInfo { - match *kind { - Kind::Host => &self.host_info, - Kind::Target => &self.target_info, +impl<'a> Links<'a> { + pub fn new() -> Links<'a> { + Links { + validated: HashSet::new(), + links: HashMap::new(), } } - pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec> { - if let Some((ref args_unit, ref args)) = self.extra_compiler_args { - if args_unit == unit { - return Some(args); - } + pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> { + if !self.validated.insert(unit.pkg.package_id()) { + return Ok(()); } - None - } -} - -/// Acquire extra flags to pass to the compiler from various locations. -/// -/// The locations are: -/// -/// - the `RUSTFLAGS` environment variable -/// -/// then if this was not found -/// -/// - `target.*.rustflags` from the manifest (Cargo.toml) -/// - `target.cfg(..).rustflags` from the manifest -/// -/// then if neither of these were found -/// -/// - `build.rustflags` from the manifest -/// -/// Note that if a `target` is specified, no args will be passed to host code (plugins, build -/// scripts, ...), even if it is the same as the target. -fn env_args( - config: &Config, - build_config: &BuildConfig, - target_cfg: Option<&[Cfg]>, - kind: Kind, - name: &str, -) -> CargoResult> { - // We *want* to apply RUSTFLAGS only to builds for the - // requested target architecture, and not to things like build - // scripts and plugins, which may be for an entirely different - // architecture. Cargo's present architecture makes it quite - // hard to only apply flags to things that are not build - // scripts and plugins though, so we do something more hacky - // instead to avoid applying the same RUSTFLAGS to multiple targets - // arches: - // - // 1) If --target is not specified we just apply RUSTFLAGS to - // all builds; they are all going to have the same target. - // - // 2) If --target *is* specified then we only apply RUSTFLAGS - // to compilation units with the Target kind, which indicates - // it was chosen by the --target flag. - // - // This means that, e.g. even if the specified --target is the - // same as the host, build scripts in plugins won't get - // RUSTFLAGS. - let compiling_with_target = build_config.requested_target.is_some(); - let is_target_kind = kind == Kind::Target; - - if compiling_with_target && !is_target_kind { - // This is probably a build script or plugin and we're - // compiling with --target. In this scenario there are - // no rustflags we can apply. - return Ok(Vec::new()); - } - - // First try RUSTFLAGS from the environment - if let Ok(a) = env::var(name) { - let args = a.split(' ') - .map(str::trim) - .filter(|s| !s.is_empty()) - .map(str::to_string); - return Ok(args.collect()); - } - - let mut rustflags = Vec::new(); - - let name = name.chars() - .flat_map(|c| c.to_lowercase()) - .collect::(); - // Then the target.*.rustflags value... - let target = build_config - .requested_target - .as_ref() - .map(|s| s.as_str()) - .unwrap_or(build_config.host_triple()); - let key = format!("target.{}.{}", target, name); - if let Some(args) = config.get_list_or_split_string(&key)? { - let args = args.val.into_iter(); - rustflags.extend(args); - } - // ...including target.'cfg(...)'.rustflags - if let Some(target_cfg) = target_cfg { - if let Some(table) = config.get_table("target")? { - let cfgs = table.val.keys().filter_map(|t| { - if t.starts_with("cfg(") && t.ends_with(')') { - let cfg = &t[4..t.len() - 1]; - CfgExpr::from_str(cfg).ok().and_then(|c| { - if c.matches(target_cfg) { - Some(t) - } else { - None - } - }) - } else { - None - } - }); - - // Note that we may have multiple matching `[target]` sections and - // because we're passing flags to the compiler this can affect - // cargo's caching and whether it rebuilds. Ensure a deterministic - // ordering through sorting for now. We may perhaps one day wish to - // ensure a deterministic ordering via the order keys were defined - // in files perhaps. - let mut cfgs = cfgs.collect::>(); - cfgs.sort(); - - for n in cfgs { - let key = format!("target.{}.{}", n, name); - if let Some(args) = config.get_list_or_split_string(&key)? { - let args = args.val.into_iter(); - rustflags.extend(args); + let lib = match unit.pkg.manifest().links() { + Some(lib) => lib, + None => return Ok(()), + }; + if let Some(prev) = self.links.get(lib) { + let pkg = unit.pkg.package_id(); + + let describe_path = |pkgid: &PackageId| -> String { + let dep_path = resolve.path_to_top(pkgid); + let mut dep_path_desc = format!("package `{}`", dep_path[0]); + for dep in dep_path.iter().skip(1) { + write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap(); } - } + dep_path_desc + }; + + bail!( + "multiple packages link to native library `{}`, \ + but a native library can be linked only once\n\ + \n\ + {}\nlinks to native library `{}`\n\ + \n\ + {}\nalso links to native library `{}`", + lib, + describe_path(prev), + lib, + describe_path(pkg), + lib + ) } + if !unit.pkg + .manifest() + .targets() + .iter() + .any(|t| t.is_custom_build()) + { + bail!( + "package `{}` specifies that it links to `{}` but does not \ + have a custom build script", + unit.pkg.package_id(), + lib + ) + } + self.links.insert(lib.to_string(), unit.pkg.package_id()); + Ok(()) } - - if !rustflags.is_empty() { - return Ok(rustflags); - } - - // Then the build.rustflags value - let key = format!("build.{}", name); - if let Some(args) = config.get_list_or_split_string(&key)? { - let args = args.val.into_iter(); - return Ok(args.collect()); - } - - Ok(Vec::new()) } diff --git a/src/cargo/core/compiler/context/unit_dependencies.rs b/src/cargo/core/compiler/context/unit_dependencies.rs index 84ee755b732..ee66f62d970 100644 --- a/src/cargo/core/compiler/context/unit_dependencies.rs +++ b/src/cargo/core/compiler/context/unit_dependencies.rs @@ -15,7 +15,7 @@ //! (for example, with and without tests), so we actually build a dependency //! graph of `Unit`s, which capture these properties. -use super::{Context, Kind, Unit}; +use super::{BuildContext, Kind, Unit}; use core::dependency::Kind as DepKind; use core::profiles::ProfileFor; use core::{Package, Target}; @@ -25,9 +25,9 @@ use CargoResult; pub fn build_unit_dependencies<'a, 'cfg>( roots: &[Unit<'a>], - cx: &Context<'a, 'cfg>, -) -> CargoResult, Vec>>> { - let mut deps = HashMap::new(); + bcx: &BuildContext<'a, 'cfg>, + mut deps: &mut HashMap, Vec>>, +) -> CargoResult<()> { for unit in roots.iter() { // Dependencies of tests/benches should not have `panic` set. // We check the global test mode to see if we are running in `cargo @@ -35,20 +35,20 @@ pub fn build_unit_dependencies<'a, 'cfg>( // cleared, and avoid building the lib thrice (once with `panic`, once // without, once for --test). In particular, the lib included for // doctests and examples are `Build` mode here. - let profile_for = if unit.mode.is_any_test() || cx.build_config.test { + let profile_for = if unit.mode.is_any_test() || bcx.build_config.test { ProfileFor::TestDependency } else { ProfileFor::Any }; - deps_of(unit, cx, &mut deps, profile_for)?; + deps_of(unit, bcx, &mut deps, profile_for)?; } - Ok(deps) + Ok(()) } fn deps_of<'a, 'b, 'cfg>( unit: &Unit<'a>, - cx: &Context<'a, 'cfg>, + bcx: &BuildContext<'a, 'cfg>, deps: &'b mut HashMap, Vec>>, profile_for: ProfileFor, ) -> CargoResult<&'b [Unit<'a>]> { @@ -59,11 +59,11 @@ fn deps_of<'a, 'b, 'cfg>( // requested unit's settings are the same as `Any`, `CustomBuild` can't // affect anything else in the hierarchy. if !deps.contains_key(unit) { - let unit_deps = compute_deps(unit, cx, deps, profile_for)?; + let unit_deps = compute_deps(unit, bcx, deps, profile_for)?; let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect(); deps.insert(*unit, to_insert); for (unit, profile_for) in unit_deps { - deps_of(&unit, cx, deps, profile_for)?; + deps_of(&unit, bcx, deps, profile_for)?; } } Ok(deps[unit].as_ref()) @@ -75,19 +75,19 @@ fn deps_of<'a, 'b, 'cfg>( /// is the profile type that should be used for dependencies of the unit. fn compute_deps<'a, 'b, 'cfg>( unit: &Unit<'a>, - cx: &Context<'a, 'cfg>, + bcx: &BuildContext<'a, 'cfg>, deps: &'b mut HashMap, Vec>>, profile_for: ProfileFor, ) -> CargoResult, ProfileFor)>> { if unit.mode.is_run_custom_build() { - return compute_deps_custom_build(unit, cx, deps); + return compute_deps_custom_build(unit, bcx, deps); } else if unit.mode.is_doc() && !unit.mode.is_any_test() { // Note: This does not include Doctest. - return compute_deps_doc(unit, cx); + return compute_deps_doc(unit, bcx); } let id = unit.pkg.package_id(); - let deps = cx.resolve.deps(id); + let deps = bcx.resolve.deps(id); let mut ret = deps.filter(|&(_id, deps)| { assert!(deps.len() > 0); deps.iter().any(|dep| { @@ -108,13 +108,13 @@ fn compute_deps<'a, 'b, 'cfg>( // If this dependency is only available for certain platforms, // make sure we're only enabling it for that platform. - if !cx.dep_platform_activated(dep, unit.kind) { + if !bcx.dep_platform_activated(dep, unit.kind) { return false; } // If the dependency is optional, then we're only activating it // if the corresponding feature was activated - if dep.is_optional() && !cx.resolve.features(id).contains(&*dep.name()) { + if dep.is_optional() && !bcx.resolve.features(id).contains(&*dep.name()) { return false; } @@ -122,10 +122,10 @@ fn compute_deps<'a, 'b, 'cfg>( // actually used! true }) - }).filter_map(|(id, _)| match cx.get_package(id) { + }).filter_map(|(id, _)| match bcx.get_package(id) { Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| { let mode = check_or_build_mode(&unit.mode, t); - let unit = new_unit(cx, pkg, t, profile_for, unit.kind.for_target(t), mode); + let unit = new_unit(bcx, pkg, t, profile_for, unit.kind.for_target(t), mode); Ok((unit, profile_for)) }), Err(e) => Some(Err(e)), @@ -138,7 +138,7 @@ fn compute_deps<'a, 'b, 'cfg>( if unit.target.is_custom_build() { return Ok(ret); } - ret.extend(dep_build_script(unit, cx)); + ret.extend(dep_build_script(unit, bcx)); // If this target is a binary, test, example, etc, then it depends on // the library of the same package. The call to `resolve.deps` above @@ -147,7 +147,7 @@ fn compute_deps<'a, 'b, 'cfg>( if unit.target.is_lib() && unit.mode != CompileMode::Doctest { return Ok(ret); } - ret.extend(maybe_lib(unit, cx, profile_for)); + ret.extend(maybe_lib(unit, bcx, profile_for)); Ok(ret) } @@ -158,7 +158,7 @@ fn compute_deps<'a, 'b, 'cfg>( /// the returned set of units must all be run before `unit` is run. fn compute_deps_custom_build<'a, 'cfg>( unit: &Unit<'a>, - cx: &Context<'a, 'cfg>, + bcx: &BuildContext<'a, 'cfg>, deps: &mut HashMap, Vec>>, ) -> CargoResult, ProfileFor)>> { // When not overridden, then the dependencies to run a build script are: @@ -178,17 +178,17 @@ fn compute_deps_custom_build<'a, 'cfg>( kind: unit.kind, mode: CompileMode::Build, }; - let deps = deps_of(&tmp, cx, deps, ProfileFor::Any)?; + let deps = deps_of(&tmp, bcx, deps, ProfileFor::Any)?; Ok(deps.iter() .filter_map(|unit| { if !unit.target.linkable() || unit.pkg.manifest().links().is_none() { return None; } - dep_build_script(unit, cx) + dep_build_script(unit, bcx) }) .chain(Some(( new_unit( - cx, + bcx, unit.pkg, unit.target, ProfileFor::CustomBuild, @@ -205,17 +205,17 @@ fn compute_deps_custom_build<'a, 'cfg>( /// Returns the dependencies necessary to document a package fn compute_deps_doc<'a, 'cfg>( unit: &Unit<'a>, - cx: &Context<'a, 'cfg>, + bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult, ProfileFor)>> { - let deps = cx.resolve + let deps = bcx.resolve .deps(unit.pkg.package_id()) .filter(|&(_id, deps)| { deps.iter().any(|dep| match dep.kind() { - DepKind::Normal => cx.dep_platform_activated(dep, unit.kind), + DepKind::Normal => bcx.dep_platform_activated(dep, unit.kind), _ => false, }) }) - .map(|(id, _deps)| cx.get_package(id)); + .map(|(id, _deps)| bcx.get_package(id)); // To document a library, we depend on dependencies actually being // built. If we're documenting *all* libraries, then we also depend on @@ -231,7 +231,7 @@ fn compute_deps_doc<'a, 'cfg>( // However, for plugins/proc-macros, deps should be built like normal. let mode = check_or_build_mode(&unit.mode, lib); let lib_unit = new_unit( - cx, + bcx, dep, lib, ProfileFor::Any, @@ -242,7 +242,7 @@ fn compute_deps_doc<'a, 'cfg>( if let CompileMode::Doc { deps: true } = unit.mode { // Document this lib as well. let doc_unit = new_unit( - cx, + bcx, dep, lib, ProfileFor::Any, @@ -254,23 +254,23 @@ fn compute_deps_doc<'a, 'cfg>( } // Be sure to build/run the build script for documented libraries as - ret.extend(dep_build_script(unit, cx)); + ret.extend(dep_build_script(unit, bcx)); // If we document a binary, we need the library available if unit.target.is_bin() { - ret.extend(maybe_lib(unit, cx, ProfileFor::Any)); + ret.extend(maybe_lib(unit, bcx, ProfileFor::Any)); } Ok(ret) } fn maybe_lib<'a>( unit: &Unit<'a>, - cx: &Context, + bcx: &BuildContext, profile_for: ProfileFor, ) -> Option<(Unit<'a>, ProfileFor)> { let mode = check_or_build_mode(&unit.mode, unit.target); unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| { - let unit = new_unit(cx, unit.pkg, t, profile_for, unit.kind.for_target(t), mode); + let unit = new_unit(bcx, unit.pkg, t, profile_for, unit.kind.for_target(t), mode); (unit, profile_for) }) } @@ -282,7 +282,7 @@ fn maybe_lib<'a>( /// script itself doesn't have any dependencies, so even in that case a unit /// of work is still returned. `None` is only returned if the package has no /// build script. -fn dep_build_script<'a>(unit: &Unit<'a>, cx: &Context) -> Option<(Unit<'a>, ProfileFor)> { +fn dep_build_script<'a>(unit: &Unit<'a>, bcx: &BuildContext) -> Option<(Unit<'a>, ProfileFor)> { unit.pkg .targets() .iter() @@ -294,7 +294,7 @@ fn dep_build_script<'a>(unit: &Unit<'a>, cx: &Context) -> Option<(Unit<'a>, Prof Unit { pkg: unit.pkg, target: t, - profile: cx.profiles.get_profile_run_custom_build(&unit.profile), + profile: bcx.profiles.get_profile_run_custom_build(&unit.profile), kind: unit.kind, mode: CompileMode::RunCustomBuild, }, @@ -322,19 +322,19 @@ fn check_or_build_mode(mode: &CompileMode, target: &Target) -> CompileMode { } fn new_unit<'a>( - cx: &Context, + bcx: &BuildContext, pkg: &'a Package, target: &'a Target, profile_for: ProfileFor, kind: Kind, mode: CompileMode, ) -> Unit<'a> { - let profile = cx.profiles.get_profile( + let profile = bcx.profiles.get_profile( &pkg.name(), - cx.ws.is_member(pkg), + bcx.ws.is_member(pkg), profile_for, mode, - cx.build_config.release, + bcx.build_config.release, ); Unit { pkg, diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 59cb9b81aee..b23b1256d66 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -103,6 +103,7 @@ pub fn prepare<'a, 'cfg>( fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> { assert!(unit.mode.is_run_custom_build()); + let bcx = &cx.bcx; let dependencies = cx.dep_targets(unit); let build_script_unit = dependencies .iter() @@ -126,30 +127,30 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes let debug = unit.profile.debuginfo.unwrap_or(0) != 0; cmd.env("OUT_DIR", &build_output) .env("CARGO_MANIFEST_DIR", unit.pkg.root()) - .env("NUM_JOBS", &cx.jobs().to_string()) + .env("NUM_JOBS", &bcx.jobs().to_string()) .env( "TARGET", &match unit.kind { - Kind::Host => &cx.build_config.host_triple(), - Kind::Target => cx.build_config.target_triple(), + Kind::Host => &bcx.build_config.host_triple(), + Kind::Target => bcx.build_config.target_triple(), }, ) .env("DEBUG", debug.to_string()) .env("OPT_LEVEL", &unit.profile.opt_level.to_string()) .env( "PROFILE", - if cx.build_config.release { + if bcx.build_config.release { "release" } else { "debug" }, ) - .env("HOST", &cx.build_config.host_triple()) - .env("RUSTC", &cx.build_config.rustc.path) - .env("RUSTDOC", &*cx.config.rustdoc()?) + .env("HOST", &bcx.build_config.host_triple()) + .env("RUSTC", &bcx.build_config.rustc.path) + .env("RUSTDOC", &*bcx.config.rustdoc()?) .inherit_jobserver(&cx.jobserver); - if let Some(ref linker) = cx.build_config.target.linker { + if let Some(ref linker) = bcx.build_config.target.linker { cmd.env("RUSTC_LINKER", linker); } @@ -159,12 +160,12 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes // Be sure to pass along all enabled features for this package, this is the // last piece of statically known information that we have. - for feat in cx.resolve.features(unit.pkg.package_id()).iter() { + for feat in bcx.resolve.features(unit.pkg.package_id()).iter() { cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); } let mut cfg_map = HashMap::new(); - for cfg in cx.cfg(unit.kind) { + for cfg in bcx.cfg(unit.kind) { match *cfg { Cfg::Name(ref n) => { cfg_map.insert(n.clone(), None); @@ -230,7 +231,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes ); let build_scripts = super::load_build_deps(cx, unit); let kind = unit.kind; - let json_messages = cx.build_config.json_messages; + let json_messages = bcx.build_config.json_messages; // Check to see if the build script has already run, and if it has keep // track of whether it has told us about some explicit dependencies diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index 73bd44bf3eb..78b18af265f 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -15,7 +15,7 @@ use util::errors::{CargoResult, CargoResultExt}; use util::paths; use util::{internal, profile, Dirty, Fresh, Freshness}; -use super::context::{Context, FileFlavor, Unit}; +use super::{Context, BuildContext, FileFlavor, Unit}; use super::custom_build::BuildDeps; use super::job::Work; @@ -56,6 +56,7 @@ pub fn prepare_target<'a, 'cfg>( unit.pkg.package_id(), unit.target.name() )); + let bcx = cx.bcx; let new = cx.files().fingerprint_dir(unit); let loc = new.join(&filename(cx, unit)); @@ -77,7 +78,7 @@ pub fn prepare_target<'a, 'cfg>( // changed then an error is issued. if compare.is_err() { let source_id = unit.pkg.package_id().source_id(); - let sources = cx.packages.sources(); + let sources = bcx.packages.sources(); let source = sources .get(source_id) .ok_or_else(|| internal("missing package source"))?; @@ -102,7 +103,7 @@ pub fn prepare_target<'a, 'cfg>( } } - let allow_failure = cx.extra_args_for(unit).is_some(); + let allow_failure = bcx.extra_args_for(unit).is_some(); let target_root = cx.files().target_root().to_path_buf(); let write_fingerprint = Work::new(move |_| { match fingerprint.update_local(&target_root) { @@ -414,6 +415,7 @@ fn calculate<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>, ) -> CargoResult> { + let bcx = cx.bcx; if let Some(s) = cx.fingerprints.get(unit) { return Ok(Arc::clone(s)); } @@ -430,7 +432,7 @@ fn calculate<'a, 'cfg>( .filter(|u| !u.target.is_custom_build() && !u.target.is_bin()) .map(|dep| { calculate(cx, dep).and_then(|fingerprint| { - let name = cx.extern_crate_name(unit, dep)?; + let name = cx.bcx.extern_crate_name(unit, dep)?; Ok((dep.pkg.package_id().to_string(), name, fingerprint)) }) }) @@ -442,30 +444,30 @@ fn calculate<'a, 'cfg>( let mtime = dep_info_mtime_if_fresh(unit.pkg, &dep_info)?; LocalFingerprint::mtime(cx.files().target_root(), mtime, &dep_info) } else { - let fingerprint = pkg_fingerprint(cx, unit.pkg)?; + let fingerprint = pkg_fingerprint(&cx.bcx, unit.pkg)?; LocalFingerprint::Precalculated(fingerprint) }; let mut deps = deps; deps.sort_by(|&(ref a, _, _), &(ref b, _, _)| a.cmp(b)); let extra_flags = if unit.mode.is_doc() { - cx.rustdocflags_args(unit)? + bcx.rustdocflags_args(unit)? } else { - cx.rustflags_args(unit)? + bcx.rustflags_args(unit)? }; let profile_hash = util::hash_u64(&( &unit.profile, unit.mode, - cx.extra_args_for(unit), + bcx.extra_args_for(unit), cx.incremental_args(unit)?, )); let fingerprint = Arc::new(Fingerprint { - rustc: util::hash_u64(&cx.build_config.rustc.verbose_version), + rustc: util::hash_u64(&bcx.build_config.rustc.verbose_version), target: util::hash_u64(&unit.target), profile: profile_hash, // Note that .0 is hashed here, not .1 which is the cwd. That doesn't // actually affect the output artifact so there's no need to hash it. - path: util::hash_u64(&super::path_args(cx, unit).0), - features: format!("{:?}", cx.resolve.features_sorted(unit.pkg.package_id())), + path: util::hash_u64(&super::path_args(&cx.bcx, unit).0), + features: format!("{:?}", bcx.resolve.features_sorted(unit.pkg.package_id())), deps, local: vec![local], memoized_hash: Mutex::new(None), @@ -591,7 +593,7 @@ fn build_script_local_fingerprints<'a, 'cfg>( let output = deps.build_script_output.clone(); if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { debug!("old local fingerprints deps"); - let s = pkg_fingerprint(cx, unit.pkg)?; + let s = pkg_fingerprint(&cx.bcx, unit.pkg)?; return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output))); } @@ -705,9 +707,9 @@ fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path) -> CargoResult