From 1691104c84059b2a9a49f73266359881d0c146f7 Mon Sep 17 00:00:00 2001 From: Serhii Plyhun Date: Wed, 31 Jul 2019 14:53:04 +0200 Subject: [PATCH 01/21] Target features build, tests fail. --- crates/resolver-tests/src/lib.rs | 10 +- src/cargo/core/compiler/build_context/mod.rs | 20 +- src/cargo/core/compiler/context/mod.rs | 6 +- .../compiler/context/unit_dependencies.rs | 15 +- src/cargo/core/compiler/custom_build.rs | 2 +- src/cargo/core/compiler/mod.rs | 18 +- src/cargo/core/dependency.rs | 53 +- src/cargo/core/resolver/context.rs | 9 +- src/cargo/core/resolver/dep_cache.rs | 928 +++++++++--------- src/cargo/core/resolver/mod.rs | 8 +- src/cargo/core/resolver/resolve.rs | 16 +- src/cargo/core/resolver/types.rs | 20 +- src/cargo/core/summary.rs | 18 +- src/cargo/ops/cargo_compile.rs | 8 +- src/cargo/ops/registry.rs | 2 +- src/cargo/sources/registry/index.rs | 6 +- src/cargo/util/cfg.rs | 52 + src/cargo/util/mod.rs | 2 +- src/cargo/util/toml/mod.rs | 33 +- tests/testsuite/cfg_features.rs | 224 +++++ tests/testsuite/main.rs | 1 + tests/testsuite/metadata.rs | 13 +- 22 files changed, 879 insertions(+), 585 deletions(-) create mode 100644 tests/testsuite/cfg_features.rs diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs index dce94689eb3..b410d56df0e 100644 --- a/crates/resolver-tests/src/lib.rs +++ b/crates/resolver-tests/src/lib.rs @@ -12,7 +12,7 @@ use cargo::core::resolver::{self, ResolveOpts}; use cargo::core::source::{GitReference, SourceId}; use cargo::core::Resolve; use cargo::core::{Dependency, PackageId, Registry, Summary}; -use cargo::util::{CargoResult, Config, Graph, IntoUrl}; +use cargo::util::{CargoResult, Config, Graph, IntoUrl, Platform}; use proptest::collection::{btree_map, vec}; use proptest::prelude::*; @@ -170,7 +170,7 @@ pub fn resolve_with_config_raw( let summary = Summary::new( pkg_id("root"), deps, - &BTreeMap::>::new(), + &BTreeMap::, Vec)>::new(), None::, false, ) @@ -571,7 +571,7 @@ pub fn pkg_dep(name: T, dep: Vec) -> Summary { Summary::new( name.to_pkgid(), dep, - &BTreeMap::>::new(), + &BTreeMap::, Vec)>::new(), link, false, ) @@ -599,7 +599,7 @@ pub fn pkg_loc(name: &str, loc: &str) -> Summary { Summary::new( pkg_id_loc(name, loc), Vec::new(), - &BTreeMap::>::new(), + &BTreeMap::, Vec)>::new(), link, false, ) @@ -613,7 +613,7 @@ pub fn remove_dep(sum: &Summary, ind: usize) -> Summary { Summary::new( sum.package_id(), deps, - &BTreeMap::>::new(), + &BTreeMap::, Vec)>::new(), sum.links().map(|a| a.as_str()), sum.namespaced_features(), ) diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 18956632152..3e67718fbfe 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -10,7 +10,7 @@ use crate::core::profiles::Profiles; use crate::core::{Dependency, Workspace}; use crate::core::{PackageId, PackageSet, Resolve}; use crate::util::errors::CargoResult; -use crate::util::{profile, Cfg, Config, Rustc}; +use crate::util::{profile, Cfg, Config, Platform, Rustc}; mod target_info; pub use self::target_info::{FileFlavor, TargetInfo}; @@ -95,12 +95,10 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { .is_public_dep(unit.pkg.package_id(), dep.pkg.package_id()) } - /// Whether a dependency should be compiled for the host or target platform, + /// Whether a given platform matches the host or target platform, /// specified by `Kind`. - pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { - // If this dependency is only available for certain platforms, - // make sure we're only enabling it for that platform. - let platform = match dep.platform() { + pub fn platform_activated(&self, platform: Option<&Platform>, kind: Kind) -> bool { + let platform = match platform { Some(p) => p, None => return true, }; @@ -111,7 +109,15 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { platform.matches(name, info.cfg()) } - /// Gets the user-specified linker for a particular host or target. + /// Whether a dependency should be compiled for the host or target platform, + /// specified by `Kind`. + pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + self.platform_activated(dep.platform(), kind) + } + + /// Gets the user-specified linker for a particular host or target pub fn linker(&self, kind: Kind) -> Option<&Path> { self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) } diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 9325c8c63e2..3cb75e344ba 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -205,7 +205,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { }); } - let feats = self.bcx.resolve.features(unit.pkg.package_id()); + let bcx = self.bcx; + let feats = bcx.resolve.features(unit.pkg.package_id()); if !feats.is_empty() { self.compilation .cfgs @@ -213,7 +214,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { .or_insert_with(|| { feats .iter() - .map(|feat| format!("feature=\"{}\"", feat)) + .filter(|feat| bcx.platform_activated(feat.1.as_ref(), unit.kind)) + .map(|feat| format!("feature=\"{}\"", feat.0)) .collect() }); } diff --git a/src/cargo/core/compiler/context/unit_dependencies.rs b/src/cargo/core/compiler/context/unit_dependencies.rs index 6e80f31a195..6b68e202cec 100644 --- a/src/cargo/core/compiler/context/unit_dependencies.rs +++ b/src/cargo/core/compiler/context/unit_dependencies.rs @@ -162,6 +162,19 @@ fn compute_deps<'a, 'cfg, 'tmp>( return false; } + // If the dependency is optional, then we're only activating it + // if the corresponding feature was activated + if dep.is_optional() { + // Same for features this dependency is referenced + if let Some(platform) = bcx.resolve.features(id).get(&*dep.name_in_toml()) { + if !bcx.platform_activated(platform.as_ref(), unit.kind) { + return false; + } + } else { + return false; + } + } + // If we've gotten past all that, then this dependency is // actually used! true @@ -228,7 +241,7 @@ fn compute_deps<'a, 'cfg, 'tmp>( t.is_bin() && // Skip binaries with required features that have not been selected. t.required_features().unwrap_or(&no_required_features).iter().all(|f| { - bcx.resolve.features(id).contains(f) + bcx.resolve.features(id).contains_key(f) && bcx.platform_activated(bcx.resolve.features(id).get(f).unwrap().as_ref(), unit.kind) }) }) .map(|t| { diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 7978d1d480e..f991e961b3e 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -182,7 +182,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes // Be sure to pass along all enabled features for this package, this is the // last piece of statically known information that we have. for feat in bcx.resolve.features(unit.pkg.package_id()).iter() { - cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); + cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat.0)), "1"); } let mut cfg_map = HashMap::new(); diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 4962e774506..a0a933918a0 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -631,8 +631,16 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult rustdoc.arg("-o").arg(doc_dir); + // Need to keep a correct order on the features, so get the sorted name first, + // then resolve the specified platform. for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) { - rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + if let Some(platform) = bcx.resolve.features(unit.pkg.package_id()).get(feat) { + if bcx.platform_activated(platform.as_ref(), unit.kind) { + rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + } else { + bail!("Failed to get the target for the feature `{}`", feat); + } } add_error_format(cx, &mut rustdoc, false, false)?; @@ -922,7 +930,13 @@ fn build_base_args<'a, 'cfg>( // rustc-caching strategies like sccache are able to cache more, so sort the // feature list here. for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) { - cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + if let Some(platform) = bcx.resolve.features(unit.pkg.package_id()).get(feat) { + if bcx.platform_activated(platform.as_ref(), unit.kind) { + cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + } else { + bail!("Failed to get the target for the feature `{}`", feat); + } } match cx.files().metadata(unit) { diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index 1da54590ef4..27859338666 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -1,6 +1,4 @@ -use std::fmt; use std::rc::Rc; -use std::str::FromStr; use log::trace; use semver::ReqParseError; @@ -12,7 +10,7 @@ use url::Url; use crate::core::interning::InternedString; use crate::core::{PackageId, SourceId, Summary}; use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::{Cfg, CfgExpr, Config}; +use crate::util::{Config, Platform}; /// Information about a dependency requested by a Cargo manifest. /// Cheap to copy. @@ -49,12 +47,6 @@ struct Inner { platform: Option, } -#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] -pub enum Platform { - Name(String), - Cfg(CfgExpr), -} - #[derive(Serialize)] struct SerializedDependency<'a> { name: &'a str, @@ -457,46 +449,3 @@ impl Dependency { } } } - -impl Platform { - pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool { - match *self { - Platform::Name(ref p) => p == name, - Platform::Cfg(ref p) => p.matches(cfg), - } - } -} - -impl ser::Serialize for Platform { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - self.to_string().serialize(s) - } -} - -impl FromStr for Platform { - type Err = failure::Error; - - fn from_str(s: &str) -> CargoResult { - if s.starts_with("cfg(") && s.ends_with(')') { - let s = &s[4..s.len() - 1]; - let p = s.parse().map(Platform::Cfg).chain_err(|| { - failure::format_err!("failed to parse `{}` as a cfg expression", s) - })?; - Ok(p) - } else { - Ok(Platform::Name(s.to_string())) - } - } -} - -impl fmt::Display for Platform { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - Platform::Name(ref n) => n.fmt(f), - Platform::Cfg(ref e) => write!(f, "cfg({})", e), - } - } -} diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 27b9a0585eb..2aed55806a9 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -13,7 +13,7 @@ use crate::util::CargoResult; use crate::util::Graph; use super::dep_cache::RegistryQueryer; -use super::types::{ConflictMap, FeaturesSet, ResolveOpts}; +use super::types::{ConflictMap, FeaturesMap, ResolveOpts}; pub use super::encode::Metadata; pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; @@ -27,7 +27,7 @@ pub use super::resolve::Resolve; pub struct Context { pub activations: Activations, /// list the features that are activated for each package - pub resolve_features: im_rc::HashMap, + pub resolve_features: im_rc::HashMap, /// get the package that will be linking to a native library by its links attribute pub links: im_rc::HashMap, /// for each package the list of names it can see, @@ -165,9 +165,10 @@ impl Context { let has_default_feature = summary.features().contains_key("default"); Ok(match self.resolve_features.get(&id) { Some(prev) => { - opts.features.is_subset(prev) + // opts.features.keys().is_subset(prev.keys()) does not work for BTreeMap::Keys + opts.features.keys().filter(|k| !prev.contains_key(k.as_str())).next().is_none() && (!opts.uses_default_features - || prev.contains("default") + || prev.contains_key("default") || !has_default_feature) } None => { diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index e20a78a66ae..c3ae84a171a 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -1,463 +1,465 @@ -//! There are 2 sources of facts for the resolver: -//! -//! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it. -//! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated. -//! -//! These constitute immutable facts, the soled ground truth that all other inference depends on. -//! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only -//! look up things we need to. The compromise is to cache the results as they are computed. -//! -//! This module impl that cache in all the gory details - -use std::cmp::Ordering; -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::rc::Rc; - -use log::debug; - -use crate::core::interning::InternedString; -use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; -use crate::util::errors::CargoResult; - -use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; -use crate::core::resolver::{ActivateResult, ResolveOpts}; - -pub struct RegistryQueryer<'a> { - pub registry: &'a mut (dyn Registry + 'a), - replacements: &'a [(PackageIdSpec, Dependency)], - try_to_use: &'a HashSet, - /// If set the list of dependency candidates will be sorted by minimal - /// versions first. That allows `cargo update -Z minimal-versions` which will - /// specify minimum dependency versions to be used. - minimal_versions: bool, - /// a cache of `Candidate`s that fulfil a `Dependency` - registry_cache: HashMap>>, - /// a cache of `Dependency`s that are required for a `Summary` - summary_cache: HashMap< - (Option, Summary, ResolveOpts), - Rc<(HashSet, Rc>)>, - >, - /// all the cases we ended up using a supplied replacement - used_replacements: HashMap, -} - -impl<'a> RegistryQueryer<'a> { - pub fn new( - registry: &'a mut dyn Registry, - replacements: &'a [(PackageIdSpec, Dependency)], - try_to_use: &'a HashSet, - minimal_versions: bool, - ) -> Self { - RegistryQueryer { - registry, - replacements, - try_to_use, - minimal_versions, - registry_cache: HashMap::new(), - summary_cache: HashMap::new(), - used_replacements: HashMap::new(), - } - } - - pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { - self.used_replacements.get(&p).map(|r| (p, r.package_id())) - } - - pub fn replacement_summary(&self, p: PackageId) -> Option<&Summary> { - self.used_replacements.get(&p) - } - - /// Queries the `registry` to return a list of candidates for `dep`. - /// - /// This method is the location where overrides are taken into account. If - /// any candidates are returned which match an override then the override is - /// applied by performing a second query for what the override should - /// return. - pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { - if let Some(out) = self.registry_cache.get(dep).cloned() { - return Ok(out); - } - - let mut ret = Vec::new(); - self.registry.query( - dep, - &mut |s| { - ret.push(s); - }, - false, - )?; - for summary in ret.iter_mut() { - let mut potential_matches = self - .replacements - .iter() - .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); - - let &(ref spec, ref dep) = match potential_matches.next() { - None => continue, - Some(replacement) => replacement, - }; - debug!( - "found an override for {} {}", - dep.package_name(), - dep.version_req() - ); - - let mut summaries = self.registry.query_vec(dep, false)?.into_iter(); - let s = summaries.next().ok_or_else(|| { - failure::format_err!( - "no matching package for override `{}` found\n\ - location searched: {}\n\ - version required: {}", - spec, - dep.source_id(), - dep.version_req() - ) - })?; - let summaries = summaries.collect::>(); - if !summaries.is_empty() { - let bullets = summaries - .iter() - .map(|s| format!(" * {}", s.package_id())) - .collect::>(); - failure::bail!( - "the replacement specification `{}` matched \ - multiple packages:\n * {}\n{}", - spec, - s.package_id(), - bullets.join("\n") - ); - } - - // The dependency should be hard-coded to have the same name and an - // exact version requirement, so both of these assertions should - // never fail. - assert_eq!(s.version(), summary.version()); - assert_eq!(s.name(), summary.name()); - - let replace = if s.source_id() == summary.source_id() { - debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); - None - } else { - Some(s) - }; - let matched_spec = spec.clone(); - - // Make sure no duplicates - if let Some(&(ref spec, _)) = potential_matches.next() { - failure::bail!( - "overlapping replacement specifications found:\n\n \ - * {}\n * {}\n\nboth specifications match: {}", - matched_spec, - spec, - summary.package_id() - ); - } - - for dep in summary.dependencies() { - debug!("\t{} => {}", dep.package_name(), dep.version_req()); - } - if let Some(r) = replace { - self.used_replacements.insert(summary.package_id(), r); - } - } - - // When we attempt versions for a package we'll want to do so in a - // sorted fashion to pick the "best candidates" first. Currently we try - // prioritized summaries (those in `try_to_use`) and failing that we - // list everything from the maximum version to the lowest version. - ret.sort_unstable_by(|a, b| { - let a_in_previous = self.try_to_use.contains(&a.package_id()); - let b_in_previous = self.try_to_use.contains(&b.package_id()); - let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse(); - match previous_cmp { - Ordering::Equal => { - let cmp = a.version().cmp(b.version()); - if self.minimal_versions { - // Lower version ordered first. - cmp - } else { - // Higher version ordered first. - cmp.reverse() - } - } - _ => previous_cmp, - } - }); - - let out = Rc::new(ret); - - self.registry_cache.insert(dep.clone(), out.clone()); - - Ok(out) - } - - /// Find out what dependencies will be added by activating `candidate`, - /// with features described in `opts`. Then look up in the `registry` - /// the candidates that will fulfil each of these dependencies, as it is the - /// next obvious question. - pub fn build_deps( - &mut self, - parent: Option, - candidate: &Summary, - opts: &ResolveOpts, - ) -> ActivateResult, Rc>)>> { - // if we have calculated a result before, then we can just return it, - // as it is a "pure" query of its arguments. - if let Some(out) = self - .summary_cache - .get(&(parent, candidate.clone(), opts.clone())) - .cloned() - { - return Ok(out); - } - // First, figure out our set of dependencies based on the requested set - // of features. This also calculates what features we're going to enable - // for our own dependencies. - let (used_features, deps) = resolve_features(parent, candidate, opts)?; - - // Next, transform all dependencies into a list of possible candidates - // which can satisfy that dependency. - let mut deps = deps - .into_iter() - .map(|(dep, features)| { - let candidates = self.query(&dep)?; - Ok((dep, candidates, features)) - }) - .collect::>>()?; - - // Attempt to resolve dependencies with fewer candidates before trying - // dependencies with more candidates. This way if the dependency with - // only one candidate can't be resolved we don't have to do a bunch of - // work before we figure that out. - deps.sort_by_key(|&(_, ref a, _)| a.len()); - - let out = Rc::new((used_features, Rc::new(deps))); - - // If we succeed we add the result to the cache so we can use it again next time. - // We dont cache the failure cases as they dont impl Clone. - self.summary_cache - .insert((parent, candidate.clone(), opts.clone()), out.clone()); - - Ok(out) - } -} - -/// Returns the features we ended up using and -/// all dependencies and the features we want from each of them. -pub fn resolve_features<'b>( - parent: Option, - s: &'b Summary, - opts: &'b ResolveOpts, -) -> ActivateResult<(HashSet, Vec<(Dependency, FeaturesSet)>)> { - // First, filter by dev-dependencies. - let deps = s.dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); - - let reqs = build_requirements(s, opts)?; - let mut ret = Vec::new(); - let mut used_features = HashSet::new(); - let default_dep = (false, BTreeSet::new()); - - // Next, collect all actually enabled dependencies and their features. - for dep in deps { - // Skip optional dependencies, but not those enabled through a - // feature - if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { - continue; - } - // So we want this dependency. Move the features we want from - // `feature_deps` to `ret` and register ourselves as using this - // name. - let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); - used_features.insert(dep.name_in_toml()); - let always_required = !dep.is_optional() - && !s - .dependencies() - .iter() - .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); - if always_required && base.0 { - return Err(match parent { - None => failure::format_err!( - "Package `{}` does not have feature `{}`. It has a required dependency \ - with that name, but only optional dependencies can be used as features.", - s.package_id(), - dep.name_in_toml() - ) - .into(), - Some(p) => ( - p, - ConflictReason::RequiredDependencyAsFeatures(dep.name_in_toml()), - ) - .into(), - }); - } - let mut base = base.1.clone(); - base.extend(dep.features().iter()); - for feature in base.iter() { - if feature.contains('/') { - return Err(failure::format_err!( - "feature names may not contain slashes: `{}`", - feature - ) - .into()); - } - } - ret.push((dep.clone(), Rc::new(base))); - } - - // Any entries in `reqs.dep` which weren't used are bugs in that the - // package does not actually have those dependencies. We classified - // them as dependencies in the first place because there is no such - // feature, either. - let remaining = reqs - .deps - .keys() - .cloned() - .filter(|s| !used_features.contains(s)) - .collect::>(); - if !remaining.is_empty() { - let features = remaining.join(", "); - return Err(match parent { - None => failure::format_err!( - "Package `{}` does not have these features: `{}`", - s.package_id(), - features - ) - .into(), - Some(p) => (p, ConflictReason::MissingFeatures(features)).into(), - }); - } - - Ok((reqs.into_used(), ret)) -} - -/// Takes requested features for a single package from the input `ResolveOpts` and -/// recurses to find all requested features, dependencies and requested -/// dependency features in a `Requirements` object, returning it to the resolver. -fn build_requirements<'a, 'b: 'a>( - s: &'a Summary, - opts: &'b ResolveOpts, -) -> CargoResult> { - let mut reqs = Requirements::new(s); - - if opts.all_features { - for key in s.features().keys() { - reqs.require_feature(*key)?; - } - for dep in s.dependencies().iter().filter(|d| d.is_optional()) { - reqs.require_dependency(dep.name_in_toml()); - } - } else { - for &f in opts.features.iter() { - reqs.require_value(&FeatureValue::new(f, s))?; - } - } - - if opts.uses_default_features { - if s.features().contains_key("default") { - reqs.require_feature(InternedString::new("default"))?; - } - } - - Ok(reqs) -} - -struct Requirements<'a> { - summary: &'a Summary, - // The deps map is a mapping of package name to list of features enabled. - // Each package should be enabled, and each package should have the - // specified set of features enabled. The boolean indicates whether this - // package was specifically requested (rather than just requesting features - // *within* this package). - deps: HashMap)>, - // The used features set is the set of features which this local package had - // enabled, which is later used when compiling to instruct the code what - // features were enabled. - used: HashSet, - visited: HashSet, -} - -impl Requirements<'_> { - fn new(summary: &Summary) -> Requirements<'_> { - Requirements { - summary, - deps: HashMap::new(), - used: HashSet::new(), - visited: HashSet::new(), - } - } - - fn into_used(self) -> HashSet { - self.used - } - - fn require_crate_feature(&mut self, package: InternedString, feat: InternedString) { - // If `package` is indeed an optional dependency then we activate the - // feature named `package`, but otherwise if `package` is a required - // dependency then there's no feature associated with it. - if let Some(dep) = self - .summary - .dependencies() - .iter() - .find(|p| p.name_in_toml() == package) - { - if dep.is_optional() { - self.used.insert(package); - } - } - self.deps - .entry(package) - .or_insert((false, BTreeSet::new())) - .1 - .insert(feat); - } - - fn seen(&mut self, feat: InternedString) -> bool { - if self.visited.insert(feat) { - self.used.insert(feat); - false - } else { - true - } - } - - fn require_dependency(&mut self, pkg: InternedString) { - if self.seen(pkg) { - return; - } - self.deps.entry(pkg).or_insert((false, BTreeSet::new())).0 = true; - } - - fn require_feature(&mut self, feat: InternedString) -> CargoResult<()> { - if feat.is_empty() || self.seen(feat) { - return Ok(()); - } - for fv in self - .summary - .features() - .get(feat.as_str()) - .expect("must be a valid feature") - { - match *fv { - FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => failure::bail!( - "cyclic feature dependency: feature `{}` depends on itself", - feat - ), - _ => {} - } - self.require_value(fv)?; - } - Ok(()) - } - - fn require_value(&mut self, fv: &FeatureValue) -> CargoResult<()> { - match fv { - FeatureValue::Feature(feat) => self.require_feature(*feat)?, - FeatureValue::Crate(dep) => self.require_dependency(*dep), - FeatureValue::CrateFeature(dep, dep_feat) => { - self.require_crate_feature(*dep, *dep_feat) - } - }; - Ok(()) - } -} +//! There are 2 sources of facts for the resolver: +//! +//! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it. +//! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated. +//! +//! These constitute immutable facts, the soled ground truth that all other inference depends on. +//! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only +//! look up things we need to. The compromise is to cache the results as they are computed. +//! +//! This module impl that cache in all the gory details + +use std::cmp::Ordering; +use std::collections::{BTreeMap, HashMap, HashSet}; +use std::rc::Rc; + +use log::debug; + +use crate::core::interning::InternedString; +use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; +use crate::util::errors::CargoResult; +use crate::util::Platform; + +use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesMap}; +use crate::core::resolver::{ActivateResult, ResolveOpts}; + +pub struct RegistryQueryer<'a> { + pub registry: &'a mut (dyn Registry + 'a), + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet, + /// If set the list of dependency candidates will be sorted by minimal + /// versions first. That allows `cargo update -Z minimal-versions` which will + /// specify minimum dependency versions to be used. + minimal_versions: bool, + /// a cache of `Candidate`s that fulfil a `Dependency` + registry_cache: HashMap>>, + /// a cache of `Dependency`s that are required for a `Summary` + summary_cache: HashMap< + (Option, Summary, ResolveOpts), + Rc<(HashMap>, Rc>)>, + >, + /// all the cases we ended up using a supplied replacement + used_replacements: HashMap, +} + +impl<'a> RegistryQueryer<'a> { + pub fn new( + registry: &'a mut dyn Registry, + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet, + minimal_versions: bool, + ) -> Self { + RegistryQueryer { + registry, + replacements, + try_to_use, + minimal_versions, + registry_cache: HashMap::new(), + summary_cache: HashMap::new(), + used_replacements: HashMap::new(), + } + } + + pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { + self.used_replacements.get(&p).map(|r| (p, r.package_id())) + } + + pub fn replacement_summary(&self, p: PackageId) -> Option<&Summary> { + self.used_replacements.get(&p) + } + + /// Queries the `registry` to return a list of candidates for `dep`. + /// + /// This method is the location where overrides are taken into account. If + /// any candidates are returned which match an override then the override is + /// applied by performing a second query for what the override should + /// return. + pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { + if let Some(out) = self.registry_cache.get(dep).cloned() { + return Ok(out); + } + + let mut ret = Vec::new(); + self.registry.query( + dep, + &mut |s| { + ret.push(s); + }, + false, + )?; + for summary in ret.iter_mut() { + let mut potential_matches = self + .replacements + .iter() + .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); + + let &(ref spec, ref dep) = match potential_matches.next() { + None => continue, + Some(replacement) => replacement, + }; + debug!( + "found an override for {} {}", + dep.package_name(), + dep.version_req() + ); + + let mut summaries = self.registry.query_vec(dep, false)?.into_iter(); + let s = summaries.next().ok_or_else(|| { + failure::format_err!( + "no matching package for override `{}` found\n\ + location searched: {}\n\ + version required: {}", + spec, + dep.source_id(), + dep.version_req() + ) + })?; + let summaries = summaries.collect::>(); + if !summaries.is_empty() { + let bullets = summaries + .iter() + .map(|s| format!(" * {}", s.package_id())) + .collect::>(); + failure::bail!( + "the replacement specification `{}` matched \ + multiple packages:\n * {}\n{}", + spec, + s.package_id(), + bullets.join("\n") + ); + } + + // The dependency should be hard-coded to have the same name and an + // exact version requirement, so both of these assertions should + // never fail. + assert_eq!(s.version(), summary.version()); + assert_eq!(s.name(), summary.name()); + + let replace = if s.source_id() == summary.source_id() { + debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); + None + } else { + Some(s) + }; + let matched_spec = spec.clone(); + + // Make sure no duplicates + if let Some(&(ref spec, _)) = potential_matches.next() { + failure::bail!( + "overlapping replacement specifications found:\n\n \ + * {}\n * {}\n\nboth specifications match: {}", + matched_spec, + spec, + summary.package_id() + ); + } + + for dep in summary.dependencies() { + debug!("\t{} => {}", dep.package_name(), dep.version_req()); + } + if let Some(r) = replace { + self.used_replacements.insert(summary.package_id(), r); + } + } + + // When we attempt versions for a package we'll want to do so in a + // sorted fashion to pick the "best candidates" first. Currently we try + // prioritized summaries (those in `try_to_use`) and failing that we + // list everything from the maximum version to the lowest version. + ret.sort_unstable_by(|a, b| { + let a_in_previous = self.try_to_use.contains(&a.package_id()); + let b_in_previous = self.try_to_use.contains(&b.package_id()); + let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse(); + match previous_cmp { + Ordering::Equal => { + let cmp = a.version().cmp(b.version()); + if self.minimal_versions { + // Lower version ordered first. + cmp + } else { + // Higher version ordered first. + cmp.reverse() + } + } + _ => previous_cmp, + } + }); + + let out = Rc::new(ret); + + self.registry_cache.insert(dep.clone(), out.clone()); + + Ok(out) + } + + /// Find out what dependencies will be added by activating `candidate`, + /// with features described in `opts`. Then look up in the `registry` + /// the candidates that will fulfil each of these dependencies, as it is the + /// next obvious question. + pub fn build_deps( + &mut self, + parent: Option, + candidate: &Summary, + opts: &ResolveOpts, + ) -> ActivateResult>, Rc>)>> { + // if we have calculated a result before, then we can just return it, + // as it is a "pure" query of its arguments. + if let Some(out) = self + .summary_cache + .get(&(parent, candidate.clone(), opts.clone())) + .cloned() + { + return Ok(out); + } + // First, figure out our set of dependencies based on the requested set + // of features. This also calculates what features we're going to enable + // for our own dependencies. + let (used_features, deps) = resolve_features(parent, candidate, opts)?; + + // Next, transform all dependencies into a list of possible candidates + // which can satisfy that dependency. + let mut deps = deps + .into_iter() + .map(|(dep, features)| { + let candidates = self.query(&dep)?; + Ok((dep, candidates, features)) + }) + .collect::>>()?; + + // Attempt to resolve dependencies with fewer candidates before trying + // dependencies with more candidates. This way if the dependency with + // only one candidate can't be resolved we don't have to do a bunch of + // work before we figure that out. + deps.sort_by_key(|&(_, ref a, _)| a.len()); + + let out = Rc::new((used_features, Rc::new(deps))); + + // If we succeed we add the result to the cache so we can use it again next time. + // We dont cache the failure cases as they dont impl Clone. + self.summary_cache + .insert((parent, candidate.clone(), opts.clone()), out.clone()); + + Ok(out) + } +} + +/// Returns the features we ended up using and +/// all dependencies and the features we want from each of them. +pub fn resolve_features<'b>( + parent: Option, + s: &'b Summary, + opts: &'b ResolveOpts, +) -> ActivateResult<(HashMap>, Vec<(Dependency, FeaturesMap)>)> { + // First, filter by dev-dependencies. + let deps = s.dependencies(); + let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); + + let reqs = build_requirements(s, opts)?; + let mut ret = Vec::new(); + let mut used_features = HashSet::new(); + let default_dep = (false, BTreeMap::new()); + + // Next, collect all actually enabled dependencies and their features. + for dep in deps { + // Skip optional dependencies, but not those enabled through a + // feature + if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { + continue; + } + // So we want this dependency. Move the features we want from + // `feature_deps` to `ret` and register ourselves as using this + // name. + let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); + used_features.insert(dep.name_in_toml()); + let always_required = !dep.is_optional() + && !s + .dependencies() + .iter() + .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); + if always_required && base.0 { + return Err(match parent { + None => failure::format_err!( + "Package `{}` does not have feature `{}`. It has a required dependency \ + with that name, but only optional dependencies can be used as features.", + s.package_id(), + dep.name_in_toml() + ) + .into(), + Some(p) => ( + p, + ConflictReason::RequiredDependencyAsFeatures(dep.name_in_toml()), + ) + .into(), + }); + } + let mut base = base.1.clone(); + base.extend(dep.features().iter().map(|f| (f.clone(), dep.platform().map(|p| p.clone())))); + for feature in base.iter() { + if feature.0.contains('/') { + return Err(failure::format_err!( + "feature names may not contain slashes: `{}`", + feature.0 + ) + .into()); + } + } + ret.push((dep.clone(), Rc::new(base))); + } + + // Any entries in `reqs.dep` which weren't used are bugs in that the + // package does not actually have those dependencies. We classified + // them as dependencies in the first place because there is no such + // feature, either. + let remaining = reqs + .deps + .keys() + .cloned() + .filter(|s| !used_features.contains(s)) + .collect::>(); + if !remaining.is_empty() { + let features = remaining.join(", "); + return Err(match parent { + None => failure::format_err!( + "Package `{}` does not have these features: `{}`", + s.package_id(), + features + ) + .into(), + Some(p) => (p, ConflictReason::MissingFeatures(features)).into(), + }); + } + + Ok((reqs.into_used(), ret)) +} + +/// Takes requested features for a single package from the input `ResolveOpts` and +/// recurses to find all requested features, dependencies and requested +/// dependency features in a `Requirements` object, returning it to the resolver. +fn build_requirements<'a, 'b: 'a>( + s: &'a Summary, + opts: &'b ResolveOpts, +) -> CargoResult> { + let mut reqs = Requirements::new(s); + + if opts.all_features { + for (key,val) in s.features() { + reqs.require_feature(*key, &val.0)?; + } + for dep in s.dependencies().iter().filter(|d| d.is_optional()) { + reqs.require_dependency(dep.name_in_toml(), &dep.platform().map(|p| p.clone())); + } + } else { + for (f, p) in opts.features.iter() { + reqs.require_value(&FeatureValue::new(*f, s), &p)?; + } + } + + if opts.uses_default_features { + if s.features().contains_key("default") { + reqs.require_feature(InternedString::new("default"), &s.features().get("default").unwrap().0)?; + } + } + + Ok(reqs) +} + +struct Requirements<'a> { + summary: &'a Summary, + // The deps map is a mapping of package name to list of features enabled. + // Each package should be enabled, and each package should have the + // specified set of features enabled. The boolean indicates whether this + // package was specifically requested (rather than just requesting features + // *within* this package). + deps: HashMap>)>, + // The used features set is the set of features which this local package had + // enabled, which is later used when compiling to instruct the code what + // features were enabled. + used: HashMap>, + visited: HashMap>, +} + +impl Requirements<'_> { + fn new(summary: &Summary) -> Requirements<'_> { + Requirements { + summary, + deps: HashMap::new(), + used: HashMap::new(), + visited: HashMap::new(), + } + } + + fn into_used(self) -> HashMap> { + self.used + } + + fn require_crate_feature(&mut self, package: InternedString, feat: InternedString, platform: Option) { + // If `package` is indeed an optional dependency then we activate the + // feature named `package`, but otherwise if `package` is a required + // dependency then there's no feature associated with it. + if let Some(dep) = self + .summary + .dependencies() + .iter() + .find(|p| p.name_in_toml() == package) + { + if dep.is_optional() { + self.used.insert(package, platform.clone()); + } + } + self.deps + .entry(package) + .or_insert((false, BTreeMap::new())) + .1 + .insert(feat, platform); + } + + fn seen(&mut self, feat: InternedString, platform: &Option) -> bool { + if self.visited.insert(feat, platform.clone()).is_some() { + self.used.insert(feat, platform.clone()); + false + } else { + true + } + } + + fn require_dependency(&mut self, pkg: InternedString, platform: &Option) { + if self.seen(pkg, platform) { + return; + } + self.deps.entry(pkg).or_insert((false, BTreeMap::new())).0 = true; + } + + fn require_feature(&mut self, feat: InternedString, platform: &Option) -> CargoResult<()> { + if feat.is_empty() || self.seen(feat, platform) { + return Ok(()); + } + let feature = self + .summary + .features() + .get(feat.as_str()) + .expect("must be a valid feature"); + for fv in feature.1.as_slice() + { + match *fv { + FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => failure::bail!( + "cyclic feature dependency: feature `{}` depends on itself", + feat + ), + _ => {} + } + self.require_value(fv, &feature.0)?; + } + Ok(()) + } + + fn require_value(&mut self, fv: &FeatureValue, platform: &Option) -> CargoResult<()> { + match fv { + FeatureValue::Feature(feat) => self.require_feature(*feat, platform)?, + FeatureValue::Crate(dep) => self.require_dependency(*dep, platform), + FeatureValue::CrateFeature(dep, dep_feat) => { + self.require_crate_feature(*dep, *dep_feat, platform.clone()) + } + }; + Ok(()) + } +} diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 4aaa7eeafed..3ffe0cfadd1 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -63,7 +63,7 @@ use crate::util::profile; use self::context::Context; use self::dep_cache::RegistryQueryer; use self::types::{ConflictMap, ConflictReason, DepsFrame}; -use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress}; +use self::types::{FeaturesMap, RcVecIter, RemainingDeps, ResolverProgress}; pub use self::encode::Metadata; pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; @@ -146,7 +146,7 @@ pub fn resolve( cx.resolve_replacements(®istry), cx.resolve_features .iter() - .map(|(k, v)| (*k, v.iter().map(|x| x.to_string()).collect())) + .map(|(k, v)| (*k, v.iter().map(|(f,p)| (f.to_string(), p.clone())).collect())) .collect(), cksums, BTreeMap::new(), @@ -691,7 +691,7 @@ fn activate( .entry(candidate.package_id()) .or_insert_with(Rc::default), ) - .extend(used_features); + .extend(used_features.clone()); } let frame = DepsFrame { @@ -709,7 +709,7 @@ struct BacktrackFrame { remaining_candidates: RemainingCandidates, parent: Summary, dep: Dependency, - features: FeaturesSet, + features: FeaturesMap, conflicting_activations: ConflictMap, } diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index 9ced48f4d67..7f210a1e812 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -8,7 +8,7 @@ use url::Url; use crate::core::dependency::Kind; use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target}; use crate::util::errors::CargoResult; -use crate::util::Graph; +use crate::util::{Graph, Platform}; use super::encode::Metadata; @@ -27,12 +27,12 @@ pub struct Resolve { replacements: HashMap, /// Inverted version of `replacements`. reverse_replacements: HashMap, - /// An empty `HashSet` to avoid creating a new `HashSet` for every package + /// An empty `HashMap` to avoid creating a new `HashMap` for every package /// that does not have any features, and to avoid using `Option` to /// simplify the API. - empty_features: HashSet, + empty_features: HashMap>, /// Features enabled for a given package. - features: HashMap>, + features: HashMap>>, /// Checksum for each package. A SHA256 hash of the `.crate` file used to /// validate the correct crate file is used. This is `None` for sources /// that do not use `.crate` files, like path or git dependencies. @@ -71,7 +71,7 @@ impl Resolve { pub fn new( graph: Graph>, replacements: HashMap, - features: HashMap>, + features: HashMap>>, checksums: HashMap>, metadata: Metadata, unused_patches: Vec, @@ -101,7 +101,7 @@ impl Resolve { checksums, metadata, unused_patches, - empty_features: HashSet::new(), + empty_features: HashMap::new(), reverse_replacements, public_dependencies, version, @@ -269,7 +269,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated &self.replacements } - pub fn features(&self, pkg: PackageId) -> &HashSet { + pub fn features(&self, pkg: PackageId) -> &HashMap> { self.features.get(&pkg).unwrap_or(&self.empty_features) } @@ -281,7 +281,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated } pub fn features_sorted(&self, pkg: PackageId) -> Vec<&str> { - let mut v = Vec::from_iter(self.features(pkg).iter().map(|s| s.as_ref())); + let mut v = Vec::from_iter(self.features(pkg).iter().map(|(s, _)| s.as_ref())); v.sort_unstable(); v } diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 881869ef16f..02f078460d9 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -1,5 +1,5 @@ use std::cmp::Ordering; -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeMap; use std::ops::Range; use std::rc::Rc; use std::time::{Duration, Instant}; @@ -7,7 +7,7 @@ use std::time::{Duration, Instant}; use crate::core::interning::InternedString; use crate::core::{Dependency, PackageId, Summary}; use crate::util::errors::CargoResult; -use crate::util::Config; +use crate::util::{Config, Platform}; use im_rc; @@ -89,7 +89,7 @@ impl ResolverProgress { } } -/// The preferred way to store the set of activated features for a package. +/// The preferred way to store the map of activated feature-platform pairs for a package. /// This is sorted so that it impls Hash, and owns its contents, /// needed so it can be part of the key for caching in the `DepsCache`. /// It is also cloned often as part of `Context`, hence the `RC`. @@ -97,7 +97,7 @@ impl ResolverProgress { /// but this can change with improvements to std, im, or llvm. /// Using a consistent type for this allows us to use the highly /// optimized comparison operators like `is_subset` at the interfaces. -pub type FeaturesSet = Rc>; +pub type FeaturesMap = Rc>>; /// Options for how the resolve should work. #[derive(Clone, Debug, Eq, PartialEq, Hash)] @@ -107,7 +107,7 @@ pub struct ResolveOpts { /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`. pub dev_deps: bool, /// Set of features to enable (`--features=…`). - pub features: FeaturesSet, + pub features: FeaturesMap, /// Indicates *all* features should be enabled (`--all-features`). pub all_features: bool, /// Include the `default` feature (`--no-default-features` sets this false). @@ -119,7 +119,7 @@ impl ResolveOpts { pub fn everything() -> ResolveOpts { ResolveOpts { dev_deps: true, - features: Rc::new(BTreeSet::new()), + features: Rc::new(BTreeMap::new()), all_features: true, uses_default_features: true, } @@ -139,14 +139,14 @@ impl ResolveOpts { } } - fn split_features(features: &[String]) -> BTreeSet { + fn split_features(features: &[String]) -> BTreeMap> { features .iter() .flat_map(|s| s.split_whitespace()) .flat_map(|s| s.split(',')) .filter(|s| !s.is_empty()) - .map(InternedString::new) - .collect::>() + .map(|s| (InternedString::new(s), None)) + .collect::>>() } } @@ -253,7 +253,7 @@ impl RemainingDeps { /// Information about the dependencies for a crate, a tuple of: /// /// (dependency info, candidates, features activated) -pub type DepInfo = (Dependency, Rc>, FeaturesSet); +pub type DepInfo = (Dependency, Rc>, FeaturesMap); /// All possible reasons that a package might fail to activate. /// diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index fb52b9179d7..0dbfd2456b3 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -11,7 +11,7 @@ use crate::core::interning::InternedString; use crate::core::{Dependency, PackageId, SourceId}; use semver::Version; -use crate::util::CargoResult; +use crate::util::{CargoResult, Platform}; /// Subset of a `Manifest`. Contains only the most important information about /// a package. @@ -36,7 +36,7 @@ impl Summary { pub fn new( pkg_id: PackageId, dependencies: Vec, - features: &BTreeMap>>, + features: &BTreeMap, Vec>)>, links: Option>, namespaced_features: bool, ) -> CargoResult @@ -149,7 +149,7 @@ impl Hash for Summary { // Checks features for errors, bailing out a CargoResult:Err if invalid, // and creates FeatureValues for each feature. fn build_feature_map( - features: &BTreeMap>>, + features: &BTreeMap, Vec>)>, dependencies: &[Dependency], namespaced: bool, ) -> CargoResult @@ -201,7 +201,7 @@ where }; let mut values = vec![]; - for dep in list { + for dep in list.1.as_slice() { let val = FeatureValue::build( InternedString::new(dep.as_ref()), |fs| features.contains_key(fs.as_str()), @@ -239,7 +239,7 @@ where // we don't have to do so here. (&Feature(feat), _, true) => { if namespaced && !features.contains_key(&*feat) { - map.insert(feat, vec![FeatureValue::Crate(feat)]); + map.insert(feat, (list.0.clone(), vec![FeatureValue::Crate(feat)])); } } // If features are namespaced and the value is not defined as a feature @@ -337,7 +337,10 @@ where ) } - map.insert(InternedString::new(feature.borrow()), values); + map.insert( + InternedString::new(feature.borrow()), + (list.0.clone(), values), + ); } Ok(map) } @@ -416,4 +419,5 @@ impl Serialize for FeatureValue { } } -pub type FeatureMap = BTreeMap>; +pub type FeatureMap = BTreeMap, Vec)>; +pub type RefFeatureMap<'a> = BTreeMap; diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index c68d4fc1b1d..653b1a6e8e0 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -36,7 +36,7 @@ use crate::core::{Package, Target}; use crate::core::{PackageId, PackageIdSpec, TargetKind, Workspace}; use crate::ops; use crate::util::config::Config; -use crate::util::{closest_msg, profile, CargoResult}; +use crate::util::{closest_msg, profile, CargoResult, Platform}; /// Contains information about how a package should be compiled. #[derive(Debug)] @@ -791,7 +791,7 @@ fn generate_targets<'a>( let features = features_map .entry(pkg) .or_insert_with(|| resolve_all_features(resolve, pkg.package_id())); - rf.iter().filter(|f| !features.contains(*f)).collect() + rf.iter().filter(|f| !features.contains_key(*f)).collect() } None => Vec::new(), }; @@ -821,14 +821,14 @@ fn generate_targets<'a>( fn resolve_all_features( resolve_with_overrides: &Resolve, package_id: PackageId, -) -> HashSet { +) -> HashMap> { let mut features = resolve_with_overrides.features(package_id).clone(); // Include features enabled for use by dependencies so targets can also use them with the // required-features field when deciding whether to be built or skipped. for (dep, _) in resolve_with_overrides.deps(package_id) { for feature in resolve_with_overrides.features(dep) { - features.insert(dep.name().to_string() + "/" + feature); + features.insert(dep.name().to_string() + "/" + feature.0, feature.1.clone()); } } diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 5ecdaf6f53c..20d7aa01ea8 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -237,7 +237,7 @@ fn transmit( .map(|(feat, values)| { ( feat.to_string(), - values.iter().map(|fv| fv.to_string(summary)).collect(), + values.1.iter().map(|fv| fv.to_string(&summary)).collect(), ) }) .collect::>>(); diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs index 67116074cb7..57d24a8f0aa 100644 --- a/src/cargo/sources/registry/index.rs +++ b/src/cargo/sources/registry/index.rs @@ -722,7 +722,11 @@ impl IndexSummary { .into_iter() .map(|dep| dep.into_dep(source_id)) .collect::>>()?; - let mut summary = Summary::new(pkgid, deps, &features, links, false)?; + let ftrs = features + .iter() + .map(|(k, v)| (k.clone(), (None, v.clone()))) + .collect(); + let mut summary = Summary::new(pkgid, deps, &ftrs, links, false)?; summary.set_checksum(cksum.clone()); Ok(IndexSummary { summary, diff --git a/src/cargo/util/cfg.rs b/src/cargo/util/cfg.rs index 4c4ad232df4..4826c3b4bce 100644 --- a/src/cargo/util/cfg.rs +++ b/src/cargo/util/cfg.rs @@ -2,6 +2,9 @@ use std::fmt; use std::iter; use std::str::{self, FromStr}; +use serde::ser; + +use crate::util::errors::CargoResultExt; use crate::util::CargoResult; #[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] @@ -28,6 +31,12 @@ enum Token<'a> { String(&'a str), } +#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] +pub enum Platform { + Name(String), + Cfg(CfgExpr), +} + struct Tokenizer<'a> { s: iter::Peekable>, orig: &'a str, @@ -276,3 +285,46 @@ impl<'a> Token<'a> { } } } + +impl Platform { + pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool { + match *self { + Platform::Name(ref p) => p == name, + Platform::Cfg(ref p) => p.matches(cfg), + } + } +} + +impl ser::Serialize for Platform { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + self.to_string().serialize(s) + } +} + +impl FromStr for Platform { + type Err = failure::Error; + + fn from_str(s: &str) -> CargoResult { + if s.starts_with("cfg(") && s.ends_with(')') { + let s = &s[4..s.len() - 1]; + let p = s.parse().map(Platform::Cfg).chain_err(|| { + failure::format_err!("failed to parse `{}` as a cfg expression", s) + })?; + Ok(p) + } else { + Ok(Platform::Name(s.to_string())) + } + } +} + +impl fmt::Display for Platform { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Platform::Name(ref n) => n.fmt(f), + Platform::Cfg(ref e) => write!(f, "cfg({})", e), + } + } +} diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs index 50fbd8c2b1b..424b525f4cc 100644 --- a/src/cargo/util/mod.rs +++ b/src/cargo/util/mod.rs @@ -1,6 +1,6 @@ use std::time::Duration; -pub use self::cfg::{Cfg, CfgExpr}; +pub use self::cfg::{Cfg, CfgExpr, Platform}; pub use self::config::{homedir, Config, ConfigValue}; pub use self::dependency_queue::DependencyQueue; pub use self::diagnostic_server::RustfixDiagnosticServer; diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index e2232827885..3633ff25b13 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -13,7 +13,7 @@ use serde::ser; use serde::{Deserialize, Serialize}; use url::Url; -use crate::core::dependency::{Kind, Platform}; +use crate::core::dependency::Kind; use crate::core::manifest::{LibKind, ManifestMetadata, TargetSourcePath, Warnings}; use crate::core::profiles::Profiles; use crate::core::{Dependency, Manifest, PackageId, Summary, Target}; @@ -21,7 +21,7 @@ use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest}; use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig}; use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY}; use crate::util::errors::{CargoResult, CargoResultExt, ManifestError}; -use crate::util::{self, paths, validate_package_name, Config, IntoUrl}; +use crate::util::{self, paths, validate_package_name, Config, IntoUrl, Platform}; mod targets; use self::targets::targets; @@ -754,6 +754,7 @@ impl TomlManifest { Ok(( k.clone(), TomlPlatform { + features: v.features.clone(), dependencies: map_deps(config, v.dependencies.as_ref())?, dev_dependencies: map_deps( config, @@ -892,6 +893,7 @@ impl TomlManifest { } let mut deps = Vec::new(); + let mut ftrs = BTreeMap::new(); let replace; let patch; @@ -924,6 +926,21 @@ impl TomlManifest { Ok(()) } + fn process_features( + ftrs: &mut BTreeMap, Vec)>, + new_ftrs: Option<&BTreeMap>>, + platform: Option<&Platform>, + ) -> CargoResult<()> { + let features = match new_ftrs { + Some(features) => features, + None => return Ok(()), + }; + for (n, v) in features.iter() { + ftrs.insert(n.clone(), (platform.cloned(), v.clone())); + } + + Ok(()) + } // Collect the dependencies. process_dependencies(&mut cx, me.dependencies.as_ref(), None)?; @@ -937,6 +954,7 @@ impl TomlManifest { .as_ref() .or_else(|| me.build_dependencies2.as_ref()); process_dependencies(&mut cx, build_deps, Some(Kind::Build))?; + process_features(&mut ftrs, me.features.as_ref(), None)?; for (name, platform) in me.target.iter().flat_map(|t| t) { cx.platform = Some(name.parse()?); @@ -951,6 +969,7 @@ impl TomlManifest { .as_ref() .or_else(|| platform.dev_dependencies2.as_ref()); process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?; + process_features(&mut ftrs, platform.features.as_ref(), cx.platform.as_ref())?; } replace = me.replace(&mut cx)?; @@ -982,14 +1001,7 @@ impl TomlManifest { let summary = Summary::new( pkgid, deps, - &me.features - .as_ref() - .map(|x| { - x.iter() - .map(|(k, v)| (k.as_str(), v.iter().collect())) - .collect() - }) - .unwrap_or_else(BTreeMap::new), + &ftrs, project.links.as_ref().map(|x| x.as_str()), project.namespaced_features.unwrap_or(false), )?; @@ -1543,6 +1555,7 @@ impl ser::Serialize for PathValue { /// Corresponds to a `target` entry, but `TomlTarget` is already used. #[derive(Serialize, Deserialize, Debug)] struct TomlPlatform { + features: Option>>, dependencies: Option>, #[serde(rename = "build-dependencies")] build_dependencies: Option>, diff --git a/tests/testsuite/cfg_features.rs b/tests/testsuite/cfg_features.rs new file mode 100644 index 00000000000..99d0742e642 --- /dev/null +++ b/tests/testsuite/cfg_features.rs @@ -0,0 +1,224 @@ +use crate::support::project; + +#[cargo_test] +fn syntax() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + b = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn bb() {} + "#, + ) + .build(); + p.cargo("build") + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn include_by_param() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + c = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + #[cfg(feature = "c")] + pub const BB: usize = 1; + + pub fn bb() -> Result<(), ()> { if BB > 0 { Ok(()) } else { Err(()) } } + "#, + ) + .build(); + p.cargo(format!("build --features {}", if cfg!(unix) { "b" } else { "c" }).as_str()) + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn dont_include_by_platform() { + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.features] + b = [] + "#, + other_family + ), + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + + pub fn bb() { let _ = BB; } + "#, + ) + .build(); + p.cargo("build --features b -vv") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +#[cargo_test] +fn dont_include_by_param() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + c = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + #[cfg(feature = "c")] + pub const BB: usize = 1; + + pub fn bb() -> Result<(), ()> { if BB > 0 { Ok(()) } else { Err(()) } } + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +#[cargo_test] +fn dont_include_default() { + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.features] + b = [] + + [features] + default = ["b"] + "#, + other_family + ), + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + + pub fn bb() { let _ = BB; } + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +// https://github.com/rust-lang/cargo/issues/5313 +#[cargo_test] +#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))] +fn cfg_looks_at_rustflags_for_target() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(with_b)'.features] + b = [] + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(with_b)] + pub const BB: usize = 0; + + fn main() { let _ = BB; } + "#, + ) + .build(); + + p.cargo("build --target x86_64-unknown-linux-gnu") + .env("RUSTFLAGS", "--cfg with_b") + .run(); +} diff --git a/tests/testsuite/main.rs b/tests/testsuite/main.rs index 618c92ceb23..a8a2d7cc309 100644 --- a/tests/testsuite/main.rs +++ b/tests/testsuite/main.rs @@ -27,6 +27,7 @@ mod cargo_alias_config; mod cargo_command; mod cargo_features; mod cfg; +mod cfg_features; mod check; mod clean; mod clippy; diff --git a/tests/testsuite/metadata.rs b/tests/testsuite/metadata.rs index 10f5636c67d..33f67344ac4 100644 --- a/tests/testsuite/metadata.rs +++ b/tests/testsuite/metadata.rs @@ -217,10 +217,19 @@ optional_feat = [] ], "features": { "default": [ + null, + [ "default_feat" + ] ], - "default_feat": [], - "optional_feat": [] + "default_feat": [ + null, + [] + ], + "optional_feat": [ + null, + [] + ] }, "manifest_path": "[..]Cargo.toml", "metadata": null From adce8798dc3f0cb31b45bc4d06917471287932c9 Mon Sep 17 00:00:00 2001 From: Serhii Plyhun Date: Wed, 31 Jul 2019 17:38:26 +0200 Subject: [PATCH 02/21] Most of the tests fixed. --- src/cargo/core/resolver/dep_cache.rs | 930 +++++++++++++-------------- 1 file changed, 465 insertions(+), 465 deletions(-) diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index c3ae84a171a..47d5261aeb0 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -1,465 +1,465 @@ -//! There are 2 sources of facts for the resolver: -//! -//! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it. -//! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated. -//! -//! These constitute immutable facts, the soled ground truth that all other inference depends on. -//! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only -//! look up things we need to. The compromise is to cache the results as they are computed. -//! -//! This module impl that cache in all the gory details - -use std::cmp::Ordering; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::rc::Rc; - -use log::debug; - -use crate::core::interning::InternedString; -use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; -use crate::util::errors::CargoResult; -use crate::util::Platform; - -use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesMap}; -use crate::core::resolver::{ActivateResult, ResolveOpts}; - -pub struct RegistryQueryer<'a> { - pub registry: &'a mut (dyn Registry + 'a), - replacements: &'a [(PackageIdSpec, Dependency)], - try_to_use: &'a HashSet, - /// If set the list of dependency candidates will be sorted by minimal - /// versions first. That allows `cargo update -Z minimal-versions` which will - /// specify minimum dependency versions to be used. - minimal_versions: bool, - /// a cache of `Candidate`s that fulfil a `Dependency` - registry_cache: HashMap>>, - /// a cache of `Dependency`s that are required for a `Summary` - summary_cache: HashMap< - (Option, Summary, ResolveOpts), - Rc<(HashMap>, Rc>)>, - >, - /// all the cases we ended up using a supplied replacement - used_replacements: HashMap, -} - -impl<'a> RegistryQueryer<'a> { - pub fn new( - registry: &'a mut dyn Registry, - replacements: &'a [(PackageIdSpec, Dependency)], - try_to_use: &'a HashSet, - minimal_versions: bool, - ) -> Self { - RegistryQueryer { - registry, - replacements, - try_to_use, - minimal_versions, - registry_cache: HashMap::new(), - summary_cache: HashMap::new(), - used_replacements: HashMap::new(), - } - } - - pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { - self.used_replacements.get(&p).map(|r| (p, r.package_id())) - } - - pub fn replacement_summary(&self, p: PackageId) -> Option<&Summary> { - self.used_replacements.get(&p) - } - - /// Queries the `registry` to return a list of candidates for `dep`. - /// - /// This method is the location where overrides are taken into account. If - /// any candidates are returned which match an override then the override is - /// applied by performing a second query for what the override should - /// return. - pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { - if let Some(out) = self.registry_cache.get(dep).cloned() { - return Ok(out); - } - - let mut ret = Vec::new(); - self.registry.query( - dep, - &mut |s| { - ret.push(s); - }, - false, - )?; - for summary in ret.iter_mut() { - let mut potential_matches = self - .replacements - .iter() - .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); - - let &(ref spec, ref dep) = match potential_matches.next() { - None => continue, - Some(replacement) => replacement, - }; - debug!( - "found an override for {} {}", - dep.package_name(), - dep.version_req() - ); - - let mut summaries = self.registry.query_vec(dep, false)?.into_iter(); - let s = summaries.next().ok_or_else(|| { - failure::format_err!( - "no matching package for override `{}` found\n\ - location searched: {}\n\ - version required: {}", - spec, - dep.source_id(), - dep.version_req() - ) - })?; - let summaries = summaries.collect::>(); - if !summaries.is_empty() { - let bullets = summaries - .iter() - .map(|s| format!(" * {}", s.package_id())) - .collect::>(); - failure::bail!( - "the replacement specification `{}` matched \ - multiple packages:\n * {}\n{}", - spec, - s.package_id(), - bullets.join("\n") - ); - } - - // The dependency should be hard-coded to have the same name and an - // exact version requirement, so both of these assertions should - // never fail. - assert_eq!(s.version(), summary.version()); - assert_eq!(s.name(), summary.name()); - - let replace = if s.source_id() == summary.source_id() { - debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); - None - } else { - Some(s) - }; - let matched_spec = spec.clone(); - - // Make sure no duplicates - if let Some(&(ref spec, _)) = potential_matches.next() { - failure::bail!( - "overlapping replacement specifications found:\n\n \ - * {}\n * {}\n\nboth specifications match: {}", - matched_spec, - spec, - summary.package_id() - ); - } - - for dep in summary.dependencies() { - debug!("\t{} => {}", dep.package_name(), dep.version_req()); - } - if let Some(r) = replace { - self.used_replacements.insert(summary.package_id(), r); - } - } - - // When we attempt versions for a package we'll want to do so in a - // sorted fashion to pick the "best candidates" first. Currently we try - // prioritized summaries (those in `try_to_use`) and failing that we - // list everything from the maximum version to the lowest version. - ret.sort_unstable_by(|a, b| { - let a_in_previous = self.try_to_use.contains(&a.package_id()); - let b_in_previous = self.try_to_use.contains(&b.package_id()); - let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse(); - match previous_cmp { - Ordering::Equal => { - let cmp = a.version().cmp(b.version()); - if self.minimal_versions { - // Lower version ordered first. - cmp - } else { - // Higher version ordered first. - cmp.reverse() - } - } - _ => previous_cmp, - } - }); - - let out = Rc::new(ret); - - self.registry_cache.insert(dep.clone(), out.clone()); - - Ok(out) - } - - /// Find out what dependencies will be added by activating `candidate`, - /// with features described in `opts`. Then look up in the `registry` - /// the candidates that will fulfil each of these dependencies, as it is the - /// next obvious question. - pub fn build_deps( - &mut self, - parent: Option, - candidate: &Summary, - opts: &ResolveOpts, - ) -> ActivateResult>, Rc>)>> { - // if we have calculated a result before, then we can just return it, - // as it is a "pure" query of its arguments. - if let Some(out) = self - .summary_cache - .get(&(parent, candidate.clone(), opts.clone())) - .cloned() - { - return Ok(out); - } - // First, figure out our set of dependencies based on the requested set - // of features. This also calculates what features we're going to enable - // for our own dependencies. - let (used_features, deps) = resolve_features(parent, candidate, opts)?; - - // Next, transform all dependencies into a list of possible candidates - // which can satisfy that dependency. - let mut deps = deps - .into_iter() - .map(|(dep, features)| { - let candidates = self.query(&dep)?; - Ok((dep, candidates, features)) - }) - .collect::>>()?; - - // Attempt to resolve dependencies with fewer candidates before trying - // dependencies with more candidates. This way if the dependency with - // only one candidate can't be resolved we don't have to do a bunch of - // work before we figure that out. - deps.sort_by_key(|&(_, ref a, _)| a.len()); - - let out = Rc::new((used_features, Rc::new(deps))); - - // If we succeed we add the result to the cache so we can use it again next time. - // We dont cache the failure cases as they dont impl Clone. - self.summary_cache - .insert((parent, candidate.clone(), opts.clone()), out.clone()); - - Ok(out) - } -} - -/// Returns the features we ended up using and -/// all dependencies and the features we want from each of them. -pub fn resolve_features<'b>( - parent: Option, - s: &'b Summary, - opts: &'b ResolveOpts, -) -> ActivateResult<(HashMap>, Vec<(Dependency, FeaturesMap)>)> { - // First, filter by dev-dependencies. - let deps = s.dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); - - let reqs = build_requirements(s, opts)?; - let mut ret = Vec::new(); - let mut used_features = HashSet::new(); - let default_dep = (false, BTreeMap::new()); - - // Next, collect all actually enabled dependencies and their features. - for dep in deps { - // Skip optional dependencies, but not those enabled through a - // feature - if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { - continue; - } - // So we want this dependency. Move the features we want from - // `feature_deps` to `ret` and register ourselves as using this - // name. - let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); - used_features.insert(dep.name_in_toml()); - let always_required = !dep.is_optional() - && !s - .dependencies() - .iter() - .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); - if always_required && base.0 { - return Err(match parent { - None => failure::format_err!( - "Package `{}` does not have feature `{}`. It has a required dependency \ - with that name, but only optional dependencies can be used as features.", - s.package_id(), - dep.name_in_toml() - ) - .into(), - Some(p) => ( - p, - ConflictReason::RequiredDependencyAsFeatures(dep.name_in_toml()), - ) - .into(), - }); - } - let mut base = base.1.clone(); - base.extend(dep.features().iter().map(|f| (f.clone(), dep.platform().map(|p| p.clone())))); - for feature in base.iter() { - if feature.0.contains('/') { - return Err(failure::format_err!( - "feature names may not contain slashes: `{}`", - feature.0 - ) - .into()); - } - } - ret.push((dep.clone(), Rc::new(base))); - } - - // Any entries in `reqs.dep` which weren't used are bugs in that the - // package does not actually have those dependencies. We classified - // them as dependencies in the first place because there is no such - // feature, either. - let remaining = reqs - .deps - .keys() - .cloned() - .filter(|s| !used_features.contains(s)) - .collect::>(); - if !remaining.is_empty() { - let features = remaining.join(", "); - return Err(match parent { - None => failure::format_err!( - "Package `{}` does not have these features: `{}`", - s.package_id(), - features - ) - .into(), - Some(p) => (p, ConflictReason::MissingFeatures(features)).into(), - }); - } - - Ok((reqs.into_used(), ret)) -} - -/// Takes requested features for a single package from the input `ResolveOpts` and -/// recurses to find all requested features, dependencies and requested -/// dependency features in a `Requirements` object, returning it to the resolver. -fn build_requirements<'a, 'b: 'a>( - s: &'a Summary, - opts: &'b ResolveOpts, -) -> CargoResult> { - let mut reqs = Requirements::new(s); - - if opts.all_features { - for (key,val) in s.features() { - reqs.require_feature(*key, &val.0)?; - } - for dep in s.dependencies().iter().filter(|d| d.is_optional()) { - reqs.require_dependency(dep.name_in_toml(), &dep.platform().map(|p| p.clone())); - } - } else { - for (f, p) in opts.features.iter() { - reqs.require_value(&FeatureValue::new(*f, s), &p)?; - } - } - - if opts.uses_default_features { - if s.features().contains_key("default") { - reqs.require_feature(InternedString::new("default"), &s.features().get("default").unwrap().0)?; - } - } - - Ok(reqs) -} - -struct Requirements<'a> { - summary: &'a Summary, - // The deps map is a mapping of package name to list of features enabled. - // Each package should be enabled, and each package should have the - // specified set of features enabled. The boolean indicates whether this - // package was specifically requested (rather than just requesting features - // *within* this package). - deps: HashMap>)>, - // The used features set is the set of features which this local package had - // enabled, which is later used when compiling to instruct the code what - // features were enabled. - used: HashMap>, - visited: HashMap>, -} - -impl Requirements<'_> { - fn new(summary: &Summary) -> Requirements<'_> { - Requirements { - summary, - deps: HashMap::new(), - used: HashMap::new(), - visited: HashMap::new(), - } - } - - fn into_used(self) -> HashMap> { - self.used - } - - fn require_crate_feature(&mut self, package: InternedString, feat: InternedString, platform: Option) { - // If `package` is indeed an optional dependency then we activate the - // feature named `package`, but otherwise if `package` is a required - // dependency then there's no feature associated with it. - if let Some(dep) = self - .summary - .dependencies() - .iter() - .find(|p| p.name_in_toml() == package) - { - if dep.is_optional() { - self.used.insert(package, platform.clone()); - } - } - self.deps - .entry(package) - .or_insert((false, BTreeMap::new())) - .1 - .insert(feat, platform); - } - - fn seen(&mut self, feat: InternedString, platform: &Option) -> bool { - if self.visited.insert(feat, platform.clone()).is_some() { - self.used.insert(feat, platform.clone()); - false - } else { - true - } - } - - fn require_dependency(&mut self, pkg: InternedString, platform: &Option) { - if self.seen(pkg, platform) { - return; - } - self.deps.entry(pkg).or_insert((false, BTreeMap::new())).0 = true; - } - - fn require_feature(&mut self, feat: InternedString, platform: &Option) -> CargoResult<()> { - if feat.is_empty() || self.seen(feat, platform) { - return Ok(()); - } - let feature = self - .summary - .features() - .get(feat.as_str()) - .expect("must be a valid feature"); - for fv in feature.1.as_slice() - { - match *fv { - FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => failure::bail!( - "cyclic feature dependency: feature `{}` depends on itself", - feat - ), - _ => {} - } - self.require_value(fv, &feature.0)?; - } - Ok(()) - } - - fn require_value(&mut self, fv: &FeatureValue, platform: &Option) -> CargoResult<()> { - match fv { - FeatureValue::Feature(feat) => self.require_feature(*feat, platform)?, - FeatureValue::Crate(dep) => self.require_dependency(*dep, platform), - FeatureValue::CrateFeature(dep, dep_feat) => { - self.require_crate_feature(*dep, *dep_feat, platform.clone()) - } - }; - Ok(()) - } -} +//! There are 2 sources of facts for the resolver: +//! +//! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it. +//! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated. +//! +//! These constitute immutable facts, the soled ground truth that all other inference depends on. +//! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only +//! look up things we need to. The compromise is to cache the results as they are computed. +//! +//! This module impl that cache in all the gory details + +use std::cmp::Ordering; +use std::collections::{BTreeMap, HashMap, HashSet}; +use std::rc::Rc; + +use log::debug; + +use crate::core::interning::InternedString; +use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; +use crate::util::errors::CargoResult; +use crate::util::Platform; + +use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesMap}; +use crate::core::resolver::{ActivateResult, ResolveOpts}; + +pub struct RegistryQueryer<'a> { + pub registry: &'a mut (dyn Registry + 'a), + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet, + /// If set the list of dependency candidates will be sorted by minimal + /// versions first. That allows `cargo update -Z minimal-versions` which will + /// specify minimum dependency versions to be used. + minimal_versions: bool, + /// a cache of `Candidate`s that fulfil a `Dependency` + registry_cache: HashMap>>, + /// a cache of `Dependency`s that are required for a `Summary` + summary_cache: HashMap< + (Option, Summary, ResolveOpts), + Rc<(HashMap>, Rc>)>, + >, + /// all the cases we ended up using a supplied replacement + used_replacements: HashMap, +} + +impl<'a> RegistryQueryer<'a> { + pub fn new( + registry: &'a mut dyn Registry, + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet, + minimal_versions: bool, + ) -> Self { + RegistryQueryer { + registry, + replacements, + try_to_use, + minimal_versions, + registry_cache: HashMap::new(), + summary_cache: HashMap::new(), + used_replacements: HashMap::new(), + } + } + + pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { + self.used_replacements.get(&p).map(|r| (p, r.package_id())) + } + + pub fn replacement_summary(&self, p: PackageId) -> Option<&Summary> { + self.used_replacements.get(&p) + } + + /// Queries the `registry` to return a list of candidates for `dep`. + /// + /// This method is the location where overrides are taken into account. If + /// any candidates are returned which match an override then the override is + /// applied by performing a second query for what the override should + /// return. + pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { + if let Some(out) = self.registry_cache.get(dep).cloned() { + return Ok(out); + } + + let mut ret = Vec::new(); + self.registry.query( + dep, + &mut |s| { + ret.push(s); + }, + false, + )?; + for summary in ret.iter_mut() { + let mut potential_matches = self + .replacements + .iter() + .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); + + let &(ref spec, ref dep) = match potential_matches.next() { + None => continue, + Some(replacement) => replacement, + }; + debug!( + "found an override for {} {}", + dep.package_name(), + dep.version_req() + ); + + let mut summaries = self.registry.query_vec(dep, false)?.into_iter(); + let s = summaries.next().ok_or_else(|| { + failure::format_err!( + "no matching package for override `{}` found\n\ + location searched: {}\n\ + version required: {}", + spec, + dep.source_id(), + dep.version_req() + ) + })?; + let summaries = summaries.collect::>(); + if !summaries.is_empty() { + let bullets = summaries + .iter() + .map(|s| format!(" * {}", s.package_id())) + .collect::>(); + failure::bail!( + "the replacement specification `{}` matched \ + multiple packages:\n * {}\n{}", + spec, + s.package_id(), + bullets.join("\n") + ); + } + + // The dependency should be hard-coded to have the same name and an + // exact version requirement, so both of these assertions should + // never fail. + assert_eq!(s.version(), summary.version()); + assert_eq!(s.name(), summary.name()); + + let replace = if s.source_id() == summary.source_id() { + debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); + None + } else { + Some(s) + }; + let matched_spec = spec.clone(); + + // Make sure no duplicates + if let Some(&(ref spec, _)) = potential_matches.next() { + failure::bail!( + "overlapping replacement specifications found:\n\n \ + * {}\n * {}\n\nboth specifications match: {}", + matched_spec, + spec, + summary.package_id() + ); + } + + for dep in summary.dependencies() { + debug!("\t{} => {}", dep.package_name(), dep.version_req()); + } + if let Some(r) = replace { + self.used_replacements.insert(summary.package_id(), r); + } + } + + // When we attempt versions for a package we'll want to do so in a + // sorted fashion to pick the "best candidates" first. Currently we try + // prioritized summaries (those in `try_to_use`) and failing that we + // list everything from the maximum version to the lowest version. + ret.sort_unstable_by(|a, b| { + let a_in_previous = self.try_to_use.contains(&a.package_id()); + let b_in_previous = self.try_to_use.contains(&b.package_id()); + let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse(); + match previous_cmp { + Ordering::Equal => { + let cmp = a.version().cmp(b.version()); + if self.minimal_versions { + // Lower version ordered first. + cmp + } else { + // Higher version ordered first. + cmp.reverse() + } + } + _ => previous_cmp, + } + }); + + let out = Rc::new(ret); + + self.registry_cache.insert(dep.clone(), out.clone()); + + Ok(out) + } + + /// Find out what dependencies will be added by activating `candidate`, + /// with features described in `opts`. Then look up in the `registry` + /// the candidates that will fulfil each of these dependencies, as it is the + /// next obvious question. + pub fn build_deps( + &mut self, + parent: Option, + candidate: &Summary, + opts: &ResolveOpts, + ) -> ActivateResult>, Rc>)>> { + // if we have calculated a result before, then we can just return it, + // as it is a "pure" query of its arguments. + if let Some(out) = self + .summary_cache + .get(&(parent, candidate.clone(), opts.clone())) + .cloned() + { + return Ok(out); + } + // First, figure out our set of dependencies based on the requested set + // of features. This also calculates what features we're going to enable + // for our own dependencies. + let (used_features, deps) = resolve_features(parent, candidate, opts)?; + + // Next, transform all dependencies into a list of possible candidates + // which can satisfy that dependency. + let mut deps = deps + .into_iter() + .map(|(dep, features)| { + let candidates = self.query(&dep)?; + Ok((dep, candidates, features)) + }) + .collect::>>()?; + + // Attempt to resolve dependencies with fewer candidates before trying + // dependencies with more candidates. This way if the dependency with + // only one candidate can't be resolved we don't have to do a bunch of + // work before we figure that out. + deps.sort_by_key(|&(_, ref a, _)| a.len()); + + let out = Rc::new((used_features, Rc::new(deps))); + + // If we succeed we add the result to the cache so we can use it again next time. + // We dont cache the failure cases as they dont impl Clone. + self.summary_cache + .insert((parent, candidate.clone(), opts.clone()), out.clone()); + + Ok(out) + } +} + +/// Returns the features we ended up using and +/// all dependencies and the features we want from each of them. +pub fn resolve_features<'b>( + parent: Option, + s: &'b Summary, + opts: &'b ResolveOpts, +) -> ActivateResult<(HashMap>, Vec<(Dependency, FeaturesMap)>)> { + // First, filter by dev-dependencies. + let deps = s.dependencies(); + let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); + + let reqs = build_requirements(s, opts)?; + let mut ret = Vec::new(); + let mut used_features = HashSet::new(); + let default_dep = (false, BTreeMap::new()); + + // Next, collect all actually enabled dependencies and their features. + for dep in deps { + // Skip optional dependencies, but not those enabled through a + // feature + if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { + continue; + } + // So we want this dependency. Move the features we want from + // `feature_deps` to `ret` and register ourselves as using this + // name. + let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); + used_features.insert(dep.name_in_toml()); + let always_required = !dep.is_optional() + && !s + .dependencies() + .iter() + .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); + if always_required && base.0 { + return Err(match parent { + None => failure::format_err!( + "Package `{}` does not have feature `{}`. It has a required dependency \ + with that name, but only optional dependencies can be used as features.", + s.package_id(), + dep.name_in_toml() + ) + .into(), + Some(p) => ( + p, + ConflictReason::RequiredDependencyAsFeatures(dep.name_in_toml()), + ) + .into(), + }); + } + let mut base = base.1.clone(); + base.extend(dep.features().iter().map(|f| (f.clone(), dep.platform().map(|p| p.clone())))); + for feature in base.iter() { + if feature.0.contains('/') { + return Err(failure::format_err!( + "feature names may not contain slashes: `{}`", + feature.0 + ) + .into()); + } + } + ret.push((dep.clone(), Rc::new(base))); + } + + // Any entries in `reqs.dep` which weren't used are bugs in that the + // package does not actually have those dependencies. We classified + // them as dependencies in the first place because there is no such + // feature, either. + let remaining = reqs + .deps + .keys() + .cloned() + .filter(|s| !used_features.contains(s)) + .collect::>(); + if !remaining.is_empty() { + let features = remaining.join(", "); + return Err(match parent { + None => failure::format_err!( + "Package `{}` does not have these features: `{}`", + s.package_id(), + features + ) + .into(), + Some(p) => (p, ConflictReason::MissingFeatures(features)).into(), + }); + } + + Ok((reqs.into_used(), ret)) +} + +/// Takes requested features for a single package from the input `ResolveOpts` and +/// recurses to find all requested features, dependencies and requested +/// dependency features in a `Requirements` object, returning it to the resolver. +fn build_requirements<'a, 'b: 'a>( + s: &'a Summary, + opts: &'b ResolveOpts, +) -> CargoResult> { + let mut reqs = Requirements::new(s); + + if opts.all_features { + for (key,val) in s.features() { + reqs.require_feature(*key, &val.0)?; + } + for dep in s.dependencies().iter().filter(|d| d.is_optional()) { + reqs.require_dependency(dep.name_in_toml(), &dep.platform().map(|p| p.clone())); + } + } else { + for (f, p) in opts.features.iter() { + reqs.require_value(&FeatureValue::new(*f, s), &p)?; + } + } + + if opts.uses_default_features { + if s.features().contains_key("default") { + reqs.require_feature(InternedString::new("default"), &s.features().get("default").unwrap().0)?; + } + } + + Ok(reqs) +} + +struct Requirements<'a> { + summary: &'a Summary, + // The deps map is a mapping of package name to list of features enabled. + // Each package should be enabled, and each package should have the + // specified set of features enabled. The boolean indicates whether this + // package was specifically requested (rather than just requesting features + // *within* this package). + deps: HashMap>)>, + // The used features set is the set of features which this local package had + // enabled, which is later used when compiling to instruct the code what + // features were enabled. + used: HashMap>, + visited: HashMap>, +} + +impl Requirements<'_> { + fn new(summary: &Summary) -> Requirements<'_> { + Requirements { + summary, + deps: HashMap::new(), + used: HashMap::new(), + visited: HashMap::new(), + } + } + + fn into_used(self) -> HashMap> { + self.used + } + + fn require_crate_feature(&mut self, package: InternedString, feat: InternedString, platform: &Option) { + // If `package` is indeed an optional dependency then we activate the + // feature named `package`, but otherwise if `package` is a required + // dependency then there's no feature associated with it. + if let Some(dep) = self + .summary + .dependencies() + .iter() + .find(|p| p.name_in_toml() == package) + { + if dep.is_optional() { + self.used.insert(package, platform.clone()); + } + } + self.deps + .entry(package) + .or_insert((false, BTreeMap::new())) + .1 + .insert(feat, platform.clone()); + } + + fn seen(&mut self, feat: InternedString, platform: &Option) -> bool { + if self.visited.insert(feat, platform.clone()).is_none() { + self.used.insert(feat, platform.clone()); + false + } else { + true + } + } + + fn require_dependency(&mut self, pkg: InternedString, platform: &Option) { + if self.seen(pkg, platform) { + return; + } + self.deps.entry(pkg).or_insert((false, BTreeMap::new())).0 = true; + } + + fn require_feature(&mut self, feat: InternedString, platform: &Option) -> CargoResult<()> { + if feat.is_empty() || self.seen(feat, platform) { + return Ok(()); + } + let feature = self + .summary + .features() + .get(feat.as_str()) + .expect("must be a valid feature"); + for fv in feature.1.as_slice() + { + match *fv { + FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => failure::bail!( + "cyclic feature dependency: feature `{}` depends on itself", + feat + ), + _ => {} + } + self.require_value(fv, &feature.0)?; + } + Ok(()) + } + + fn require_value(&mut self, fv: &FeatureValue, platform: &Option) -> CargoResult<()> { + match fv { + FeatureValue::Feature(feat) => self.require_feature(*feat, platform)?, + FeatureValue::Crate(dep) => self.require_dependency(*dep, platform), + FeatureValue::CrateFeature(dep, dep_feat) => { + self.require_crate_feature(*dep, *dep_feat, platform) + } + }; + Ok(()) + } +} From 2a42ceeb7f1a03a3791f44839ea863953535c15e Mon Sep 17 00:00:00 2001 From: Serhii Plyhun Date: Thu, 1 Aug 2019 00:21:26 +0200 Subject: [PATCH 03/21] `dont_include_by_platform` fixed --- src/cargo/core/resolver/dep_cache.rs | 31 ++++++++++++++-------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index 47d5261aeb0..67a0505396f 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -340,23 +340,24 @@ fn build_requirements<'a, 'b: 'a>( opts: &'b ResolveOpts, ) -> CargoResult> { let mut reqs = Requirements::new(s); + let empty_ftrs = (None, vec![]); if opts.all_features { for (key,val) in s.features() { - reqs.require_feature(*key, &val.0)?; + reqs.require_feature(*key, val.0.as_ref())?; } for dep in s.dependencies().iter().filter(|d| d.is_optional()) { - reqs.require_dependency(dep.name_in_toml(), &dep.platform().map(|p| p.clone())); + reqs.require_dependency(dep.name_in_toml(), dep.platform()); } } else { - for (f, p) in opts.features.iter() { - reqs.require_value(&FeatureValue::new(*f, s), &p)?; + for (f, _) in opts.features.iter() { + reqs.require_value(&FeatureValue::new(*f, s), s.features().get(f).unwrap_or(&empty_ftrs).0.as_ref())?; } } if opts.uses_default_features { if s.features().contains_key("default") { - reqs.require_feature(InternedString::new("default"), &s.features().get("default").unwrap().0)?; + reqs.require_feature(InternedString::new("default"), s.features().get("default").unwrap_or(&empty_ftrs).0.as_ref())?; } } @@ -392,7 +393,7 @@ impl Requirements<'_> { self.used } - fn require_crate_feature(&mut self, package: InternedString, feat: InternedString, platform: &Option) { + fn require_crate_feature(&mut self, package: InternedString, feat: InternedString, platform: Option<&Platform>) { // If `package` is indeed an optional dependency then we activate the // feature named `package`, but otherwise if `package` is a required // dependency then there's no feature associated with it. @@ -403,33 +404,33 @@ impl Requirements<'_> { .find(|p| p.name_in_toml() == package) { if dep.is_optional() { - self.used.insert(package, platform.clone()); + self.used.insert(package, platform.cloned()); } } self.deps .entry(package) .or_insert((false, BTreeMap::new())) .1 - .insert(feat, platform.clone()); + .insert(feat, platform.cloned()); } - fn seen(&mut self, feat: InternedString, platform: &Option) -> bool { - if self.visited.insert(feat, platform.clone()).is_none() { - self.used.insert(feat, platform.clone()); + fn seen(&mut self, feat: InternedString, platform: Option<&Platform>) -> bool { + if self.visited.insert(feat, platform.cloned()).is_none() { + self.used.insert(feat, platform.cloned()); false } else { true } } - fn require_dependency(&mut self, pkg: InternedString, platform: &Option) { + fn require_dependency(&mut self, pkg: InternedString, platform: Option<&Platform>) { if self.seen(pkg, platform) { return; } self.deps.entry(pkg).or_insert((false, BTreeMap::new())).0 = true; } - fn require_feature(&mut self, feat: InternedString, platform: &Option) -> CargoResult<()> { + fn require_feature(&mut self, feat: InternedString, platform: Option<&Platform>) -> CargoResult<()> { if feat.is_empty() || self.seen(feat, platform) { return Ok(()); } @@ -447,12 +448,12 @@ impl Requirements<'_> { ), _ => {} } - self.require_value(fv, &feature.0)?; + self.require_value(fv, feature.0.as_ref())?; } Ok(()) } - fn require_value(&mut self, fv: &FeatureValue, platform: &Option) -> CargoResult<()> { + fn require_value(&mut self, fv: &FeatureValue, platform: Option<&Platform>) -> CargoResult<()> { match fv { FeatureValue::Feature(feat) => self.require_feature(*feat, platform)?, FeatureValue::Crate(dep) => self.require_dependency(*dep, platform), From 4cf22abe2f65c083aac29d6111fe16015bb51c5d Mon Sep 17 00:00:00 2001 From: Serhii Plyhun Date: Thu, 1 Aug 2019 17:10:51 +0200 Subject: [PATCH 04/21] Tests pass. --- src/cargo/core/resolver/dep_cache.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index 67a0505396f..e9c49cf3e75 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -455,7 +455,7 @@ impl Requirements<'_> { fn require_value(&mut self, fv: &FeatureValue, platform: Option<&Platform>) -> CargoResult<()> { match fv { - FeatureValue::Feature(feat) => self.require_feature(*feat, platform)?, + FeatureValue::Feature(feat) => self.require_feature(*feat, self.summary.features().get(feat).expect("must be a valid feature").0.as_ref())?, FeatureValue::Crate(dep) => self.require_dependency(*dep, platform), FeatureValue::CrateFeature(dep, dep_feat) => { self.require_crate_feature(*dep, *dep_feat, platform) From 7df90482f92dcf5a8190e2f7427467899137def9 Mon Sep 17 00:00:00 2001 From: Serhii Plyhun Date: Fri, 2 Aug 2019 12:53:11 +0200 Subject: [PATCH 05/21] Transitive dependency test --- tests/testsuite/cfg_features.rs | 139 ++++++++++++++++++++++++++++++++ 1 file changed, 139 insertions(+) diff --git a/tests/testsuite/cfg_features.rs b/tests/testsuite/cfg_features.rs index 99d0742e642..e54d0749b7b 100644 --- a/tests/testsuite/cfg_features.rs +++ b/tests/testsuite/cfg_features.rs @@ -190,6 +190,145 @@ fn dont_include_default() { .run(); } +#[cargo_test] +fn transitive() { + #[cfg(target_os = "macos")] + let config = "target_os = \"macos\""; + #[cfg(target_os = "windows")] + let config = "target_os = \"windows\""; + #[cfg(all(not(target_os = "macos"), not(target_os = "windows")))] + let config = "unix"; + + let p = project() + .no_manifest() + // root depends on a and c="1.1.0" + .file( + "root/Cargo.toml", + r#" + [package] + name = "root" + version = "0.0.1" + authors = [] + + [dependencies] + a = { version = "*", path = "../a" } + c = { version = "1.1.0", path = "../c1" } + "#, + ) + .file( + "root/src/main.rs", + r#" + fn main() { + println!("Hello, world!"); + } + "#, + ) + // a depends on b and on OSX depends on b's flag maybe + .file( + "a/Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.1.0" + + [lib] + name = "a" + + [target.'cfg(not({}))'.dependencies] + b = {{ version = "*", path = "../b" }} + + [target.'cfg({})'.dependencies] + b = {{ version = "*", path = "../b", features = ["maybe"] }} + "#, + config, + config, + ) + ) + .file( + "a/src/lib.rs", + r#" + "#, + ) + // b depends on c="=1.0.0" if maybe is active. + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + + [dependencies] + c = { version = "1.0.0", path = "../c0", optional = true } + + [features] + maybe = ["c"] + "#, + ) + .file( + "b/src/lib.rs", + r#" + #[cfg(feature = "maybe")] + pub fn maybe() { + c::cee(); + } + "#, + ) + // c 1.0.0 + .file( + "c0/Cargo.toml", + r#" + [package] + name = "c" + version = "1.0.0" + + [lib] + name = "c" + + [dependencies] + "#, + ) + .file( + "c0/src/lib.rs", + r#" + pub fn cee() {} + "#, + ) + // c 1.1.0 + .file( + "c1/Cargo.toml", + r#" + [package] + name = "c" + version = "1.1.0" + + [lib] + name = "c" + + [dependencies] + "#, + ) + .file( + "c1/src/lib.rs", + r#" + "#, + ) + .build(); + + p.cargo("build").cwd("root") + .with_stderr( + "\ +[COMPILING] c v1.0.0 ([..]) +[COMPILING] c v1.1.0 ([..]) +[COMPILING] b v0.1.0 ([..]) +[COMPILING] a v0.1.0 ([..]) +[COMPILING] root v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + // https://github.com/rust-lang/cargo/issues/5313 #[cargo_test] #[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))] From ff3d63822be687e97290be7c21702cad6688decb Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Mon, 8 Apr 2019 07:53:00 +0200 Subject: [PATCH 06/21] Add tests for symlinks to git submodules or directories. --- tests/testsuite/package.rs | 43 ++++++++++++++++++++++++++++++++++ tests/testsuite/support/mod.rs | 21 +++++++++++++++-- 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index 92be2b2eee2..71afb01f9d6 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -504,6 +504,37 @@ fn package_git_submodule() { .run(); } +#[cargo_test] +fn package_symlink_to_submodule() { + let project = git::new("foo", |project| { + project + .file("src/lib.rs", "pub fn foo() {}") + .symlink("submodule", "submodule-link") + }).unwrap(); + + let library = git::new("submodule", |library| { + library.no_manifest().file("Makefile", "all:") + }).unwrap(); + + let repository = git2::Repository::open(&project.root()).unwrap(); + let url = path2url(library.root()).to_string(); + git::add_submodule(&repository, &url, Path::new("submodule")); + git::commit(&repository); + + let repository = git2::Repository::open(&project.root().join("submodule")).unwrap(); + repository + .reset( + &repository.revparse_single("HEAD").unwrap(), + git2::ResetType::Hard, + None + ).unwrap(); + + project + .cargo("package --no-verify -v") + .with_stderr_contains("[ARCHIVING] submodule/Makefile") + .run(); +} + #[cargo_test] fn no_duplicates_from_modified_tracked_files() { let root = paths::root().join("all"); @@ -699,6 +730,18 @@ Caused by: .run(); } +#[cargo_test] +fn package_symlink_to_dir() { + project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .file("bla/Makefile", "all:") + .symlink_dir("bla", "foo") + .build() + .cargo("package -v") + .with_stderr_contains("[ARCHIVING] foo/Makefile") + .run(); +} + #[cargo_test] fn do_not_package_if_repository_is_dirty() { let p = project().build(); diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs index 01abfdba550..f64ac8dda71 100644 --- a/tests/testsuite/support/mod.rs +++ b/tests/testsuite/support/mod.rs @@ -178,11 +178,16 @@ impl FileBuilder { struct SymlinkBuilder { dst: PathBuf, src: PathBuf, + src_is_dir: bool, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { - SymlinkBuilder { dst, src } + SymlinkBuilder { dst, src, src_is_dir: false } + } + + pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { + SymlinkBuilder { dst, src, src_is_dir: true } } #[cfg(unix)] @@ -194,7 +199,11 @@ impl SymlinkBuilder { #[cfg(windows)] fn mk(&self) { self.dirname().mkdir_p(); - t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + if self.src_is_dir { + t!(os::window::fs::symlink_dir(&self.dst, &self.src)); + } else { + t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + } } fn dirname(&self) -> &Path { @@ -261,6 +270,14 @@ impl ProjectBuilder { self } + pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { + self.symlinks.push(SymlinkBuilder::new_dir( + self.root.root().join(dst), + self.root.root().join(src), + )); + self + } + pub fn no_manifest(mut self) -> Self { self.no_manifest = true; self From a5fe06a40142e3bad0272c871a609e32d380e2c2 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Wed, 3 Apr 2019 15:07:04 +0200 Subject: [PATCH 07/21] Check if symlinks are directories Fixes #2748. Uses @ehuss's suggested fix. See https://github.com/rust-lang/cargo/pull/6817#issuecomment-480538976 --- src/cargo/sources/path.rs | 14 +++++++++++--- tests/testsuite/package.rs | 8 +++++++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index af6d458c345..db8e7d99876 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -219,9 +219,17 @@ impl<'cfg> PathSource<'cfg> { // the untracked files are often part of a build and may become relevant // as part of a future commit. let index_files = index.iter().map(|entry| { - use libgit2_sys::GIT_FILEMODE_COMMIT; - let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32; - (join(root, &entry.path), Some(is_dir)) + use libgit2_sys::{GIT_FILEMODE_COMMIT, GIT_FILEMODE_LINK}; + // ``is_dir`` is an optimization to avoid calling + // ``fs::metadata`` on every file. + let is_dir = if entry.mode == GIT_FILEMODE_LINK as u32 { + // Let the code below figure out if this symbolic link points + // to a directory or not. + None + } else { + Some(entry.mode == GIT_FILEMODE_COMMIT as u32) + }; + (join(root, &entry.path), is_dir) }); let mut opts = git2::StatusOptions::new(); opts.include_untracked(true); diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index 71afb01f9d6..d895dca7812 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -506,10 +506,14 @@ fn package_git_submodule() { #[cargo_test] fn package_symlink_to_submodule() { + #[cfg(unix)] + use std::os::unix::fs::symlink as symlink; + #[cfg(windows)] + use std::os::unix::fs::symlink_dir as symlink; + let project = git::new("foo", |project| { project .file("src/lib.rs", "pub fn foo() {}") - .symlink("submodule", "submodule-link") }).unwrap(); let library = git::new("submodule", |library| { @@ -519,6 +523,8 @@ fn package_symlink_to_submodule() { let repository = git2::Repository::open(&project.root()).unwrap(); let url = path2url(library.root()).to_string(); git::add_submodule(&repository, &url, Path::new("submodule")); + t!(symlink(&project.root().join("submodule"), &project.root().join("submodule-link"))); + git::add(&repository); git::commit(&repository); let repository = git2::Repository::open(&project.root().join("submodule")).unwrap(); From 57de13f2ab8de02c9dfdc67d9d8637d0c2301f44 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Thu, 11 Apr 2019 13:28:45 +0200 Subject: [PATCH 08/21] enable the broken_symlink test on Windows --- tests/testsuite/package.rs | 10 ++++++---- tests/testsuite/support/mod.rs | 5 +++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index d895dca7812..19bcbdbdee7 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -509,7 +509,7 @@ fn package_symlink_to_submodule() { #[cfg(unix)] use std::os::unix::fs::symlink as symlink; #[cfg(windows)] - use std::os::unix::fs::symlink_dir as symlink; + use std::os::windows::fs::symlink_dir as symlink; let project = git::new("foo", |project| { project @@ -697,9 +697,11 @@ See [..] } #[cargo_test] -#[cfg(unix)] fn broken_symlink() { - use std::os::unix::fs; + #[cfg(unix)] + use std::os::unix::fs::symlink as symlink; + #[cfg(windows)] + use std::os::windows::fs::symlink_dir as symlink; let p = project() .file( @@ -718,7 +720,7 @@ fn broken_symlink() { ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); - t!(fs::symlink("nowhere", &p.root().join("src/foo.rs"))); + t!(symlink("nowhere", &p.root().join("src/foo.rs"))); p.cargo("package -v") .with_status(101) diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs index f64ac8dda71..cf1c3a0ec0e 100644 --- a/tests/testsuite/support/mod.rs +++ b/tests/testsuite/support/mod.rs @@ -200,7 +200,7 @@ impl SymlinkBuilder { fn mk(&self) { self.dirname().mkdir_p(); if self.src_is_dir { - t!(os::window::fs::symlink_dir(&self.dst, &self.src)); + t!(os::windows::fs::symlink_dir(&self.dst, &self.src)); } else { t!(os::windows::fs::symlink_file(&self.dst, &self.src)); } @@ -261,7 +261,7 @@ impl ProjectBuilder { .push(FileBuilder::new(self.root.root().join(path), body)); } - /// Adds a symlink to the project. + /// Adds a symlink to a file to the project. pub fn symlink>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new( self.root.root().join(dst), @@ -270,6 +270,7 @@ impl ProjectBuilder { self } + /// Create a symlink to a directory pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new_dir( self.root.root().join(dst), From 06bb4749adef965f3a6f24602d082fd882784e86 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Thu, 11 Apr 2019 14:06:19 +0200 Subject: [PATCH 09/21] handle symlinks correctly in support/paths.rs --- tests/testsuite/support/paths.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/testsuite/support/paths.rs b/tests/testsuite/support/paths.rs index 7dfb65c69a3..bf3c763de87 100644 --- a/tests/testsuite/support/paths.rs +++ b/tests/testsuite/support/paths.rs @@ -153,7 +153,7 @@ impl CargoPathExt for Path { where F: Fn(i64, u32) -> ((i64, u32)), { - let stat = t!(path.metadata()); + let stat = t!(path.symlink_metadata()); let mtime = FileTime::from_last_modification_time(&stat); From 74238e529cb0db188b84740cd815f492d2b47e0a Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Thu, 4 Jul 2019 13:21:50 +0200 Subject: [PATCH 10/21] Ignore tests that need Administrator privileges on Windows. This patch allows you to run them when wanted with ``--ignored`` on Windows. --- ci/azure-test-all.yml | 7 +++++ tests/testsuite/build.rs | 10 +++---- tests/testsuite/package.rs | 53 ++++++++++++++++++++++++++++------ tests/testsuite/support/mod.rs | 18 ++++++++---- 4 files changed, 69 insertions(+), 19 deletions(-) diff --git a/ci/azure-test-all.yml b/ci/azure-test-all.yml index 626858431e8..df700161c63 100644 --- a/ci/azure-test-all.yml +++ b/ci/azure-test-all.yml @@ -26,3 +26,10 @@ steps: # fix the link errors. - bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' displayName: "cargo test" + +# Run any tests that have been marked ignore. +# +# `--include-ignored` is only supported on nightly so far, so we have to call +# this separately for now. +- bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' -- --ignored + displayName: "cargo test -- --ignored" diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs index 59ed086708d..ce888880e15 100644 --- a/tests/testsuite/build.rs +++ b/tests/testsuite/build.rs @@ -1495,12 +1495,12 @@ package `test v0.0.0 ([CWD])`", } #[cargo_test] +#[cfg_attr(windows, ignore)] +/// Make sure ignored symlinks don't break the build +/// +/// This test is marked ``ignore`` on Windows because it needs admin permissions. +/// Run it with ``--ignored``. fn ignore_broken_symlinks() { - // windows and symlinks don't currently agree that well - if cfg!(windows) { - return; - } - let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index 19bcbdbdee7..fa59ccf8588 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -505,25 +505,39 @@ fn package_git_submodule() { } #[cargo_test] +#[cfg_attr(windows, ignore)] +/// Tests if a symlink to a git submodule is properly handled. +/// +/// This test is ignored on Windows, because it needs Administrator +/// permissions to run. If you do want to run this test, please +/// run the tests with ``--ignored``, e.g. +/// +/// ```text +/// cargo test -- --ignored +/// ``` fn package_symlink_to_submodule() { #[cfg(unix)] - use std::os::unix::fs::symlink as symlink; + use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; let project = git::new("foo", |project| { - project - .file("src/lib.rs", "pub fn foo() {}") - }).unwrap(); + project.file("src/lib.rs", "pub fn foo() {}") + }) + .unwrap(); let library = git::new("submodule", |library| { library.no_manifest().file("Makefile", "all:") - }).unwrap(); + }) + .unwrap(); let repository = git2::Repository::open(&project.root()).unwrap(); let url = path2url(library.root()).to_string(); git::add_submodule(&repository, &url, Path::new("submodule")); - t!(symlink(&project.root().join("submodule"), &project.root().join("submodule-link"))); + t!(symlink( + &project.root().join("submodule"), + &project.root().join("submodule-link") + )); git::add(&repository); git::commit(&repository); @@ -532,8 +546,9 @@ fn package_symlink_to_submodule() { .reset( &repository.revparse_single("HEAD").unwrap(), git2::ResetType::Hard, - None - ).unwrap(); + None, + ) + .unwrap(); project .cargo("package --no-verify -v") @@ -697,9 +712,19 @@ See [..] } #[cargo_test] +#[cfg_attr(windows, ignore)] +/// Tests if a broken symlink is properly handled when packaging. +/// +/// This test is ignored on Windows, because it needs Administrator +/// permissions to run. If you do want to run this test, please +/// run the tests with ``--ignored``, e.g. +/// +/// ```text +/// cargo test -- --ignored +/// ``` fn broken_symlink() { #[cfg(unix)] - use std::os::unix::fs::symlink as symlink; + use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; @@ -739,6 +764,16 @@ Caused by: } #[cargo_test] +#[cfg_attr(windows, ignore)] +/// Tests if a symlink to a directory is proberly included. +/// +/// This test is ignored on Windows, because it needs Administrator +/// permissions to run. If you do want to run this test, please +/// run the tests with ``--ignored``, e.g. +/// +/// ```text +/// cargo test -- --ignored +/// ``` fn package_symlink_to_dir() { project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs index cf1c3a0ec0e..f1da02e255c 100644 --- a/tests/testsuite/support/mod.rs +++ b/tests/testsuite/support/mod.rs @@ -183,11 +183,19 @@ struct SymlinkBuilder { impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { - SymlinkBuilder { dst, src, src_is_dir: false } + SymlinkBuilder { + dst, + src, + src_is_dir: false, + } } pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { - SymlinkBuilder { dst, src, src_is_dir: true } + SymlinkBuilder { + dst, + src, + src_is_dir: true, + } } #[cfg(unix)] @@ -273,9 +281,9 @@ impl ProjectBuilder { /// Create a symlink to a directory pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new_dir( - self.root.root().join(dst), - self.root.root().join(src), - )); + self.root.root().join(dst), + self.root.root().join(src), + )); self } From 22f137ccff34c2a9f74da89606eb3b75bd0531eb Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Thu, 18 Jul 2019 14:37:29 +0200 Subject: [PATCH 11/21] Fix doctests that used ignore to hide code that didn't compile. --- src/cargo/core/features.rs | 2 +- src/cargo/util/network.rs | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs index 9fd8161ce35..b910eb72661 100644 --- a/src/cargo/core/features.rs +++ b/src/cargo/core/features.rs @@ -21,7 +21,7 @@ //! 3. To actually perform the feature gate, you'll want to have code that looks //! like: //! -//! ```rust,ignore +//! ```rust,compile_fail //! use core::{Feature, Features}; //! //! let feature = Feature::launch_into_space(); diff --git a/src/cargo/util/network.rs b/src/cargo/util/network.rs index 2873dea253b..c0d5b2a5ae9 100644 --- a/src/cargo/util/network.rs +++ b/src/cargo/util/network.rs @@ -73,9 +73,12 @@ fn maybe_spurious(err: &Error) -> bool { /// /// # Examples /// -/// ```ignore -/// use util::network; -/// cargo_result = network::with_retry(&config, || something.download()); +/// ``` +/// # use crate::cargo::util::{CargoResult, Config}; +/// # let download_something = || return Ok(()); +/// # let config = Config::default().unwrap(); +/// use cargo::util::network; +/// let cargo_result = network::with_retry(&config, || download_something()); /// ``` pub fn with_retry(config: &Config, mut callback: F) -> CargoResult where From 2f73513548a5f0e495b97a2df5da29b2fe3a1328 Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Mon, 29 Jul 2019 08:23:30 +0200 Subject: [PATCH 12/21] Remove appveyor-aimed #[ignore] Seems like it's no longer necessary (this test ran fine when `--ignored` was specified --- tests/testsuite/small_fd_limits.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/testsuite/small_fd_limits.rs b/tests/testsuite/small_fd_limits.rs index 27558a8657f..f0ca6988e32 100644 --- a/tests/testsuite/small_fd_limits.rs +++ b/tests/testsuite/small_fd_limits.rs @@ -98,9 +98,6 @@ fn use_git_gc() { } #[cargo_test] -// it looks like this test passes on some windows machines but not others, -// notably not on AppVeyor's machines. Sounds like another but for another day. -#[cfg_attr(windows, ignore)] fn avoid_using_git() { let path = env::var_os("PATH").unwrap_or_default(); let mut paths = env::split_paths(&path).collect::>(); From 1ef495f133e3f5f8a15e36f6efbd6e223dab6a0a Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Mon, 29 Jul 2019 08:36:13 +0200 Subject: [PATCH 13/21] Don't run symlink tests based on `symlink_supported` --- tests/testsuite/build.rs | 16 +++++++------ tests/testsuite/package.rs | 47 +++++++++++++++++--------------------- 2 files changed, 30 insertions(+), 33 deletions(-) diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs index ce888880e15..7c6323425aa 100644 --- a/tests/testsuite/build.rs +++ b/tests/testsuite/build.rs @@ -4,11 +4,10 @@ use std::io::prelude::*; use crate::support::paths::{root, CargoPathExt}; use crate::support::registry::Package; -use crate::support::ProjectBuilder; use crate::support::{ - basic_bin_manifest, basic_lib_manifest, basic_manifest, rustc_host, sleep_ms, + basic_bin_manifest, basic_lib_manifest, basic_manifest, main_file, project, rustc_host, + sleep_ms, symlink_supported, Execs, ProjectBuilder, }; -use crate::support::{main_file, project, Execs}; use cargo::util::paths::dylib_path_envvar; #[cargo_test] @@ -1495,12 +1494,15 @@ package `test v0.0.0 ([CWD])`", } #[cargo_test] -#[cfg_attr(windows, ignore)] -/// Make sure ignored symlinks don't break the build +/// Make sure broken symlinks don't break the build /// -/// This test is marked ``ignore`` on Windows because it needs admin permissions. -/// Run it with ``--ignored``. +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn ignore_broken_symlinks() { + if !symlink_supported() { + return; + } + let p = project() .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index fa59ccf8588..c099672daa8 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -3,11 +3,11 @@ use std::fs::File; use std::io::prelude::*; use std::path::Path; -use crate::support::cargo_process; use crate::support::paths::CargoPathExt; use crate::support::registry::Package; use crate::support::{ - basic_manifest, git, path2url, paths, project, publish::validate_crate_contents, registry, + basic_manifest, cargo_process, git, path2url, paths, project, publish::validate_crate_contents, + registry, symlink_supported, }; use git2; @@ -505,22 +505,20 @@ fn package_git_submodule() { } #[cargo_test] -#[cfg_attr(windows, ignore)] /// Tests if a symlink to a git submodule is properly handled. /// -/// This test is ignored on Windows, because it needs Administrator -/// permissions to run. If you do want to run this test, please -/// run the tests with ``--ignored``, e.g. -/// -/// ```text -/// cargo test -- --ignored -/// ``` +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn package_symlink_to_submodule() { #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; + if !symlink_supported() { + return; + } + let project = git::new("foo", |project| { project.file("src/lib.rs", "pub fn foo() {}") }) @@ -712,22 +710,20 @@ See [..] } #[cargo_test] -#[cfg_attr(windows, ignore)] /// Tests if a broken symlink is properly handled when packaging. /// -/// This test is ignored on Windows, because it needs Administrator -/// permissions to run. If you do want to run this test, please -/// run the tests with ``--ignored``, e.g. -/// -/// ```text -/// cargo test -- --ignored -/// ``` +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn broken_symlink() { #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_dir as symlink; + if !symlink_supported() { + return; + } + let p = project() .file( "Cargo.toml", @@ -764,17 +760,16 @@ Caused by: } #[cargo_test] -#[cfg_attr(windows, ignore)] /// Tests if a symlink to a directory is proberly included. /// -/// This test is ignored on Windows, because it needs Administrator -/// permissions to run. If you do want to run this test, please -/// run the tests with ``--ignored``, e.g. -/// -/// ```text -/// cargo test -- --ignored -/// ``` +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn package_symlink_to_dir() { + + if !symlink_supported() { + return; + } + project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .file("bla/Makefile", "all:") From 3ac678ec14b0d73ab3460430305b8b0567d49f1f Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Tue, 30 Jul 2019 09:32:00 +0200 Subject: [PATCH 14/21] Remove ``--ignored`` from CI --- ci/azure-test-all.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/ci/azure-test-all.yml b/ci/azure-test-all.yml index df700161c63..626858431e8 100644 --- a/ci/azure-test-all.yml +++ b/ci/azure-test-all.yml @@ -26,10 +26,3 @@ steps: # fix the link errors. - bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' displayName: "cargo test" - -# Run any tests that have been marked ignore. -# -# `--include-ignored` is only supported on nightly so far, so we have to call -# this separately for now. -- bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' -- --ignored - displayName: "cargo test -- --ignored" From 8596fbf94d7f50d93a1b070dc100dc61e8dee13c Mon Sep 17 00:00:00 2001 From: Thom Wiggers Date: Tue, 30 Jul 2019 09:51:14 +0200 Subject: [PATCH 15/21] Cargo fmt --- tests/testsuite/package.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index c099672daa8..6a070ea0acf 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -765,7 +765,6 @@ Caused by: /// This test requires you to be able to make symlinks. /// For windows, this may require you to enable developer mode. fn package_symlink_to_dir() { - if !symlink_supported() { return; } From 82adbb6a74eb1906ee09e5df40affc2a7092557e Mon Sep 17 00:00:00 2001 From: debris Date: Tue, 30 Jul 2019 16:29:50 +0200 Subject: [PATCH 16/21] fix #7007, improve error message for unmatched prerelease dependencies --- src/cargo/core/resolver/errors.rs | 55 +++++++++++++++++++------------ tests/testsuite/registry.rs | 49 +++++++++++++++++++++++++++ 2 files changed, 83 insertions(+), 21 deletions(-) diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index e67ec9ede01..359ca186c50 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -251,14 +251,14 @@ pub(super) fn activation_error( // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` // was meant. So we try asking the registry for a `fuzzy` search for suggestions. let mut candidates = Vec::new(); - if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.name()), true) { + if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.clone()), true) { return to_resolve_err(e); }; - candidates.sort_unstable(); - candidates.dedup(); + candidates.sort_unstable_by(|a, b| a.name().cmp(&b.name())); + candidates.dedup_by(|a, b| a.name() == b.name()); let mut candidates: Vec<_> = candidates .iter() - .map(|n| (lev_distance(&*new_dep.package_name(), &*n), n)) + .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) .filter(|&(d, _)| d < 4) .collect(); candidates.sort_by_key(|o| o.0); @@ -269,25 +269,38 @@ pub(super) fn activation_error( dep.source_id() ); if !candidates.is_empty() { - let mut names = candidates - .iter() - .take(3) - .map(|c| c.1.as_str()) - .collect::>(); - - if candidates.len() > 3 { - names.push("..."); + // If dependency package name is equal to the name of the candidate here + // it may be a prerelease package which hasn't been speficied correctly + if dep.package_name() == candidates[0].1.name() && + candidates[0].1.package_id().version().is_prerelease() { + msg.push_str("prerelease package needs to be specified explicitly\n"); + msg.push_str(&format!( + "{name} = {{ version = \"{version}\" }}", + name = candidates[0].1.name(), + version = candidates[0].1.package_id().version() + )); + } else { + let mut names = candidates + .iter() + .take(3) + .map(|c| c.1.name().as_str()) + .collect::>(); + + if candidates.len() > 3 { + names.push("..."); + } + + msg.push_str("perhaps you meant: "); + msg.push_str(&names.iter().enumerate().fold( + String::default(), + |acc, (i, el)| match i { + 0 => acc + el, + i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, + _ => acc + ", " + el, + }, + )); } - msg.push_str("perhaps you meant: "); - msg.push_str(&names.iter().enumerate().fold( - String::default(), - |acc, (i, el)| match i { - 0 => acc + el, - i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, - _ => acc + ", " + el, - }, - )); msg.push_str("\n"); } msg.push_str("required by "); diff --git a/tests/testsuite/registry.rs b/tests/testsuite/registry.rs index 9fb92d80f65..2db759ac1e2 100644 --- a/tests/testsuite/registry.rs +++ b/tests/testsuite/registry.rs @@ -1395,6 +1395,55 @@ fn use_semver() { p.cargo("build").run(); } +#[cargo_test] +fn use_semver_package_incorrectly() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.1-alpha.0" + authors = [] + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { version = "^0.1", path = "../a" } + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .file("b/src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: no matching package named `a` found +location searched: [..] +prerelease package needs to be specified explicitly +a = { version = \"0.1.1-alpha.0\" } +required by package `b v0.1.0 ([..])` +", + ) + .run(); +} + #[cargo_test] fn only_download_relevant() { let p = project() From fb794247e25752270b895f868f4db1bac955e468 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Tue, 30 Jul 2019 08:55:57 -0700 Subject: [PATCH 17/21] Fix excluding target dirs from backups on OSX This fixes an accidental regression from #6880 identified in #7189 by moving where the configuration of backup preferences happens since it was accidentally never happening due to the folder always having been created. --- src/cargo/core/compiler/layout.rs | 71 ++++++++++++++++--------------- 1 file changed, 37 insertions(+), 34 deletions(-) diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index 40140e8ce98..e4ae2bc5174 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -109,6 +109,14 @@ impl Layout { /// /// This function will block if the directory is already locked. pub fn at(config: &Config, root: Filesystem) -> CargoResult { + // If the root directory doesn't already exist go ahead and create it + // here. Use this opportunity to exclude it from backups as well if the + // system supports it since this is a freshly created folder. + if !root.as_path_unlocked().exists() { + root.create_dir()?; + exclude_from_backups(root.as_path_unlocked()); + } + // For now we don't do any more finer-grained locking on the artifact // directory, so just lock the entire thing for the duration of this // compile. @@ -127,42 +135,8 @@ impl Layout { }) } - #[cfg(not(target_os = "macos"))] - fn exclude_from_backups(&self, _: &Path) {} - - #[cfg(target_os = "macos")] - /// Marks files or directories as excluded from Time Machine on macOS - /// - /// This is recommended to prevent derived/temporary files from bloating backups. - fn exclude_from_backups(&self, path: &Path) { - use core_foundation::base::TCFType; - use core_foundation::{number, string, url}; - use std::ptr; - - // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey - let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); - let path = url::CFURL::from_path(path, false); - if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { - unsafe { - url::CFURLSetResourcePropertyForKey( - path.as_concrete_TypeRef(), - is_excluded_key.as_concrete_TypeRef(), - number::kCFBooleanTrue as *const _, - ptr::null_mut(), - ); - } - } - // Errors are ignored, since it's an optional feature and failure - // doesn't prevent Cargo from working - } - /// Makes sure all directories stored in the Layout exist on the filesystem. pub fn prepare(&mut self) -> io::Result<()> { - if fs::metadata(&self.root).is_err() { - fs::create_dir_all(&self.root)?; - self.exclude_from_backups(&self.root); - } - mkdir(&self.deps)?; mkdir(&self.native)?; mkdir(&self.incremental)?; @@ -209,3 +183,32 @@ impl Layout { &self.build } } + +#[cfg(not(target_os = "macos"))] +fn exclude_from_backups(_: &Path) {} + +#[cfg(target_os = "macos")] +/// Marks files or directories as excluded from Time Machine on macOS +/// +/// This is recommended to prevent derived/temporary files from bloating backups. +fn exclude_from_backups(path: &Path) { + use core_foundation::base::TCFType; + use core_foundation::{number, string, url}; + use std::ptr; + + // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey + let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); + let path = url::CFURL::from_path(path, false); + if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { + unsafe { + url::CFURLSetResourcePropertyForKey( + path.as_concrete_TypeRef(), + is_excluded_key.as_concrete_TypeRef(), + number::kCFBooleanTrue as *const _, + ptr::null_mut(), + ); + } + } + // Errors are ignored, since it's an optional feature and failure + // doesn't prevent Cargo from working +} From 15fe1e8216dfc766b81b40c6ff965c98fa9d0438 Mon Sep 17 00:00:00 2001 From: debris Date: Wed, 31 Jul 2019 10:49:32 +0200 Subject: [PATCH 18/21] cargo fmt --- src/cargo/core/resolver/errors.rs | 194 +++++++++++++++--------------- 1 file changed, 98 insertions(+), 96 deletions(-) diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index 359ca186c50..6d56cb33ca8 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -203,113 +203,115 @@ pub(super) fn activation_error( }; candidates.sort_unstable_by(|a, b| b.version().cmp(a.version())); - let mut msg = if !candidates.is_empty() { - let versions = { - let mut versions = candidates - .iter() - .take(3) - .map(|cand| cand.version().to_string()) - .collect::>(); - - if candidates.len() > 3 { - versions.push("...".into()); - } - - versions.join(", ") - }; - - let mut msg = format!( - "failed to select a version for the requirement `{} = \"{}\"`\n \ - candidate versions found which didn't match: {}\n \ - location searched: {}\n", - dep.package_name(), - dep.version_req(), - versions, - registry.describe_source(dep.source_id()), - ); - msg.push_str("required by "); - msg.push_str(&describe_path( - &cx.parents.path_to_bottom(&parent.package_id()), - )); - - // If we have a path dependency with a locked version, then this may - // indicate that we updated a sub-package and forgot to run `cargo - // update`. In this case try to print a helpful error! - if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { - msg.push_str( - "\nconsider running `cargo update` to update \ - a path dependency's locked version", - ); - } - - if registry.is_replaced(dep.source_id()) { - msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); - } - - msg - } else { - // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` - // was meant. So we try asking the registry for a `fuzzy` search for suggestions. - let mut candidates = Vec::new(); - if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.clone()), true) { - return to_resolve_err(e); - }; - candidates.sort_unstable_by(|a, b| a.name().cmp(&b.name())); - candidates.dedup_by(|a, b| a.name() == b.name()); - let mut candidates: Vec<_> = candidates - .iter() - .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) - .filter(|&(d, _)| d < 4) - .collect(); - candidates.sort_by_key(|o| o.0); - let mut msg = format!( - "no matching package named `{}` found\n\ - location searched: {}\n", - dep.package_name(), - dep.source_id() - ); + let mut msg = if !candidates.is_empty() { - // If dependency package name is equal to the name of the candidate here - // it may be a prerelease package which hasn't been speficied correctly - if dep.package_name() == candidates[0].1.name() && - candidates[0].1.package_id().version().is_prerelease() { - msg.push_str("prerelease package needs to be specified explicitly\n"); - msg.push_str(&format!( - "{name} = {{ version = \"{version}\" }}", - name = candidates[0].1.name(), - version = candidates[0].1.package_id().version() - )); - } else { - let mut names = candidates + let versions = { + let mut versions = candidates .iter() .take(3) - .map(|c| c.1.name().as_str()) + .map(|cand| cand.version().to_string()) .collect::>(); if candidates.len() > 3 { - names.push("..."); + versions.push("...".into()); } - msg.push_str("perhaps you meant: "); - msg.push_str(&names.iter().enumerate().fold( - String::default(), - |acc, (i, el)| match i { - 0 => acc + el, - i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, - _ => acc + ", " + el, - }, - )); + versions.join(", ") + }; + + let mut msg = format!( + "failed to select a version for the requirement `{} = \"{}\"`\n \ + candidate versions found which didn't match: {}\n \ + location searched: {}\n", + dep.package_name(), + dep.version_req(), + versions, + registry.describe_source(dep.source_id()), + ); + msg.push_str("required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), + )); + + // If we have a path dependency with a locked version, then this may + // indicate that we updated a sub-package and forgot to run `cargo + // update`. In this case try to print a helpful error! + if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { + msg.push_str( + "\nconsider running `cargo update` to update \ + a path dependency's locked version", + ); } - msg.push_str("\n"); - } - msg.push_str("required by "); - msg.push_str(&describe_path( - &cx.parents.path_to_bottom(&parent.package_id()), - )); + if registry.is_replaced(dep.source_id()) { + msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); + } - msg - }; + msg + } else { + // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` + // was meant. So we try asking the registry for a `fuzzy` search for suggestions. + let mut candidates = Vec::new(); + if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.clone()), true) { + return to_resolve_err(e); + }; + candidates.sort_unstable_by(|a, b| a.name().cmp(&b.name())); + candidates.dedup_by(|a, b| a.name() == b.name()); + let mut candidates: Vec<_> = candidates + .iter() + .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) + .filter(|&(d, _)| d < 4) + .collect(); + candidates.sort_by_key(|o| o.0); + let mut msg = format!( + "no matching package named `{}` found\n\ + location searched: {}\n", + dep.package_name(), + dep.source_id() + ); + if !candidates.is_empty() { + // If dependency package name is equal to the name of the candidate here + // it may be a prerelease package which hasn't been speficied correctly + if dep.package_name() == candidates[0].1.name() + && candidates[0].1.package_id().version().is_prerelease() + { + msg.push_str("prerelease package needs to be specified explicitly\n"); + msg.push_str(&format!( + "{name} = {{ version = \"{version}\" }}", + name = candidates[0].1.name(), + version = candidates[0].1.package_id().version() + )); + } else { + let mut names = candidates + .iter() + .take(3) + .map(|c| c.1.name().as_str()) + .collect::>(); + + if candidates.len() > 3 { + names.push("..."); + } + + msg.push_str("perhaps you meant: "); + msg.push_str(&names.iter().enumerate().fold( + String::default(), + |acc, (i, el)| match i { + 0 => acc + el, + i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, + _ => acc + ", " + el, + }, + )); + } + + msg.push_str("\n"); + } + msg.push_str("required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), + )); + + msg + }; if let Some(config) = config { if config.offline() { From a4fa8685a9842d092a004d5bc18d9ee53f99aadf Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 31 Jul 2019 22:46:29 +0300 Subject: [PATCH 19/21] tests: Enable features to fix unstabilized `#[bench]` --- tests/testsuite/bench.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/testsuite/bench.rs b/tests/testsuite/bench.rs index 42f4415279d..35488793671 100644 --- a/tests/testsuite/bench.rs +++ b/tests/testsuite/bench.rs @@ -58,7 +58,7 @@ fn bench_bench_implicit() { .file( "src/main.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } @@ -364,7 +364,7 @@ fn bench_with_lib_dep() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; /// @@ -432,7 +432,7 @@ fn bench_with_deep_lib_dep() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate foo; #[cfg(test)] @@ -448,7 +448,7 @@ fn bench_with_deep_lib_dep() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -495,7 +495,7 @@ fn external_bench_explicit() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; pub fn get_hello() -> &'static str { "Hello" } @@ -541,7 +541,7 @@ fn external_bench_implicit() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -760,7 +760,7 @@ fn lib_bin_same_name() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn lib_bench(_b: &mut test::Bencher) {} @@ -769,7 +769,7 @@ fn lib_bin_same_name() { .file( "src/main.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; #[cfg(test)] @@ -804,7 +804,7 @@ fn lib_with_standard_name() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -919,7 +919,7 @@ fn bench_dylib() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] extern crate bar as the_bar; #[cfg(test)] extern crate test; @@ -1061,7 +1061,7 @@ fn bench_with_examples() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[cfg(test)] From 9cde46cb2592f6744b8dd3f9f98c4e3458ee881f Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 17 Jul 2019 12:41:27 -0700 Subject: [PATCH 20/21] Enable pipelined compilation by default This commit enables pipelined compilation by default in Cargo now that the requisite support has been stablized in rust-lang/rust#62766. This involved minor updates in a number of locations here and there, but nothing of meat has changed from the original implementation (just tweaks to how rustc is called). --- .../compiler/build_context/target_info.rs | 14 ++++ src/cargo/core/compiler/context/mod.rs | 2 +- src/cargo/core/compiler/mod.rs | 81 +++++++------------ src/cargo/ops/fix.rs | 2 +- tests/testsuite/build_script.rs | 7 +- tests/testsuite/cache_messages.rs | 58 ++++++++++--- tests/testsuite/dep_info.rs | 2 +- tests/testsuite/profile_overrides.rs | 14 ++-- tests/testsuite/rustc_info_cache.rs | 5 ++ tests/testsuite/rustdoc.rs | 6 ++ 10 files changed, 114 insertions(+), 77 deletions(-) diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index ecdb4239c65..81a2d622c63 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -34,6 +34,7 @@ pub struct TargetInfo { pub rustflags: Vec, /// Extra flags to pass to `rustdoc`, see `env_args`. pub rustdocflags: Vec, + pub supports_pipelining: Option, } /// Kind of each file generated by a Unit, part of `FileType`. @@ -98,6 +99,18 @@ impl TargetInfo { .args(&rustflags) .env_remove("RUSTC_LOG"); + // NOTE: set this unconditionally to `true` once support for `--json` + // rides to stable. + // + // Also note that we only learn about this functionality for the host + // compiler since the host/target rustc are always the same. + let mut pipelining_test = process.clone(); + pipelining_test.args(&["--error-format=json", "--json=artifacts"]); + let supports_pipelining = match kind { + Kind::Host => Some(rustc.cached_output(&pipelining_test).is_ok()), + Kind::Target => None, + }; + let target_triple = requested_target .as_ref() .map(|s| s.as_str()) @@ -179,6 +192,7 @@ impl TargetInfo { "RUSTDOCFLAGS", )?, cfg, + supports_pipelining, }) } diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 3cb75e344ba..24661b9d189 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -77,7 +77,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { .config .get_bool("build.pipelining")? .map(|t| t.val) - .unwrap_or(false); + .unwrap_or(bcx.host_info.supports_pipelining.unwrap()); Ok(Self { bcx, diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index a0a933918a0..3b402c17007 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -18,7 +18,7 @@ use std::io::Write; use std::path::{Path, PathBuf}; use std::sync::Arc; -use failure::{bail, Error}; +use failure::Error; use lazycell::LazyCell; use log::debug; use same_file::is_same_file; @@ -614,7 +614,6 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); add_path_args(bcx, unit, &mut rustdoc); add_cap_lints(bcx, unit, &mut rustdoc); - add_color(bcx, &mut rustdoc); if unit.kind != Kind::Host { if let Some(ref target) = bcx.build_config.requested_target { @@ -643,7 +642,7 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult } } - add_error_format(cx, &mut rustdoc, false, false)?; + add_error_format_and_color(cx, &mut rustdoc, false)?; if let Some(args) = bcx.extra_args_for(unit) { rustdoc.args(args); @@ -730,39 +729,20 @@ fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>, cmd: &mut ProcessB } } -fn add_color(bcx: &BuildContext<'_, '_>, cmd: &mut ProcessBuilder) { - let shell = bcx.config.shell(); - let color = if shell.supports_color() { - "always" - } else { - "never" - }; - cmd.args(&["--color", color]); -} - /// Add error-format flags to the command. /// -/// This is rather convoluted right now. The general overview is: -/// - If -Zcache-messages or `build.pipelining` is enabled, Cargo always uses -/// JSON output. This has several benefits, such as being easier to parse, -/// handles changing formats (for replaying cached messages), ensures -/// atomic output (so messages aren't interleaved), etc. -/// - `supports_termcolor` is a temporary flag. rustdoc does not yet support -/// the `--json-rendered` flag, but it is intended to fix that soon. -/// - `short` output is not yet supported for JSON output. We haven't yet -/// decided how this problem will be resolved. Probably either adding -/// "short" to the JSON output, or more ambitiously moving diagnostic -/// rendering to an external library that Cargo can share with rustc. +/// This is somewhat odd right now, but the general overview is that if +/// `-Zcache-messages` or `pipelined` is enabled then Cargo always uses JSON +/// output. This has several benefits, such as being easier to parse, handles +/// changing formats (for replaying cached messages), ensures atomic output (so +/// messages aren't interleaved), etc. /// -/// It is intended in the future that Cargo *always* uses the JSON output, and -/// this function can be simplified. The above issues need to be resolved, the -/// flags need to be stabilized, and we need more testing to ensure there -/// aren't any regressions. -fn add_error_format( +/// It is intended in the future that Cargo *always* uses the JSON output (by +/// turning on cache-messages by default), and this function can be simplified. +fn add_error_format_and_color( cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, pipelined: bool, - supports_termcolor: bool, ) -> CargoResult<()> { // If this unit is producing a required rmeta file then we need to know // when the rmeta file is ready so we can signal to the rest of Cargo that @@ -777,26 +757,15 @@ fn add_error_format( // internally understand that we should extract the `rendered` field and // present it if we can. if cx.bcx.build_config.cache_messages() || pipelined { - cmd.arg("--error-format=json").arg("-Zunstable-options"); - if supports_termcolor { - cmd.arg("--json-rendered=termcolor"); + cmd.arg("--error-format=json"); + let mut json = String::from("--json=diagnostic-rendered-ansi"); + if pipelined { + json.push_str(",artifacts"); } if cx.bcx.build_config.message_format == MessageFormat::Short { - // FIXME(rust-lang/rust#60419): right now we have no way of - // turning on JSON messages from the compiler and also asking - // the rendered field to be in the `short` format. - bail!( - "currently `--message-format short` is incompatible with {}", - if pipelined { - "pipelined compilation" - } else { - "cached output" - } - ); - } - if pipelined { - cmd.arg("-Zemit-artifact-notifications"); + json.push_str(",diagnostic-short"); } + cmd.arg(json); } else { match cx.bcx.build_config.message_format { MessageFormat::Human => (), @@ -807,6 +776,13 @@ fn add_error_format( cmd.arg("--error-format").arg("short"); } } + + let color = if cx.bcx.config.shell().supports_color() { + "always" + } else { + "never" + }; + cmd.args(&["--color", color]); } Ok(()) } @@ -837,8 +813,7 @@ fn build_base_args<'a, 'cfg>( cmd.arg("--crate-name").arg(&unit.target.crate_name()); add_path_args(bcx, unit, cmd); - add_color(bcx, cmd); - add_error_format(cx, cmd, cx.rmeta_required(unit), true)?; + add_error_format_and_color(cx, cmd, cx.rmeta_required(unit))?; if !test { for crate_type in crate_types.iter() { @@ -1248,11 +1223,11 @@ fn on_stderr_line( } else { // Remove color information from the rendered string. rustc has not // included color in the past, so to avoid breaking anything, strip it - // out when --json-rendered=termcolor is used. This runs + // out when --json=diagnostic-rendered-ansi is used. This runs // unconditionally under the assumption that Cargo will eventually // move to this as the default mode. Perhaps in the future, cargo // could allow the user to enable/disable color (such as with a - // `--json-rendered` or `--color` or `--message-format` flag). + // `--json` or `--color` or `--message-format` flag). #[derive(serde::Deserialize, serde::Serialize)] struct CompilerMessage { rendered: String, @@ -1318,10 +1293,8 @@ fn replay_output_cache( ) -> Work { let target = target.clone(); let extract_rendered_messages = match format { - MessageFormat::Human => true, + MessageFormat::Human | MessageFormat::Short => true, MessageFormat::Json => false, - // FIXME: short not supported. - MessageFormat::Short => false, }; let mut options = OutputOptions { extract_rendered_messages, diff --git a/src/cargo/ops/fix.rs b/src/cargo/ops/fix.rs index 1ac31974b72..a43c68687b7 100644 --- a/src/cargo/ops/fix.rs +++ b/src/cargo/ops/fix.rs @@ -621,7 +621,7 @@ impl FixArgs { ret.enabled_edition = Some(s[prefix.len()..].to_string()); continue; } - if s.starts_with("--error-format=") || s.starts_with("--json-rendered=") { + if s.starts_with("--error-format=") || s.starts_with("--json=") { // Cargo may add error-format in some cases, but `cargo // fix` wants to add its own. continue; diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs index 0952033bacc..d26509e2954 100644 --- a/tests/testsuite/build_script.rs +++ b/tests/testsuite/build_script.rs @@ -2155,6 +2155,11 @@ fn flags_go_into_tests() { #[cargo_test] fn diamond_passes_args_only_once() { + // FIXME: when pipelining rides to stable, enable this test on all channels. + if !crate::support::is_nightly() { + return; + } + let p = project() .file( "Cargo.toml", @@ -2229,7 +2234,7 @@ fn diamond_passes_args_only_once() { [COMPILING] a v0.5.0 ([..] [RUNNING] `rustc [..]` [COMPILING] foo v0.5.0 ([..] -[RUNNING] `[..]rlib -L native=test` +[RUNNING] `[..]rmeta -L native=test` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) diff --git a/tests/testsuite/cache_messages.rs b/tests/testsuite/cache_messages.rs index b283a285f4e..2582d191810 100644 --- a/tests/testsuite/cache_messages.rs +++ b/tests/testsuite/cache_messages.rs @@ -54,6 +54,52 @@ fn simple() { assert!(cargo_output2.stdout.is_empty()); } +// same as `simple`, except everything is using the short format +#[cargo_test] +fn simple_short() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + let p = project() + .file( + "src/lib.rs", + " + fn a() {} + fn b() {} + ", + ) + .build(); + + let agnostic_path = Path::new("src").join("lib.rs"); + let agnostic_path_s = agnostic_path.to_str().unwrap(); + + let rustc_output = process("rustc") + .cwd(p.root()) + .args(&["--crate-type=lib", agnostic_path_s, "--error-format=short"]) + .exec_with_output() + .expect("rustc to run"); + + assert!(rustc_output.stdout.is_empty()); + assert!(rustc_output.status.success()); + + let cargo_output1 = p + .cargo("check -Zcache-messages -q --color=never --message-format=short") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output1.stderr)); + // assert!(cargo_output1.stdout.is_empty()); + let cargo_output2 = p + .cargo("check -Zcache-messages -q --message-format=short") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + println!("{}", String::from_utf8_lossy(&cargo_output2.stdout)); + assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output2.stderr)); + assert!(cargo_output2.stdout.is_empty()); +} + #[cargo_test] fn color() { if !is_nightly() { @@ -334,15 +380,3 @@ fn very_verbose() { .with_stderr_contains("[..]not_used[..]") .run(); } - -#[cargo_test] -fn short_incompatible() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("check -Zcache-messages --message-format=short") - .masquerade_as_nightly_cargo() - .with_stderr( - "[ERROR] currently `--message-format short` is incompatible with cached output", - ) - .with_status(101) - .run(); -} diff --git a/tests/testsuite/dep_info.rs b/tests/testsuite/dep_info.rs index dce5c4025d1..439621e42e9 100644 --- a/tests/testsuite/dep_info.rs +++ b/tests/testsuite/dep_info.rs @@ -511,6 +511,6 @@ fn canonical_path() { assert_deps_contains( &p, "target/debug/.fingerprint/foo-*/dep-lib-foo-*", - &[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rlib")], + &[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rmeta")], ); } diff --git a/tests/testsuite/profile_overrides.rs b/tests/testsuite/profile_overrides.rs index 8445460b571..17b3c25c18b 100644 --- a/tests/testsuite/profile_overrides.rs +++ b/tests/testsuite/profile_overrides.rs @@ -321,17 +321,17 @@ fn profile_override_hierarchy() { p.cargo("build -v").masquerade_as_nightly_cargo().with_stderr_unordered("\ [COMPILING] m3 [..] [COMPILING] dep [..] -[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=4 [..] -[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=3 [..] -[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..] -[RUNNING] `rustc --crate-name build_script_build m1/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=4 [..] +[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=4 [..] +[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=3 [..] +[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=1 [..] +[RUNNING] `rustc --crate-name build_script_build m1/build.rs [..] --crate-type bin --emit=[..]link -C codegen-units=4 [..] [COMPILING] m2 [..] -[RUNNING] `rustc --crate-name build_script_build m2/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=2 [..] +[RUNNING] `rustc --crate-name build_script_build m2/build.rs [..] --crate-type bin --emit=[..]link -C codegen-units=2 [..] [RUNNING] `[..]/m1-[..]/build-script-build` [RUNNING] `[..]/m2-[..]/build-script-build` -[RUNNING] `rustc --crate-name m2 m2/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=2 [..] +[RUNNING] `rustc --crate-name m2 m2/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=2 [..] [COMPILING] m1 [..] -[RUNNING] `rustc --crate-name m1 m1/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..] +[RUNNING] `rustc --crate-name m1 m1/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=1 [..] [FINISHED] dev [unoptimized + debuginfo] [..] ", ) diff --git a/tests/testsuite/rustc_info_cache.rs b/tests/testsuite/rustc_info_cache.rs index 51dc4a42881..ceed53ee3a8 100644 --- a/tests/testsuite/rustc_info_cache.rs +++ b/tests/testsuite/rustc_info_cache.rs @@ -4,6 +4,11 @@ use std::env; #[cargo_test] fn rustc_info_cache() { + // FIXME: when pipelining rides to stable, enable this test on all channels. + if !crate::support::is_nightly() { + return; + } + let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); diff --git a/tests/testsuite/rustdoc.rs b/tests/testsuite/rustdoc.rs index 6525054444f..195b47c0342 100644 --- a/tests/testsuite/rustdoc.rs +++ b/tests/testsuite/rustdoc.rs @@ -10,6 +10,7 @@ fn rustdoc_simple() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", @@ -27,6 +28,7 @@ fn rustdoc_args() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -66,6 +68,7 @@ fn rustdoc_foo_with_bar_dependency() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps \ --extern [..]` @@ -104,6 +107,7 @@ fn rustdoc_only_bar_dependency() { [DOCUMENTING] bar v0.0.1 ([..]) [RUNNING] `rustdoc --crate-name bar [..]bar/src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -125,6 +129,7 @@ fn rustdoc_same_name_documents_lib() { [DOCUMENTING] foo v0.0.1 ([..]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -168,6 +173,7 @@ fn rustdoc_target() { [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ --target x86_64-unknown-linux-gnu \ -o [CWD]/target/x86_64-unknown-linux-gnu/doc \ + [..] \ -L dependency=[CWD]/target/x86_64-unknown-linux-gnu/debug/deps \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", From 47412b0c403b29e4dcdec38518be3d16a9e025f7 Mon Sep 17 00:00:00 2001 From: Serhii Plyhun Date: Fri, 2 Aug 2019 15:16:59 +0200 Subject: [PATCH 21/21] Merge fix --- src/cargo/core/compiler/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 3b402c17007..795e2d148dc 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -18,7 +18,7 @@ use std::io::Write; use std::path::{Path, PathBuf}; use std::sync::Arc; -use failure::Error; +use failure::{bail, Error}; use lazycell::LazyCell; use log::debug; use same_file::is_same_file;