diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs index dce94689eb3..b410d56df0e 100644 --- a/crates/resolver-tests/src/lib.rs +++ b/crates/resolver-tests/src/lib.rs @@ -12,7 +12,7 @@ use cargo::core::resolver::{self, ResolveOpts}; use cargo::core::source::{GitReference, SourceId}; use cargo::core::Resolve; use cargo::core::{Dependency, PackageId, Registry, Summary}; -use cargo::util::{CargoResult, Config, Graph, IntoUrl}; +use cargo::util::{CargoResult, Config, Graph, IntoUrl, Platform}; use proptest::collection::{btree_map, vec}; use proptest::prelude::*; @@ -170,7 +170,7 @@ pub fn resolve_with_config_raw( let summary = Summary::new( pkg_id("root"), deps, - &BTreeMap::>::new(), + &BTreeMap::, Vec)>::new(), None::, false, ) @@ -571,7 +571,7 @@ pub fn pkg_dep(name: T, dep: Vec) -> Summary { Summary::new( name.to_pkgid(), dep, - &BTreeMap::>::new(), + &BTreeMap::, Vec)>::new(), link, false, ) @@ -599,7 +599,7 @@ pub fn pkg_loc(name: &str, loc: &str) -> Summary { Summary::new( pkg_id_loc(name, loc), Vec::new(), - &BTreeMap::>::new(), + &BTreeMap::, Vec)>::new(), link, false, ) @@ -613,7 +613,7 @@ pub fn remove_dep(sum: &Summary, ind: usize) -> Summary { Summary::new( sum.package_id(), deps, - &BTreeMap::>::new(), + &BTreeMap::, Vec)>::new(), sum.links().map(|a| a.as_str()), sum.namespaced_features(), ) diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 18956632152..3e67718fbfe 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -10,7 +10,7 @@ use crate::core::profiles::Profiles; use crate::core::{Dependency, Workspace}; use crate::core::{PackageId, PackageSet, Resolve}; use crate::util::errors::CargoResult; -use crate::util::{profile, Cfg, Config, Rustc}; +use crate::util::{profile, Cfg, Config, Platform, Rustc}; mod target_info; pub use self::target_info::{FileFlavor, TargetInfo}; @@ -95,12 +95,10 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { .is_public_dep(unit.pkg.package_id(), dep.pkg.package_id()) } - /// Whether a dependency should be compiled for the host or target platform, + /// Whether a given platform matches the host or target platform, /// specified by `Kind`. - pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { - // If this dependency is only available for certain platforms, - // make sure we're only enabling it for that platform. - let platform = match dep.platform() { + pub fn platform_activated(&self, platform: Option<&Platform>, kind: Kind) -> bool { + let platform = match platform { Some(p) => p, None => return true, }; @@ -111,7 +109,15 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { platform.matches(name, info.cfg()) } - /// Gets the user-specified linker for a particular host or target. + /// Whether a dependency should be compiled for the host or target platform, + /// specified by `Kind`. + pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + self.platform_activated(dep.platform(), kind) + } + + /// Gets the user-specified linker for a particular host or target pub fn linker(&self, kind: Kind) -> Option<&Path> { self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) } diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index ecdb4239c65..81a2d622c63 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -34,6 +34,7 @@ pub struct TargetInfo { pub rustflags: Vec, /// Extra flags to pass to `rustdoc`, see `env_args`. pub rustdocflags: Vec, + pub supports_pipelining: Option, } /// Kind of each file generated by a Unit, part of `FileType`. @@ -98,6 +99,18 @@ impl TargetInfo { .args(&rustflags) .env_remove("RUSTC_LOG"); + // NOTE: set this unconditionally to `true` once support for `--json` + // rides to stable. + // + // Also note that we only learn about this functionality for the host + // compiler since the host/target rustc are always the same. + let mut pipelining_test = process.clone(); + pipelining_test.args(&["--error-format=json", "--json=artifacts"]); + let supports_pipelining = match kind { + Kind::Host => Some(rustc.cached_output(&pipelining_test).is_ok()), + Kind::Target => None, + }; + let target_triple = requested_target .as_ref() .map(|s| s.as_str()) @@ -179,6 +192,7 @@ impl TargetInfo { "RUSTDOCFLAGS", )?, cfg, + supports_pipelining, }) } diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 9325c8c63e2..24661b9d189 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -77,7 +77,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { .config .get_bool("build.pipelining")? .map(|t| t.val) - .unwrap_or(false); + .unwrap_or(bcx.host_info.supports_pipelining.unwrap()); Ok(Self { bcx, @@ -205,7 +205,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { }); } - let feats = self.bcx.resolve.features(unit.pkg.package_id()); + let bcx = self.bcx; + let feats = bcx.resolve.features(unit.pkg.package_id()); if !feats.is_empty() { self.compilation .cfgs @@ -213,7 +214,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { .or_insert_with(|| { feats .iter() - .map(|feat| format!("feature=\"{}\"", feat)) + .filter(|feat| bcx.platform_activated(feat.1.as_ref(), unit.kind)) + .map(|feat| format!("feature=\"{}\"", feat.0)) .collect() }); } diff --git a/src/cargo/core/compiler/context/unit_dependencies.rs b/src/cargo/core/compiler/context/unit_dependencies.rs index 6e80f31a195..6b68e202cec 100644 --- a/src/cargo/core/compiler/context/unit_dependencies.rs +++ b/src/cargo/core/compiler/context/unit_dependencies.rs @@ -162,6 +162,19 @@ fn compute_deps<'a, 'cfg, 'tmp>( return false; } + // If the dependency is optional, then we're only activating it + // if the corresponding feature was activated + if dep.is_optional() { + // Same for features this dependency is referenced + if let Some(platform) = bcx.resolve.features(id).get(&*dep.name_in_toml()) { + if !bcx.platform_activated(platform.as_ref(), unit.kind) { + return false; + } + } else { + return false; + } + } + // If we've gotten past all that, then this dependency is // actually used! true @@ -228,7 +241,7 @@ fn compute_deps<'a, 'cfg, 'tmp>( t.is_bin() && // Skip binaries with required features that have not been selected. t.required_features().unwrap_or(&no_required_features).iter().all(|f| { - bcx.resolve.features(id).contains(f) + bcx.resolve.features(id).contains_key(f) && bcx.platform_activated(bcx.resolve.features(id).get(f).unwrap().as_ref(), unit.kind) }) }) .map(|t| { diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 7978d1d480e..f991e961b3e 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -182,7 +182,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes // Be sure to pass along all enabled features for this package, this is the // last piece of statically known information that we have. for feat in bcx.resolve.features(unit.pkg.package_id()).iter() { - cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); + cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat.0)), "1"); } let mut cfg_map = HashMap::new(); diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index 40140e8ce98..e4ae2bc5174 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -109,6 +109,14 @@ impl Layout { /// /// This function will block if the directory is already locked. pub fn at(config: &Config, root: Filesystem) -> CargoResult { + // If the root directory doesn't already exist go ahead and create it + // here. Use this opportunity to exclude it from backups as well if the + // system supports it since this is a freshly created folder. + if !root.as_path_unlocked().exists() { + root.create_dir()?; + exclude_from_backups(root.as_path_unlocked()); + } + // For now we don't do any more finer-grained locking on the artifact // directory, so just lock the entire thing for the duration of this // compile. @@ -127,42 +135,8 @@ impl Layout { }) } - #[cfg(not(target_os = "macos"))] - fn exclude_from_backups(&self, _: &Path) {} - - #[cfg(target_os = "macos")] - /// Marks files or directories as excluded from Time Machine on macOS - /// - /// This is recommended to prevent derived/temporary files from bloating backups. - fn exclude_from_backups(&self, path: &Path) { - use core_foundation::base::TCFType; - use core_foundation::{number, string, url}; - use std::ptr; - - // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey - let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); - let path = url::CFURL::from_path(path, false); - if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { - unsafe { - url::CFURLSetResourcePropertyForKey( - path.as_concrete_TypeRef(), - is_excluded_key.as_concrete_TypeRef(), - number::kCFBooleanTrue as *const _, - ptr::null_mut(), - ); - } - } - // Errors are ignored, since it's an optional feature and failure - // doesn't prevent Cargo from working - } - /// Makes sure all directories stored in the Layout exist on the filesystem. pub fn prepare(&mut self) -> io::Result<()> { - if fs::metadata(&self.root).is_err() { - fs::create_dir_all(&self.root)?; - self.exclude_from_backups(&self.root); - } - mkdir(&self.deps)?; mkdir(&self.native)?; mkdir(&self.incremental)?; @@ -209,3 +183,32 @@ impl Layout { &self.build } } + +#[cfg(not(target_os = "macos"))] +fn exclude_from_backups(_: &Path) {} + +#[cfg(target_os = "macos")] +/// Marks files or directories as excluded from Time Machine on macOS +/// +/// This is recommended to prevent derived/temporary files from bloating backups. +fn exclude_from_backups(path: &Path) { + use core_foundation::base::TCFType; + use core_foundation::{number, string, url}; + use std::ptr; + + // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey + let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); + let path = url::CFURL::from_path(path, false); + if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { + unsafe { + url::CFURLSetResourcePropertyForKey( + path.as_concrete_TypeRef(), + is_excluded_key.as_concrete_TypeRef(), + number::kCFBooleanTrue as *const _, + ptr::null_mut(), + ); + } + } + // Errors are ignored, since it's an optional feature and failure + // doesn't prevent Cargo from working +} diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 4962e774506..795e2d148dc 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -614,7 +614,6 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); add_path_args(bcx, unit, &mut rustdoc); add_cap_lints(bcx, unit, &mut rustdoc); - add_color(bcx, &mut rustdoc); if unit.kind != Kind::Host { if let Some(ref target) = bcx.build_config.requested_target { @@ -631,11 +630,19 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult rustdoc.arg("-o").arg(doc_dir); + // Need to keep a correct order on the features, so get the sorted name first, + // then resolve the specified platform. for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) { - rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + if let Some(platform) = bcx.resolve.features(unit.pkg.package_id()).get(feat) { + if bcx.platform_activated(platform.as_ref(), unit.kind) { + rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + } else { + bail!("Failed to get the target for the feature `{}`", feat); + } } - add_error_format(cx, &mut rustdoc, false, false)?; + add_error_format_and_color(cx, &mut rustdoc, false)?; if let Some(args) = bcx.extra_args_for(unit) { rustdoc.args(args); @@ -722,39 +729,20 @@ fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>, cmd: &mut ProcessB } } -fn add_color(bcx: &BuildContext<'_, '_>, cmd: &mut ProcessBuilder) { - let shell = bcx.config.shell(); - let color = if shell.supports_color() { - "always" - } else { - "never" - }; - cmd.args(&["--color", color]); -} - /// Add error-format flags to the command. /// -/// This is rather convoluted right now. The general overview is: -/// - If -Zcache-messages or `build.pipelining` is enabled, Cargo always uses -/// JSON output. This has several benefits, such as being easier to parse, -/// handles changing formats (for replaying cached messages), ensures -/// atomic output (so messages aren't interleaved), etc. -/// - `supports_termcolor` is a temporary flag. rustdoc does not yet support -/// the `--json-rendered` flag, but it is intended to fix that soon. -/// - `short` output is not yet supported for JSON output. We haven't yet -/// decided how this problem will be resolved. Probably either adding -/// "short" to the JSON output, or more ambitiously moving diagnostic -/// rendering to an external library that Cargo can share with rustc. +/// This is somewhat odd right now, but the general overview is that if +/// `-Zcache-messages` or `pipelined` is enabled then Cargo always uses JSON +/// output. This has several benefits, such as being easier to parse, handles +/// changing formats (for replaying cached messages), ensures atomic output (so +/// messages aren't interleaved), etc. /// -/// It is intended in the future that Cargo *always* uses the JSON output, and -/// this function can be simplified. The above issues need to be resolved, the -/// flags need to be stabilized, and we need more testing to ensure there -/// aren't any regressions. -fn add_error_format( +/// It is intended in the future that Cargo *always* uses the JSON output (by +/// turning on cache-messages by default), and this function can be simplified. +fn add_error_format_and_color( cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, pipelined: bool, - supports_termcolor: bool, ) -> CargoResult<()> { // If this unit is producing a required rmeta file then we need to know // when the rmeta file is ready so we can signal to the rest of Cargo that @@ -769,26 +757,15 @@ fn add_error_format( // internally understand that we should extract the `rendered` field and // present it if we can. if cx.bcx.build_config.cache_messages() || pipelined { - cmd.arg("--error-format=json").arg("-Zunstable-options"); - if supports_termcolor { - cmd.arg("--json-rendered=termcolor"); + cmd.arg("--error-format=json"); + let mut json = String::from("--json=diagnostic-rendered-ansi"); + if pipelined { + json.push_str(",artifacts"); } if cx.bcx.build_config.message_format == MessageFormat::Short { - // FIXME(rust-lang/rust#60419): right now we have no way of - // turning on JSON messages from the compiler and also asking - // the rendered field to be in the `short` format. - bail!( - "currently `--message-format short` is incompatible with {}", - if pipelined { - "pipelined compilation" - } else { - "cached output" - } - ); - } - if pipelined { - cmd.arg("-Zemit-artifact-notifications"); + json.push_str(",diagnostic-short"); } + cmd.arg(json); } else { match cx.bcx.build_config.message_format { MessageFormat::Human => (), @@ -799,6 +776,13 @@ fn add_error_format( cmd.arg("--error-format").arg("short"); } } + + let color = if cx.bcx.config.shell().supports_color() { + "always" + } else { + "never" + }; + cmd.args(&["--color", color]); } Ok(()) } @@ -829,8 +813,7 @@ fn build_base_args<'a, 'cfg>( cmd.arg("--crate-name").arg(&unit.target.crate_name()); add_path_args(bcx, unit, cmd); - add_color(bcx, cmd); - add_error_format(cx, cmd, cx.rmeta_required(unit), true)?; + add_error_format_and_color(cx, cmd, cx.rmeta_required(unit))?; if !test { for crate_type in crate_types.iter() { @@ -922,7 +905,13 @@ fn build_base_args<'a, 'cfg>( // rustc-caching strategies like sccache are able to cache more, so sort the // feature list here. for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) { - cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + if let Some(platform) = bcx.resolve.features(unit.pkg.package_id()).get(feat) { + if bcx.platform_activated(platform.as_ref(), unit.kind) { + cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + } else { + bail!("Failed to get the target for the feature `{}`", feat); + } } match cx.files().metadata(unit) { @@ -1234,11 +1223,11 @@ fn on_stderr_line( } else { // Remove color information from the rendered string. rustc has not // included color in the past, so to avoid breaking anything, strip it - // out when --json-rendered=termcolor is used. This runs + // out when --json=diagnostic-rendered-ansi is used. This runs // unconditionally under the assumption that Cargo will eventually // move to this as the default mode. Perhaps in the future, cargo // could allow the user to enable/disable color (such as with a - // `--json-rendered` or `--color` or `--message-format` flag). + // `--json` or `--color` or `--message-format` flag). #[derive(serde::Deserialize, serde::Serialize)] struct CompilerMessage { rendered: String, @@ -1304,10 +1293,8 @@ fn replay_output_cache( ) -> Work { let target = target.clone(); let extract_rendered_messages = match format { - MessageFormat::Human => true, + MessageFormat::Human | MessageFormat::Short => true, MessageFormat::Json => false, - // FIXME: short not supported. - MessageFormat::Short => false, }; let mut options = OutputOptions { extract_rendered_messages, diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index 1da54590ef4..27859338666 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -1,6 +1,4 @@ -use std::fmt; use std::rc::Rc; -use std::str::FromStr; use log::trace; use semver::ReqParseError; @@ -12,7 +10,7 @@ use url::Url; use crate::core::interning::InternedString; use crate::core::{PackageId, SourceId, Summary}; use crate::util::errors::{CargoResult, CargoResultExt}; -use crate::util::{Cfg, CfgExpr, Config}; +use crate::util::{Config, Platform}; /// Information about a dependency requested by a Cargo manifest. /// Cheap to copy. @@ -49,12 +47,6 @@ struct Inner { platform: Option, } -#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] -pub enum Platform { - Name(String), - Cfg(CfgExpr), -} - #[derive(Serialize)] struct SerializedDependency<'a> { name: &'a str, @@ -457,46 +449,3 @@ impl Dependency { } } } - -impl Platform { - pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool { - match *self { - Platform::Name(ref p) => p == name, - Platform::Cfg(ref p) => p.matches(cfg), - } - } -} - -impl ser::Serialize for Platform { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - self.to_string().serialize(s) - } -} - -impl FromStr for Platform { - type Err = failure::Error; - - fn from_str(s: &str) -> CargoResult { - if s.starts_with("cfg(") && s.ends_with(')') { - let s = &s[4..s.len() - 1]; - let p = s.parse().map(Platform::Cfg).chain_err(|| { - failure::format_err!("failed to parse `{}` as a cfg expression", s) - })?; - Ok(p) - } else { - Ok(Platform::Name(s.to_string())) - } - } -} - -impl fmt::Display for Platform { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - Platform::Name(ref n) => n.fmt(f), - Platform::Cfg(ref e) => write!(f, "cfg({})", e), - } - } -} diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs index 9fd8161ce35..b910eb72661 100644 --- a/src/cargo/core/features.rs +++ b/src/cargo/core/features.rs @@ -21,7 +21,7 @@ //! 3. To actually perform the feature gate, you'll want to have code that looks //! like: //! -//! ```rust,ignore +//! ```rust,compile_fail //! use core::{Feature, Features}; //! //! let feature = Feature::launch_into_space(); diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 27b9a0585eb..2aed55806a9 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -13,7 +13,7 @@ use crate::util::CargoResult; use crate::util::Graph; use super::dep_cache::RegistryQueryer; -use super::types::{ConflictMap, FeaturesSet, ResolveOpts}; +use super::types::{ConflictMap, FeaturesMap, ResolveOpts}; pub use super::encode::Metadata; pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; @@ -27,7 +27,7 @@ pub use super::resolve::Resolve; pub struct Context { pub activations: Activations, /// list the features that are activated for each package - pub resolve_features: im_rc::HashMap, + pub resolve_features: im_rc::HashMap, /// get the package that will be linking to a native library by its links attribute pub links: im_rc::HashMap, /// for each package the list of names it can see, @@ -165,9 +165,10 @@ impl Context { let has_default_feature = summary.features().contains_key("default"); Ok(match self.resolve_features.get(&id) { Some(prev) => { - opts.features.is_subset(prev) + // opts.features.keys().is_subset(prev.keys()) does not work for BTreeMap::Keys + opts.features.keys().filter(|k| !prev.contains_key(k.as_str())).next().is_none() && (!opts.uses_default_features - || prev.contains("default") + || prev.contains_key("default") || !has_default_feature) } None => { diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index e20a78a66ae..e9c49cf3e75 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -10,7 +10,7 @@ //! This module impl that cache in all the gory details use std::cmp::Ordering; -use std::collections::{BTreeSet, HashMap, HashSet}; +use std::collections::{BTreeMap, HashMap, HashSet}; use std::rc::Rc; use log::debug; @@ -18,8 +18,9 @@ use log::debug; use crate::core::interning::InternedString; use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; use crate::util::errors::CargoResult; +use crate::util::Platform; -use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; +use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesMap}; use crate::core::resolver::{ActivateResult, ResolveOpts}; pub struct RegistryQueryer<'a> { @@ -35,7 +36,7 @@ pub struct RegistryQueryer<'a> { /// a cache of `Dependency`s that are required for a `Summary` summary_cache: HashMap< (Option, Summary, ResolveOpts), - Rc<(HashSet, Rc>)>, + Rc<(HashMap>, Rc>)>, >, /// all the cases we ended up using a supplied replacement used_replacements: HashMap, @@ -200,7 +201,7 @@ impl<'a> RegistryQueryer<'a> { parent: Option, candidate: &Summary, opts: &ResolveOpts, - ) -> ActivateResult, Rc>)>> { + ) -> ActivateResult>, Rc>)>> { // if we have calculated a result before, then we can just return it, // as it is a "pure" query of its arguments. if let Some(out) = self @@ -248,7 +249,7 @@ pub fn resolve_features<'b>( parent: Option, s: &'b Summary, opts: &'b ResolveOpts, -) -> ActivateResult<(HashSet, Vec<(Dependency, FeaturesSet)>)> { +) -> ActivateResult<(HashMap>, Vec<(Dependency, FeaturesMap)>)> { // First, filter by dev-dependencies. let deps = s.dependencies(); let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); @@ -256,7 +257,7 @@ pub fn resolve_features<'b>( let reqs = build_requirements(s, opts)?; let mut ret = Vec::new(); let mut used_features = HashSet::new(); - let default_dep = (false, BTreeSet::new()); + let default_dep = (false, BTreeMap::new()); // Next, collect all actually enabled dependencies and their features. for dep in deps { @@ -292,12 +293,12 @@ pub fn resolve_features<'b>( }); } let mut base = base.1.clone(); - base.extend(dep.features().iter()); + base.extend(dep.features().iter().map(|f| (f.clone(), dep.platform().map(|p| p.clone())))); for feature in base.iter() { - if feature.contains('/') { + if feature.0.contains('/') { return Err(failure::format_err!( "feature names may not contain slashes: `{}`", - feature + feature.0 ) .into()); } @@ -339,23 +340,24 @@ fn build_requirements<'a, 'b: 'a>( opts: &'b ResolveOpts, ) -> CargoResult> { let mut reqs = Requirements::new(s); + let empty_ftrs = (None, vec![]); if opts.all_features { - for key in s.features().keys() { - reqs.require_feature(*key)?; + for (key,val) in s.features() { + reqs.require_feature(*key, val.0.as_ref())?; } for dep in s.dependencies().iter().filter(|d| d.is_optional()) { - reqs.require_dependency(dep.name_in_toml()); + reqs.require_dependency(dep.name_in_toml(), dep.platform()); } } else { - for &f in opts.features.iter() { - reqs.require_value(&FeatureValue::new(f, s))?; + for (f, _) in opts.features.iter() { + reqs.require_value(&FeatureValue::new(*f, s), s.features().get(f).unwrap_or(&empty_ftrs).0.as_ref())?; } } if opts.uses_default_features { if s.features().contains_key("default") { - reqs.require_feature(InternedString::new("default"))?; + reqs.require_feature(InternedString::new("default"), s.features().get("default").unwrap_or(&empty_ftrs).0.as_ref())?; } } @@ -369,12 +371,12 @@ struct Requirements<'a> { // specified set of features enabled. The boolean indicates whether this // package was specifically requested (rather than just requesting features // *within* this package). - deps: HashMap)>, + deps: HashMap>)>, // The used features set is the set of features which this local package had // enabled, which is later used when compiling to instruct the code what // features were enabled. - used: HashSet, - visited: HashSet, + used: HashMap>, + visited: HashMap>, } impl Requirements<'_> { @@ -382,16 +384,16 @@ impl Requirements<'_> { Requirements { summary, deps: HashMap::new(), - used: HashSet::new(), - visited: HashSet::new(), + used: HashMap::new(), + visited: HashMap::new(), } } - fn into_used(self) -> HashSet { + fn into_used(self) -> HashMap> { self.used } - fn require_crate_feature(&mut self, package: InternedString, feat: InternedString) { + fn require_crate_feature(&mut self, package: InternedString, feat: InternedString, platform: Option<&Platform>) { // If `package` is indeed an optional dependency then we activate the // feature named `package`, but otherwise if `package` is a required // dependency then there's no feature associated with it. @@ -402,41 +404,42 @@ impl Requirements<'_> { .find(|p| p.name_in_toml() == package) { if dep.is_optional() { - self.used.insert(package); + self.used.insert(package, platform.cloned()); } } self.deps .entry(package) - .or_insert((false, BTreeSet::new())) + .or_insert((false, BTreeMap::new())) .1 - .insert(feat); + .insert(feat, platform.cloned()); } - fn seen(&mut self, feat: InternedString) -> bool { - if self.visited.insert(feat) { - self.used.insert(feat); + fn seen(&mut self, feat: InternedString, platform: Option<&Platform>) -> bool { + if self.visited.insert(feat, platform.cloned()).is_none() { + self.used.insert(feat, platform.cloned()); false } else { true } } - fn require_dependency(&mut self, pkg: InternedString) { - if self.seen(pkg) { + fn require_dependency(&mut self, pkg: InternedString, platform: Option<&Platform>) { + if self.seen(pkg, platform) { return; } - self.deps.entry(pkg).or_insert((false, BTreeSet::new())).0 = true; + self.deps.entry(pkg).or_insert((false, BTreeMap::new())).0 = true; } - fn require_feature(&mut self, feat: InternedString) -> CargoResult<()> { - if feat.is_empty() || self.seen(feat) { + fn require_feature(&mut self, feat: InternedString, platform: Option<&Platform>) -> CargoResult<()> { + if feat.is_empty() || self.seen(feat, platform) { return Ok(()); } - for fv in self + let feature = self .summary .features() .get(feat.as_str()) - .expect("must be a valid feature") + .expect("must be a valid feature"); + for fv in feature.1.as_slice() { match *fv { FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => failure::bail!( @@ -445,17 +448,17 @@ impl Requirements<'_> { ), _ => {} } - self.require_value(fv)?; + self.require_value(fv, feature.0.as_ref())?; } Ok(()) } - fn require_value(&mut self, fv: &FeatureValue) -> CargoResult<()> { + fn require_value(&mut self, fv: &FeatureValue, platform: Option<&Platform>) -> CargoResult<()> { match fv { - FeatureValue::Feature(feat) => self.require_feature(*feat)?, - FeatureValue::Crate(dep) => self.require_dependency(*dep), + FeatureValue::Feature(feat) => self.require_feature(*feat, self.summary.features().get(feat).expect("must be a valid feature").0.as_ref())?, + FeatureValue::Crate(dep) => self.require_dependency(*dep, platform), FeatureValue::CrateFeature(dep, dep_feat) => { - self.require_crate_feature(*dep, *dep_feat) + self.require_crate_feature(*dep, *dep_feat, platform) } }; Ok(()) diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index e67ec9ede01..6d56cb33ca8 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -203,100 +203,115 @@ pub(super) fn activation_error( }; candidates.sort_unstable_by(|a, b| b.version().cmp(a.version())); - let mut msg = if !candidates.is_empty() { - let versions = { - let mut versions = candidates - .iter() - .take(3) - .map(|cand| cand.version().to_string()) - .collect::>(); + let mut msg = + if !candidates.is_empty() { + let versions = { + let mut versions = candidates + .iter() + .take(3) + .map(|cand| cand.version().to_string()) + .collect::>(); + + if candidates.len() > 3 { + versions.push("...".into()); + } + + versions.join(", ") + }; + + let mut msg = format!( + "failed to select a version for the requirement `{} = \"{}\"`\n \ + candidate versions found which didn't match: {}\n \ + location searched: {}\n", + dep.package_name(), + dep.version_req(), + versions, + registry.describe_source(dep.source_id()), + ); + msg.push_str("required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), + )); - if candidates.len() > 3 { - versions.push("...".into()); + // If we have a path dependency with a locked version, then this may + // indicate that we updated a sub-package and forgot to run `cargo + // update`. In this case try to print a helpful error! + if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { + msg.push_str( + "\nconsider running `cargo update` to update \ + a path dependency's locked version", + ); } - versions.join(", ") - }; - - let mut msg = format!( - "failed to select a version for the requirement `{} = \"{}\"`\n \ - candidate versions found which didn't match: {}\n \ - location searched: {}\n", - dep.package_name(), - dep.version_req(), - versions, - registry.describe_source(dep.source_id()), - ); - msg.push_str("required by "); - msg.push_str(&describe_path( - &cx.parents.path_to_bottom(&parent.package_id()), - )); - - // If we have a path dependency with a locked version, then this may - // indicate that we updated a sub-package and forgot to run `cargo - // update`. In this case try to print a helpful error! - if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { - msg.push_str( - "\nconsider running `cargo update` to update \ - a path dependency's locked version", - ); - } - - if registry.is_replaced(dep.source_id()) { - msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); - } + if registry.is_replaced(dep.source_id()) { + msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); + } - msg - } else { - // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` - // was meant. So we try asking the registry for a `fuzzy` search for suggestions. - let mut candidates = Vec::new(); - if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.name()), true) { - return to_resolve_err(e); - }; - candidates.sort_unstable(); - candidates.dedup(); - let mut candidates: Vec<_> = candidates - .iter() - .map(|n| (lev_distance(&*new_dep.package_name(), &*n), n)) - .filter(|&(d, _)| d < 4) - .collect(); - candidates.sort_by_key(|o| o.0); - let mut msg = format!( - "no matching package named `{}` found\n\ - location searched: {}\n", - dep.package_name(), - dep.source_id() - ); - if !candidates.is_empty() { - let mut names = candidates + msg + } else { + // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` + // was meant. So we try asking the registry for a `fuzzy` search for suggestions. + let mut candidates = Vec::new(); + if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.clone()), true) { + return to_resolve_err(e); + }; + candidates.sort_unstable_by(|a, b| a.name().cmp(&b.name())); + candidates.dedup_by(|a, b| a.name() == b.name()); + let mut candidates: Vec<_> = candidates .iter() - .take(3) - .map(|c| c.1.as_str()) - .collect::>(); - - if candidates.len() > 3 { - names.push("..."); + .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) + .filter(|&(d, _)| d < 4) + .collect(); + candidates.sort_by_key(|o| o.0); + let mut msg = format!( + "no matching package named `{}` found\n\ + location searched: {}\n", + dep.package_name(), + dep.source_id() + ); + if !candidates.is_empty() { + // If dependency package name is equal to the name of the candidate here + // it may be a prerelease package which hasn't been speficied correctly + if dep.package_name() == candidates[0].1.name() + && candidates[0].1.package_id().version().is_prerelease() + { + msg.push_str("prerelease package needs to be specified explicitly\n"); + msg.push_str(&format!( + "{name} = {{ version = \"{version}\" }}", + name = candidates[0].1.name(), + version = candidates[0].1.package_id().version() + )); + } else { + let mut names = candidates + .iter() + .take(3) + .map(|c| c.1.name().as_str()) + .collect::>(); + + if candidates.len() > 3 { + names.push("..."); + } + + msg.push_str("perhaps you meant: "); + msg.push_str(&names.iter().enumerate().fold( + String::default(), + |acc, (i, el)| match i { + 0 => acc + el, + i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, + _ => acc + ", " + el, + }, + )); + } + + msg.push_str("\n"); } - - msg.push_str("perhaps you meant: "); - msg.push_str(&names.iter().enumerate().fold( - String::default(), - |acc, (i, el)| match i { - 0 => acc + el, - i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, - _ => acc + ", " + el, - }, + msg.push_str("required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), )); - msg.push_str("\n"); - } - msg.push_str("required by "); - msg.push_str(&describe_path( - &cx.parents.path_to_bottom(&parent.package_id()), - )); - msg - }; + msg + }; if let Some(config) = config { if config.offline() { diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 4aaa7eeafed..3ffe0cfadd1 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -63,7 +63,7 @@ use crate::util::profile; use self::context::Context; use self::dep_cache::RegistryQueryer; use self::types::{ConflictMap, ConflictReason, DepsFrame}; -use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress}; +use self::types::{FeaturesMap, RcVecIter, RemainingDeps, ResolverProgress}; pub use self::encode::Metadata; pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; @@ -146,7 +146,7 @@ pub fn resolve( cx.resolve_replacements(®istry), cx.resolve_features .iter() - .map(|(k, v)| (*k, v.iter().map(|x| x.to_string()).collect())) + .map(|(k, v)| (*k, v.iter().map(|(f,p)| (f.to_string(), p.clone())).collect())) .collect(), cksums, BTreeMap::new(), @@ -691,7 +691,7 @@ fn activate( .entry(candidate.package_id()) .or_insert_with(Rc::default), ) - .extend(used_features); + .extend(used_features.clone()); } let frame = DepsFrame { @@ -709,7 +709,7 @@ struct BacktrackFrame { remaining_candidates: RemainingCandidates, parent: Summary, dep: Dependency, - features: FeaturesSet, + features: FeaturesMap, conflicting_activations: ConflictMap, } diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index 9ced48f4d67..7f210a1e812 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -8,7 +8,7 @@ use url::Url; use crate::core::dependency::Kind; use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target}; use crate::util::errors::CargoResult; -use crate::util::Graph; +use crate::util::{Graph, Platform}; use super::encode::Metadata; @@ -27,12 +27,12 @@ pub struct Resolve { replacements: HashMap, /// Inverted version of `replacements`. reverse_replacements: HashMap, - /// An empty `HashSet` to avoid creating a new `HashSet` for every package + /// An empty `HashMap` to avoid creating a new `HashMap` for every package /// that does not have any features, and to avoid using `Option` to /// simplify the API. - empty_features: HashSet, + empty_features: HashMap>, /// Features enabled for a given package. - features: HashMap>, + features: HashMap>>, /// Checksum for each package. A SHA256 hash of the `.crate` file used to /// validate the correct crate file is used. This is `None` for sources /// that do not use `.crate` files, like path or git dependencies. @@ -71,7 +71,7 @@ impl Resolve { pub fn new( graph: Graph>, replacements: HashMap, - features: HashMap>, + features: HashMap>>, checksums: HashMap>, metadata: Metadata, unused_patches: Vec, @@ -101,7 +101,7 @@ impl Resolve { checksums, metadata, unused_patches, - empty_features: HashSet::new(), + empty_features: HashMap::new(), reverse_replacements, public_dependencies, version, @@ -269,7 +269,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated &self.replacements } - pub fn features(&self, pkg: PackageId) -> &HashSet { + pub fn features(&self, pkg: PackageId) -> &HashMap> { self.features.get(&pkg).unwrap_or(&self.empty_features) } @@ -281,7 +281,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated } pub fn features_sorted(&self, pkg: PackageId) -> Vec<&str> { - let mut v = Vec::from_iter(self.features(pkg).iter().map(|s| s.as_ref())); + let mut v = Vec::from_iter(self.features(pkg).iter().map(|(s, _)| s.as_ref())); v.sort_unstable(); v } diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 881869ef16f..02f078460d9 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -1,5 +1,5 @@ use std::cmp::Ordering; -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeMap; use std::ops::Range; use std::rc::Rc; use std::time::{Duration, Instant}; @@ -7,7 +7,7 @@ use std::time::{Duration, Instant}; use crate::core::interning::InternedString; use crate::core::{Dependency, PackageId, Summary}; use crate::util::errors::CargoResult; -use crate::util::Config; +use crate::util::{Config, Platform}; use im_rc; @@ -89,7 +89,7 @@ impl ResolverProgress { } } -/// The preferred way to store the set of activated features for a package. +/// The preferred way to store the map of activated feature-platform pairs for a package. /// This is sorted so that it impls Hash, and owns its contents, /// needed so it can be part of the key for caching in the `DepsCache`. /// It is also cloned often as part of `Context`, hence the `RC`. @@ -97,7 +97,7 @@ impl ResolverProgress { /// but this can change with improvements to std, im, or llvm. /// Using a consistent type for this allows us to use the highly /// optimized comparison operators like `is_subset` at the interfaces. -pub type FeaturesSet = Rc>; +pub type FeaturesMap = Rc>>; /// Options for how the resolve should work. #[derive(Clone, Debug, Eq, PartialEq, Hash)] @@ -107,7 +107,7 @@ pub struct ResolveOpts { /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`. pub dev_deps: bool, /// Set of features to enable (`--features=…`). - pub features: FeaturesSet, + pub features: FeaturesMap, /// Indicates *all* features should be enabled (`--all-features`). pub all_features: bool, /// Include the `default` feature (`--no-default-features` sets this false). @@ -119,7 +119,7 @@ impl ResolveOpts { pub fn everything() -> ResolveOpts { ResolveOpts { dev_deps: true, - features: Rc::new(BTreeSet::new()), + features: Rc::new(BTreeMap::new()), all_features: true, uses_default_features: true, } @@ -139,14 +139,14 @@ impl ResolveOpts { } } - fn split_features(features: &[String]) -> BTreeSet { + fn split_features(features: &[String]) -> BTreeMap> { features .iter() .flat_map(|s| s.split_whitespace()) .flat_map(|s| s.split(',')) .filter(|s| !s.is_empty()) - .map(InternedString::new) - .collect::>() + .map(|s| (InternedString::new(s), None)) + .collect::>>() } } @@ -253,7 +253,7 @@ impl RemainingDeps { /// Information about the dependencies for a crate, a tuple of: /// /// (dependency info, candidates, features activated) -pub type DepInfo = (Dependency, Rc>, FeaturesSet); +pub type DepInfo = (Dependency, Rc>, FeaturesMap); /// All possible reasons that a package might fail to activate. /// diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index fb52b9179d7..0dbfd2456b3 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -11,7 +11,7 @@ use crate::core::interning::InternedString; use crate::core::{Dependency, PackageId, SourceId}; use semver::Version; -use crate::util::CargoResult; +use crate::util::{CargoResult, Platform}; /// Subset of a `Manifest`. Contains only the most important information about /// a package. @@ -36,7 +36,7 @@ impl Summary { pub fn new( pkg_id: PackageId, dependencies: Vec, - features: &BTreeMap>>, + features: &BTreeMap, Vec>)>, links: Option>, namespaced_features: bool, ) -> CargoResult @@ -149,7 +149,7 @@ impl Hash for Summary { // Checks features for errors, bailing out a CargoResult:Err if invalid, // and creates FeatureValues for each feature. fn build_feature_map( - features: &BTreeMap>>, + features: &BTreeMap, Vec>)>, dependencies: &[Dependency], namespaced: bool, ) -> CargoResult @@ -201,7 +201,7 @@ where }; let mut values = vec![]; - for dep in list { + for dep in list.1.as_slice() { let val = FeatureValue::build( InternedString::new(dep.as_ref()), |fs| features.contains_key(fs.as_str()), @@ -239,7 +239,7 @@ where // we don't have to do so here. (&Feature(feat), _, true) => { if namespaced && !features.contains_key(&*feat) { - map.insert(feat, vec![FeatureValue::Crate(feat)]); + map.insert(feat, (list.0.clone(), vec![FeatureValue::Crate(feat)])); } } // If features are namespaced and the value is not defined as a feature @@ -337,7 +337,10 @@ where ) } - map.insert(InternedString::new(feature.borrow()), values); + map.insert( + InternedString::new(feature.borrow()), + (list.0.clone(), values), + ); } Ok(map) } @@ -416,4 +419,5 @@ impl Serialize for FeatureValue { } } -pub type FeatureMap = BTreeMap>; +pub type FeatureMap = BTreeMap, Vec)>; +pub type RefFeatureMap<'a> = BTreeMap; diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index c68d4fc1b1d..653b1a6e8e0 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -36,7 +36,7 @@ use crate::core::{Package, Target}; use crate::core::{PackageId, PackageIdSpec, TargetKind, Workspace}; use crate::ops; use crate::util::config::Config; -use crate::util::{closest_msg, profile, CargoResult}; +use crate::util::{closest_msg, profile, CargoResult, Platform}; /// Contains information about how a package should be compiled. #[derive(Debug)] @@ -791,7 +791,7 @@ fn generate_targets<'a>( let features = features_map .entry(pkg) .or_insert_with(|| resolve_all_features(resolve, pkg.package_id())); - rf.iter().filter(|f| !features.contains(*f)).collect() + rf.iter().filter(|f| !features.contains_key(*f)).collect() } None => Vec::new(), }; @@ -821,14 +821,14 @@ fn generate_targets<'a>( fn resolve_all_features( resolve_with_overrides: &Resolve, package_id: PackageId, -) -> HashSet { +) -> HashMap> { let mut features = resolve_with_overrides.features(package_id).clone(); // Include features enabled for use by dependencies so targets can also use them with the // required-features field when deciding whether to be built or skipped. for (dep, _) in resolve_with_overrides.deps(package_id) { for feature in resolve_with_overrides.features(dep) { - features.insert(dep.name().to_string() + "/" + feature); + features.insert(dep.name().to_string() + "/" + feature.0, feature.1.clone()); } } diff --git a/src/cargo/ops/fix.rs b/src/cargo/ops/fix.rs index 1ac31974b72..a43c68687b7 100644 --- a/src/cargo/ops/fix.rs +++ b/src/cargo/ops/fix.rs @@ -621,7 +621,7 @@ impl FixArgs { ret.enabled_edition = Some(s[prefix.len()..].to_string()); continue; } - if s.starts_with("--error-format=") || s.starts_with("--json-rendered=") { + if s.starts_with("--error-format=") || s.starts_with("--json=") { // Cargo may add error-format in some cases, but `cargo // fix` wants to add its own. continue; diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 5ecdaf6f53c..20d7aa01ea8 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -237,7 +237,7 @@ fn transmit( .map(|(feat, values)| { ( feat.to_string(), - values.iter().map(|fv| fv.to_string(summary)).collect(), + values.1.iter().map(|fv| fv.to_string(&summary)).collect(), ) }) .collect::>>(); diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index af6d458c345..db8e7d99876 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -219,9 +219,17 @@ impl<'cfg> PathSource<'cfg> { // the untracked files are often part of a build and may become relevant // as part of a future commit. let index_files = index.iter().map(|entry| { - use libgit2_sys::GIT_FILEMODE_COMMIT; - let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32; - (join(root, &entry.path), Some(is_dir)) + use libgit2_sys::{GIT_FILEMODE_COMMIT, GIT_FILEMODE_LINK}; + // ``is_dir`` is an optimization to avoid calling + // ``fs::metadata`` on every file. + let is_dir = if entry.mode == GIT_FILEMODE_LINK as u32 { + // Let the code below figure out if this symbolic link points + // to a directory or not. + None + } else { + Some(entry.mode == GIT_FILEMODE_COMMIT as u32) + }; + (join(root, &entry.path), is_dir) }); let mut opts = git2::StatusOptions::new(); opts.include_untracked(true); diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs index 67116074cb7..57d24a8f0aa 100644 --- a/src/cargo/sources/registry/index.rs +++ b/src/cargo/sources/registry/index.rs @@ -722,7 +722,11 @@ impl IndexSummary { .into_iter() .map(|dep| dep.into_dep(source_id)) .collect::>>()?; - let mut summary = Summary::new(pkgid, deps, &features, links, false)?; + let ftrs = features + .iter() + .map(|(k, v)| (k.clone(), (None, v.clone()))) + .collect(); + let mut summary = Summary::new(pkgid, deps, &ftrs, links, false)?; summary.set_checksum(cksum.clone()); Ok(IndexSummary { summary, diff --git a/src/cargo/util/cfg.rs b/src/cargo/util/cfg.rs index 4c4ad232df4..4826c3b4bce 100644 --- a/src/cargo/util/cfg.rs +++ b/src/cargo/util/cfg.rs @@ -2,6 +2,9 @@ use std::fmt; use std::iter; use std::str::{self, FromStr}; +use serde::ser; + +use crate::util::errors::CargoResultExt; use crate::util::CargoResult; #[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] @@ -28,6 +31,12 @@ enum Token<'a> { String(&'a str), } +#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] +pub enum Platform { + Name(String), + Cfg(CfgExpr), +} + struct Tokenizer<'a> { s: iter::Peekable>, orig: &'a str, @@ -276,3 +285,46 @@ impl<'a> Token<'a> { } } } + +impl Platform { + pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool { + match *self { + Platform::Name(ref p) => p == name, + Platform::Cfg(ref p) => p.matches(cfg), + } + } +} + +impl ser::Serialize for Platform { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + self.to_string().serialize(s) + } +} + +impl FromStr for Platform { + type Err = failure::Error; + + fn from_str(s: &str) -> CargoResult { + if s.starts_with("cfg(") && s.ends_with(')') { + let s = &s[4..s.len() - 1]; + let p = s.parse().map(Platform::Cfg).chain_err(|| { + failure::format_err!("failed to parse `{}` as a cfg expression", s) + })?; + Ok(p) + } else { + Ok(Platform::Name(s.to_string())) + } + } +} + +impl fmt::Display for Platform { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Platform::Name(ref n) => n.fmt(f), + Platform::Cfg(ref e) => write!(f, "cfg({})", e), + } + } +} diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs index 50fbd8c2b1b..424b525f4cc 100644 --- a/src/cargo/util/mod.rs +++ b/src/cargo/util/mod.rs @@ -1,6 +1,6 @@ use std::time::Duration; -pub use self::cfg::{Cfg, CfgExpr}; +pub use self::cfg::{Cfg, CfgExpr, Platform}; pub use self::config::{homedir, Config, ConfigValue}; pub use self::dependency_queue::DependencyQueue; pub use self::diagnostic_server::RustfixDiagnosticServer; diff --git a/src/cargo/util/network.rs b/src/cargo/util/network.rs index 2873dea253b..c0d5b2a5ae9 100644 --- a/src/cargo/util/network.rs +++ b/src/cargo/util/network.rs @@ -73,9 +73,12 @@ fn maybe_spurious(err: &Error) -> bool { /// /// # Examples /// -/// ```ignore -/// use util::network; -/// cargo_result = network::with_retry(&config, || something.download()); +/// ``` +/// # use crate::cargo::util::{CargoResult, Config}; +/// # let download_something = || return Ok(()); +/// # let config = Config::default().unwrap(); +/// use cargo::util::network; +/// let cargo_result = network::with_retry(&config, || download_something()); /// ``` pub fn with_retry(config: &Config, mut callback: F) -> CargoResult where diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index e2232827885..3633ff25b13 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -13,7 +13,7 @@ use serde::ser; use serde::{Deserialize, Serialize}; use url::Url; -use crate::core::dependency::{Kind, Platform}; +use crate::core::dependency::Kind; use crate::core::manifest::{LibKind, ManifestMetadata, TargetSourcePath, Warnings}; use crate::core::profiles::Profiles; use crate::core::{Dependency, Manifest, PackageId, Summary, Target}; @@ -21,7 +21,7 @@ use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest}; use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig}; use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY}; use crate::util::errors::{CargoResult, CargoResultExt, ManifestError}; -use crate::util::{self, paths, validate_package_name, Config, IntoUrl}; +use crate::util::{self, paths, validate_package_name, Config, IntoUrl, Platform}; mod targets; use self::targets::targets; @@ -754,6 +754,7 @@ impl TomlManifest { Ok(( k.clone(), TomlPlatform { + features: v.features.clone(), dependencies: map_deps(config, v.dependencies.as_ref())?, dev_dependencies: map_deps( config, @@ -892,6 +893,7 @@ impl TomlManifest { } let mut deps = Vec::new(); + let mut ftrs = BTreeMap::new(); let replace; let patch; @@ -924,6 +926,21 @@ impl TomlManifest { Ok(()) } + fn process_features( + ftrs: &mut BTreeMap, Vec)>, + new_ftrs: Option<&BTreeMap>>, + platform: Option<&Platform>, + ) -> CargoResult<()> { + let features = match new_ftrs { + Some(features) => features, + None => return Ok(()), + }; + for (n, v) in features.iter() { + ftrs.insert(n.clone(), (platform.cloned(), v.clone())); + } + + Ok(()) + } // Collect the dependencies. process_dependencies(&mut cx, me.dependencies.as_ref(), None)?; @@ -937,6 +954,7 @@ impl TomlManifest { .as_ref() .or_else(|| me.build_dependencies2.as_ref()); process_dependencies(&mut cx, build_deps, Some(Kind::Build))?; + process_features(&mut ftrs, me.features.as_ref(), None)?; for (name, platform) in me.target.iter().flat_map(|t| t) { cx.platform = Some(name.parse()?); @@ -951,6 +969,7 @@ impl TomlManifest { .as_ref() .or_else(|| platform.dev_dependencies2.as_ref()); process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?; + process_features(&mut ftrs, platform.features.as_ref(), cx.platform.as_ref())?; } replace = me.replace(&mut cx)?; @@ -982,14 +1001,7 @@ impl TomlManifest { let summary = Summary::new( pkgid, deps, - &me.features - .as_ref() - .map(|x| { - x.iter() - .map(|(k, v)| (k.as_str(), v.iter().collect())) - .collect() - }) - .unwrap_or_else(BTreeMap::new), + &ftrs, project.links.as_ref().map(|x| x.as_str()), project.namespaced_features.unwrap_or(false), )?; @@ -1543,6 +1555,7 @@ impl ser::Serialize for PathValue { /// Corresponds to a `target` entry, but `TomlTarget` is already used. #[derive(Serialize, Deserialize, Debug)] struct TomlPlatform { + features: Option>>, dependencies: Option>, #[serde(rename = "build-dependencies")] build_dependencies: Option>, diff --git a/tests/testsuite/bench.rs b/tests/testsuite/bench.rs index 42f4415279d..35488793671 100644 --- a/tests/testsuite/bench.rs +++ b/tests/testsuite/bench.rs @@ -58,7 +58,7 @@ fn bench_bench_implicit() { .file( "src/main.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { } @@ -364,7 +364,7 @@ fn bench_with_lib_dep() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; /// @@ -432,7 +432,7 @@ fn bench_with_deep_lib_dep() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate foo; #[cfg(test)] @@ -448,7 +448,7 @@ fn bench_with_deep_lib_dep() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -495,7 +495,7 @@ fn external_bench_explicit() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; pub fn get_hello() -> &'static str { "Hello" } @@ -541,7 +541,7 @@ fn external_bench_implicit() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -760,7 +760,7 @@ fn lib_bin_same_name() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[bench] fn lib_bench(_b: &mut test::Bencher) {} @@ -769,7 +769,7 @@ fn lib_bin_same_name() { .file( "src/main.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[allow(unused_extern_crates)] extern crate foo; #[cfg(test)] @@ -804,7 +804,7 @@ fn lib_with_standard_name() { .file( "src/lib.rs", " - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; @@ -919,7 +919,7 @@ fn bench_dylib() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] extern crate bar as the_bar; #[cfg(test)] extern crate test; @@ -1061,7 +1061,7 @@ fn bench_with_examples() { .file( "src/lib.rs", r#" - #![cfg_attr(test, feature(test))] + #![feature(test)] #[cfg(test)] extern crate test; #[cfg(test)] diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs index 59ed086708d..7c6323425aa 100644 --- a/tests/testsuite/build.rs +++ b/tests/testsuite/build.rs @@ -4,11 +4,10 @@ use std::io::prelude::*; use crate::support::paths::{root, CargoPathExt}; use crate::support::registry::Package; -use crate::support::ProjectBuilder; use crate::support::{ - basic_bin_manifest, basic_lib_manifest, basic_manifest, rustc_host, sleep_ms, + basic_bin_manifest, basic_lib_manifest, basic_manifest, main_file, project, rustc_host, + sleep_ms, symlink_supported, Execs, ProjectBuilder, }; -use crate::support::{main_file, project, Execs}; use cargo::util::paths::dylib_path_envvar; #[cargo_test] @@ -1495,9 +1494,12 @@ package `test v0.0.0 ([CWD])`", } #[cargo_test] +/// Make sure broken symlinks don't break the build +/// +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn ignore_broken_symlinks() { - // windows and symlinks don't currently agree that well - if cfg!(windows) { + if !symlink_supported() { return; } diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs index 0952033bacc..d26509e2954 100644 --- a/tests/testsuite/build_script.rs +++ b/tests/testsuite/build_script.rs @@ -2155,6 +2155,11 @@ fn flags_go_into_tests() { #[cargo_test] fn diamond_passes_args_only_once() { + // FIXME: when pipelining rides to stable, enable this test on all channels. + if !crate::support::is_nightly() { + return; + } + let p = project() .file( "Cargo.toml", @@ -2229,7 +2234,7 @@ fn diamond_passes_args_only_once() { [COMPILING] a v0.5.0 ([..] [RUNNING] `rustc [..]` [COMPILING] foo v0.5.0 ([..] -[RUNNING] `[..]rlib -L native=test` +[RUNNING] `[..]rmeta -L native=test` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", ) diff --git a/tests/testsuite/cache_messages.rs b/tests/testsuite/cache_messages.rs index b283a285f4e..2582d191810 100644 --- a/tests/testsuite/cache_messages.rs +++ b/tests/testsuite/cache_messages.rs @@ -54,6 +54,52 @@ fn simple() { assert!(cargo_output2.stdout.is_empty()); } +// same as `simple`, except everything is using the short format +#[cargo_test] +fn simple_short() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + let p = project() + .file( + "src/lib.rs", + " + fn a() {} + fn b() {} + ", + ) + .build(); + + let agnostic_path = Path::new("src").join("lib.rs"); + let agnostic_path_s = agnostic_path.to_str().unwrap(); + + let rustc_output = process("rustc") + .cwd(p.root()) + .args(&["--crate-type=lib", agnostic_path_s, "--error-format=short"]) + .exec_with_output() + .expect("rustc to run"); + + assert!(rustc_output.stdout.is_empty()); + assert!(rustc_output.status.success()); + + let cargo_output1 = p + .cargo("check -Zcache-messages -q --color=never --message-format=short") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output1.stderr)); + // assert!(cargo_output1.stdout.is_empty()); + let cargo_output2 = p + .cargo("check -Zcache-messages -q --message-format=short") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + println!("{}", String::from_utf8_lossy(&cargo_output2.stdout)); + assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output2.stderr)); + assert!(cargo_output2.stdout.is_empty()); +} + #[cargo_test] fn color() { if !is_nightly() { @@ -334,15 +380,3 @@ fn very_verbose() { .with_stderr_contains("[..]not_used[..]") .run(); } - -#[cargo_test] -fn short_incompatible() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("check -Zcache-messages --message-format=short") - .masquerade_as_nightly_cargo() - .with_stderr( - "[ERROR] currently `--message-format short` is incompatible with cached output", - ) - .with_status(101) - .run(); -} diff --git a/tests/testsuite/cfg_features.rs b/tests/testsuite/cfg_features.rs new file mode 100644 index 00000000000..e54d0749b7b --- /dev/null +++ b/tests/testsuite/cfg_features.rs @@ -0,0 +1,363 @@ +use crate::support::project; + +#[cargo_test] +fn syntax() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + b = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn bb() {} + "#, + ) + .build(); + p.cargo("build") + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn include_by_param() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + c = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + #[cfg(feature = "c")] + pub const BB: usize = 1; + + pub fn bb() -> Result<(), ()> { if BB > 0 { Ok(()) } else { Err(()) } } + "#, + ) + .build(); + p.cargo(format!("build --features {}", if cfg!(unix) { "b" } else { "c" }).as_str()) + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn dont_include_by_platform() { + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.features] + b = [] + "#, + other_family + ), + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + + pub fn bb() { let _ = BB; } + "#, + ) + .build(); + p.cargo("build --features b -vv") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +#[cargo_test] +fn dont_include_by_param() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + c = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + #[cfg(feature = "c")] + pub const BB: usize = 1; + + pub fn bb() -> Result<(), ()> { if BB > 0 { Ok(()) } else { Err(()) } } + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +#[cargo_test] +fn dont_include_default() { + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.features] + b = [] + + [features] + default = ["b"] + "#, + other_family + ), + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + + pub fn bb() { let _ = BB; } + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +#[cargo_test] +fn transitive() { + #[cfg(target_os = "macos")] + let config = "target_os = \"macos\""; + #[cfg(target_os = "windows")] + let config = "target_os = \"windows\""; + #[cfg(all(not(target_os = "macos"), not(target_os = "windows")))] + let config = "unix"; + + let p = project() + .no_manifest() + // root depends on a and c="1.1.0" + .file( + "root/Cargo.toml", + r#" + [package] + name = "root" + version = "0.0.1" + authors = [] + + [dependencies] + a = { version = "*", path = "../a" } + c = { version = "1.1.0", path = "../c1" } + "#, + ) + .file( + "root/src/main.rs", + r#" + fn main() { + println!("Hello, world!"); + } + "#, + ) + // a depends on b and on OSX depends on b's flag maybe + .file( + "a/Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.1.0" + + [lib] + name = "a" + + [target.'cfg(not({}))'.dependencies] + b = {{ version = "*", path = "../b" }} + + [target.'cfg({})'.dependencies] + b = {{ version = "*", path = "../b", features = ["maybe"] }} + "#, + config, + config, + ) + ) + .file( + "a/src/lib.rs", + r#" + "#, + ) + // b depends on c="=1.0.0" if maybe is active. + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + + [dependencies] + c = { version = "1.0.0", path = "../c0", optional = true } + + [features] + maybe = ["c"] + "#, + ) + .file( + "b/src/lib.rs", + r#" + #[cfg(feature = "maybe")] + pub fn maybe() { + c::cee(); + } + "#, + ) + // c 1.0.0 + .file( + "c0/Cargo.toml", + r#" + [package] + name = "c" + version = "1.0.0" + + [lib] + name = "c" + + [dependencies] + "#, + ) + .file( + "c0/src/lib.rs", + r#" + pub fn cee() {} + "#, + ) + // c 1.1.0 + .file( + "c1/Cargo.toml", + r#" + [package] + name = "c" + version = "1.1.0" + + [lib] + name = "c" + + [dependencies] + "#, + ) + .file( + "c1/src/lib.rs", + r#" + "#, + ) + .build(); + + p.cargo("build").cwd("root") + .with_stderr( + "\ +[COMPILING] c v1.0.0 ([..]) +[COMPILING] c v1.1.0 ([..]) +[COMPILING] b v0.1.0 ([..]) +[COMPILING] a v0.1.0 ([..]) +[COMPILING] root v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +// https://github.com/rust-lang/cargo/issues/5313 +#[cargo_test] +#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))] +fn cfg_looks_at_rustflags_for_target() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(with_b)'.features] + b = [] + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(with_b)] + pub const BB: usize = 0; + + fn main() { let _ = BB; } + "#, + ) + .build(); + + p.cargo("build --target x86_64-unknown-linux-gnu") + .env("RUSTFLAGS", "--cfg with_b") + .run(); +} diff --git a/tests/testsuite/dep_info.rs b/tests/testsuite/dep_info.rs index dce5c4025d1..439621e42e9 100644 --- a/tests/testsuite/dep_info.rs +++ b/tests/testsuite/dep_info.rs @@ -511,6 +511,6 @@ fn canonical_path() { assert_deps_contains( &p, "target/debug/.fingerprint/foo-*/dep-lib-foo-*", - &[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rlib")], + &[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rmeta")], ); } diff --git a/tests/testsuite/main.rs b/tests/testsuite/main.rs index 618c92ceb23..a8a2d7cc309 100644 --- a/tests/testsuite/main.rs +++ b/tests/testsuite/main.rs @@ -27,6 +27,7 @@ mod cargo_alias_config; mod cargo_command; mod cargo_features; mod cfg; +mod cfg_features; mod check; mod clean; mod clippy; diff --git a/tests/testsuite/metadata.rs b/tests/testsuite/metadata.rs index 10f5636c67d..33f67344ac4 100644 --- a/tests/testsuite/metadata.rs +++ b/tests/testsuite/metadata.rs @@ -217,10 +217,19 @@ optional_feat = [] ], "features": { "default": [ + null, + [ "default_feat" + ] ], - "default_feat": [], - "optional_feat": [] + "default_feat": [ + null, + [] + ], + "optional_feat": [ + null, + [] + ] }, "manifest_path": "[..]Cargo.toml", "metadata": null diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs index 92be2b2eee2..6a070ea0acf 100644 --- a/tests/testsuite/package.rs +++ b/tests/testsuite/package.rs @@ -3,11 +3,11 @@ use std::fs::File; use std::io::prelude::*; use std::path::Path; -use crate::support::cargo_process; use crate::support::paths::CargoPathExt; use crate::support::registry::Package; use crate::support::{ - basic_manifest, git, path2url, paths, project, publish::validate_crate_contents, registry, + basic_manifest, cargo_process, git, path2url, paths, project, publish::validate_crate_contents, + registry, symlink_supported, }; use git2; @@ -504,6 +504,56 @@ fn package_git_submodule() { .run(); } +#[cargo_test] +/// Tests if a symlink to a git submodule is properly handled. +/// +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. +fn package_symlink_to_submodule() { + #[cfg(unix)] + use std::os::unix::fs::symlink; + #[cfg(windows)] + use std::os::windows::fs::symlink_dir as symlink; + + if !symlink_supported() { + return; + } + + let project = git::new("foo", |project| { + project.file("src/lib.rs", "pub fn foo() {}") + }) + .unwrap(); + + let library = git::new("submodule", |library| { + library.no_manifest().file("Makefile", "all:") + }) + .unwrap(); + + let repository = git2::Repository::open(&project.root()).unwrap(); + let url = path2url(library.root()).to_string(); + git::add_submodule(&repository, &url, Path::new("submodule")); + t!(symlink( + &project.root().join("submodule"), + &project.root().join("submodule-link") + )); + git::add(&repository); + git::commit(&repository); + + let repository = git2::Repository::open(&project.root().join("submodule")).unwrap(); + repository + .reset( + &repository.revparse_single("HEAD").unwrap(), + git2::ResetType::Hard, + None, + ) + .unwrap(); + + project + .cargo("package --no-verify -v") + .with_stderr_contains("[ARCHIVING] submodule/Makefile") + .run(); +} + #[cargo_test] fn no_duplicates_from_modified_tracked_files() { let root = paths::root().join("all"); @@ -660,9 +710,19 @@ See [..] } #[cargo_test] -#[cfg(unix)] +/// Tests if a broken symlink is properly handled when packaging. +/// +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. fn broken_symlink() { - use std::os::unix::fs; + #[cfg(unix)] + use std::os::unix::fs::symlink; + #[cfg(windows)] + use std::os::windows::fs::symlink_dir as symlink; + + if !symlink_supported() { + return; + } let p = project() .file( @@ -681,7 +741,7 @@ fn broken_symlink() { ) .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); - t!(fs::symlink("nowhere", &p.root().join("src/foo.rs"))); + t!(symlink("nowhere", &p.root().join("src/foo.rs"))); p.cargo("package -v") .with_status(101) @@ -699,6 +759,26 @@ Caused by: .run(); } +#[cargo_test] +/// Tests if a symlink to a directory is proberly included. +/// +/// This test requires you to be able to make symlinks. +/// For windows, this may require you to enable developer mode. +fn package_symlink_to_dir() { + if !symlink_supported() { + return; + } + + project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .file("bla/Makefile", "all:") + .symlink_dir("bla", "foo") + .build() + .cargo("package -v") + .with_stderr_contains("[ARCHIVING] foo/Makefile") + .run(); +} + #[cargo_test] fn do_not_package_if_repository_is_dirty() { let p = project().build(); diff --git a/tests/testsuite/profile_overrides.rs b/tests/testsuite/profile_overrides.rs index 8445460b571..17b3c25c18b 100644 --- a/tests/testsuite/profile_overrides.rs +++ b/tests/testsuite/profile_overrides.rs @@ -321,17 +321,17 @@ fn profile_override_hierarchy() { p.cargo("build -v").masquerade_as_nightly_cargo().with_stderr_unordered("\ [COMPILING] m3 [..] [COMPILING] dep [..] -[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=4 [..] -[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=3 [..] -[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..] -[RUNNING] `rustc --crate-name build_script_build m1/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=4 [..] +[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=4 [..] +[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=3 [..] +[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=1 [..] +[RUNNING] `rustc --crate-name build_script_build m1/build.rs [..] --crate-type bin --emit=[..]link -C codegen-units=4 [..] [COMPILING] m2 [..] -[RUNNING] `rustc --crate-name build_script_build m2/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=2 [..] +[RUNNING] `rustc --crate-name build_script_build m2/build.rs [..] --crate-type bin --emit=[..]link -C codegen-units=2 [..] [RUNNING] `[..]/m1-[..]/build-script-build` [RUNNING] `[..]/m2-[..]/build-script-build` -[RUNNING] `rustc --crate-name m2 m2/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=2 [..] +[RUNNING] `rustc --crate-name m2 m2/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=2 [..] [COMPILING] m1 [..] -[RUNNING] `rustc --crate-name m1 m1/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..] +[RUNNING] `rustc --crate-name m1 m1/src/lib.rs [..] --crate-type lib --emit=[..]link -C codegen-units=1 [..] [FINISHED] dev [unoptimized + debuginfo] [..] ", ) diff --git a/tests/testsuite/registry.rs b/tests/testsuite/registry.rs index 9fb92d80f65..2db759ac1e2 100644 --- a/tests/testsuite/registry.rs +++ b/tests/testsuite/registry.rs @@ -1395,6 +1395,55 @@ fn use_semver() { p.cargo("build").run(); } +#[cargo_test] +fn use_semver_package_incorrectly() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.1-alpha.0" + authors = [] + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { version = "^0.1", path = "../a" } + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .file("b/src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: no matching package named `a` found +location searched: [..] +prerelease package needs to be specified explicitly +a = { version = \"0.1.1-alpha.0\" } +required by package `b v0.1.0 ([..])` +", + ) + .run(); +} + #[cargo_test] fn only_download_relevant() { let p = project() diff --git a/tests/testsuite/rustc_info_cache.rs b/tests/testsuite/rustc_info_cache.rs index 51dc4a42881..ceed53ee3a8 100644 --- a/tests/testsuite/rustc_info_cache.rs +++ b/tests/testsuite/rustc_info_cache.rs @@ -4,6 +4,11 @@ use std::env; #[cargo_test] fn rustc_info_cache() { + // FIXME: when pipelining rides to stable, enable this test on all channels. + if !crate::support::is_nightly() { + return; + } + let p = project() .file("src/main.rs", r#"fn main() { println!("hello"); }"#) .build(); diff --git a/tests/testsuite/rustdoc.rs b/tests/testsuite/rustdoc.rs index 6525054444f..195b47c0342 100644 --- a/tests/testsuite/rustdoc.rs +++ b/tests/testsuite/rustdoc.rs @@ -10,6 +10,7 @@ fn rustdoc_simple() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] ", @@ -27,6 +28,7 @@ fn rustdoc_args() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -66,6 +68,7 @@ fn rustdoc_foo_with_bar_dependency() { [DOCUMENTING] foo v0.0.1 ([CWD]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps \ --extern [..]` @@ -104,6 +107,7 @@ fn rustdoc_only_bar_dependency() { [DOCUMENTING] bar v0.0.1 ([..]) [RUNNING] `rustdoc --crate-name bar [..]bar/src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -125,6 +129,7 @@ fn rustdoc_same_name_documents_lib() { [DOCUMENTING] foo v0.0.1 ([..]) [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ -o [CWD]/target/doc \ + [..] \ --cfg=foo \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..] @@ -168,6 +173,7 @@ fn rustdoc_target() { [RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ --target x86_64-unknown-linux-gnu \ -o [CWD]/target/x86_64-unknown-linux-gnu/doc \ + [..] \ -L dependency=[CWD]/target/x86_64-unknown-linux-gnu/debug/deps \ -L dependency=[CWD]/target/debug/deps` [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", diff --git a/tests/testsuite/small_fd_limits.rs b/tests/testsuite/small_fd_limits.rs index 27558a8657f..f0ca6988e32 100644 --- a/tests/testsuite/small_fd_limits.rs +++ b/tests/testsuite/small_fd_limits.rs @@ -98,9 +98,6 @@ fn use_git_gc() { } #[cargo_test] -// it looks like this test passes on some windows machines but not others, -// notably not on AppVeyor's machines. Sounds like another but for another day. -#[cfg_attr(windows, ignore)] fn avoid_using_git() { let path = env::var_os("PATH").unwrap_or_default(); let mut paths = env::split_paths(&path).collect::>(); diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs index 01abfdba550..f1da02e255c 100644 --- a/tests/testsuite/support/mod.rs +++ b/tests/testsuite/support/mod.rs @@ -178,11 +178,24 @@ impl FileBuilder { struct SymlinkBuilder { dst: PathBuf, src: PathBuf, + src_is_dir: bool, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { - SymlinkBuilder { dst, src } + SymlinkBuilder { + dst, + src, + src_is_dir: false, + } + } + + pub fn new_dir(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { + SymlinkBuilder { + dst, + src, + src_is_dir: true, + } } #[cfg(unix)] @@ -194,7 +207,11 @@ impl SymlinkBuilder { #[cfg(windows)] fn mk(&self) { self.dirname().mkdir_p(); - t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + if self.src_is_dir { + t!(os::windows::fs::symlink_dir(&self.dst, &self.src)); + } else { + t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + } } fn dirname(&self) -> &Path { @@ -252,7 +269,7 @@ impl ProjectBuilder { .push(FileBuilder::new(self.root.root().join(path), body)); } - /// Adds a symlink to the project. + /// Adds a symlink to a file to the project. pub fn symlink>(mut self, dst: T, src: T) -> Self { self.symlinks.push(SymlinkBuilder::new( self.root.root().join(dst), @@ -261,6 +278,15 @@ impl ProjectBuilder { self } + /// Create a symlink to a directory + pub fn symlink_dir>(mut self, dst: T, src: T) -> Self { + self.symlinks.push(SymlinkBuilder::new_dir( + self.root.root().join(dst), + self.root.root().join(src), + )); + self + } + pub fn no_manifest(mut self) -> Self { self.no_manifest = true; self diff --git a/tests/testsuite/support/paths.rs b/tests/testsuite/support/paths.rs index 7dfb65c69a3..bf3c763de87 100644 --- a/tests/testsuite/support/paths.rs +++ b/tests/testsuite/support/paths.rs @@ -153,7 +153,7 @@ impl CargoPathExt for Path { where F: Fn(i64, u32) -> ((i64, u32)), { - let stat = t!(path.metadata()); + let stat = t!(path.symlink_metadata()); let mtime = FileTime::from_last_modification_time(&stat);