From 54d738b00affacce24375656c966ca3e1efcdc6b Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Tue, 9 Feb 2016 15:42:17 -0800 Subject: [PATCH] Implement top-level overrides This commit is an implementation of top-level overrides to be encoded into the manifest itself directly. This style of override is distinct from the existing `paths` support in `.cargo/config` in two important ways: * Top level overrides are intended to be checked in and shared amongst all developers of a project. * Top level overrides are reflected in `Cargo.lock`. The second point is crucially important here as it will ensure that an override on one machine behaves the same as an override on another machine. This solves many long-standing problems with `paths`-based overrides which suffer from some level of nondeterminism as they're not encoded. From a syntactical point of view, an override looks like: ```toml [replace] "libc:0.2.0" = { git = 'https://github.com/my-username/libc', branch = '0.2-fork' } ``` This declaration indicates that whenever resolution would otherwise encounter the `libc` package version 0.2.0 from crates.io, it should instead replace it with the custom git dependency on a specific branch. The key "libc:0.2.0" here is actually a package id specification which will allow selecting various components of a graph. For example the same named package coming from two distinct locations can be selected against, as well as multiple versions of one crate in a dependency graph. The replacement dependency has the same syntax as the `[dependencies]` section of Cargo.toml. One of the major uses of this syntax will be, for example, using a temporary fork of a crate while the changes are pushed upstream to the original repo. This will avoid the need to change the intermediate projects immediately, and over time once fixes have landed upstream the `[replace]` section in a `Cargo.toml` can be removed. There are also two crucial restrictions on overrides. * A crate with the name `foo` can only get overridden with packages also of the name `foo`. * A crate can only get overridden with a crate of the exact same version. A consequence of these restrictions is that crates.io cannot be used to replace anything from crates.io. There's only one version of something on crates.io, so there's nothing else to replace it with (name/version are a unique key). Closes #942 --- src/cargo/core/manifest.rs | 11 +- src/cargo/core/registry.rs | 14 +- src/cargo/core/resolver/encode.rs | 64 ++- src/cargo/core/resolver/mod.rs | 250 ++++++++--- src/cargo/ops/cargo_generate_lockfile.rs | 6 +- src/cargo/ops/cargo_output_metadata.rs | 4 +- src/cargo/ops/cargo_rustc/context.rs | 5 +- src/cargo/ops/lockfile.rs | 2 + src/cargo/ops/resolve.rs | 62 +-- src/cargo/util/toml.rs | 84 +++- tests/resolve.rs | 2 +- tests/test_cargo_overrides.rs | 528 +++++++++++++++++++++++ tests/tests.rs | 1 + 13 files changed, 878 insertions(+), 155 deletions(-) create mode 100644 tests/test_cargo_overrides.rs diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 5a4a48182b4..a988460f676 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -1,11 +1,10 @@ -use std::default::Default; use std::fmt; use std::path::{PathBuf, Path}; use semver::Version; use rustc_serialize::{Encoder, Encodable}; -use core::{Dependency, PackageId, Summary}; +use core::{Dependency, PackageId, PackageIdSpec, Summary}; use core::package_id::Metadata; use util::{CargoResult, human}; @@ -20,7 +19,8 @@ pub struct Manifest { include: Vec, metadata: ManifestMetadata, profiles: Profiles, - publish: bool + publish: bool, + replace: Vec<(PackageIdSpec, Dependency)>, } /// General metadata about a package which is just blindly uploaded to the @@ -165,7 +165,8 @@ impl Manifest { links: Option, metadata: ManifestMetadata, profiles: Profiles, - publish: bool) -> Manifest { + publish: bool, + replace: Vec<(PackageIdSpec, Dependency)>) -> Manifest { Manifest { summary: summary, targets: targets, @@ -176,6 +177,7 @@ impl Manifest { metadata: metadata, profiles: profiles, publish: publish, + replace: replace, } } @@ -191,6 +193,7 @@ impl Manifest { pub fn warnings(&self) -> &[String] { &self.warnings } pub fn profiles(&self) -> &Profiles { &self.profiles } pub fn publish(&self) -> bool { self.publish } + pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace } pub fn links(&self) -> Option<&str> { self.links.as_ref().map(|s| &s[..]) } diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index c005703860a..33a7fea3137 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -79,7 +79,7 @@ impl<'cfg> PackageRegistry<'cfg> { PackageRegistry { sources: SourceMap::new(), source_ids: HashMap::new(), - overrides: vec![], + overrides: Vec::new(), config: config, locked: HashMap::new(), } @@ -270,18 +270,16 @@ impl<'cfg> PackageRegistry<'cfg> { impl<'cfg> Registry for PackageRegistry<'cfg> { fn query(&mut self, dep: &Dependency) -> CargoResult> { - let overrides = try!(self.query_overrides(dep)); + let overrides = try!(self.query_overrides(&dep)); let ret = if overrides.is_empty() { // Ensure the requested source_id is loaded try!(self.ensure_loaded(dep.source_id(), Kind::Normal)); - let mut ret = Vec::new(); - for (id, src) in self.sources.sources_mut() { - if id == dep.source_id() { - ret.extend(try!(src.query(dep)).into_iter()); - } + + match self.sources.get_mut(dep.source_id()) { + Some(src) => try!(src.query(&dep)), + None => Vec::new(), } - ret } else { overrides }; diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index c2d187c3f09..f81d620938b 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -26,18 +26,23 @@ impl EncodableResolve { let mut g = Graph::new(); let mut tmp = HashMap::new(); + let mut replacements = HashMap::new(); let packages = Vec::new(); let packages = self.package.as_ref().unwrap_or(&packages); - let root = try!(to_package_id(&self.root.name, - &self.root.version, - self.root.source.as_ref(), - default, &path_deps)); - let ids = try!(packages.iter().map(|p| { - to_package_id(&p.name, &p.version, p.source.as_ref(), + let id2pkgid = |id: &EncodablePackageId| { + to_package_id(&id.name, &id.version, id.source.as_ref(), default, &path_deps) - }).collect::>>()); + }; + let dep2pkgid = |dep: &EncodableDependency| { + to_package_id(&dep.name, &dep.version, dep.source.as_ref(), + default, &path_deps) + }; + + let root = try!(dep2pkgid(&self.root)); + let ids = try!(packages.iter().map(&dep2pkgid) + .collect::>>()); { let mut register_pkg = |pkgid: &PackageId| { @@ -57,16 +62,22 @@ impl EncodableResolve { { let mut add_dependencies = |id: &PackageId, pkg: &EncodableDependency| -> CargoResult<()> { + if let Some(ref replace) = pkg.replace { + let replace = try!(id2pkgid(replace)); + let replace_precise = tmp.get(&replace).map(|p| { + replace.with_precise(p.clone()) + }).unwrap_or(replace); + replacements.insert(id.clone(), replace_precise); + assert!(pkg.dependencies.is_none()); + return Ok(()) + } + let deps = match pkg.dependencies { Some(ref deps) => deps, None => return Ok(()), }; for edge in deps.iter() { - let to_depend_on = try!(to_package_id(&edge.name, - &edge.version, - edge.source.as_ref(), - default, - &path_deps)); + let to_depend_on = try!(id2pkgid(edge)); let precise_pkgid = tmp.get(&to_depend_on) .map(|p| to_depend_on.with_precise(p.clone())) @@ -87,6 +98,7 @@ impl EncodableResolve { root: root, features: HashMap::new(), metadata: self.metadata.clone(), + replacements: replacements, }) } } @@ -136,7 +148,8 @@ pub struct EncodableDependency { name: String, version: String, source: Option, - dependencies: Option> + dependencies: Option>, + replace: Option, } #[derive(Debug, PartialOrd, Ord, PartialEq, Eq)] @@ -186,24 +199,32 @@ impl Encodable for Resolve { let encodable = ids.iter().filter_map(|&id| { if self.root == *id { return None; } - Some(encodable_resolve_node(id, &self.graph)) + Some(encodable_resolve_node(id, self)) }).collect::>(); EncodableResolve { package: Some(encodable), - root: encodable_resolve_node(&self.root, &self.graph), + root: encodable_resolve_node(&self.root, self), metadata: self.metadata.clone(), }.encode(s) } } -fn encodable_resolve_node(id: &PackageId, graph: &Graph) +fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDependency { - let deps = graph.edges(id).map(|edge| { - let mut deps = edge.map(encodable_package_id).collect::>(); - deps.sort(); - deps - }); + let (replace, deps) = match resolve.replacement(id) { + Some(id) => { + (Some(encodable_package_id(id)), None) + } + None => { + let mut deps = resolve.graph.edges(id) + .into_iter().flat_map(|a| a) + .map(encodable_package_id) + .collect::>(); + deps.sort(); + (None, Some(deps)) + } + }; let source = if id.source_id().is_path() { None @@ -216,6 +237,7 @@ fn encodable_resolve_node(id: &PackageId, graph: &Graph) version: id.version().to_string(), source: source, dependencies: deps, + replace: replace, } } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index f1f2e100a82..be690ba4305 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -6,8 +6,8 @@ //! which is more worried about discovering crates from various sources, this //! module just uses the Registry trait as a source to learn about crates from. //! -//! Actually solving a constraint graph is an NP-hard problem. This algorithm -//! is basically a nice heuristic to make sure we get roughly the best answer +//! Actually solving a constraint graph is an NP-hard problem. This algorithm +//! is basically a nice heuristic to make sure we get roughly the best answer //! most of the time. The constraints that we're working with are: //! //! 1. Each crate can have any number of dependencies. Each dependency can @@ -57,6 +57,7 @@ use core::{PackageId, Registry, SourceId, Summary, Dependency}; use core::PackageIdSpec; use util::{CargoResult, Graph, human, CargoError}; use util::profile; +use util::ChainError; use util::graph::{Nodes, Edges}; pub use self::encode::{EncodableResolve, EncodableDependency, EncodablePackageId}; @@ -72,11 +73,21 @@ mod encode; #[derive(PartialEq, Eq, Clone)] pub struct Resolve { graph: Graph, + replacements: HashMap, features: HashMap>, root: PackageId, metadata: Option, } +pub struct Deps<'a> { + edges: Option>, + resolve: &'a Resolve, +} + +pub struct DepsNotReplaced<'a> { + edges: Option>, +} + #[derive(Clone, Copy)] pub enum Method<'a> { Everything, @@ -90,18 +101,30 @@ pub enum Method<'a> { // Err(..) == standard transient error (e.g. I/O error) // Ok(Err(..)) == resolve error, but is human readable // Ok(Ok(..)) == success in resolving -type ResolveResult = CargoResult>>; +type ResolveResult<'a> = CargoResult>>>; // Information about the dependencies for a crate, a tuple of: // // (dependency info, candidates, features activated) -type DepInfo = (Dependency, Vec>, Vec); +type DepInfo = (Dependency, Vec, Vec); + +#[derive(Clone)] +struct Candidate { + summary: Rc, + replace: Option>, +} impl Resolve { fn new(root: PackageId) -> Resolve { let mut g = Graph::new(); g.add(root.clone(), &[]); - Resolve { graph: g, root: root, features: HashMap::new(), metadata: None } + Resolve { + graph: g, + root: root, + replacements: HashMap::new(), + features: HashMap::new(), + metadata: None, + } } pub fn copy_metadata(&mut self, other: &Resolve) { @@ -114,8 +137,20 @@ impl Resolve { pub fn root(&self) -> &PackageId { &self.root } - pub fn deps(&self, pkg: &PackageId) -> Option> { - self.graph.edges(pkg) + pub fn deps(&self, pkg: &PackageId) -> Deps { + Deps { edges: self.graph.edges(pkg), resolve: self } + } + + pub fn deps_not_replaced(&self, pkg: &PackageId) -> DepsNotReplaced { + DepsNotReplaced { edges: self.graph.edges(pkg) } + } + + pub fn replacement(&self, pkg: &PackageId) -> Option<&PackageId> { + self.replacements.get(pkg) + } + + pub fn replacements(&self) -> &HashMap { + &self.replacements } pub fn features(&self, pkg: &PackageId) -> Option<&HashSet> { @@ -138,14 +173,35 @@ impl fmt::Debug for Resolve { } } +impl<'a> Iterator for Deps<'a> { + type Item = &'a PackageId; + + fn next(&mut self) -> Option<&'a PackageId> { + self.edges.as_mut() + .and_then(|e| e.next()) + .map(|id| self.resolve.replacement(id).unwrap_or(id)) + } +} + +impl<'a> Iterator for DepsNotReplaced<'a> { + type Item = &'a PackageId; + + fn next(&mut self) -> Option<&'a PackageId> { + self.edges.as_mut().and_then(|e| e.next()) + } +} + #[derive(Clone)] -struct Context { +struct Context<'a> { activations: HashMap<(String, SourceId), Vec>>, resolve: Resolve, + replacements: &'a [(PackageIdSpec, Dependency)], } /// Builds the list of all packages required to build the first argument. -pub fn resolve(summary: &Summary, method: &Method, +pub fn resolve(summary: &Summary, + method: &Method, + replacements: &[(PackageIdSpec, Dependency)], registry: &mut Registry) -> CargoResult { trace!("resolve; summary={}", summary.package_id()); let summary = Rc::new(summary.clone()); @@ -153,6 +209,7 @@ pub fn resolve(summary: &Summary, method: &Method, let cx = Context { resolve: Resolve::new(summary.package_id().clone()), activations: HashMap::new(), + replacements: replacements, }; let _p = profile::start(format!("resolving: {}", summary.package_id())); let cx = try!(activate_deps_loop(cx, registry, summary, method)); @@ -160,33 +217,49 @@ pub fn resolve(summary: &Summary, method: &Method, Ok(cx.resolve) } -/// Attempts to activate the summary `parent` in the context `cx`. +/// Attempts to activate the summary `candidate` in the context `cx`. /// /// This function will pull dependency summaries from the registry provided, and /// the dependencies of the package will be determined by the `method` provided. -/// If `parent` was activated, this function returns the dependency frame to +/// If `candidate` was activated, this function returns the dependency frame to /// iterate through next. fn activate(cx: &mut Context, registry: &mut Registry, - parent: Rc, + parent: Option<&Rc>, + candidate: Candidate, method: &Method) -> CargoResult> { - // Dependency graphs are required to be a DAG, so we keep a set of - // packages we're visiting and bail if we hit a dupe. - let id = parent.package_id().clone(); + if let Some(parent) = parent { + cx.resolve.graph.link(parent.package_id().clone(), + candidate.summary.package_id().clone()); + } - // If we're already activated, then that was easy! - if cx.flag_activated(&parent, method) { + if cx.flag_activated(&candidate.summary, method) { return Ok(None); } - trace!("activating {}", parent.package_id()); - let deps = try!(cx.build_deps(registry, &parent, method)); + let candidate = match candidate.replace { + Some(replace) => { + cx.resolve.replacements.insert(candidate.summary.package_id().clone(), + replace.package_id().clone()); + if cx.flag_activated(&replace, method) { + return Ok(None); + } + trace!("activating {} (replacing {})", replace.package_id(), + candidate.summary.package_id()); + replace + } + None => { + trace!("activating {}", candidate.summary.package_id()); + candidate.summary + } + }; + + let deps = try!(cx.build_deps(registry, &candidate, method)); - Ok(Some(DepsFrame{ - parent: parent, + Ok(Some(DepsFrame { + parent: candidate, remaining_siblings: RcVecIter::new(deps), - id: id, })) } @@ -229,7 +302,6 @@ impl Iterator for RcVecIter where T: Clone { struct DepsFrame { parent: Rc, remaining_siblings: RcVecIter, - id: PackageId, } impl DepsFrame { @@ -269,10 +341,10 @@ impl Ord for DepsFrame { } } -struct BacktrackFrame { - context_backup: Context, +struct BacktrackFrame<'a> { + context_backup: Context<'a>, deps_backup: BinaryHeap, - remaining_candidates: RcVecIter>, + remaining_candidates: RcVecIter, parent: Rc, dep: Dependency, features: Vec, @@ -283,10 +355,10 @@ struct BacktrackFrame { /// /// If all dependencies can be activated and resolved to a version in the /// dependency graph, cx.resolve is returned. -fn activate_deps_loop(mut cx: Context, - registry: &mut Registry, - top: Rc, - top_method: &Method) -> CargoResult { +fn activate_deps_loop<'a>(mut cx: Context<'a>, + registry: &mut Registry, + top: Rc, + top_method: &Method) -> CargoResult> { // Note that a `BinaryHeap` is used for the remaining dependencies that need // activation. This heap is sorted such that the "largest value" is the most // constrained dependency, or the one with the least candidates. @@ -296,7 +368,9 @@ fn activate_deps_loop(mut cx: Context, // use (those with more candidates). let mut backtrack_stack = Vec::new(); let mut remaining_deps = BinaryHeap::new(); - remaining_deps.extend(try!(activate(&mut cx, registry, top, &top_method))); + remaining_deps.extend(try!(activate(&mut cx, registry, None, + Candidate { summary: top, replace: None }, + &top_method))); // Main resolution loop, this is the workhorse of the resolution algorithm. // @@ -338,9 +412,9 @@ fn activate_deps_loop(mut cx: Context, // the left-most nonzero digit is the same they're considered // compatible. candidates.iter().filter(|&b| { - prev_active.iter().any(|a| a == b) || + prev_active.iter().any(|a| *a == b.summary) || prev_active.iter().all(|a| { - !compatible(a.version(), b.version()) + !compatible(a.version(), b.summary.version()) }) }).cloned().collect() }; @@ -401,10 +475,8 @@ fn activate_deps_loop(mut cx: Context, uses_default_features: dep.uses_default_features(), }; trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(), - candidate.version()); - cx.resolve.graph.link(parent.package_id().clone(), - candidate.package_id().clone()); - remaining_deps.extend(try!(activate(&mut cx, registry, + candidate.summary.version()); + remaining_deps.extend(try!(activate(&mut cx, registry, Some(&parent), candidate, &method))); } trace!("resolved: {:?}", cx.resolve); @@ -414,13 +486,13 @@ fn activate_deps_loop(mut cx: Context, // Searches up `backtrack_stack` until it finds a dependency with remaining // candidates. Resets `cx` and `remaining_deps` to that level and returns the // next candidate. If all candidates have been exhausted, returns None. -fn find_candidate(backtrack_stack: &mut Vec, - cx: &mut Context, - remaining_deps: &mut BinaryHeap, - parent: &mut Rc, - cur: &mut usize, - dep: &mut Dependency, - features: &mut Vec) -> Option> { +fn find_candidate<'a>(backtrack_stack: &mut Vec>, + cx: &mut Context<'a>, + remaining_deps: &mut BinaryHeap, + parent: &mut Rc, + cur: &mut usize, + dep: &mut Dependency, + features: &mut Vec) -> Option { while let Some(mut frame) = backtrack_stack.pop() { if let Some((_, candidate)) = frame.remaining_candidates.next() { *cx = frame.context_backup.clone(); @@ -441,7 +513,7 @@ fn activation_error(cx: &Context, parent: &Summary, dep: &Dependency, prev_active: &[Rc], - candidates: &[Rc]) -> Box { + candidates: &[Candidate]) -> Box { if candidates.len() > 0 { let mut msg = format!("failed to select a version for `{}` \ (required by `{}`):\n\ @@ -469,7 +541,7 @@ fn activation_error(cx: &Context, msg.push_str(&format!("\n possible versions to select: {}", candidates.iter() - .map(|v| v.version()) + .map(|v| v.summary.version()) .map(|v| v.to_string()) .collect::>() .join(", "))); @@ -625,7 +697,7 @@ fn build_features(s: &Summary, method: &Method) } } -impl Context { +impl<'a> Context<'a> { // Activate this summary by inserting it into our list of known activations. // // Returns if this summary with the given method is already activated. @@ -659,24 +731,24 @@ impl Context { } } - fn build_deps(&mut self, registry: &mut Registry, - parent: &Summary, + fn build_deps(&mut self, + registry: &mut Registry, + candidate: &Summary, method: &Method) -> CargoResult> { // First, figure out our set of dependencies based on the requsted set // of features. This also calculates what features we're going to enable // for our own dependencies. - let deps = try!(self.resolve_features(parent, method)); + let deps = try!(self.resolve_features(candidate, method)); // Next, transform all dependencies into a list of possible candidates // which can satisfy that dependency. let mut deps = try!(deps.into_iter().map(|(dep, features)| { - let mut candidates = try!(registry.query(&dep)); + let mut candidates = try!(self.query(registry, &dep)); // When we attempt versions for a package, we'll want to start at // the maximum version and work our way down. candidates.sort_by(|a, b| { - b.version().cmp(a.version()) + b.summary.version().cmp(a.summary.version()) }); - let candidates = candidates.into_iter().map(Rc::new).collect(); Ok((dep, candidates, features)) }).collect::>>()); @@ -684,30 +756,82 @@ impl Context { // dependencies with more candidates. This way if the dependency with // only one candidate can't be resolved we don't have to do a bunch of // work before we figure that out. - deps.sort_by(|&(_, ref a, _), &(_, ref b, _)| { - a.len().cmp(&b.len()) - }); + deps.sort_by_key(|&(_, ref a, _)| a.len()); Ok(deps) } + /// Queries the `registry` to return a list of candidates for `dep`. + /// + /// This method is the location where overrides are taken into account. If + /// any candidates are returned which match an override then the override is + /// applied by performing a second query for what the override should + /// return. + fn query(&self, + registry: &mut Registry, + dep: &Dependency) -> CargoResult> { + let summaries = try!(registry.query(dep)); + summaries.into_iter().map(Rc::new).map(|summary| { + let mut replace = None; + let mut matched_spec = None; + for &(ref spec, ref dep) in self.replacements.iter() { + if !spec.matches(summary.package_id()) { + continue + } + + if replace.is_some() { + bail!("overlapping replacement specifications found:\n\n \ + * {}\n * {}\n\nboth specifications match: {}", + matched_spec.unwrap(), spec, summary.package_id()); + } + + let mut summaries = try!(registry.query(dep)).into_iter(); + let s = try!(summaries.next().chain_error(|| { + human(format!("no matching package for override `{}` found\n\ + location searched: {}\n\ + version required: {}", + spec, dep.source_id(), dep.version_req())) + })); + let summaries = summaries.collect::>(); + if summaries.len() > 0 { + let bullets = summaries.iter().map(|s| { + format!(" * {}", s.package_id()) + }).collect::>(); + bail!("the replacement specification `{}` matched \ + multiple packages:\n * {}\n{}", spec, + s.package_id(), bullets.join("\n")); + } + + // The dependency should be hard-coded to have the same name and + // an exact version requirement, so both of these assertions + // should never fail. + assert_eq!(s.version(), summary.version()); + assert_eq!(s.name(), summary.name()); + + replace = Some(Rc::new(s)); + matched_spec = Some(spec.clone()); + } + Ok(Candidate { summary: summary, replace: replace }) + }).collect() + } + fn prev_active(&self, dep: &Dependency) -> &[Rc] { let key = (dep.name().to_string(), dep.source_id().clone()); self.activations.get(&key).map(|v| &v[..]).unwrap_or(&[]) } - fn resolve_features(&mut self, parent: &Summary, method: &Method) - -> CargoResult)>> { + fn resolve_features(&mut self, candidate: &Summary, method: &Method) + -> CargoResult)>> { let dev_deps = match *method { Method::Everything => true, Method::Required { dev_deps, .. } => dev_deps, }; // First, filter by dev-dependencies - let deps = parent.dependencies(); + let deps = candidate.dependencies(); let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); - let (mut feature_deps, used_features) = try!(build_features(parent, + let (mut feature_deps, used_features) = try!(build_features(candidate, method)); let mut ret = Vec::new(); @@ -739,13 +863,13 @@ impl Context { if !unknown.is_empty() { let features = unknown.join(", "); bail!("Package `{}` does not have these features: `{}`", - parent.package_id(), features) + candidate.package_id(), features) } } // Record what list of features is active for this package. if !used_features.is_empty() { - let pkgid = parent.package_id(); + let pkgid = candidate.package_id(); self.resolve.features.entry(pkgid.clone()) .or_insert(HashSet::new()) .extend(used_features); @@ -787,7 +911,7 @@ fn check_cycles(cx: &Context) -> CargoResult<()> { // dependencies. if checked.insert(id) { let summary = summaries[id]; - for dep in resolve.deps(id).into_iter().flat_map(|a| a) { + for dep in resolve.deps(id) { let is_transitive = summary.dependencies().iter().any(|d| { d.matches_id(dep) && d.is_transitive() }); diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 1b8bb0087f0..2d0fa803db2 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -115,10 +115,8 @@ pub fn update_lockfile(manifest_path: &Path, return } set.insert(dep); - if let Some(deps) = resolve.deps(dep) { - for dep in deps { - fill_with_deps(resolve, dep, set, visited); - } + for dep in resolve.deps(dep) { + fill_with_deps(resolve, dep, set, visited); } } diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index 79b308053d4..d5088bab2a1 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -92,9 +92,7 @@ impl Encodable for MetadataResolve { nodes: resolve.iter().map(|id| { Node { id: id, - dependencies: resolve.deps(id) - .map(|it| it.collect()) - .unwrap_or(Vec::new()), + dependencies: resolve.deps(id).collect(), } }).collect(), }; diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index b400745644c..824ba134457 100644 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -351,7 +351,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } let id = unit.pkg.package_id(); - let deps = self.resolve.deps(id).into_iter().flat_map(|a| a); + let deps = self.resolve.deps(id); let mut ret = try!(deps.filter(|dep| { unit.pkg.dependencies().iter().filter(|d| { d.name() == dep.name() @@ -479,8 +479,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns the dependencies necessary to document a package fn doc_deps(&self, unit: &Unit<'a>) -> CargoResult>> { - let deps = self.resolve.deps(unit.pkg.package_id()).into_iter(); - let deps = deps.flat_map(|a| a).filter(|dep| { + let deps = self.resolve.deps(unit.pkg.package_id()).filter(|dep| { unit.pkg.dependencies().iter().filter(|d| { d.name() == dep.name() }).any(|dep| { diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index e959203726f..71b07bfe89a 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -115,6 +115,8 @@ fn emit_package(dep: &toml::Table, out: &mut String) { out.push_str("]\n"); } out.push_str("\n"); + } else if dep.contains_key("replace") { + out.push_str(&format!("replace = {}\n\n", lookup(dep, "replace"))); } } diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index f5925a46c50..a33fd1b9cd0 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -40,7 +40,6 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, previous: Option<&'a Resolve>, to_avoid: Option<&HashSet<&'a PackageId>>) -> CargoResult { - try!(registry.add_sources(&[package.package_id().source_id() .clone()])); @@ -52,20 +51,14 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, // TODO: This seems like a hokey reason to single out the registry as being // different let mut to_avoid_sources = HashSet::new(); - match to_avoid { - Some(set) => { - for package_id in set.iter() { - let source = package_id.source_id(); - if !source.is_registry() { - to_avoid_sources.insert(source); - } - } - } - None => {} + if let Some(to_avoid) = to_avoid { + to_avoid_sources.extend(to_avoid.iter() + .map(|p| p.source_id()) + .filter(|s| !s.is_registry())); } let summary = package.summary().clone(); - let summary = match previous { + let (summary, replace) = match previous { Some(r) => { // In the case where a previous instance of resolve is available, we // want to lock as many packages as possible to the previous version @@ -89,31 +82,44 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, // to the previously resolved version if the dependency listed // still matches the locked version. for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) { - let deps = r.deps(node).into_iter().flat_map(|i| i) + let deps = r.deps_not_replaced(node) .filter(|p| keep(p, to_avoid, &to_avoid_sources)) .cloned().collect(); registry.register_lock(node.clone(), deps); } - let map = r.deps(r.root()).into_iter().flat_map(|i| i).filter(|p| { - keep(p, to_avoid, &to_avoid_sources) - }).map(|d| { - (d.name(), d) - }).collect::>(); - summary.map_dependencies(|d| { - match map.get(d.name()) { - Some(&lock) if d.matches_id(lock) => d.lock_to(lock), - _ => d, + let summary = { + let map = r.deps_not_replaced(r.root()).filter(|p| { + keep(p, to_avoid, &to_avoid_sources) + }).map(|d| { + (d.name(), d) + }).collect::>(); + + summary.map_dependencies(|dep| { + match map.get(dep.name()) { + Some(&lock) if dep.matches_id(lock) => dep.lock_to(lock), + _ => dep, + } + }) + }; + let replace = package.manifest().replace(); + let replace = replace.iter().map(|&(ref spec, ref dep)| { + for (key, val) in r.replacements().iter() { + if spec.matches(key) && dep.matches_id(val) { + return (spec.clone(), dep.clone().lock_to(val)) + } } - }) + (spec.clone(), dep.clone()) + }).collect::>(); + (summary, replace) } - None => summary, + None => (summary, package.manifest().replace().to_owned()), }; - let mut resolved = try!(resolver::resolve(&summary, &method, registry)); - match previous { - Some(r) => resolved.copy_metadata(r), - None => {} + let mut resolved = try!(resolver::resolve(&summary, &method, &replace, + registry)); + if let Some(previous) = previous { + resolved.copy_metadata(previous); } return Ok(resolved); diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs index 17f043cab15..dcaed14762f 100644 --- a/src/cargo/util/toml.rs +++ b/src/cargo/util/toml.rs @@ -6,10 +6,10 @@ use std::path::{Path, PathBuf}; use std::str; use toml; -use semver; +use semver::{self, VersionReq}; use rustc_serialize::{Decodable, Decoder}; -use core::{SourceId, Profiles}; +use core::{SourceId, Profiles, PackageIdSpec}; use core::{Summary, Manifest, Target, Dependency, DependencyInner, PackageId, GitReference}; use core::dependency::{Kind, Platform}; @@ -220,6 +220,7 @@ pub struct TomlManifest { build_dependencies: Option>, features: Option>>, target: Option>, + replace: Option>, } #[derive(RustcDecodable, Clone, Default)] @@ -503,6 +504,7 @@ impl TomlManifest { } let mut deps = Vec::new(); + let mut replace = Vec::new(); { @@ -538,6 +540,36 @@ impl TomlManifest { Some(Kind::Development))); } } + + if let Some(ref map) = self.replace { + for (spec, replacement) in map { + let spec = try!(PackageIdSpec::parse(spec)); + + let version_specified = match *replacement { + TomlDependency::Detailed(ref d) => d.version.is_some(), + TomlDependency::Simple(..) => true, + }; + if version_specified { + bail!("replacements cannot specify a version \ + requirement, but found one for `{}`", spec); + } + + let dep = try!(replacement.to_dependency(spec.name(), + &mut cx, + None)); + let dep = { + let version = try!(spec.version().chain_error(|| { + human(format!("replacements must specify a version \ + to replace, but `{}` does not", + spec)) + })); + let req = VersionReq::exact(version); + dep.clone_inner().set_version_req(req) + .into_dependency() + }; + replace.push((spec, dep)); + } + } } let exclude = project.exclude.clone().unwrap_or(Vec::new()); @@ -566,7 +598,8 @@ impl TomlManifest { project.links.clone(), metadata, profiles, - publish); + publish, + replace); if project.license_file.is_some() && project.license.is_some() { manifest.add_warning(format!("only one of `license` or \ `license-file` is necessary")); @@ -660,16 +693,13 @@ fn validate_bench_name(target: &TomlTarget) -> CargoResult<()> { } } -fn process_dependencies(cx: &mut Context, - new_deps: Option<&HashMap>, - kind: Option) - -> CargoResult<()> { - let dependencies = match new_deps { - Some(ref dependencies) => dependencies, - None => return Ok(()) - }; - for (n, v) in dependencies.iter() { - let details = match *v { +impl TomlDependency { + fn to_dependency(&self, + name: &str, + cx: &mut Context, + kind: Option) + -> CargoResult { + let details = match *self { TomlDependency::Simple(ref version) => { let mut d: DetailedTomlDependency = Default::default(); d.version = Some(version.clone()); @@ -680,11 +710,11 @@ fn process_dependencies(cx: &mut Context, if details.version.is_none() && details.path.is_none() && details.git.is_none() { - cx.warnings.push(format!("dependency ({}) specified \ - without providing a local path, Git \ - repository, or version to use. This will \ - be considered an error in future \ - versions", n)); + let msg = format!("dependency ({}) specified without \ + providing a local path, Git repository, or \ + version to use. This will be considered an \ + error in future versions", name); + cx.warnings.push(msg); } let reference = details.branch.clone().map(GitReference::Branch) @@ -725,7 +755,7 @@ fn process_dependencies(cx: &mut Context, }.unwrap_or(try!(SourceId::for_central(cx.config))); let version = details.version.as_ref().map(|v| &v[..]); - let mut dep = try!(DependencyInner::parse(&n, version, &new_source_id)); + let mut dep = try!(DependencyInner::parse(name, version, &new_source_id)); dep = dep.set_features(details.features.unwrap_or(Vec::new())) .set_default_features(details.default_features.unwrap_or(true)) .set_optional(details.optional.unwrap_or(false)) @@ -733,7 +763,21 @@ fn process_dependencies(cx: &mut Context, if let Some(kind) = kind { dep = dep.set_kind(kind); } - cx.deps.push(dep.into_dependency()); + Ok(dep.into_dependency()) + } +} + +fn process_dependencies(cx: &mut Context, + new_deps: Option<&HashMap>, + kind: Option) + -> CargoResult<()> { + let dependencies = match new_deps { + Some(ref dependencies) => dependencies, + None => return Ok(()) + }; + for (n, v) in dependencies.iter() { + let dep = try!(v.to_dependency(n, cx, kind)); + cx.deps.push(dep); } Ok(()) diff --git a/tests/resolve.rs b/tests/resolve.rs index 933996a37a7..14ff5ea9a84 100644 --- a/tests/resolve.rs +++ b/tests/resolve.rs @@ -18,7 +18,7 @@ fn resolve(pkg: PackageId, deps: Vec, -> CargoResult> { let summary = Summary::new(pkg, deps, HashMap::new()).unwrap(); let method = Method::Everything; - Ok(try!(resolver::resolve(&summary, &method, registry)).iter().map(|p| { + Ok(try!(resolver::resolve(&summary, &method, &[], registry)).iter().map(|p| { p.clone() }).collect()) } diff --git a/tests/test_cargo_overrides.rs b/tests/test_cargo_overrides.rs new file mode 100644 index 00000000000..0dac5e62e2e --- /dev/null +++ b/tests/test_cargo_overrides.rs @@ -0,0 +1,528 @@ +use hamcrest::assert_that; + +use support::registry::{registry, Package}; +use support::{execs, project, UPDATING, DOWNLOADING, COMPILING}; +use support::git; +use support::paths; + +fn setup() {} + +test!(override_simple { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}"); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + "); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{updating} registry `file://[..]` +{updating} git repository `[..]` +{compiling} foo v0.1.0 (file://[..]) +{compiling} local v0.0.1 (file://[..]) +", + updating = UPDATING, compiling = COMPILING))); +}); + +test!(missing_version { + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + foo = { git = 'https://example.com' } + "#) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements must specify a version to replace, but `foo` does not +")); +}); + +test!(different_version { + Package::new("foo", "0.2.0").publish(); + Package::new("foo", "0.1.0").publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = "0.2.0" + "#) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements cannot specify a version requirement, but found one for [..] +")); +}); + +test!(transitive { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.2.0") + .dep("foo", "0.1.0") + .file("src/lib.rs", "extern crate foo; fn bar() { foo::foo(); }") + .publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}"); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.2.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{updating} registry `file://[..]` +{updating} git repository `[..]` +{downloading} bar v0.2.0 (registry [..]) +{compiling} foo v0.1.0 (file://[..]) +{compiling} bar v0.2.0 (registry [..]) +{compiling} local v0.0.1 (file://[..]) +", + updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING))); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +}); + +test!(persists_across_rebuilds { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}"); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + "); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{updating} registry `file://[..]` +{updating} git repository `file://[..]` +{compiling} foo v0.1.0 (file://[..]) +{compiling} local v0.0.1 (file://[..]) +", + updating = UPDATING, compiling = COMPILING))); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout("")); +}); + +test!(replace_registry_with_path { + Package::new("foo", "0.1.0").publish(); + + project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = { path = "../foo" } + "#) + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { + foo::foo(); + } + "); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{updating} registry `file://[..]` +{compiling} foo v0.1.0 (file://[..]) +{compiling} local v0.0.1 (file://[..]) +", + compiling = COMPILING, updating = UPDATING))); +}); + +test!(use_a_spec_to_select { + Package::new("foo", "0.1.1") + .file("src/lib.rs", "pub fn foo1() {}") + .publish(); + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.1.1") + .dep("foo", "0.2") + .file("src/lib.rs", " + extern crate foo; + pub fn bar() { foo::foo3(); } + ") + .publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo3() {}"); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + foo = "0.1" + + [replace] + "foo:0.2.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", " + extern crate foo; + extern crate bar; + + fn local() { + foo::foo1(); + bar::bar(); + } + "); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{updating} registry `file://[..]` +{updating} git repository `[..]` +{downloading} [..] +{downloading} [..] +{compiling} [..] +{compiling} [..] +{compiling} [..] +{compiling} local v0.0.1 (file://[..]) +", + updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING))); +}); + +test!(override_adds_some_deps { + Package::new("foo", "0.1.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1" + "#) + .file("src/lib.rs", ""); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{updating} registry `file://[..]` +{updating} git repository `[..]` +{downloading} foo v0.1.1 (registry [..]) +{compiling} foo v0.1.1 (registry [..]) +{compiling} bar v0.1.0 ([..]) +{compiling} local v0.0.1 (file://[..]) +", + updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING))); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + + Package::new("foo", "0.1.2").publish(); + assert_that(p.cargo("update").arg("-p").arg(&format!("{}#bar", foo.url())), + execs().with_status(0).with_stdout(&format!("\ +{updating} git repository `file://[..]` +", updating = UPDATING))); + assert_that(p.cargo("update").arg("-p").arg(&format!("{}#bar", registry())), + execs().with_status(0).with_stdout(&format!("\ +{updating} registry `file://[..]` +", updating = UPDATING))); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +}); + +test!(locked_means_locked_yes_no_seriously_i_mean_locked { + // this in theory exercises #2041 + Package::new("foo", "0.1.0").publish(); + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", ""); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + bar = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(0)); + + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); +}); + +test!(override_wrong_name { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", ""); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: no matching package for override `foo:0.1.0` found +location searched: file://[..] +version required: = 0.1.0 +")); +}); + +test!(override_with_nothing { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("src/lib.rs", ""); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + + [replace] + "foo:0.1.0" = {{ git = '{}' }} + "#, foo.url())) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: Unable to update file://[..] + +Caused by: + Could not find Cargo.toml in `[..]` +")); +}); + +test!(override_wrong_version { + let p = project("local") + .file("Cargo.toml", r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [replace] + "foo:0.1.0" = { git = 'https://example.com', version = '0.2.0' } + "#) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements cannot specify a version requirement, but found one for `foo:0.1.0` +")); +}); + +test!(multiple_specs { + Package::new("foo", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}"); + foo.build(); + + let p = project("local") + .file("Cargo.toml", &format!(r#" + [package] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + + [replace] + "foo:0.1.0" = {{ git = '{0}' }} + "{1}#foo:0.1.0" = {{ git = '{0}' }} + "#, foo.url(), registry())) + .file("src/lib.rs", ""); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: overlapping replacement specifications found: + + * [..] + * [..] + +both specifications match: foo v0.1.0 ([..]) +")); +}); diff --git a/tests/tests.rs b/tests/tests.rs index ecd4ade3320..b94c24ec4dc 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -58,6 +58,7 @@ mod test_cargo_metadata; mod test_cargo_new; mod test_cargo_package; mod test_cargo_profiles; +mod test_cargo_overrides; mod test_cargo_publish; mod test_cargo_read_manifest; mod test_cargo_registry;