Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: constraining dependencies #124

Open
wants to merge 1 commit into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/caching_dependency_provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ impl<P: Package, VS: VersionSet, DP: DependencyProvider<P, VS>> DependencyProvid
let dependencies = self.remote_dependencies.get_dependencies(package, version);
match dependencies {
Ok(Dependencies::Known(dependencies)) => {
cache.add_dependencies(
cache.add_requirements(
package.clone(),
version.clone(),
dependencies.clone(),
Expand Down
24 changes: 18 additions & 6 deletions src/internal/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use crate::internal::partial_solution::{DecisionLevel, PartialSolution};
use crate::internal::small_vec::SmallVec;
use crate::package::Package;
use crate::report::DerivationTree;
use crate::solver::Requirement;
use crate::type_aliases::{DependencyConstraints, Map};
use crate::version_set::VersionSet;

Expand Down Expand Up @@ -75,14 +76,25 @@ impl<P: Package, VS: VersionSet> State<P, VS> {
&mut self,
package: P,
version: VS::V,
deps: &DependencyConstraints<P, VS>,
deps: &DependencyConstraints<P, Requirement<VS>>,
) -> std::ops::Range<IncompId<P, VS>> {
// Create incompatibilities and allocate them in the store.
let new_incompats_id_range = self
.incompatibility_store
.alloc_iter(deps.iter().map(|dep| {
Incompatibility::from_dependency(package.clone(), version.clone(), dep)
}));
let new_incompats_id_range =
self.incompatibility_store
.alloc_iter(deps.iter().map(|(dep, requirement)| match requirement {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd suggest rename dep into p or pkg here to fit with the type annotation

Requirement::Required(range) => Incompatibility::from_required_dependency(
package.clone(),
version.clone(),
(dep, range),
),
Requirement::Constrained(range) => {
Incompatibility::from_constrained_dependency(
package.clone(),
version.clone(),
(dep, range),
)
}
}));
// Merge the newly created incompatibilities with the older ones.
for id in IncompId::range_to_iter(new_incompats_id_range.clone()) {
self.merge_incompatibility(id);
Expand Down
15 changes: 14 additions & 1 deletion src/internal/incompatibility.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ impl<P: Package, VS: VersionSet> Incompatibility<P, VS> {
}

/// Build an incompatibility from a given dependency.
pub fn from_dependency(package: P, version: VS::V, dep: (&P, &VS)) -> Self {
pub fn from_required_dependency(package: P, version: VS::V, dep: (&P, &VS)) -> Self {
let set1 = VS::singleton(version);
let (p2, set2) = dep;
Self {
Expand All @@ -117,6 +117,19 @@ impl<P: Package, VS: VersionSet> Incompatibility<P, VS> {
}
}

/// Build an incompatibility from a given constraint.
pub fn from_constrained_dependency(package: P, version: VS::V, dep: (&P, &VS)) -> Self {
let set1 = VS::singleton(version);
let (p2, set2) = dep;
Self {
package_terms: SmallMap::Two([
(package.clone(), Term::Positive(set1.clone())),
(p2.clone(), Term::Positive(set2.complement())),
]),
kind: Kind::FromDependencyOf(package, set1, p2.clone(), set2.clone()),
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Currently weather it is constrained or required does not appear in the kind at all. This does not affect the correctness of the algorithm, as kind is only used for error messages. Do we think it's important to have this distinction available when generating error messages?

  • Yes, you can't understand the proof argument created by this algorithm unless you know what the inputs are. Therefore we should add a boolean or enum to Kind::FromDependency to distinguish which kind of requirement it was.
  • No, they are not significantly different. If a resolver uses both of them and wants to distinguish it can look out this dependency in its source of truth and figure out which kind it had generated.

Allso, is FromDependencyOf still the correct terminology, Is FromRequirementOf better?

}
}

/// Prior cause of two incompatibilities using the rule of resolution.
pub fn prior_cause(
incompat: Id<Self>,
Expand Down
102 changes: 91 additions & 11 deletions src/solver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,10 @@ pub fn resolve<P: Package, VS: VersionSet>(
version: v.clone(),
});
}
if let Some((dependent, _)) = x.iter().find(|(_, r)| r == &&VS::empty()) {
if let Some((dependent, _)) = x
.iter()
.find(|(_, r)| matches!(r, Requirement::Required(r) if r == &VS::empty()))
{
return Err(PubGrubError::DependencyOnTheEmptySet {
package: p.clone(),
version: v.clone(),
Expand Down Expand Up @@ -215,7 +218,17 @@ pub enum Dependencies<P: Package, VS: VersionSet> {
/// Package dependencies are unavailable.
Unknown,
/// Container for all available package versions.
Known(DependencyConstraints<P, VS>),
Known(DependencyConstraints<P, Requirement<VS>>),
}

/// An enum that defines the type of dependency.
#[derive(Clone, Debug)]
pub enum Requirement<VS: VersionSet> {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this will need to #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))], and it's going to need to be even a little more complicated than that. because it needs to deserialize the existing test files and benchmarks, which do not have a marker for which kind it is.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Or we could decide to break compatibility of serialized data? or start defining a serialized format more in line with the internal representation of incompatiblities (list of pairs of pkg, and +/- version set) which is not going to change soon as this would not be a "pubgrub" algorithm anymore? Then will have manually written serialize and deserializers.

/// A dependency that is resolved as part of the solution.
Required(VS),

/// Constrains the version of package but does not require inclusion of the package.
Constrained(VS),
}

/// Trait that allows the algorithm to retrieve available packages and their dependencies.
Expand Down Expand Up @@ -311,7 +324,7 @@ where
)]
#[cfg_attr(feature = "serde", serde(transparent))]
pub struct OfflineDependencyProvider<P: Package, VS: VersionSet> {
dependencies: Map<P, BTreeMap<VS::V, DependencyConstraints<P, VS>>>,
dependencies: Map<P, BTreeMap<VS::V, DependencyConstraints<P, Requirement<VS>>>>,
}

impl<P: Package, VS: VersionSet> OfflineDependencyProvider<P, VS> {
Expand All @@ -324,10 +337,11 @@ impl<P: Package, VS: VersionSet> OfflineDependencyProvider<P, VS> {

/// Registers the dependencies of a package and version pair.
/// Dependencies must be added with a single call to
/// [add_dependencies](OfflineDependencyProvider::add_dependencies).
/// All subsequent calls to
/// [add_dependencies](OfflineDependencyProvider::add_dependencies) for a given
/// package version pair will replace the dependencies by the new ones.
/// [add_dependencies](OfflineDependencyProvider::add_dependencies),
/// [add_constraints](OfflineDependencyProvider::add_constraints), or
/// [add_requirements](OfflineDependencyProvider::add_requirements).
/// All subsequent calls to any of those functions for a given package version pair will replace the
/// dependencies by the new ones.
///
/// The API does not allow to add dependencies one at a time to uphold an assumption that
/// [OfflineDependencyProvider.get_dependencies(p, v)](OfflineDependencyProvider::get_dependencies)
Expand All @@ -338,14 +352,76 @@ impl<P: Package, VS: VersionSet> OfflineDependencyProvider<P, VS> {
version: impl Into<VS::V>,
dependencies: I,
) {
let package_deps = dependencies.into_iter().collect();
let v = version.into();
*self
let entries = self
.dependencies
.entry(package)
.or_default()
.entry(v)
.or_default();
for (dep, range) in dependencies.into_iter() {
entries.insert(dep, Requirement::Required(range));
}
}

/// Registers the dependencies of a package and version pair that may *not* be included in the final
/// solution but if it is included in the solution (through another package for example) it must meet
/// the requirements defined in this function call..
/// Dependencies must be added with a single call to
/// [add_dependencies](OfflineDependencyProvider::add_dependencies),
/// [add_constraints](OfflineDependencyProvider::add_constraints), or
/// [add_requirements](OfflineDependencyProvider::add_requirements).
/// All subsequent calls to any of those functions for a given package version pair will replace the
/// dependencies by the new ones.
///
/// The API does not allow to add dependencies one at a time to uphold an assumption that
/// [OfflineDependencyProvider.get_dependencies(p, v)](OfflineDependencyProvider::get_dependencies)
/// provides all dependencies of a given package (p) and version (v) pair.
pub fn add_constraints<I: IntoIterator<Item = (P, VS)>>(
&mut self,
package: P,
version: impl Into<VS::V>,
dependencies: I,
) {
let v = version.into();
let entries = self
.dependencies
.entry(package)
.or_default()
.entry(v)
.or_default();
for (package, constraint) in dependencies.into_iter() {
entries.insert(package, Requirement::Constrained(constraint));
}
}

/// Registers the dependencies of a package and version pair.
/// Dependencies must be added with a single call to
/// [add_dependencies](OfflineDependencyProvider::add_dependencies),
/// [add_constraints](OfflineDependencyProvider::add_constraints), or
/// [add_requirements](OfflineDependencyProvider::add_requirements).
/// All subsequent calls to any of those functions for a given package version pair will replace the
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is no longer true. At a minimum these comments need to be updated. These calls now append items to an existing list.
@mpizenberg, do you have a strong preference for the all at once API?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Indeed, I'm not a big fan of making it easy for people to make mistakes. I'd say we probably make add_dependency and add_constraint private or remove them alltogether, and only keep the add_requirements function. And the new add_requirements function should use = instead of insert. It doesn't make sense to have both a required dep and a constraint for the same dependency package.

/// dependencies by the new ones.
///
/// The API does not allow to add dependencies one at a time to uphold an assumption that
/// [OfflineDependencyProvider.get_dependencies(p, v)](OfflineDependencyProvider::get_dependencies)
/// provides all dependencies of a given package (p) and version (v) pair.
pub fn add_requirements<I: IntoIterator<Item = (P, Requirement<VS>)>>(
&mut self,
package: P,
version: impl Into<VS::V>,
dependencies: I,
) {
let v = version.into();
let entries = self
.dependencies
.entry(package)
.or_default()
.entry(v)
.or_default() = package_deps;
.or_default();
for (package, req) in dependencies.into_iter() {
entries.insert(package, req);
}
}

/// Lists packages that have been saved.
Expand All @@ -361,7 +437,11 @@ impl<P: Package, VS: VersionSet> OfflineDependencyProvider<P, VS> {

/// Lists dependencies of a given package and version.
/// Returns [None] if no information is available regarding that package and version pair.
fn dependencies(&self, package: &P, version: &VS::V) -> Option<DependencyConstraints<P, VS>> {
fn dependencies(
&self,
package: &P,
version: &VS::V,
) -> Option<DependencyConstraints<P, Requirement<VS>>> {
self.dependencies.get(package)?.get(version).cloned()
}
}
Expand Down
73 changes: 44 additions & 29 deletions tests/proptest.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
// SPDX-License-Identifier: MPL-2.0

use proptest::bool::weighted;
use std::{collections::BTreeSet as Set, error::Error};

use pubgrub::error::PubGrubError;
Expand All @@ -8,7 +9,7 @@ use pubgrub::range::Range;
use pubgrub::report::{DefaultStringReporter, Reporter};
use pubgrub::solver::{
choose_package_with_fewest_versions, resolve, Dependencies, DependencyProvider,
OfflineDependencyProvider,
OfflineDependencyProvider, Requirement,
};
use pubgrub::version::{NumberVersion, SemanticVersion};
use pubgrub::version_set::VersionSet;
Expand Down Expand Up @@ -146,7 +147,12 @@ pub fn registry_strategy<N: Package + Ord>(
let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage;

let raw_version_range = (any::<Index>(), any::<Index>());
let raw_dependency = (any::<Index>(), any::<Index>(), raw_version_range);
let raw_dependency = (
any::<Index>(),
any::<Index>(),
weighted(0.9),
raw_version_range,
);

fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) {
use std::cmp::{max, min};
Expand All @@ -169,20 +175,22 @@ pub fn registry_strategy<N: Package + Ord>(
)
.prop_map(
move |(crate_vers_by_name, raw_dependencies, reverse_alphabetical, complicated_len)| {
let mut list_of_pkgid: Vec<((N, NumberVersion), Option<Vec<(N, NumVS)>>)> =
crate_vers_by_name
.iter()
.flat_map(|(name, vers)| {
vers.iter().map(move |x| {
(
(name.clone(), NumberVersion::from(x.0)),
if x.1 { Some(vec![]) } else { None },
)
})
let mut list_of_pkgid: Vec<(
(N, NumberVersion),
Option<Vec<(N, Requirement<NumVS>)>>,
)> = crate_vers_by_name
.iter()
.flat_map(|(name, vers)| {
vers.iter().map(move |x| {
(
(name.clone(), NumberVersion::from(x.0)),
if x.1 { Some(vec![]) } else { None },
)
})
.collect();
})
.collect();
let len_all_pkgid = list_of_pkgid.len();
for (a, b, (c, d)) in raw_dependencies {
for (a, b, required, (c, d)) in raw_dependencies {
let (a, b) = order_index(a, b, len_all_pkgid);
let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) };
let ((dep_name, _), _) = list_of_pkgid[a].to_owned();
Expand All @@ -194,18 +202,23 @@ pub fn registry_strategy<N: Package + Ord>(
let (c, d) = order_index(c, d, s.len());

if let (_, Some(deps)) = &mut list_of_pkgid[b] {
let range = if c == 0 && d == s_last_index {
Range::full()
} else if c == 0 {
Range::strictly_lower_than(s[d].0 + 1)
} else if d == s_last_index {
Range::higher_than(s[c].0)
} else if c == d {
Range::singleton(s[c].0)
} else {
Range::between(s[c].0, s[d].0 + 1)
};
deps.push((
dep_name,
if c == 0 && d == s_last_index {
Range::full()
} else if c == 0 {
Range::strictly_lower_than(s[d].0 + 1)
} else if d == s_last_index {
Range::higher_than(s[c].0)
} else if c == d {
Range::singleton(s[c].0)
if required {
Requirement::Required(range)
} else {
Range::between(s[c].0, s[d].0 + 1)
Requirement::Constrained(range)
},
))
}
Expand All @@ -224,10 +237,12 @@ pub fn registry_strategy<N: Package + Ord>(
.collect();

for ((name, ver), deps) in list_of_pkgid {
dependency_provider.add_dependencies(
dependency_provider.add_requirements(
name,
ver,
deps.unwrap_or_else(|| vec![(bad_name.clone(), Range::full())]),
deps.unwrap_or_else(|| {
vec![(bad_name.clone(), Requirement::Required(Range::full()))]
}),
);
}

Expand Down Expand Up @@ -374,7 +389,7 @@ proptest! {
.versions(package)
.unwrap().collect();
let version = version_idx.get(&versions);
let dependencies: Vec<(u16, NumVS)> = match dependency_provider
let dependencies: Vec<_> = match dependency_provider
.get_dependencies(package, version)
.unwrap()
{
Expand All @@ -383,7 +398,7 @@ proptest! {
};
if !dependencies.is_empty() {
let dependency = dep_idx.get(&dependencies).0;
removed_provider.add_dependencies(
removed_provider.add_requirements(
**package,
**version,
dependencies.into_iter().filter(|x| x.0 != dependency),
Expand Down Expand Up @@ -442,7 +457,7 @@ proptest! {
Dependencies::Unknown => panic!(),
Dependencies::Known(deps) => deps,
};
smaller_dependency_provider.add_dependencies(n, v, deps)
smaller_dependency_provider.add_requirements(n, v, deps)
}
}
prop_assert!(
Expand All @@ -464,7 +479,7 @@ proptest! {
Dependencies::Unknown => panic!(),
Dependencies::Known(deps) => deps,
};
smaller_dependency_provider.add_dependencies(n, v, deps)
smaller_dependency_provider.add_requirements(n, v, deps)
}
}
prop_assert!(
Expand Down
Loading