diff --git a/src/cli/add.rs b/src/cli/add.rs index 2a8c1f6a6..221e30216 100644 --- a/src/cli/add.rs +++ b/src/cli/add.rs @@ -1,25 +1,24 @@ -use crate::environment::{update_prefix, verify_prefix_location_unchanged}; -use crate::prefix::Prefix; -use crate::project::{DependencyType, SpecType}; use crate::{ consts, - lock_file::{load_lock_file, update_lock_file}, - project::python::PyPiRequirement, - project::Project, + environment::{get_up_to_date_prefix, verify_prefix_location_unchanged, LockFileUsage}, + project::{python::PyPiRequirement, DependencyType, Project, SpecType}, }; use clap::Parser; use indexmap::IndexMap; use itertools::Itertools; + use miette::{IntoDiagnostic, WrapErr}; -use rattler_conda_types::version_spec::{LogicalOperator, RangeOperator}; use rattler_conda_types::{ + version_spec::{LogicalOperator, RangeOperator}, MatchSpec, NamelessMatchSpec, PackageName, Platform, Version, VersionSpec, }; use rattler_repodata_gateway::sparse::SparseRepoData; use rattler_solve::{resolvo, SolverImpl}; -use std::collections::{HashMap, HashSet}; -use std::path::PathBuf; -use std::str::FromStr; +use std::{ + collections::{HashMap, HashSet}, + path::PathBuf, + str::FromStr, +}; /// Adds a dependency to the project #[derive(Parser, Debug, Default)] @@ -217,8 +216,13 @@ pub async fn add_pypi_specs_to_project( } } } + let lock_file_usage = if no_update_lockfile { + LockFileUsage::Frozen + } else { + LockFileUsage::Update + }; - update_environment(project, None, no_install, no_update_lockfile).await?; + get_up_to_date_prefix(project, lock_file_usage, no_install, None).await?; project.save()?; @@ -317,60 +321,17 @@ pub async fn add_conda_specs_to_project( } } } - project.save()?; - - update_environment( - project, - Some(sparse_repo_data), - no_install, - no_update_lockfile, - ) - .await?; - - Ok(()) -} - -/// Updates the lock file and potentially the prefix to get an up-to-date environment. -/// -/// We are using this function instead of [`crate::environment::get_up_to_date_prefix`] because we want to be able to -/// specify if we do not want to update the prefix. Also we know the lock file needs to be updated so `--frozen` and `--locked` -/// make no sense in this scenario. -/// -/// Essentially, other than that it does almost the same thing -async fn update_environment( - project: &Project, - sparse_repo_data: Option>, - no_install: bool, - no_update_lockfile: bool, -) -> miette::Result<()> { - // Update the lock file - let lock_file = if !no_update_lockfile { - Some(update_lock_file(project, load_lock_file(project).await?, sparse_repo_data).await?) + let lock_file_usage = if no_update_lockfile { + LockFileUsage::Frozen } else { - None + LockFileUsage::Update }; + get_up_to_date_prefix(project, lock_file_usage, no_install, Some(sparse_repo_data)).await?; + project.save()?; - if let Some(lock_file) = lock_file { - if !no_install { - crate::environment::sanity_check_project(project)?; - - // Get the currently installed packages - let prefix = Prefix::new(project.environment_dir())?; - let installed_packages = prefix.find_installed_packages(None).await?; - - // Update the prefix - update_prefix( - project.pypi_package_db()?, - &prefix, - installed_packages, - &lock_file, - Platform::current(), - ) - .await?; - } - } Ok(()) } + /// Given several specs determines the highest installable version for them. pub fn determine_best_version( project: &Project, diff --git a/src/cli/install.rs b/src/cli/install.rs index a7225035e..f72c4f3ab 100644 --- a/src/cli/install.rs +++ b/src/cli/install.rs @@ -17,7 +17,7 @@ pub struct Args { pub async fn execute(args: Args) -> miette::Result<()> { let project = Project::load_or_else_discover(args.manifest_path.as_deref())?; - get_up_to_date_prefix(&project, args.lock_file_usage.into()).await?; + get_up_to_date_prefix(&project, args.lock_file_usage.into(), false, None).await?; // Emit success eprintln!( diff --git a/src/cli/project/channel/add.rs b/src/cli/project/channel/add.rs index b42f61804..7dd22f9f0 100644 --- a/src/cli/project/channel/add.rs +++ b/src/cli/project/channel/add.rs @@ -1,11 +1,11 @@ -use crate::environment::update_prefix; -use crate::lock_file::{load_lock_file, update_lock_file}; -use crate::prefix::Prefix; +use crate::environment::{get_up_to_date_prefix, LockFileUsage}; +use crate::lock_file::load_lock_file; + use crate::Project; use clap::Parser; use itertools::Itertools; use miette::IntoDiagnostic; -use rattler_conda_types::{Channel, ChannelConfig, Platform}; +use rattler_conda_types::{Channel, ChannelConfig}; #[derive(Parser, Debug, Default)] pub struct Args { @@ -44,34 +44,15 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { } // Load the existing lock-file - let lock_file = load_lock_file(&project).await?; + let _lock_file = load_lock_file(&project).await?; // Add the channels to the lock-file project .manifest .add_channels(missing_channels.iter().map(|(name, _channel)| name))?; - // Try to update the lock-file with the new channels - let lock_file = update_lock_file(&project, lock_file, None).await?; + get_up_to_date_prefix(&project, LockFileUsage::Update, args.no_install, None).await?; project.save()?; - - // Update the installation if needed - if !args.no_install { - // Get the currently installed packages - let prefix = Prefix::new(project.environment_dir())?; - let installed_packages = prefix.find_installed_packages(None).await?; - - // Update the prefix - update_prefix( - project.pypi_package_db()?, - &prefix, - installed_packages, - &lock_file, - Platform::current(), - ) - .await?; - } - // Report back to the user for (name, channel) in missing_channels { eprintln!( diff --git a/src/cli/project/channel/remove.rs b/src/cli/project/channel/remove.rs index 8fa432b22..6fd5fa06d 100644 --- a/src/cli/project/channel/remove.rs +++ b/src/cli/project/channel/remove.rs @@ -1,11 +1,10 @@ -use crate::environment::update_prefix; -use crate::lock_file::{load_lock_file, update_lock_file}; -use crate::prefix::Prefix; +use crate::environment::{get_up_to_date_prefix, LockFileUsage}; + use crate::Project; use clap::Parser; use itertools::Itertools; use miette::IntoDiagnostic; -use rattler_conda_types::{Channel, ChannelConfig, Platform}; +use rattler_conda_types::{Channel, ChannelConfig}; #[derive(Parser, Debug, Default)] pub struct Args { @@ -43,35 +42,15 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { return Ok(()); } - // Load the existing lock-file - let lock_file = load_lock_file(&project).await?; - // Remove the channels from the manifest project .manifest .remove_channels(channels_to_remove.iter().map(|(name, _channel)| name))?; // Try to update the lock-file without the removed channels - let lock_file = update_lock_file(&project, lock_file, None).await?; + get_up_to_date_prefix(&project, LockFileUsage::Update, args.no_install, None).await?; project.save()?; - // Update the installation if needed - if !args.no_install { - // Get the currently installed packages - let prefix = Prefix::new(project.environment_dir())?; - let installed_packages = prefix.find_installed_packages(None).await?; - - // Update the prefix - update_prefix( - project.pypi_package_db()?, - &prefix, - installed_packages, - &lock_file, - Platform::current(), - ) - .await?; - } - // Report back to the user for (name, channel) in channels_to_remove { eprintln!( diff --git a/src/cli/project/platform/add.rs b/src/cli/project/platform/add.rs index 4d57e07c6..dca4f2ab3 100644 --- a/src/cli/project/platform/add.rs +++ b/src/cli/project/platform/add.rs @@ -1,8 +1,6 @@ use std::str::FromStr; -use crate::environment::update_prefix; -use crate::lock_file::{load_lock_file, update_lock_file}; -use crate::prefix::Prefix; +use crate::environment::{get_up_to_date_prefix, LockFileUsage}; use crate::Project; use clap::Parser; use itertools::Itertools; @@ -42,33 +40,13 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { return Ok(()); } - // Load the existing lock-file - let lock_file = load_lock_file(&project).await?; - // Add the platforms to the lock-file project.manifest.add_platforms(missing_platforms.iter())?; // Try to update the lock-file with the new channels - let lock_file = update_lock_file(&project, lock_file, None).await?; + get_up_to_date_prefix(&project, LockFileUsage::Update, args.no_install, None).await?; project.save()?; - // Update the installation if needed - if !args.no_install { - // Get the currently installed packages - let prefix = Prefix::new(project.environment_dir())?; - let installed_packages = prefix.find_installed_packages(None).await?; - - // Update the prefix - update_prefix( - project.pypi_package_db()?, - &prefix, - installed_packages, - &lock_file, - Platform::current(), - ) - .await?; - } - // Report back to the user for platform in missing_platforms { eprintln!( diff --git a/src/cli/project/platform/remove.rs b/src/cli/project/platform/remove.rs index b754a1a47..97c4fc37a 100644 --- a/src/cli/project/platform/remove.rs +++ b/src/cli/project/platform/remove.rs @@ -1,6 +1,5 @@ -use crate::environment::update_prefix; -use crate::lock_file::{load_lock_file, update_lock_file}; -use crate::prefix::Prefix; +use crate::environment::{get_up_to_date_prefix, LockFileUsage}; + use crate::Project; use clap::Parser; use itertools::Itertools; @@ -41,35 +40,14 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { return Ok(()); } - // Load the existing lock-file - let lock_file = load_lock_file(&project).await?; - // Remove the platform(s) from the manifest project .manifest .remove_platforms(platforms_to_remove.iter().map(|p| p.to_string()))?; - // Try to update the lock-file without the removed platform(s) - let lock_file = update_lock_file(&project, lock_file, None).await?; + get_up_to_date_prefix(&project, LockFileUsage::Update, args.no_install, None).await?; project.save()?; - // Update the installation if needed - if !args.no_install { - // Get the currently installed packages - let prefix = Prefix::new(project.environment_dir())?; - let installed_packages = prefix.find_installed_packages(None).await?; - - // Update the prefix - update_prefix( - project.pypi_package_db()?, - &prefix, - installed_packages, - &lock_file, - Platform::current(), - ) - .await?; - } - // Report back to the user for platform in platforms_to_remove { eprintln!( diff --git a/src/cli/remove.rs b/src/cli/remove.rs index 56d0d209b..6da6eadc9 100644 --- a/src/cli/remove.rs +++ b/src/cli/remove.rs @@ -57,7 +57,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { project.save()?; // updating prefix after removing from toml - let _ = get_up_to_date_prefix(&project, LockFileUsage::Update).await?; + let _ = get_up_to_date_prefix(&project, LockFileUsage::Update, false, None).await?; for (removed, spec) in results.iter().flatten() { let table_name = if let Some(p) = &args.platform { diff --git a/src/cli/run.rs b/src/cli/run.rs index 5b4fcee3c..7405276d5 100644 --- a/src/cli/run.rs +++ b/src/cli/run.rs @@ -163,7 +163,7 @@ pub async fn get_task_env( lock_file_usage: LockFileUsage, ) -> miette::Result> { // Get the prefix which we can then activate. - let prefix = get_up_to_date_prefix(project, lock_file_usage).await?; + let prefix = get_up_to_date_prefix(project, lock_file_usage, false, None).await?; // Get environment variables from the activation let activation_env = run_activation_async(project, prefix).await?; diff --git a/src/cli/shell.rs b/src/cli/shell.rs index d0238d3d7..2731b5db6 100644 --- a/src/cli/shell.rs +++ b/src/cli/shell.rs @@ -203,7 +203,7 @@ pub async fn get_shell_env( lock_file_usage: LockFileUsage, ) -> miette::Result> { // Get the prefix which we can then activate. - let prefix = get_up_to_date_prefix(project, lock_file_usage).await?; + let prefix = get_up_to_date_prefix(project, lock_file_usage, false, None).await?; // Get environment variables from the activation let activation_env = run_activation_async(project, prefix).await?; diff --git a/src/environment.rs b/src/environment.rs index 3a6cc0bbf..ad76e8b1f 100644 --- a/src/environment.rs +++ b/src/environment.rs @@ -5,9 +5,10 @@ use crate::{ use miette::{Context, IntoDiagnostic, LabeledSpan}; use crate::lock_file::lock_file_satisfies_project; -use rattler::install::Transaction; -use rattler_conda_types::{Platform, PrefixRecord}; +use rattler::install::{PythonInfo, Transaction}; +use rattler_conda_types::{Platform, PrefixRecord, RepoDataRecord}; use rattler_lock::CondaLock; +use rattler_repodata_gateway::sparse::SparseRepoData; use rip::index::PackageDb; use std::{io::ErrorKind, path::Path}; @@ -83,7 +84,7 @@ pub fn sanity_check_project(project: &Project) -> miette::Result<()> { } /// Specifies how the lock-file should be updated. -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq, Eq, Copy, Clone)] pub enum LockFileUsage { /// Update the lock-file if it is out of date. #[default] @@ -94,11 +95,36 @@ pub enum LockFileUsage { Frozen, } +impl LockFileUsage { + /// Returns true if the lock-file should be updated if it is out of date. + pub fn allows_lock_file_updates(self) -> bool { + match self { + LockFileUsage::Update => true, + LockFileUsage::Locked | LockFileUsage::Frozen => false, + } + } + + /// Returns true if the lock-file should be checked if it is out of date. + pub fn should_check_if_out_of_date(self) -> bool { + match self { + LockFileUsage::Update | LockFileUsage::Locked => true, + LockFileUsage::Frozen => false, + } + } +} + /// Returns the prefix associated with the given environment. If the prefix doesn't exist or is not /// up to date it is updated. +/// +/// The `sparse_repo_data` is used when the lock-file is update. We pass it into this function to +/// make sure the data is not loaded twice since the repodata takes up a lot of memory and takes a +/// while to load. If `sparse_repo_data` is `None` it will be downloaded. If the lock-file is not +/// updated, the `sparse_repo_data` is ignored. pub async fn get_up_to_date_prefix( project: &Project, usage: LockFileUsage, + no_install: bool, + sparse_repo_data: Option>, ) -> miette::Result { // Make sure the project is in a sane state sanity_check_project(project)?; @@ -110,50 +136,130 @@ pub async fn get_up_to_date_prefix( tokio::spawn(async move { prefix.find_installed_packages(None).await }) }; - // Update the lock-file if it is out of date. - if matches!(usage, LockFileUsage::Frozen) && !project.lock_file_path().is_file() { - miette::bail!("No lockfile available, can't do a frozen installation."); + // If there is no lock-file and we are also not allowed to update it, we can bail immediately. + if !project.lock_file_path().is_file() && !usage.allows_lock_file_updates() { + miette::bail!("no lockfile available, can't do a frozen installation."); } + // Load the lock-file into memory. let mut lock_file = lock_file::load_lock_file(project).await?; - let up_to_date = lock_file_satisfies_project(project, &lock_file)?; - match usage { - LockFileUsage::Update => { - if !up_to_date { - lock_file = lock_file::update_lock_file(project, lock_file, None).await? - } + // Check if the lock-file is up to date, but only if the current usage allows it. + let update_lock_file = if usage.should_check_if_out_of_date() + && !lock_file_satisfies_project(project, &lock_file)? + { + if !usage.allows_lock_file_updates() { + miette::bail!("lockfile not up-to-date with the project"); } - LockFileUsage::Locked => { - if !up_to_date { - miette::bail!("Lockfile not up-to-date with the project"); - } + true + } else { + false + }; + + // First lock and install the conda environment + // After which we should have a usable prefix to use for pypi resolution. + if update_lock_file { + lock_file = lock_file::update_lock_file_conda(project, lock_file, sparse_repo_data).await?; + } + + let python_status = if !no_install { + update_prefix_conda( + &prefix, + installed_packages_future.await.into_diagnostic()??, + &lock_file, + Platform::current(), + ) + .await? + } else { + // We don't know and it won't matter because we won't install pypi either + PythonStatus::DoesNotExist + }; + + if project.has_pypi_dependencies() { + if update_lock_file { + lock_file = lock_file::update_lock_file_for_pypi(project, lock_file).await?; + } + + if !no_install { + // Then update the pypi packages. + update_prefix_pypi( + &prefix, + Platform::current(), + project.pypi_package_db()?, + &lock_file, + &python_status, + ) + .await?; } - // Dont update the lock-file, dont check it - LockFileUsage::Frozen => {} } - // Update the environment - update_prefix( - project.pypi_package_db()?, - &prefix, - installed_packages_future.await.into_diagnostic()??, - &lock_file, - Platform::current(), + Ok(prefix) +} + +pub async fn update_prefix_pypi( + prefix: &Prefix, + platform: Platform, + package_db: &PackageDb, + lock_file: &CondaLock, + status: &PythonStatus, +) -> miette::Result<()> { + // Remove python packages from a previous python distribution if the python version changed. + install_pypi::remove_old_python_distributions(prefix, platform, status)?; + + // Install and/or remove python packages + progress::await_in_progress( + "updating python packages", + install_pypi::update_python_distributions(package_db, prefix, lock_file, platform, status), ) - .await?; + .await +} - Ok(prefix) +#[derive(Clone)] +pub enum PythonStatus { + /// The python interpreter changed from `old` to `new`. + Changed { old: PythonInfo, new: PythonInfo }, + + /// The python interpreter remained the same. + Unchanged(PythonInfo), + + /// The python interpreter was removed from the environment + Removed { old: PythonInfo }, + + /// The python interpreter was added to the environment + Added { new: PythonInfo }, + + /// There is no python interpreter in the environment. + DoesNotExist, +} + +impl PythonStatus { + /// Determine the [`PythonStatus`] from a [`Transaction`]. + pub fn from_transaction(transaction: &Transaction) -> Self { + match ( + transaction.current_python_info.as_ref(), + transaction.python_info.as_ref(), + ) { + (Some(old), Some(new)) if old.short_version != new.short_version => { + PythonStatus::Changed { + old: old.clone(), + new: new.clone(), + } + } + (Some(_), Some(new)) => PythonStatus::Unchanged(new.clone()), + (None, Some(new)) => PythonStatus::Added { new: new.clone() }, + (Some(old), None) => PythonStatus::Removed { old: old.clone() }, + (None, None) => PythonStatus::DoesNotExist, + } + } } /// Updates the environment to contain the packages from the specified lock-file -pub async fn update_prefix( - package_db: &PackageDb, +pub async fn update_prefix_conda( prefix: &Prefix, installed_packages: Vec, lock_file: &CondaLock, platform: Platform, -) -> miette::Result<()> { +) -> miette::Result { // Construct a transaction to bring the environment up to date with the lock-file content let desired_conda_packages = lock_file .get_conda_packages_by_platform(platform) @@ -178,22 +284,6 @@ pub async fn update_prefix( .await?; } - // Remove python packages from a previous python distribution if the python version changed. - install_pypi::remove_old_python_distributions(prefix, platform, &transaction)?; - - // Install and/or remove python packages - progress::await_in_progress( - "updating python packages", - install_pypi::update_python_distributions( - package_db, - prefix, - lock_file, - platform, - &transaction, - ), - ) - .await?; - // Mark the location of the prefix create_prefix_location_file( &prefix @@ -204,5 +294,6 @@ pub async fn update_prefix( ) .with_context(|| "failed to create prefix location file.".to_string())?; - Ok(()) + // Determine if the python version changed. + Ok(PythonStatus::from_transaction(&transaction)) } diff --git a/src/install_pypi.rs b/src/install_pypi.rs index 73d3a27c9..6d3c6beb3 100644 --- a/src/install_pypi.rs +++ b/src/install_pypi.rs @@ -1,3 +1,4 @@ +use crate::environment::PythonStatus; use crate::prefix::Prefix; use crate::progress; use crate::progress::ProgressBarMessageFormatter; @@ -6,8 +7,8 @@ use indexmap::IndexSet; use indicatif::ProgressBar; use itertools::Itertools; use miette::{IntoDiagnostic, WrapErr}; -use rattler::install::Transaction; -use rattler_conda_types::{Platform, PrefixRecord, RepoDataRecord}; + +use rattler_conda_types::Platform; use rattler_lock::{CondaLock, LockedDependency}; use rip::artifacts::wheel::{InstallPaths, UnpackWheelOptions}; use rip::artifacts::Wheel; @@ -32,22 +33,25 @@ pub async fn update_python_distributions( prefix: &Prefix, lock_file: &CondaLock, platform: Platform, - transaction: &Transaction, + status: &PythonStatus, ) -> miette::Result<()> { - // Get the python info from the transaction - let Some(python_info) = transaction.python_info.as_ref() else { - return Ok(()); + let python_info = match status { + PythonStatus::Changed { new, .. } + | PythonStatus::Unchanged(new) + | PythonStatus::Added { new } => new, + PythonStatus::Removed { .. } | PythonStatus::DoesNotExist => { + // No python interpreter in the environment, so there is nothing to do here. + return Ok(()); + } }; // Determine where packages would have been installed - let install_paths = InstallPaths::for_venv( - ( - python_info.short_version.0 as u32, - python_info.short_version.1 as u32, - 0, - ), - platform.is_windows(), + let python_version = ( + python_info.short_version.0 as u32, + python_info.short_version.1 as u32, + 0, ); + let install_paths = InstallPaths::for_venv(python_version, platform.is_windows()); // Determine the current python distributions in those locations let current_python_packages = find_distributions_in_venv(prefix.root(), &install_paths) @@ -89,7 +93,7 @@ pub async fn update_python_distributions( let package_install_pb = install_python_distributions( prefix, install_paths, - &prefix.root().join(&python_info.path), + &prefix.root().join(python_info.path()), package_stream, ) .await?; @@ -297,22 +301,20 @@ fn stream_python_artifacts<'a>( pub fn remove_old_python_distributions( prefix: &Prefix, platform: Platform, - transaction: &Transaction, + python_changed: &PythonStatus, ) -> miette::Result<()> { - // Determine if the current distribution is the same as the desired distribution. - let Some(previous_python_installation) = transaction.current_python_info.as_ref() else { - return Ok(()); + // If the python version didn't change, there is nothing to do here. + let python_version = match python_changed { + PythonStatus::Removed { old } | PythonStatus::Changed { old, .. } => old, + PythonStatus::Added { .. } | PythonStatus::DoesNotExist | PythonStatus::Unchanged(_) => { + return Ok(()) + } }; - if Some(previous_python_installation.short_version) - == transaction.python_info.as_ref().map(|p| p.short_version) - { - return Ok(()); - } - // Determine the current python distributions in its install locations + // Get the interpreter version from the info let python_version = ( - previous_python_installation.short_version.0 as u32, - previous_python_installation.short_version.1 as u32, + python_version.short_version.0 as u32, + python_version.short_version.1 as u32, 0, ); let install_paths = InstallPaths::for_venv(python_version, platform.is_windows()); diff --git a/src/lock_file/mod.rs b/src/lock_file/mod.rs index bc3bba5db..335b53361 100644 --- a/src/lock_file/mod.rs +++ b/src/lock_file/mod.rs @@ -7,7 +7,7 @@ use crate::{progress, Project}; use futures::TryStreamExt; use futures::{stream, StreamExt}; use indicatif::ProgressBar; -use itertools::Itertools; +use itertools::{izip, Itertools}; use miette::{Context, IntoDiagnostic}; use rattler_conda_types::{ GenericVirtualPackage, MatchSpec, PackageName, Platform, RepoDataRecord, @@ -17,7 +17,7 @@ use rattler_lock::{ CondaLockedDependencyBuilder, LockFileBuilder, LockedPackagesBuilder, PypiLockedDependencyBuilder, }, - CondaLock, PackageHashes, + CondaLock, LockedDependencyKind, PackageHashes, }; use rattler_repodata_gateway::sparse::SparseRepoData; use rattler_solve::{resolvo, SolverImpl}; @@ -39,8 +39,36 @@ pub async fn load_lock_file(project: &Project) -> miette::Result { .unwrap_or_else(|e| Err(e).into_diagnostic()) } -/// Updates the lock file for a project. -pub async fn update_lock_file( +fn main_progress_bar(num_bars: u64, message: &'static str) -> ProgressBar { + let multi_progress = progress::global_multi_progress(); + let top_level_progress = multi_progress.add(ProgressBar::new(num_bars)); + top_level_progress.set_style(progress::long_running_progress_style()); + top_level_progress.set_message(message); + top_level_progress.enable_steady_tick(Duration::from_millis(50)); + top_level_progress +} + +fn platform_solve_bars(platforms: &[Platform]) -> Vec { + platforms + .iter() + .map(|platform| { + let pb = + progress::global_multi_progress().add(ProgressBar::new(platforms.len() as u64)); + pb.set_style( + indicatif::ProgressStyle::with_template(&format!( + " {:<9} ..", + platform.to_string(), + )) + .unwrap(), + ); + pb.enable_steady_tick(Duration::from_millis(100)); + pb + }) + .collect_vec() +} + +/// Updates the lock file for conda dependencies for the specified project. +pub async fn update_lock_file_conda( project: &Project, existing_lock_file: CondaLock, repodata: Option>, @@ -56,11 +84,10 @@ pub async fn update_lock_file( .into(); // Construct a progress bar - let multi_progress = progress::global_multi_progress(); - let top_level_progress = multi_progress.add(ProgressBar::new(platforms.len() as u64)); - top_level_progress.set_style(progress::long_running_progress_style()); - top_level_progress.set_message("solving dependencies"); - top_level_progress.enable_steady_tick(Duration::from_millis(50)); + let _top_level_progress = + main_progress_bar(platforms.len() as u64, "resolving conda dependencies"); + // Create progress bars for each platform + let solve_bars = platform_solve_bars(platforms); // Construct a conda lock file let channels = project @@ -68,35 +95,6 @@ pub async fn update_lock_file( .iter() .map(|channel| rattler_lock::Channel::from(channel.base_url().to_string())); - // Create progress bars for each platform - let solve_bars = platforms - .iter() - .map(|platform| { - let pb = - progress::global_multi_progress().add(ProgressBar::new(platforms.len() as u64)); - pb.set_style( - indicatif::ProgressStyle::with_template(&format!( - " {:<9} ..", - platform.to_string(), - )) - .unwrap(), - ); - pb.enable_steady_tick(Duration::from_millis(100)); - pb - }) - .collect_vec(); - - // Solve each platform concurrently - let num_concurrent = if project.has_pypi_dependencies() { - // HACK: There is a bug in rip that causes a dead-lock when solving multiple environments - // at the same time. So if there are pypi dependencies we limit the number of concurrent - // solves to 1. - 1 - } else { - // By default we solve 2 platforms concurrently. Could probably do more but solving takes - // a significant amount of memory. - 2 - }; let result: miette::Result> = stream::iter(platforms.iter().zip(solve_bars.iter().cloned())) .map(|(platform, pb)| { @@ -134,7 +132,7 @@ pub async fn update_lock_file( Ok(result) } }) - .buffer_unordered(num_concurrent) + .buffer_unordered(2) .try_collect() .await; @@ -157,6 +155,150 @@ pub async fn update_lock_file( Ok(conda_lock) } +pub async fn update_lock_file_for_pypi( + project: &Project, + lock_for_conda: CondaLock, +) -> miette::Result { + let platforms = project.platforms(); + let _top_level_progress = + main_progress_bar(platforms.len() as u64, "resolving pypi dependencies"); + let solve_bars = platform_solve_bars(platforms); + + let records = platforms + .iter() + .map(|plat| lock_for_conda.get_conda_packages_by_platform(*plat)); + + let result: miette::Result> = + stream::iter(izip!(platforms.iter(), solve_bars.iter().cloned(), records)) + .map(|(platform, pb, records)| { + pb.reset_elapsed(); + pb.set_style( + indicatif::ProgressStyle::with_template(&format!( + " {{spinner:.dim}} {:<9} [{{elapsed_precise}}] {{msg:.dim}}", + platform.to_string(), + )) + .unwrap(), + ); + + async move { + let locked_packages = LockedPackagesBuilder::new(*platform); + let result = resolve_pypi( + project, + &records.into_diagnostic()?, + locked_packages, + *platform, + &pb, + ) + .await?; + + pb.set_style( + indicatif::ProgressStyle::with_template(&format!( + " {} {:<9} [{{elapsed_precise}}]", + console::style(console::Emoji("✔", "↳")).green(), + platform.to_string(), + )) + .unwrap(), + ); + pb.finish(); + + Ok(result) + } + }) + // TODO: Hack to ensure we do not encounter file-locking issues in windows, should look at a better solution + .buffer_unordered(1) + .try_collect() + .await; + + // Clear all progress bars + for bar in solve_bars { + bar.finish_and_clear(); + } + + let channels = project + .channels() + .iter() + .map(|channel| rattler_lock::Channel::from(channel.base_url().to_string())); + let mut builder = LockFileBuilder::new(channels, platforms.iter().cloned(), vec![]); + for locked_packages in result? { + builder = builder.add_locked_packages(locked_packages); + } + let conda_lock_pypi_only = builder.build().into_diagnostic()?; + + // TODO: think of a better way to do this + // Seeing as we are not using the content-hash anyways this seems to be fine + let latest_lock = CondaLock { + metadata: lock_for_conda.metadata, + package: conda_lock_pypi_only + .package + .into_iter() + .chain( + lock_for_conda + .package + .into_iter() + .filter(|p| matches!(p.kind, LockedDependencyKind::Conda(_))), + ) + .collect(), + }; + + // Write the conda lock to disk + latest_lock + .to_path(&project.lock_file_path()) + .into_diagnostic()?; + + Ok(latest_lock) +} + +async fn resolve_pypi( + project: &Project, + records: &[RepoDataRecord], + mut locked_packages: LockedPackagesBuilder, + platform: Platform, + pb: &ProgressBar, +) -> miette::Result { + // Solve python packages + pb.set_message("resolving python"); + let python_artifacts = pypi::resolve_dependencies(project, platform, records).await?; + + // Clear message + pb.set_message(""); + + // Add pip packages + for python_artifact in python_artifacts { + let (artifact, metadata) = project + .pypi_package_db()? + .get_metadata(&python_artifact.artifacts, None) + .await + .expect("failed to get metadata for a package for which we have already fetched metadata during solving.") + .expect("no metadata for a package for which we have already fetched metadata during solving."); + + let locked_package = PypiLockedDependencyBuilder { + name: python_artifact.name.to_string(), + version: python_artifact.version.to_string(), + requires_dist: metadata + .requires_dist + .into_iter() + .map(|r| r.to_string()) + .collect(), + requires_python: metadata.requires_python.map(|r| r.to_string()), + extras: python_artifact + .extras + .into_iter() + .map(|e| e.as_str().to_string()) + .collect(), + url: artifact.url.clone(), + hash: artifact + .hashes + .as_ref() + .and_then(|hash| PackageHashes::from_hashes(None, hash.sha256)), + source: None, + build: None, + }; + + locked_packages.add_locked_package(locked_package) + } + Ok(locked_packages) +} + async fn resolve_platform( project: &Project, existing_lock_file: &CondaLock, @@ -198,56 +340,18 @@ async fn resolve_platform( ) .await?; - // Solve python packages - pb.set_message("resolving python"); - let python_artifacts = pypi::resolve_pypi_dependencies(project, platform, &mut records).await?; - - // Clear message - pb.set_message(""); + // Add purl's for the conda packages that are also available as pypi packages + pypi::amend_pypi_purls(&mut records).await?; // Update lock file let mut locked_packages = LockedPackagesBuilder::new(platform); // Add conda packages - for record in records { + for record in records.iter() { let locked_package = CondaLockedDependencyBuilder::try_from(record).into_diagnostic()?; locked_packages.add_locked_package(locked_package); } - // Add pip packages - for python_artifact in python_artifacts { - let (artifact, metadata) = project - .pypi_package_db()? - .get_metadata(&python_artifact.artifacts, None) - .await - .expect("failed to get metadata for a package for which we have already fetched metadata during solving.") - .expect("no metadata for a package for which we have already fetched metadata during solving."); - - let locked_package = PypiLockedDependencyBuilder { - name: python_artifact.name.to_string(), - version: python_artifact.version.to_string(), - requires_dist: metadata - .requires_dist - .into_iter() - .map(|r| r.to_string()) - .collect(), - requires_python: metadata.requires_python.map(|r| r.to_string()), - extras: python_artifact - .extras - .into_iter() - .map(|e| e.as_str().to_string()) - .collect(), - url: artifact.url.clone(), - hash: artifact - .hashes - .as_ref() - .and_then(|hash| PackageHashes::from_hashes(None, hash.sha256)), - source: None, - build: None, - }; - - locked_packages.add_locked_package(locked_package) - } Ok(locked_packages) } diff --git a/src/lock_file/pypi.rs b/src/lock_file/pypi.rs index 8cd6c6640..ccc4318de 100644 --- a/src/lock_file/pypi.rs +++ b/src/lock_file/pypi.rs @@ -14,22 +14,16 @@ use rip::resolve::{resolve, PinnedPackage, ResolveOptions, SDistResolution}; use std::{collections::HashMap, str::FromStr, vec}; /// Resolve python packages for the specified project. -pub async fn resolve_pypi_dependencies<'p>( +pub async fn resolve_dependencies<'p>( project: &'p Project, platform: Platform, - conda_packages: &mut [RepoDataRecord], + conda_packages: &[RepoDataRecord], ) -> miette::Result>> { let dependencies = project.pypi_dependencies(platform); if dependencies.is_empty() { return Ok(vec![]); } - // Amend the records with pypi purls if they are not present yet. - let conda_forge_mapping = pypi_name_mapping::conda_pypi_name_mapping().await?; - for record in conda_packages.iter_mut() { - pypi_name_mapping::amend_pypi_purls(record, conda_forge_mapping)?; - } - // Determine the python packages that are installed by the conda packages let conda_python_packages = package_identifier::PypiPackageIdentifier::from_records(conda_packages) @@ -100,6 +94,15 @@ pub async fn resolve_pypi_dependencies<'p>( Ok(result) } +/// Amend the records with pypi purls if they are not present yet. +pub async fn amend_pypi_purls(conda_packages: &mut [RepoDataRecord]) -> miette::Result<()> { + let conda_forge_mapping = pypi_name_mapping::conda_pypi_name_mapping().await?; + for record in conda_packages.iter_mut() { + pypi_name_mapping::amend_pypi_purls(record, conda_forge_mapping)?; + } + Ok(()) +} + /// Returns true if the specified record refers to a version/variant of python. pub fn is_python_record(record: &RepoDataRecord) -> bool { package_name_is_python(&record.package_record.name) diff --git a/tests/project_tests.rs b/tests/project_tests.rs index f1edad5d3..aae914442 100644 --- a/tests/project_tests.rs +++ b/tests/project_tests.rs @@ -39,7 +39,7 @@ async fn add_channel() { // Our channel should be in the list of channels let local_channel = Channel::from_str( - &Url::from_directory_path(additional_channel_dir.path()).unwrap(), + Url::from_directory_path(additional_channel_dir.path()).unwrap(), &ChannelConfig::default(), ) .unwrap();