From bc782d825bb393acdc3df73a0cff9ab3b13c5d40 Mon Sep 17 00:00:00 2001 From: Michael Weiss Date: Tue, 11 Jul 2023 12:25:01 +0200 Subject: [PATCH] Reformat the entire codebase using rustfmt This is just the result of running `cargo fmt`. Unfortunately, quite a few changes are necessary as `rustfmt` wasn't used for quite a while. We'll add a CI check to enforce that the code is properly formatted. Signed-off-by: Michael Weiss --- build.rs | 15 +- src/cli.rs | 22 +- src/commands/build.rs | 82 +++-- src/commands/db.rs | 174 +++++++--- src/commands/endpoint.rs | 285 ++++++++++----- src/commands/endpoint_container.rs | 514 ++++++++++++++++------------ src/commands/find_artifact.rs | 50 ++- src/commands/find_pkg.rs | 2 +- src/commands/metrics.rs | 124 +++++-- src/commands/release.rs | 133 ++++--- src/commands/source/download.rs | 102 +++--- src/commands/source/mod.rs | 23 +- src/commands/tree_of.rs | 9 +- src/commands/util.rs | 24 +- src/commands/what_depends.rs | 5 +- src/config/endpoint_config.rs | 1 - src/config/not_validated.rs | 5 +- src/consts.rs | 6 +- src/db/connection.rs | 29 +- src/db/find_artifacts.rs | 114 +++--- src/db/models/endpoint.rs | 24 +- src/db/models/githash.rs | 5 +- src/db/models/image.rs | 10 +- src/db/models/job.rs | 16 +- src/db/models/package.rs | 15 +- src/db/models/release_store.rs | 9 +- src/db/models/submit.rs | 7 +- src/endpoint/configured.rs | 190 ++++++---- src/endpoint/mod.rs | 1 - src/endpoint/scheduler.rs | 152 +++++--- src/endpoint/util.rs | 4 +- src/filestore/path.rs | 17 +- src/filestore/staging.rs | 4 +- src/job/dag.rs | 34 +- src/job/runnable.rs | 28 +- src/log/parser.rs | 14 +- src/main.rs | 73 ++-- src/orchestrator/mod.rs | 1 - src/orchestrator/orchestrator.rs | 278 ++++++++++----- src/orchestrator/util.rs | 6 +- src/package/dag.rs | 135 +++++--- src/package/dependency/build.rs | 63 ++-- src/package/dependency/condition.rs | 165 +++++---- src/package/dependency/runtime.rs | 77 +++-- src/package/package.rs | 67 +++- src/package/script.rs | 16 +- src/package/source.rs | 19 +- src/package/version.rs | 45 +-- src/repository/fs/element.rs | 3 +- src/repository/fs/mod.rs | 1 - src/repository/fs/path.rs | 5 +- src/repository/fs/representation.rs | 199 ++++++----- src/repository/mod.rs | 1 - src/repository/repository.rs | 5 +- src/source/mod.rs | 5 +- src/ui/mod.rs | 2 +- src/ui/package.rs | 50 ++- src/util/filters.rs | 2 +- src/util/mod.rs | 1 - src/util/progress.rs | 7 +- 60 files changed, 2194 insertions(+), 1281 deletions(-) diff --git a/build.rs b/build.rs index 225b7522..be99008c 100644 --- a/build.rs +++ b/build.rs @@ -1,15 +1,14 @@ use anyhow::Result; -use vergen::EmitBuilder; use std::error::Error; - +use vergen::EmitBuilder; fn main() -> Result<(), Box> { EmitBuilder::builder() - .build_timestamp() - .cargo_debug() - .git_sha(false) - .git_commit_timestamp() - .git_describe(true, true, None) - .emit()?; + .build_timestamp() + .cargo_debug() + .git_sha(false) + .git_commit_timestamp() + .git_describe(true, true, None) + .emit()?; Ok(()) } diff --git a/src/cli.rs b/src/cli.rs index 3c5f0de4..28b15ea2 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -12,10 +12,10 @@ use std::path::PathBuf; use std::str::FromStr; use clap::crate_authors; -use clap::Command; use clap::Arg; use clap::ArgAction; use clap::ArgGroup; +use clap::Command; use tracing::{debug, error}; @@ -1246,7 +1246,8 @@ fn arg_older_than_date(about: &str) -> Arg { .long("older-than") .value_name("DATE") .help(about.to_owned()) - .long_help(r#" + .long_help( + r#" DATE can be a freeform date, for example '2h' It can also be a exact date: '2020-01-01 00:12:45' If the hour-minute-second part is omitted, " 00:00:00" is appended automatically. @@ -1264,7 +1265,8 @@ fn arg_older_than_date(about: &str) -> Arg { months, month, M -- defined as 30.44 days years, year, y -- defined as 365.25 days - "#) + "#, + ) .value_parser(parse_date_from_string) } @@ -1274,7 +1276,8 @@ fn arg_newer_than_date(about: &str) -> Arg { .long("newer-than") .value_name("DATE") .help(about.to_owned()) - .long_help(r#" + .long_help( + r#" DATE can be a freeform date, for example '2h' It can also be a exact date: '2020-01-01 00:12:45' If the hour-minute-second part is omitted, " 00:00:00" is appended automatically. @@ -1292,7 +1295,8 @@ fn arg_newer_than_date(about: &str) -> Arg { months, month, M -- defined as 30.44 days years, year, y -- defined as 365.25 days - "#) + "#, + ) .value_parser(parse_date_from_string) } @@ -1315,11 +1319,15 @@ fn parse_date_from_string(s: &str) -> std::result::Result { } fn parse_usize(s: &str) -> std::result::Result { - usize::from_str(s) .map_err(|e| e.to_string()).map(|_| s.to_owned()) + usize::from_str(s) + .map_err(|e| e.to_string()) + .map(|_| s.to_owned()) } fn parse_u64(s: &str) -> std::result::Result { - u64::from_str(s).map_err(|e| e.to_string()).map(|_| s.to_owned()) + u64::from_str(s) + .map_err(|e| e.to_string()) + .map(|_| s.to_owned()) } #[cfg(test)] diff --git a/src/commands/build.rs b/src/commands/build.rs index 2bc88b4c..3a69f95c 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -22,36 +22,36 @@ use anyhow::Error; use anyhow::Result; use clap::ArgMatches; use colored::Colorize; +use diesel::r2d2::ConnectionManager; +use diesel::r2d2::Pool; use diesel::ExpressionMethods; use diesel::PgConnection; use diesel::QueryDsl; use diesel::RunQueryDsl; -use diesel::r2d2::ConnectionManager; -use diesel::r2d2::Pool; use itertools::Itertools; -use tracing::{debug, info, trace, warn}; use tokio::sync::RwLock; use tokio_stream::StreamExt; +use tracing::{debug, info, trace, warn}; use uuid::Uuid; use crate::config::*; +use crate::filestore::path::StoreRoot; use crate::filestore::ReleaseStore; use crate::filestore::StagingStore; -use crate::filestore::path::StoreRoot; use crate::job::JobResource; use crate::log::LogItem; use crate::orchestrator::OrchestratorSetup; +use crate::package::condition::ConditionData; use crate::package::Dag; use crate::package::PackageName; use crate::package::PackageVersion; use crate::package::Shebang; -use crate::package::condition::ConditionData; use crate::repository::Repository; use crate::schema; use crate::source::SourceCache; -use crate::util::EnvironmentVariableName; use crate::util::docker::ImageName; use crate::util::progress::ProgressBars; +use crate::util::EnvironmentVariableName; /// Implementation of the "build" subcommand #[allow(clippy::too_many_arguments)] @@ -91,9 +91,20 @@ pub async fn build( .any(|img| image_name == img.name) { return Err(anyhow!( - "Requested build image {} is not in the configured images", image_name + "Requested build image {} is not in the configured images", + image_name )) - .with_context(|| anyhow!("Available images: {}", config.docker().images().iter().map(|img| img.name.clone()).join(", "))) + .with_context(|| { + anyhow!( + "Available images: {}", + config + .docker() + .images() + .iter() + .map(|img| img.name.clone()) + .join(", ") + ) + }) .with_context(|| anyhow!("Image present verification failed")) .map_err(Error::from); } @@ -111,7 +122,14 @@ pub async fn build( crate::endpoint::EndpointConfiguration::builder() .endpoint_name(ep_name.clone()) .endpoint(ep_cfg.clone()) - .required_images(config.docker().images().iter().map(|img| img.name.clone()).collect::>()) + .required_images( + config + .docker() + .images() + .iter() + .map(|img| img.name.clone()) + .collect::>(), + ) .required_docker_versions(config.docker().docker_versions().clone()) .required_docker_api_versions(config.docker().docker_api_versions().clone()) .build() @@ -145,7 +163,10 @@ pub async fn build( .collect::>>()?; let packages = if let Some(pvers) = pvers { - debug!("Searching for package with version: '{}' '{}'", pname, pvers); + debug!( + "Searching for package with version: '{}' '{}'", + pname, pvers + ); repo.find(&pname, &pvers) } else { debug!("Searching for package by name: '{}'", pname); @@ -176,9 +197,11 @@ pub async fn build( debug!("Loading release directory: {}", p_str); let r = ReleaseStore::load(StoreRoot::new(p.clone())?, &bar_release_loading); if r.is_ok() { - bar_release_loading.finish_with_message(format!("Loaded releases in {p_str} successfully")); + bar_release_loading + .finish_with_message(format!("Loaded releases in {p_str} successfully")); } else { - bar_release_loading.finish_with_message(format!("Failed to load releases in {p_str}")); + bar_release_loading + .finish_with_message(format!("Failed to load releases in {p_str}")); } r.map(Arc::new) }) @@ -187,13 +210,16 @@ pub async fn build( let (staging_store, staging_dir, submit_id) = { let bar_staging_loading = progressbars.bar()?; - let (submit_id, p) = if let Some(staging_dir) = matches.get_one::("staging_dir").map(PathBuf::from) { + let (submit_id, p) = if let Some(staging_dir) = + matches.get_one::("staging_dir").map(PathBuf::from) + { info!( "Setting staging dir to {} for this run", staging_dir.display() ); - let uuid = staging_dir.file_name() + let uuid = staging_dir + .file_name() .ok_or_else(|| anyhow!("Seems not to be a directory: {}", staging_dir.display()))? .to_owned() .into_string() @@ -224,7 +250,9 @@ pub async fn build( } else { bar_staging_loading.finish_with_message("Failed to load staging"); } - r.map(RwLock::new).map(Arc::new).map(|store| (store, p, submit_id))? + r.map(RwLock::new) + .map(Arc::new) + .map(|store| (store, p, submit_id))? }; let dag = { @@ -234,7 +262,12 @@ pub async fn build( env: &additional_env, }; - let dag = Dag::for_root_package(package.clone(), &repo, Some(&bar_tree_building), &condition_data)?; + let dag = Dag::for_root_package( + package.clone(), + &repo, + Some(&bar_tree_building), + &condition_data, + )?; bar_tree_building.finish_with_message("Finished loading Dag"); dag }; @@ -298,7 +331,8 @@ pub async fn build( trace!("Setting up database jobs for Package, GitHash, Image"); let db_package = async { Package::create_or_fetch(&mut database_pool.get().unwrap(), package) }; - let db_githash = async { GitHash::create_or_fetch(&mut database_pool.get().unwrap(), &hash_str) }; + let db_githash = + async { GitHash::create_or_fetch(&mut database_pool.get().unwrap(), &hash_str) }; let db_image = async { Image::create_or_fetch(&mut database_pool.get().unwrap(), &image_name) }; let db_envs = async { additional_env @@ -347,15 +381,19 @@ pub async fn build( writeln!(outlock, "Starting submit: {}", mkgreen(&submit_id))?; writeln!(outlock, "Started at: {}", mkgreen(&now))?; writeln!(outlock, "On Image: {}", mkgreen(&db_image.name))?; - writeln!(outlock, "For Package: {p} {v}", + writeln!( + outlock, + "For Package: {p} {v}", p = mkgreen(&db_package.name), - v = mkgreen(&db_package.version))?; + v = mkgreen(&db_package.version) + )?; writeln!(outlock, "On repo hash: {}", mkgreen(&db_githash.hash))?; } trace!("Setting up job sets"); let resources: Vec = additional_env.into_iter().map(JobResource::from).collect(); - let jobdag = crate::job::Dag::from_package_dag(dag, shebang, image_name, phases.clone(), resources); + let jobdag = + crate::job::Dag::from_package_dag(dag, shebang, image_name, phases.clone(), resources); trace!("Setting up job sets finished successfully"); trace!("Setting up Orchestrator"); @@ -408,7 +446,8 @@ pub async fn build( writeln!( outlock, "Last {} lines of Job {}", - number_log_lines, job_uuid.to_string().red() + number_log_lines, + job_uuid.to_string().red() )?; writeln!( outlock, @@ -432,7 +471,6 @@ pub async fn build( error_catched = true; } - line_item.display().map(|d| d.to_string()) }) .collect::>>()?; diff --git a/src/commands/db.rs b/src/commands/db.rs index e5187146..cdc45f7d 100644 --- a/src/commands/db.rs +++ b/src/commands/db.rs @@ -16,10 +16,10 @@ use std::path::PathBuf; use std::process::Command; use std::str::FromStr; +use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use clap::ArgMatches; use colored::Colorize; use diesel::BelongingToDsl; @@ -191,11 +191,7 @@ fn artifacts(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<( let rel = rel .map(|r| r.release_date.to_string()) .unwrap_or_else(|| String::from("no")); - vec![ - artifact.path, - rel, - job.uuid.to_string(), - ] + vec![artifact.path, rel, job.uuid.to_string()] }) .collect::>(); @@ -255,7 +251,8 @@ fn images(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> /// Implementation of the "db submit" subcommand fn submit(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> { let mut conn = conn_cfg.establish_connection()?; - let submit_id = matches.get_one::("submit") + let submit_id = matches + .get_one::("submit") .map(|s| uuid::Uuid::from_str(s.as_ref())) .transpose() .context("Parsing submit UUID")? @@ -294,7 +291,9 @@ fn submit(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> let out = std::io::stdout(); let mut outlock = out.lock(); - indoc::writedoc!(outlock, r#" + indoc::writedoc!( + outlock, + r#" Submit {submit_id} Date: {submit_dt} Commit: {submit_commit} @@ -313,8 +312,20 @@ fn submit(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> n_jobs_err = jobs_err.to_string().red(), )?; - let header = crate::commands::util::mk_header(["Job", "Success", "Package", "Version", "Container", "Endpoint", "Image"].to_vec()); - let data = jobs.iter() + let header = crate::commands::util::mk_header( + [ + "Job", + "Success", + "Package", + "Version", + "Container", + "Endpoint", + "Image", + ] + .to_vec(), + ); + let data = jobs + .iter() .map(|job| { let image = models::Image::fetch_for_job(&mut conn, job)? .ok_or_else(|| anyhow!("Image for job {} not found", job.uuid))?; @@ -344,13 +355,23 @@ fn submit(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> /// Implementation of the "db submits" subcommand fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> { let csv = matches.get_flag("csv"); - let limit = matches.get_one::("limit").map(|s| s.parse::()).transpose()?; - let hdrs = crate::commands::util::mk_header(vec!["Time", "UUID", "For Package", "For Package Version"]); + let limit = matches + .get_one::("limit") + .map(|s| s.parse::()) + .transpose()?; + let hdrs = crate::commands::util::mk_header(vec![ + "Time", + "UUID", + "For Package", + "For Package Version", + ]); let mut conn = conn_cfg.establish_connection()?; let query = schema::submits::table .order_by(schema::submits::id.desc()) // required for the --limit implementation - .inner_join(schema::githashes::table.on(schema::submits::repo_hash_id.eq(schema::githashes::id))) + .inner_join( + schema::githashes::table.on(schema::submits::repo_hash_id.eq(schema::githashes::id)), + ) .inner_join(schema::images::table) .into_boxed(); @@ -377,7 +398,9 @@ fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> // Get all submits which included the package, but were not necessarily made _for_ the package let query = query .inner_join(schema::jobs::table) - .inner_join(schema::packages::table.on(schema::jobs::package_id.eq(schema::packages::id))) + .inner_join( + schema::packages::table.on(schema::jobs::package_id.eq(schema::packages::id)), + ) .filter(schema::packages::name.eq(&pkgname)); let query = if let Some(limit) = limit { @@ -392,7 +415,8 @@ fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> schema::submits::table .order_by(schema::submits::id.desc()) // required for the --limit implementation .inner_join({ - schema::packages::table.on(schema::submits::requested_package_id.eq(schema::packages::id)) + schema::packages::table + .on(schema::submits::requested_package_id.eq(schema::packages::id)) }) .filter(schema::submits::id.eq_any(submit_ids)) .select((schema::submits::all_columns, schema::packages::all_columns)) @@ -401,7 +425,8 @@ fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> // Get all submits _for_ the package let query = query .inner_join({ - schema::packages::table.on(schema::submits::requested_package_id.eq(schema::packages::id)) + schema::packages::table + .on(schema::submits::requested_package_id.eq(schema::packages::id)) }) .filter(schema::packages::dsl::name.eq(&pkgname)); @@ -415,14 +440,17 @@ fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> } else if let Some(limit) = limit { query .inner_join({ - schema::packages::table.on(schema::submits::requested_package_id.eq(schema::packages::id)) + schema::packages::table + .on(schema::submits::requested_package_id.eq(schema::packages::id)) }) .select((schema::submits::all_columns, schema::packages::all_columns)) .limit(limit) .load::<(models::Submit, models::Package)>(&mut conn)? } else { - query.inner_join({ - schema::packages::table.on(schema::submits::requested_package_id.eq(schema::packages::id)) + query + .inner_join({ + schema::packages::table + .on(schema::submits::requested_package_id.eq(schema::packages::id)) }) .select((schema::submits::all_columns, schema::packages::all_columns)) .load::<(models::Submit, models::Package)>(&mut conn)? @@ -438,7 +466,11 @@ fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> ] }; - let data = submits.into_iter().rev().map(submit_to_vec).collect::>(); + let data = submits + .into_iter() + .rev() + .map(submit_to_vec) + .collect::>(); if data.is_empty() { info!("No submits in database"); @@ -450,17 +482,14 @@ fn submits(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> } /// Implementation of the "db jobs" subcommand -fn jobs(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgMatches) -> Result<()> { +fn jobs( + conn_cfg: DbConnectionConfig<'_>, + config: &Configuration, + matches: &ArgMatches, +) -> Result<()> { let csv = matches.get_flag("csv"); let hdrs = crate::commands::util::mk_header(vec![ - "Submit", - "Job", - "Time", - "Host", - "Ok?", - "Package", - "Version", - "Distro", + "Submit", "Job", "Time", "Host", "Ok?", "Package", "Version", "Distro", ]); let mut conn = conn_cfg.establish_connection()?; let older_than_filter = get_date_filter("older_than", matches)?; @@ -473,7 +502,11 @@ fn jobs(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgM .inner_join(schema::images::table) .into_boxed(); - if let Some(submit_uuid) = matches.get_one::("submit_uuid").map(|s| uuid::Uuid::parse_str(s.as_ref())).transpose()? { + if let Some(submit_uuid) = matches + .get_one::("submit_uuid") + .map(|s| uuid::Uuid::parse_str(s.as_ref())) + .transpose()? + { sel = sel.filter(schema::submits::uuid.eq(submit_uuid)) } @@ -481,19 +514,27 @@ fn jobs(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgM // // If we get a filter for environment on CLI, we fetch all job ids that are associated with the // passed environment variables and make `sel` filter for those. - if let Some((name, val)) = matches.get_one::("env_filter").map(|s| crate::util::env::parse_to_env(s.as_ref())).transpose()? { + if let Some((name, val)) = matches + .get_one::("env_filter") + .map(|s| crate::util::env::parse_to_env(s.as_ref())) + .transpose()? + { debug!("Filtering for ENV: {} = {}", name, val); let jids = schema::envvars::table .filter({ use crate::diesel::BoolExpressionMethods; - schema::envvars::dsl::name.eq(name.as_ref()) + schema::envvars::dsl::name + .eq(name.as_ref()) .and(schema::envvars::dsl::value.eq(val)) }) .inner_join(schema::job_envs::table) .select(schema::job_envs::job_id) .load::(&mut conn)?; - debug!("Filtering for these IDs (because of env filter): {:?}", jids); + debug!( + "Filtering for these IDs (because of env filter): {:?}", + jids + ); sel = sel.filter(schema::jobs::dsl::id.eq_any(jids)); } @@ -505,7 +546,11 @@ fn jobs(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgM sel = sel.filter(schema::submits::dsl::submit_time.gt(datetime)) } - if let Some(limit) = matches.get_one::("limit").map(|s| s.parse::()).transpose()? { + if let Some(limit) = matches + .get_one::("limit") + .map(|s| s.parse::()) + .transpose()? + { sel = sel.limit(limit) } @@ -524,7 +569,13 @@ fn jobs(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgM let data = sel .order_by(schema::jobs::id.desc()) // required for the --limit implementation - .load::<(models::Job, models::Submit, models::Endpoint, models::Package, models::Image)>(&mut conn)? + .load::<( + models::Job, + models::Submit, + models::Endpoint, + models::Package, + models::Image, + )>(&mut conn)? .into_iter() .rev() // required for the --limit implementation .map(|(job, submit, ep, package, image)| { @@ -542,7 +593,10 @@ fn jobs(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgM success, package.name, package.version, - image_short_name_map.get(&image_name).unwrap_or(&image_name).to_string(), + image_short_name_map + .get(&image_name) + .unwrap_or(&image_name) + .to_string(), ]) }) .collect::>>()?; @@ -557,7 +611,11 @@ fn jobs(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgM } /// Implementation of the "db job" subcommand -fn job(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgMatches) -> Result<()> { +fn job( + conn_cfg: DbConnectionConfig<'_>, + config: &Configuration, + matches: &ArgMatches, +) -> Result<()> { let script_highlight = !matches.get_flag("no_script_highlight"); let script_line_numbers = !matches.get_flag("no_script_line_numbers"); let configured_theme = config.script_highlight_theme(); @@ -748,23 +806,33 @@ fn log_of(conn_cfg: DbConnectionConfig<'_>, matches: &ArgMatches) -> Result<()> .map_err(Error::from) .and_then(|s| crate::log::ParsedLog::from_str(&s))? .into_iter() - .map(|line| line.display().and_then(|d| writeln!(lock, "{d}").map_err(Error::from))) + .map(|line| { + line.display() + .and_then(|d| writeln!(lock, "{d}").map_err(Error::from)) + }) .collect::>>() .map(|_| ()) } /// Implementation of the "db releases" subcommand -fn releases(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: &ArgMatches) -> Result<()> { +fn releases( + conn_cfg: DbConnectionConfig<'_>, + config: &Configuration, + matches: &ArgMatches, +) -> Result<()> { let csv = matches.get_flag("csv"); let mut conn = conn_cfg.establish_connection()?; let header = crate::commands::util::mk_header(["Package", "Version", "Date", "Path"].to_vec()); let mut query = schema::jobs::table .inner_join(schema::packages::table) .inner_join(schema::artifacts::table) - .inner_join(schema::releases::table - .on(schema::releases::artifact_id.eq(schema::artifacts::id))) - .inner_join(schema::release_stores::table - .on(schema::release_stores::id.eq(schema::releases::release_store_id))) + .inner_join( + schema::releases::table.on(schema::releases::artifact_id.eq(schema::artifacts::id)), + ) + .inner_join( + schema::release_stores::table + .on(schema::release_stores::id.eq(schema::releases::release_store_id)), + ) .order_by(schema::packages::dsl::name.asc()) .then_order_by(schema::packages::dsl::version.asc()) .then_order_by(schema::releases::release_date.asc()) @@ -794,10 +862,18 @@ fn releases(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: & let rst = schema::release_stores::all_columns; (art, pac, rel, rst) }) - .load::<(models::Artifact, models::Package, models::Release, models::ReleaseStore)>(&mut conn)? + .load::<( + models::Artifact, + models::Package, + models::Release, + models::ReleaseStore, + )>(&mut conn)? .into_iter() .filter_map(|(art, pack, rel, rstore)| { - let p = config.releases_directory().join(rstore.store_name).join(art.path); + let p = config + .releases_directory() + .join(rstore.store_name) + .join(art.path); if p.is_file() { Some(vec![ @@ -807,7 +883,12 @@ fn releases(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: & p.display().to_string(), ]) } else { - warn!("Released file for {} {} not found: {}", pack.name, pack.version, p.display()); + warn!( + "Released file for {} {} not found: {}", + pack.name, + pack.version, + p.display() + ); None } }) @@ -822,4 +903,3 @@ fn releases(conn_cfg: DbConnectionConfig<'_>, config: &Configuration, matches: & fn is_job_successfull(job: &models::Job) -> Result> { crate::log::ParsedLog::from_str(&job.log_text).map(|pl| pl.is_successfull().to_bool()) } - diff --git a/src/commands/endpoint.rs b/src/commands/endpoint.rs index 33823e35..a9de4323 100644 --- a/src/commands/endpoint.rs +++ b/src/commands/endpoint.rs @@ -16,28 +16,33 @@ use std::ops::Deref; use std::str::FromStr; use std::sync::Arc; -use anyhow::Error; +use anyhow::anyhow; use anyhow::Context; +use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use clap::ArgMatches; -use tracing::{debug, info, trace}; use itertools::Itertools; use tokio_stream::StreamExt; +use tracing::{debug, info, trace}; use crate::config::Configuration; use crate::config::EndpointName; -use crate::util::progress::ProgressBars; use crate::endpoint::Endpoint; +use crate::util::progress::ProgressBars; -pub async fn endpoint(matches: &ArgMatches, config: &Configuration, progress_generator: ProgressBars) -> Result<()> { +pub async fn endpoint( + matches: &ArgMatches, + config: &Configuration, + progress_generator: ProgressBars, +) -> Result<()> { let endpoint_names = matches .get_one::("endpoint_name") .map(|s| s.to_owned()) .map(EndpointName::from) .map(|ep| vec![ep]) .unwrap_or_else(|| { - config.docker() + config + .docker() .endpoints() .iter() .map(|(ep_name, _)| ep_name) @@ -47,8 +52,12 @@ pub async fn endpoint(matches: &ArgMatches, config: &Configuration, progress_gen match matches.subcommand() { Some(("ping", matches)) => ping(endpoint_names, matches, config, progress_generator).await, - Some(("stats", matches)) => stats(endpoint_names, matches, config, progress_generator).await, - Some(("container", matches)) => crate::commands::endpoint_container::container(endpoint_names, matches, config).await, + Some(("stats", matches)) => { + stats(endpoint_names, matches, config, progress_generator).await + } + Some(("container", matches)) => { + crate::commands::endpoint_container::container(endpoint_names, matches, config).await + } Some(("containers", matches)) => containers(endpoint_names, matches, config).await, Some(("images", matches)) => images(endpoint_names, matches, config).await, Some((other, _)) => Err(anyhow!("Unknown subcommand: {}", other)), @@ -56,13 +65,22 @@ pub async fn endpoint(matches: &ArgMatches, config: &Configuration, progress_gen } } -async fn ping(endpoint_names: Vec, +async fn ping( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, - progress_generator: ProgressBars + progress_generator: ProgressBars, ) -> Result<()> { - let n_pings = matches.get_one::("ping_n").map(|s| s.parse::()).transpose()?.unwrap(); // safe by clap - let sleep = matches.get_one::("ping_sleep").map(|s| s.parse::()).transpose()?.unwrap(); // safe by clap + let n_pings = matches + .get_one::("ping_n") + .map(|s| s.parse::()) + .transpose()? + .unwrap(); // safe by clap + let sleep = matches + .get_one::("ping_sleep") + .map(|s| s.parse::()) + .transpose()? + .unwrap(); // safe by clap let endpoints = connect_to_endpoints(config, &endpoint_names).await?; let multibar = Arc::new({ let mp = indicatif::MultiProgress::new(); @@ -90,7 +108,7 @@ async fn ping(endpoint_names: Vec, bar.inc(1); if let Err(e) = r { bar.finish_with_message(format!("Pinging {} failed", endpoint.name())); - return Err(e) + return Err(e); } tokio::time::sleep(tokio::time::Duration::from_secs(sleep)).await; @@ -105,10 +123,11 @@ async fn ping(endpoint_names: Vec, .await } -async fn stats(endpoint_names: Vec, +async fn stats( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, - progress_generator: ProgressBars + progress_generator: ProgressBars, ) -> Result<()> { let csv = matches.get_flag("csv"); let endpoints = connect_to_endpoints(config, &endpoint_names).await?; @@ -116,17 +135,20 @@ async fn stats(endpoint_names: Vec, bar.set_length(endpoint_names.len() as u64); bar.set_message("Fetching stats"); - let hdr = crate::commands::util::mk_header([ - "Name", - "Containers", - "Images", - "Kernel", - "Memory", - "Memory limit", - "Cores", - "OS", - "System Time", - ].to_vec()); + let hdr = crate::commands::util::mk_header( + [ + "Name", + "Containers", + "Images", + "Kernel", + "Memory", + "Memory limit", + "Cores", + "OS", + "System Time", + ] + .to_vec(), + ); let data = endpoints .into_iter() @@ -165,8 +187,8 @@ async fn stats(endpoint_names: Vec, crate::commands::util::display_data(hdr, data, csv) } - -async fn containers(endpoint_names: Vec, +async fn containers( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, ) -> Result<()> { @@ -180,7 +202,8 @@ async fn containers(endpoint_names: Vec, } } -async fn containers_list(endpoint_names: Vec, +async fn containers_list( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, ) -> Result<()> { @@ -189,19 +212,17 @@ async fn containers_list(endpoint_names: Vec, let older_than_filter = crate::commands::util::get_date_filter("older_than", matches)?; let newer_than_filter = crate::commands::util::get_date_filter("newer_than", matches)?; let csv = matches.get_flag("csv"); - let hdr = crate::commands::util::mk_header([ - "Endpoint", - "Container id", - "Image", - "Created", - "Status", - ].to_vec()); + let hdr = crate::commands::util::mk_header( + ["Endpoint", "Container id", "Image", "Created", "Status"].to_vec(), + ); let data = connect_to_endpoints(config, &endpoint_names) .await? .into_iter() .map(|ep| async move { - ep.container_stats().await.map(|stats| (ep.name().clone(), stats)) + ep.container_stats() + .await + .map(|stats| (ep.name().clone(), stats)) }) .collect::>() .collect::>>() @@ -213,8 +234,18 @@ async fn containers_list(endpoint_names: Vec, .into_iter() .filter(|stat| list_stopped || stat.state != "exited") .filter(|stat| filter_image.map(|fim| *fim == stat.image).unwrap_or(true)) - .filter(|stat| older_than_filter.as_ref().map(|time| time > &stat.created).unwrap_or(true)) - .filter(|stat| newer_than_filter.as_ref().map(|time| time < &stat.created).unwrap_or(true)) + .filter(|stat| { + older_than_filter + .as_ref() + .map(|time| time > &stat.created) + .unwrap_or(true) + }) + .filter(|stat| { + newer_than_filter + .as_ref() + .map(|time| time < &stat.created) + .unwrap_or(true) + }) .map(|stat| { vec![ endpoint_name.as_ref().to_owned(), @@ -231,7 +262,8 @@ async fn containers_list(endpoint_names: Vec, crate::commands::util::display_data(hdr, data, csv) } -async fn containers_prune(endpoint_names: Vec, +async fn containers_prune( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, ) -> Result<()> { @@ -242,12 +274,23 @@ async fn containers_prune(endpoint_names: Vec, .await? .into_iter() .map(move |ep| async move { - let stats = ep.container_stats() + let stats = ep + .container_stats() .await? .into_iter() .filter(|stat| stat.state == "exited") - .filter(|stat| older_than_filter.as_ref().map(|time| time > &stat.created).unwrap_or(true)) - .filter(|stat| newer_than_filter.as_ref().map(|time| time < &stat.created).unwrap_or(true)) + .filter(|stat| { + older_than_filter + .as_ref() + .map(|time| time > &stat.created) + .unwrap_or(true) + }) + .filter(|stat| { + newer_than_filter + .as_ref() + .map(|time| time < &stat.created) + .unwrap_or(true) + }) .map(|stat| (ep.clone(), stat)) .collect::>(); Ok(stats) @@ -256,12 +299,16 @@ async fn containers_prune(endpoint_names: Vec, .collect::>>() .await?; - let prompt = format!("Really delete {} Containers?", stats.iter().flatten().count()); + let prompt = format!( + "Really delete {} Containers?", + stats.iter().flatten().count() + ); if !dialoguer::Confirm::new().with_prompt(prompt).interact()? { - return Ok(()) + return Ok(()); } - stats.into_iter() + stats + .into_iter() .flat_map(Vec::into_iter) .map(|(ep, stat)| async move { ep.get_container_by_id(&stat.id) @@ -276,11 +323,15 @@ async fn containers_prune(endpoint_names: Vec, .await } -async fn containers_top(endpoint_names: Vec, +async fn containers_top( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, ) -> Result<()> { - let limit = matches.get_one::("limit").map(|s| usize::from_str(s.as_ref())).transpose()?; + let limit = matches + .get_one::("limit") + .map(|s| usize::from_str(s.as_ref())) + .transpose()?; let older_than_filter = crate::commands::util::get_date_filter("older_than", matches)?; let newer_than_filter = crate::commands::util::get_date_filter("newer_than", matches)?; let csv = matches.get_flag("csv"); @@ -290,13 +341,24 @@ async fn containers_top(endpoint_names: Vec, .into_iter() .inspect(|ep| trace!("Fetching stats for endpoint: {}", ep.name())) .map(move |ep| async move { - let stats = ep.container_stats() + let stats = ep + .container_stats() .await? .into_iter() .inspect(|stat| trace!("Fetching stats for container: {}", stat.id)) .filter(|stat| stat.state == "running") - .filter(|stat| older_than_filter.as_ref().map(|time| time > &stat.created).unwrap_or(true)) - .filter(|stat| newer_than_filter.as_ref().map(|time| time < &stat.created).unwrap_or(true)) + .filter(|stat| { + older_than_filter + .as_ref() + .map(|time| time > &stat.created) + .unwrap_or(true) + }) + .filter(|stat| { + newer_than_filter + .as_ref() + .map(|time| time < &stat.created) + .unwrap_or(true) + }) .map(|stat| (ep.clone(), stat)) .collect::>(); Ok(stats) @@ -329,7 +391,8 @@ async fn containers_top(endpoint_names: Vec, top.processes }; - let hm = top.titles + let hm = top + .titles .into_iter() .zip(processes.into_iter()) .collect::>>(); @@ -339,44 +402,48 @@ async fn containers_top(endpoint_names: Vec, let hdr = crate::commands::util::mk_header({ std::iter::once("Container ID") - .chain({ - data.values() - .flat_map(|hm| hm.keys()) - .map(|s| s.deref()) - }) + .chain(data.values().flat_map(|hm| hm.keys()).map(|s| s.deref())) .collect::>() .into_iter() .unique() .collect() }); - let data = data.into_iter() + let data = data + .into_iter() .flat_map(|(container_id, top_hm)| { - top_hm.values() - .map(|t| std::iter::once(container_id.clone()).chain(t.iter().map(String::clone)).collect()) + top_hm + .values() + .map(|t| { + std::iter::once(container_id.clone()) + .chain(t.iter().map(String::clone)) + .collect() + }) .collect::>>() }) - // ugly hack to bring order to the galaxy - .sorted_by(|v1, v2| if let (Some(f1), Some(f2)) = (v1.iter().next(), v2.iter().next()) { - f1.cmp(f2) - } else { - std::cmp::Ordering::Less + .sorted_by(|v1, v2| { + if let (Some(f1), Some(f2)) = (v1.iter().next(), v2.iter().next()) { + f1.cmp(f2) + } else { + std::cmp::Ordering::Less + } }) .collect::>>(); crate::commands::util::display_data(hdr, data, csv) } - -async fn containers_stop(endpoint_names: Vec, +async fn containers_stop( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, ) -> Result<()> { let older_than_filter = crate::commands::util::get_date_filter("older_than", matches)?; let newer_than_filter = crate::commands::util::get_date_filter("newer_than", matches)?; - let stop_timeout = matches.get_one::("timeout") + let stop_timeout = matches + .get_one::("timeout") .map(|s| s.parse::()) .transpose()? .map(std::time::Duration::from_secs); @@ -385,12 +452,23 @@ async fn containers_stop(endpoint_names: Vec, .await? .into_iter() .map(move |ep| async move { - let stats = ep.container_stats() + let stats = ep + .container_stats() .await? .into_iter() .filter(|stat| stat.state == "exited") - .filter(|stat| older_than_filter.as_ref().map(|time| time > &stat.created).unwrap_or(true)) - .filter(|stat| newer_than_filter.as_ref().map(|time| time < &stat.created).unwrap_or(true)) + .filter(|stat| { + older_than_filter + .as_ref() + .map(|time| time > &stat.created) + .unwrap_or(true) + }) + .filter(|stat| { + newer_than_filter + .as_ref() + .map(|time| time < &stat.created) + .unwrap_or(true) + }) .map(|stat| (ep.clone(), stat)) .collect::>(); Ok(stats) @@ -401,10 +479,11 @@ async fn containers_stop(endpoint_names: Vec, let prompt = format!("Really stop {} Containers?", stats.iter().flatten().count()); if !dialoguer::Confirm::new().with_prompt(prompt).interact()? { - return Ok(()) + return Ok(()); } - stats.into_iter() + stats + .into_iter() .flat_map(Vec::into_iter) .map(|(ep, stat)| async move { ep.get_container_by_id(&stat.id) @@ -419,8 +498,8 @@ async fn containers_stop(endpoint_names: Vec, .await } - -async fn images(endpoint_names: Vec, +async fn images( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, ) -> Result<()> { @@ -432,7 +511,8 @@ async fn images(endpoint_names: Vec, } } -async fn images_list(endpoint_names: Vec, +async fn images_list( + endpoint_names: Vec, _matches: &ArgMatches, config: &Configuration, ) -> Result<()> { @@ -450,17 +530,24 @@ async fn images_list(endpoint_names: Vec, let mut lock = out.lock(); iter.try_for_each(|img| { - writeln!(lock, "{created} {id}", created = img.created(), id = { - if let Some(tags)= img.tags() { - tags.join(", ") - } else { - img.id().clone() + writeln!( + lock, + "{created} {id}", + created = img.created(), + id = { + if let Some(tags) = img.tags() { + tags.join(", ") + } else { + img.id().clone() + } } - }).map_err(Error::from) + ) + .map_err(Error::from) }) } -async fn images_present(endpoint_names: Vec, +async fn images_present( + endpoint_names: Vec, _matches: &ArgMatches, config: &Configuration, ) -> Result<()> { @@ -468,10 +555,12 @@ async fn images_present(endpoint_names: Vec, let eps = connect_to_endpoints(config, &endpoint_names).await?; - let ep_names_to_images = eps.iter() + let ep_names_to_images = eps + .iter() .map(|ep| async move { ep.images(None).await.map(|imgs| { - let img_tags = imgs.filter_map(|img| img.tags().clone().map(Vec::into_iter)) + let img_tags = imgs + .filter_map(|img| img.tags().clone().map(Vec::into_iter)) .flatten() .map(ImageName::from) .collect(); @@ -491,7 +580,8 @@ async fn images_present(endpoint_names: Vec, ep_names_to_images .iter() .try_for_each(|(ep_name, ep_imgs)| { - config.docker() + config + .docker() .images() .iter() .map(|config_img| (ep_imgs.contains(&config_img.name), &config_img.name)) @@ -507,7 +597,10 @@ async fn images_present(endpoint_names: Vec, /// Helper function to connect to all endpoints from the configuration, that appear (by name) in /// the `endpoint_names` list -pub(super) async fn connect_to_endpoints(config: &Configuration, endpoint_names: &[EndpointName]) -> Result>> { +pub(super) async fn connect_to_endpoints( + config: &Configuration, + endpoint_names: &[EndpointName], +) -> Result>> { let endpoint_configurations = config .docker() .endpoints() @@ -517,7 +610,14 @@ pub(super) async fn connect_to_endpoints(config: &Configuration, endpoint_names: crate::endpoint::EndpointConfiguration::builder() .endpoint_name(ep_name.clone()) .endpoint(ep_cfg.clone()) - .required_images(config.docker().images().iter().map(|img| img.name.clone()).collect::>()) + .required_images( + config + .docker() + .images() + .iter() + .map(|img| img.name.clone()) + .collect::>(), + ) .required_docker_versions(config.docker().docker_versions().clone()) .required_docker_api_versions(config.docker().docker_api_versions().clone()) .build() @@ -525,9 +625,14 @@ pub(super) async fn connect_to_endpoints(config: &Configuration, endpoint_names: .collect::>(); info!("Endpoint config build"); - info!("Connecting to {n} endpoints: {eps}", + info!( + "Connecting to {n} endpoints: {eps}", n = endpoint_configurations.len(), - eps = endpoint_configurations.iter().map(|epc| epc.endpoint_name()).join(", ")); + eps = endpoint_configurations + .iter() + .map(|epc| epc.endpoint_name()) + .join(", ") + ); crate::endpoint::util::setup_endpoints(endpoint_configurations).await } diff --git a/src/commands/endpoint_container.rs b/src/commands/endpoint_container.rs index 11b93546..b7c6882e 100644 --- a/src/commands/endpoint_container.rs +++ b/src/commands/endpoint_container.rs @@ -12,23 +12,26 @@ use std::borrow::Cow; +use anyhow::anyhow; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use clap::ArgMatches; -use tokio_stream::StreamExt; use shiplift::Container; +use tokio_stream::StreamExt; use crate::config::Configuration; use crate::config::EndpointName; -pub async fn container(endpoint_names: Vec, +pub async fn container( + endpoint_names: Vec, matches: &ArgMatches, config: &Configuration, ) -> Result<()> { let container_id = matches.get_one::("container_id").unwrap(); - let endpoints = crate::commands::endpoint::connect_to_endpoints(config, &endpoint_names).await?; - let relevant_endpoints = endpoints.into_iter() + let endpoints = + crate::commands::endpoint::connect_to_endpoints(config, &endpoint_names).await?; + let relevant_endpoints = endpoints + .into_iter() .map(|ep| async { ep.has_container_with_id(container_id) .await @@ -38,31 +41,35 @@ pub async fn container(endpoint_names: Vec, .collect::>>() .await? .into_iter() - .filter_map(|tpl| { - if tpl.1 { - Some(tpl.0) - } else { - None - } - }) + .filter_map(|tpl| if tpl.1 { Some(tpl.0) } else { None }) .collect::>(); if relevant_endpoints.len() > 1 { - return Err(anyhow!("Found more than one container for id {}", container_id)) + return Err(anyhow!( + "Found more than one container for id {}", + container_id + )); } - let relevant_endpoint = relevant_endpoints.get(0).ok_or_else(|| { - anyhow!("Found no container for id {}", container_id) - })?; + let relevant_endpoint = relevant_endpoints + .get(0) + .ok_or_else(|| anyhow!("Found no container for id {}", container_id))?; - let container = relevant_endpoint.get_container_by_id(container_id) + let container = relevant_endpoint + .get_container_by_id(container_id) .await? - .ok_or_else(|| anyhow!("Cannot find container {} on {}", container_id, relevant_endpoint.name()))?; + .ok_or_else(|| { + anyhow!( + "Cannot find container {} on {}", + container_id, + relevant_endpoint.name() + ) + })?; let confirm = |prompt: String| dialoguer::Confirm::new().with_prompt(prompt).interact(); match matches.subcommand() { - Some(("top", matches)) => top(matches, container).await, + Some(("top", matches)) => top(matches, container).await, Some(("kill", matches)) => { confirm({ if let Some(sig) = matches.get_one::("signal") { @@ -73,36 +80,44 @@ pub async fn container(endpoint_names: Vec, })?; kill(matches, container).await - }, + } Some(("delete", _)) => { if confirm(format!("Really delete {container_id}?"))? { delete(container).await } else { Ok(()) } - }, - Some(("start", _)) => { + } + Some(("start", _)) => { if confirm(format!("Really start {container_id}?"))? { start(container).await } else { Ok(()) } - }, + } Some(("stop", matches)) => { if confirm(format!("Really stop {container_id}?"))? { stop(matches, container).await } else { Ok(()) } - }, + } Some(("exec", matches)) => { - let commands = matches.get_many::("commands").unwrap().map(AsRef::as_ref).collect::>(); - if confirm(format!("Really run '{}' in {}?", commands.join(" "), container_id))? { + let commands = matches + .get_many::("commands") + .unwrap() + .map(AsRef::as_ref) + .collect::>(); + if confirm(format!( + "Really run '{}' in {}?", + commands.join(" "), + container_id + ))? { exec(matches, container).await } else { Ok(()) } - }, + } Some(("inspect", _)) => inspect(container).await, Some((other, _)) => Err(anyhow!("Unknown subcommand: {}", other)), None => Err(anyhow!("No subcommand")), @@ -117,7 +132,10 @@ async fn top(matches: &ArgMatches, container: Container<'_>) -> Result<()> { async fn kill(matches: &ArgMatches, container: Container<'_>) -> Result<()> { let signal = matches.get_one::("signal"); - container.kill(signal.map(|s| s.as_ref())).await.map_err(Error::from) + container + .kill(signal.map(|s| s.as_ref())) + .await + .map_err(Error::from) } async fn delete(container: Container<'_>) -> Result<()> { @@ -129,40 +147,46 @@ async fn start(container: Container<'_>) -> Result<()> { } async fn stop(matches: &ArgMatches, container: Container<'_>) -> Result<()> { - container.stop({ - matches - .get_one::("timeout") - .map(|s| s.parse::()) - .transpose()? - .map(std::time::Duration::from_secs) - }) - .await - .map_err(Error::from) + container + .stop({ + matches + .get_one::("timeout") + .map(|s| s.parse::()) + .transpose()? + .map(std::time::Duration::from_secs) + }) + .await + .map_err(Error::from) } async fn exec(matches: &ArgMatches, container: Container<'_>) -> Result<()> { - use std::io::Write; use futures::TryStreamExt; + use std::io::Write; let execopts = shiplift::builder::ExecContainerOptions::builder() .cmd({ - matches.get_many::("commands").unwrap().map(AsRef::as_ref).collect::>() + matches + .get_many::("commands") + .unwrap() + .map(AsRef::as_ref) + .collect::>() }) .attach_stdout(true) .attach_stderr(true) .build(); - container.exec(&execopts) + container + .exec(&execopts) .map_err(Error::from) .try_for_each(|chunk| async { match chunk { shiplift::tty::TtyChunk::StdIn(_) => Err(anyhow!("Cannot handle STDIN TTY chunk")), shiplift::tty::TtyChunk::StdOut(v) => { std::io::stdout().write(&v).map_err(Error::from).map(|_| ()) - }, + } shiplift::tty::TtyChunk::StdErr(v) => { std::io::stderr().write(&v).map_err(Error::from).map(|_| ()) - }, + } } }) .await @@ -178,8 +202,8 @@ async fn exec(matches: &ArgMatches, container: Container<'_>) -> Result<()> { // This is the most ugly function of the whole codebase. As ugly as it is: It is simply printing // things, nothing here is too complex code-wise (except some nested formatting stuff...) async fn inspect(container: Container<'_>) -> Result<()> { - use std::io::Write; use itertools::Itertools; + use std::io::Write; let d = container.inspect().await?; @@ -206,7 +230,11 @@ async fn inspect(container: Container<'_>) -> Result<()> { .unwrap_or_else(|| Cow::from("None")) } - writeln!(std::io::stdout(), "{}", indoc::formatdoc!(r#" + writeln!( + std::io::stdout(), + "{}", + indoc::formatdoc!( + r#" Container: {container_id} app_armor_profile: {app_armor_profile} @@ -277,141 +305,176 @@ async fn inspect(container: Container<'_>) -> Result<()> { status: {state_status} mounts: {mounts} "#, - - container_id = container.id(), - - app_armor_profile = d.app_armor_profile, - args = d.args.iter().join(", "), - - config_attach_stderr = d.config.attach_stderr.to_string(), - config_attach_stdin = d.config.attach_stdin.to_string(), - config_attach_stdout = d.config.attach_stdout.to_string(), - config_cmd = option_vec(d.config.cmd.as_ref()), - config_domainname = d.config.domainname, - config_entrypoint = option_vec(d.config.entrypoint.as_ref()), - config_env = option_vec_nl(d.config.env.as_ref(), 8), - config_exposed_ports = { - d.config.exposed_ports.map(|hm| { - let s = hm.iter() - .map(|(k, v_hm)| { - format!("{:ind$}{k}:\n{hm}", - "", ind = 8, - k = k, - hm = v_hm.iter() - .map(|(k, v)| format!("{:ind$}{k}: {v}", "", ind = 12, k = k, v = v)) + container_id = container.id(), + app_armor_profile = d.app_armor_profile, + args = d.args.iter().join(", "), + config_attach_stderr = d.config.attach_stderr.to_string(), + config_attach_stdin = d.config.attach_stdin.to_string(), + config_attach_stdout = d.config.attach_stdout.to_string(), + config_cmd = option_vec(d.config.cmd.as_ref()), + config_domainname = d.config.domainname, + config_entrypoint = option_vec(d.config.entrypoint.as_ref()), + config_env = option_vec_nl(d.config.env.as_ref(), 8), + config_exposed_ports = { + d.config + .exposed_ports + .map(|hm| { + let s = hm + .iter() + .map(|(k, v_hm)| { + format!( + "{:ind$}{k}:\n{hm}", + "", + ind = 8, + k = k, + hm = v_hm + .iter() + .map(|(k, v)| format!( + "{:ind$}{k}: {v}", + "", + ind = 12, + k = k, + v = v + )) + .collect::>() + .join("\n") + ) + }) .collect::>() - .join("\n") - ) - }) - .collect::>() - .join("\n"); + .join("\n"); - format!("\n{s}") - }) - .unwrap_or_else(|| String::from("None")) - }, - config_hostname = d.config.hostname, - config_image = d.config.image, - config_labels = { - d.config.labels - .map(|hm| { - let s = hm.iter() - .map(|(k, v)| format!("{:ind$}{k}: {v}", "", ind = 8, k = k, v = v)) - .collect::>() - .join("\n"); - format!("\n{s}") - }) - .unwrap_or_else(|| String::from("None")) - }, - config_on_build = option_vec(d.config.on_build.as_ref()), - config_open_stdin = d.config.open_stdin.to_string(), - config_stdin_once = d.config.stdin_once.to_string(), - config_tty = d.config.tty.to_string(), - config_user = d.config.user, - config_working_dir = d.config.working_dir, - - created = d.created.to_string(), - driver = d.driver, - - host_config_cgroup_parent = option_tostr(d.host_config.cgroup_parent.as_ref()), - host_config_container_id_file = d.host_config.container_id_file, - host_config_cpu_shares = option_tostr(d.host_config.cpu_shares.as_ref()), - host_config_cpuset_cpus = option_tostr(d.host_config.cpuset_cpus.as_ref()), - host_config_memory = option_tostr(d.host_config.memory.as_ref()), - host_config_memory_swap = option_tostr(d.host_config.memory_swap.as_ref()), - host_config_network_mode = d.host_config.network_mode, - host_config_pid_mode = option_tostr(d.host_config.pid_mode.as_ref()), - host_config_port_bindings = { - d.host_config.port_bindings - .map(|hm| { - let s = hm.iter() - .map(|(k, v)| { - let v = v.iter() - .map(|hm| { - hm.iter() - .map(|(k, v)| { - format!("{:ind$}{k}: {v}", "", ind = 12, k = k, v = v) + format!("\n{s}") + }) + .unwrap_or_else(|| String::from("None")) + }, + config_hostname = d.config.hostname, + config_image = d.config.image, + config_labels = { + d.config + .labels + .map(|hm| { + let s = hm + .iter() + .map(|(k, v)| format!("{:ind$}{k}: {v}", "", ind = 8, k = k, v = v)) + .collect::>() + .join("\n"); + format!("\n{s}") + }) + .unwrap_or_else(|| String::from("None")) + }, + config_on_build = option_vec(d.config.on_build.as_ref()), + config_open_stdin = d.config.open_stdin.to_string(), + config_stdin_once = d.config.stdin_once.to_string(), + config_tty = d.config.tty.to_string(), + config_user = d.config.user, + config_working_dir = d.config.working_dir, + created = d.created.to_string(), + driver = d.driver, + host_config_cgroup_parent = option_tostr(d.host_config.cgroup_parent.as_ref()), + host_config_container_id_file = d.host_config.container_id_file, + host_config_cpu_shares = option_tostr(d.host_config.cpu_shares.as_ref()), + host_config_cpuset_cpus = option_tostr(d.host_config.cpuset_cpus.as_ref()), + host_config_memory = option_tostr(d.host_config.memory.as_ref()), + host_config_memory_swap = option_tostr(d.host_config.memory_swap.as_ref()), + host_config_network_mode = d.host_config.network_mode, + host_config_pid_mode = option_tostr(d.host_config.pid_mode.as_ref()), + host_config_port_bindings = { + d.host_config + .port_bindings + .map(|hm| { + let s = hm + .iter() + .map(|(k, v)| { + let v = v + .iter() + .map(|hm| { + hm.iter() + .map(|(k, v)| { + format!( + "{:ind$}{k}: {v}", + "", + ind = 12, + k = k, + v = v + ) + }) + .collect::>() + .join("\n") }) .collect::>() - .join("\n") + .join("\n"); + + format!("{:ind$}{k}: \n{v}", "", ind = 8, k = k, v = v) }) .collect::>() .join("\n"); + format!("\n{s}") + }) + .unwrap_or_else(|| String::from("None")) + }, + host_config_privileged = d.host_config.privileged.to_string(), + host_config_publish_all_ports = d.host_config.publish_all_ports.to_string(), + host_config_readonly_rootfs = option_tostr(d.host_config.readonly_rootfs.as_ref()), + hostname_path = d.hostname_path, + hosts_path = d.hosts_path, + log_path = d.log_path, + id = d.id, + image = d.image, + mount_label = d.mount_label, + name = d.name, + network_settings_bridge = d.network_settings.bridge, + network_settings_gateway = d.network_settings.gateway, + network_settings_ip_address = d.network_settings.ip_address, + network_settings_ip_prefix_len = d.network_settings.ip_prefix_len.to_string(), + network_settings_mac_address = d.network_settings.mac_address, + network_settings_ports = { + d.network_settings + .ports + .map(|hm| { + let s = hm + .iter() + .map(|(k, v)| { + let v = v + .as_ref() + .map(|v| { + v.iter() + .map(|hm| { + let s = hm + .iter() + .map(|(k, v)| { + format!( + "{:ind$}{k}: {v}", + "", + ind = 12, + k = k, + v = v + ) + }) + .collect::>() + .join("\n"); + format!("\n{s}") + }) + .collect::>() + .join("\n") + }) + .unwrap_or_else(|| String::from("None")); - format!("{:ind$}{k}: \n{v}", "", ind = 8, k = k, v = v) + format!("{:ind$}{k}: \n{v}", "", ind = 8, k = k, v = v) + }) + .collect::>() + .join("\n"); + format!("\n{s}") }) - .collect::>() - .join("\n"); - format!("\n{s}") - }) - .unwrap_or_else(|| String::from("None")) - }, - host_config_privileged = d.host_config.privileged.to_string(), - host_config_publish_all_ports = d.host_config.publish_all_ports.to_string(), - host_config_readonly_rootfs = option_tostr(d.host_config.readonly_rootfs.as_ref()), - - hostname_path = d.hostname_path, - hosts_path = d.hosts_path, - log_path = d.log_path, - id = d.id, - image = d.image, - mount_label = d.mount_label, - name = d.name, - - network_settings_bridge = d.network_settings.bridge, - network_settings_gateway = d.network_settings.gateway, - network_settings_ip_address = d.network_settings.ip_address, - network_settings_ip_prefix_len = d.network_settings.ip_prefix_len.to_string(), - network_settings_mac_address = d.network_settings.mac_address, - network_settings_ports = { - d.network_settings.ports - .map(|hm| { - let s = hm.iter() + .unwrap_or_else(|| String::from("None")) + }, + network_settings_networks = { + let s = d + .network_settings + .networks + .iter() .map(|(k, v)| { - let v = v.as_ref().map(|v| { - v.iter() - .map(|hm| { - let s = hm.iter() - .map(|(k, v)| format!("{:ind$}{k}: {v}", "", ind = 12, k = k, v = v)) - .collect::>() - .join("\n"); - format!("\n{s}") - }) - .collect::>() - .join("\n") - }).unwrap_or_else(|| String::from("None")); - - format!("{:ind$}{k}: \n{v}", "", ind = 8, k = k, v = v) - }) - .collect::>() - .join("\n"); - format!("\n{s}") - }) - .unwrap_or_else(|| String::from("None")) - }, - network_settings_networks = { - let s = d.network_settings.networks.iter().map(|(k, v)| { - indoc::formatdoc!(r#" + indoc::formatdoc!( + r#" {k}: network_id: {network_id} endpoint_id: {endpoint_id} @@ -423,67 +486,68 @@ async fn inspect(container: Container<'_>) -> Result<()> { global_ipv6_prefix_len: {global_ipv6_prefix_len} mac_address: {mac_address} "#, - k = k, - network_id = v.network_id, - endpoint_id = v.endpoint_id, - gateway = v.gateway, - ip_address = v.ip_address, - ip_prefix_len = v.ip_prefix_len, - ipv6_gateway = v.ipv6_gateway, - global_ipv6_address = v.global_ipv6_address, - global_ipv6_prefix_len = v.global_ipv6_prefix_len.to_string(), - mac_address = v.mac_address, - ) - .lines() - .map(|s| format!("{:ind$}{s}", "", ind = 8, s = s)) - .join("\n") - }) - .collect::>() - .join("\n"); - - format!("\n{s}") - }, - - path = d.path, - process_label = d.process_label, - resolv_conf_path = d.resolv_conf_path, - restart_count = d.restart_count.to_string(), - - state_error = d.state.error, - state_exit_code = d.state.exit_code.to_string(), - state_finished_at = d.state.finished_at.to_string(), - state_oom_killed = d.state.oom_killed.to_string(), - state_paused = d.state.paused.to_string(), - state_pid = d.state.pid.to_string(), - state_restarting = d.state.restarting.to_string(), - state_running = d.state.running.to_string(), - state_started_at = d.state.started_at.to_string(), - state_status = d.state.status, - - mounts = { - let s = d.mounts.iter() - .map(|mount| { - indoc::formatdoc!(r#" + k = k, + network_id = v.network_id, + endpoint_id = v.endpoint_id, + gateway = v.gateway, + ip_address = v.ip_address, + ip_prefix_len = v.ip_prefix_len, + ipv6_gateway = v.ipv6_gateway, + global_ipv6_address = v.global_ipv6_address, + global_ipv6_prefix_len = v.global_ipv6_prefix_len.to_string(), + mac_address = v.mac_address, + ) + .lines() + .map(|s| format!("{:ind$}{s}", "", ind = 8, s = s)) + .join("\n") + }) + .collect::>() + .join("\n"); + + format!("\n{s}") + }, + path = d.path, + process_label = d.process_label, + resolv_conf_path = d.resolv_conf_path, + restart_count = d.restart_count.to_string(), + state_error = d.state.error, + state_exit_code = d.state.exit_code.to_string(), + state_finished_at = d.state.finished_at.to_string(), + state_oom_killed = d.state.oom_killed.to_string(), + state_paused = d.state.paused.to_string(), + state_pid = d.state.pid.to_string(), + state_restarting = d.state.restarting.to_string(), + state_running = d.state.running.to_string(), + state_started_at = d.state.started_at.to_string(), + state_status = d.state.status, + mounts = { + let s = d + .mounts + .iter() + .map(|mount| { + indoc::formatdoc!( + r#" source: {source} destination: {destination} mode: {mode} rw: {rw} "#, - source = mount.source, - destination = mount.destination, - mode = mount.mode, - rw = mount.rw.to_string() - ) - .lines() - .map(|s| format!("{:ind$}{s}", "", ind = 4, s = s)) - .join("\n") - }) - .collect::>() - .join("\n"); + source = mount.source, + destination = mount.destination, + mode = mount.mode, + rw = mount.rw.to_string() + ) + .lines() + .map(|s| format!("{:ind$}{s}", "", ind = 4, s = s)) + .join("\n") + }) + .collect::>() + .join("\n"); - format!("\n{s}") - } - )).map_err(Error::from) + format!("\n{s}") + } + ) + ) + .map_err(Error::from) } - diff --git a/src/commands/find_artifact.rs b/src/commands/find_artifact.rs index 700fef8d..b34a01b4 100644 --- a/src/commands/find_artifact.rs +++ b/src/commands/find_artifact.rs @@ -10,32 +10,38 @@ //! Implementation of the 'find-artifact' subcommand -use std::path::PathBuf; +use std::convert::TryFrom; use std::io::Write; +use std::path::PathBuf; use std::sync::Arc; -use std::convert::TryFrom; use anyhow::Context; use anyhow::Error; use anyhow::Result; use clap::ArgMatches; -use diesel::PgConnection; use diesel::r2d2::ConnectionManager; use diesel::r2d2::Pool; +use diesel::PgConnection; use itertools::Itertools; use tracing::{debug, trace}; use crate::config::Configuration; +use crate::filestore::path::StoreRoot; use crate::filestore::ReleaseStore; use crate::filestore::StagingStore; -use crate::filestore::path::StoreRoot; use crate::package::PackageVersionConstraint; use crate::repository::Repository; -use crate::util::progress::ProgressBars; use crate::util::docker::ImageName; +use crate::util::progress::ProgressBars; /// Implementation of the "find_artifact" subcommand -pub async fn find_artifact(matches: &ArgMatches, config: &Configuration, progressbars: ProgressBars, repo: Repository, database_pool: Pool>) -> Result<()> { +pub async fn find_artifact( + matches: &ArgMatches, + config: &Configuration, + progressbars: ProgressBars, + repo: Repository, + database_pool: Pool>, +) -> Result<()> { let package_name_regex = crate::commands::util::mk_package_name_regex({ matches.get_one::("package_name_regex").unwrap() // safe by clap })?; @@ -48,16 +54,25 @@ pub async fn find_artifact(matches: &ArgMatches, config: &Configuration, progres .context("Parsing package version constraint") .context("A valid package version constraint looks like this: '=1.0.0'")?; - let env_filter = matches.get_many::("env_filter") - .map(|vals| vals.map(AsRef::as_ref).map(crate::util::env::parse_to_env).collect::>>()) + let env_filter = matches + .get_many::("env_filter") + .map(|vals| { + vals.map(AsRef::as_ref) + .map(crate::util::env::parse_to_env) + .collect::>>() + }) .transpose()? .unwrap_or_default(); - let image_name = matches.get_one::("image") + let image_name = matches + .get_one::("image") .map(|s| s.to_owned()) .map(ImageName::from); - debug!("Finding artifacts for '{:?}' '{:?}'", package_name_regex, package_version_constraint); + debug!( + "Finding artifacts for '{:?}' '{:?}'", + package_name_regex, package_version_constraint + ); let release_stores = config .release_stores() @@ -70,16 +85,19 @@ pub async fn find_artifact(matches: &ArgMatches, config: &Configuration, progres debug!("Loading release directory: {}", p_str); let r = ReleaseStore::load(StoreRoot::new(p.clone())?, &bar_release_loading); if r.is_ok() { - bar_release_loading.finish_with_message(format!("Loaded releases in {p_str} successfully")); + bar_release_loading + .finish_with_message(format!("Loaded releases in {p_str} successfully")); } else { - bar_release_loading.finish_with_message(format!("Failed to load releases in {p_str}")); + bar_release_loading + .finish_with_message(format!("Failed to load releases in {p_str}")); } r.map(Arc::new) }) .collect::>>()?; - let staging_store = if let Some(p) = matches.get_one::("staging_dir").map(PathBuf::from) { + let staging_store = if let Some(p) = matches.get_one::("staging_dir").map(PathBuf::from) + { let bar_staging_loading = progressbars.bar()?; if !p.is_dir() { @@ -121,7 +139,8 @@ pub async fn find_artifact(matches: &ArgMatches, config: &Configuration, progres .build() .run()?; - pathes.iter() + pathes + .iter() .map(|tpl| (tpl.0.joined(), tpl.1)) .sorted_by(|tpla, tplb| { use std::cmp::Ordering; @@ -151,7 +170,8 @@ pub async fn find_artifact(matches: &ArgMatches, config: &Configuration, progres writeln!(std::io::stdout(), "[{}] {}", time, path.display()) } else { writeln!(std::io::stdout(), "[unknown] {}", path.display()) - }.map_err(Error::from) + } + .map_err(Error::from) }) }) .inspect(|r| trace!("Query resulted in: {:?}", r)) diff --git a/src/commands/find_pkg.rs b/src/commands/find_pkg.rs index d272ea88..d5d03ec6 100644 --- a/src/commands/find_pkg.rs +++ b/src/commands/find_pkg.rs @@ -15,9 +15,9 @@ use std::convert::TryFrom; use anyhow::Context; use anyhow::Result; use clap::ArgMatches; -use tracing::trace; use futures::stream::StreamExt; use futures::stream::TryStreamExt; +use tracing::trace; use crate::config::Configuration; use crate::package::PackageVersionConstraint; diff --git a/src/commands/metrics.rs b/src/commands/metrics.rs index 441b8bc1..a610aa56 100644 --- a/src/commands/metrics.rs +++ b/src/commands/metrics.rs @@ -10,16 +10,16 @@ //! Implementation of the 'metrics' subcommand -use std::path::Path; use std::io::Write; +use std::path::Path; use anyhow::Error; use anyhow::Result; +use diesel::r2d2::ConnectionManager; +use diesel::r2d2::Pool; use diesel::PgConnection; use diesel::QueryDsl; use diesel::RunQueryDsl; -use diesel::r2d2::ConnectionManager; -use diesel::r2d2::Pool; use walkdir::WalkDir; use crate::config::Configuration; @@ -46,16 +46,56 @@ pub async fn metrics( }) .count(); - let n_artifacts = async { crate::schema::artifacts::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_endpoints = async { crate::schema::endpoints::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_envvars = async { crate::schema::envvars::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_githashes = async { crate::schema::githashes::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_images = async { crate::schema::images::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_jobs = async { crate::schema::jobs::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_packages = async { crate::schema::packages::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_releasestores = async { crate::schema::release_stores::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_releases = async { crate::schema::releases::table.count().get_result::(&mut pool.get().unwrap()) }; - let n_submits = async { crate::schema::submits::table.count().get_result::(&mut pool.get().unwrap()) }; + let n_artifacts = async { + crate::schema::artifacts::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_endpoints = async { + crate::schema::endpoints::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_envvars = async { + crate::schema::envvars::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_githashes = async { + crate::schema::githashes::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_images = async { + crate::schema::images::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_jobs = async { + crate::schema::jobs::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_packages = async { + crate::schema::packages::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_releasestores = async { + crate::schema::release_stores::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_releases = async { + crate::schema::releases::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; + let n_submits = async { + crate::schema::submits::table + .count() + .get_result::(&mut pool.get().unwrap()) + }; let ( n_artifacts, @@ -68,9 +108,24 @@ pub async fn metrics( n_releasestores, n_releases, n_submits, - ) = tokio::try_join!(n_artifacts, n_endpoints, n_envvars, n_githashes, n_images, n_jobs, n_packages, n_releasestores, n_releases, n_submits)?; + ) = tokio::try_join!( + n_artifacts, + n_endpoints, + n_envvars, + n_githashes, + n_images, + n_jobs, + n_packages, + n_releasestores, + n_releases, + n_submits + )?; - write!(out, "{}", indoc::formatdoc!(r#" + write!( + out, + "{}", + indoc::formatdoc!( + r#" Butido release {release} {configured_endpoints} Configured endpoints @@ -92,23 +147,24 @@ pub async fn metrics( {n_releases} releases in database {n_submits} submits in database "#, - release = clap::crate_version!(), - configured_endpoints = config.docker().endpoints().len(), - configured_images = config.docker().images().len(), - configured_release_stores = config.release_stores().len(), - configured_phases = config.available_phases().len(), - nfiles = nfiles, - repo_packages = repo.packages().count(), - n_artifacts = n_artifacts, - n_endpoints = n_endpoints, - n_envvars = n_envvars, - n_githashes = n_githashes, - n_images = n_images, - n_jobs = n_jobs, - n_packages = n_packages, - n_releasestores = n_releasestores, - n_releases = n_releases, - n_submits = n_submits, - )).map_err(Error::from) + release = clap::crate_version!(), + configured_endpoints = config.docker().endpoints().len(), + configured_images = config.docker().images().len(), + configured_release_stores = config.release_stores().len(), + configured_phases = config.available_phases().len(), + nfiles = nfiles, + repo_packages = repo.packages().count(), + n_artifacts = n_artifacts, + n_endpoints = n_endpoints, + n_envvars = n_envvars, + n_githashes = n_githashes, + n_images = n_images, + n_jobs = n_jobs, + n_packages = n_packages, + n_releasestores = n_releasestores, + n_releases = n_releases, + n_submits = n_submits, + ) + ) + .map_err(Error::from) } - diff --git a/src/commands/release.rs b/src/commands/release.rs index 804ba040..69c951eb 100644 --- a/src/commands/release.rs +++ b/src/commands/release.rs @@ -19,9 +19,9 @@ use anyhow::Error; use anyhow::Result; use clap::ArgMatches; use diesel::prelude::*; -use tracing::{debug, error, info, trace}; -use tokio_stream::StreamExt; use resiter::AndThen; +use tokio_stream::StreamExt; +use tracing::{debug, error, info, trace}; use crate::config::Configuration; use crate::db::models as dbmodels; @@ -34,14 +34,13 @@ pub async fn release( matches: &ArgMatches, ) -> Result<()> { match matches.subcommand() { - Some(("new", matches)) => new_release(db_connection_config, config, matches).await, - Some(("rm", matches)) => rm_release(db_connection_config, config, matches).await, + Some(("new", matches)) => new_release(db_connection_config, config, matches).await, + Some(("rm", matches)) => rm_release(db_connection_config, config, matches).await, Some((other, _matches)) => Err(anyhow!("Unknown subcommand: {}", other)), None => Err(anyhow!("Missing subcommand")), } } - async fn new_release( db_connection_config: DbConnectionConfig<'_>, config: &Configuration, @@ -136,16 +135,21 @@ async fn new_release( let staging_base: &PathBuf = &config.staging_directory().join(submit.uuid.to_string()); - let release_store = crate::db::models::ReleaseStore::create(&mut pool.get().unwrap(), release_store_name)?; + let release_store = + crate::db::models::ReleaseStore::create(&mut pool.get().unwrap(), release_store_name)?; let do_update = matches.get_flag("package_do_update"); let interactive = !matches.get_flag("noninteractive"); let now = chrono::offset::Local::now().naive_local(); - let any_err = arts.into_iter() + let any_err = arts + .into_iter() .map(|art| async { let art = art; // ensure it is moved let art_path = staging_base.join(&art.path); - let dest_path = config.releases_directory().join(release_store_name).join(&art.path); + let dest_path = config + .releases_directory() + .join(release_store_name) + .join(&art.path); debug!( "Trying to release {} to {}", art_path.display(), @@ -162,27 +166,51 @@ async fn new_release( if dest_path.exists() && !do_update { return Err(anyhow!("Does already exist: {}", dest_path.display())); } else if dest_path.exists() && do_update { - writeln!(std::io::stderr(), "Going to update: {}", dest_path.display())?; - if interactive && !dialoguer::Confirm::new().with_prompt("Continue?").interact()? { - return Err(anyhow!("Does already exist: {} and update was denied", dest_path.display())); + writeln!( + std::io::stderr(), + "Going to update: {}", + dest_path.display() + )?; + if interactive + && !dialoguer::Confirm::new() + .with_prompt("Continue?") + .interact()? + { + return Err(anyhow!( + "Does already exist: {} and update was denied", + dest_path.display() + )); } } if dest_path.exists() { - debug!("Removing {} before writing new file to this path", dest_path.display()); - tokio::fs::remove_file(&dest_path) - .await - .with_context(|| anyhow!("Removing {} before writing new file to this path", dest_path.display()))?; + debug!( + "Removing {} before writing new file to this path", + dest_path.display() + ); + tokio::fs::remove_file(&dest_path).await.with_context(|| { + anyhow!( + "Removing {} before writing new file to this path", + dest_path.display() + ) + })?; } // else !dest_path.exists() tokio::fs::copy(&art_path, &dest_path) .await - .with_context(|| anyhow!("Copying {} to {}", art_path.display(), dest_path.display())) + .with_context(|| { + anyhow!("Copying {} to {}", art_path.display(), dest_path.display()) + }) .map_err(Error::from) .and_then(|_| { debug!("Updating {:?} to set released = true", art); - let rel = crate::db::models::Release::create(&mut pool.get().unwrap(), &art, &now, &release_store)?; + let rel = crate::db::models::Release::create( + &mut pool.get().unwrap(), + &art, + &now, + &release_store, + )?; debug!("Release object = {:?}", rel); Ok(dest_path) }) @@ -224,7 +252,10 @@ pub async fn rm_release( )); } if !config.release_stores().contains(release_store_name) { - return Err(anyhow!("Unknown release store name: {}", release_store_name)) + return Err(anyhow!( + "Unknown release store name: {}", + release_store_name + )); } let pname = matches.get_one::("package_name").unwrap(); // safe by clap @@ -233,29 +264,54 @@ pub async fn rm_release( let mut conn = db_connection_config.establish_connection()?; - let (release, artifact) = crate::schema::jobs::table - .inner_join(crate::schema::packages::table) - .inner_join(crate::schema::artifacts::table) - .inner_join(crate::schema::releases::table - .on(crate::schema::releases::artifact_id.eq(crate::schema::artifacts::id))) - .inner_join(crate::schema::release_stores::table - .on(crate::schema::release_stores::id.eq(crate::schema::releases::release_store_id))) - .filter(crate::schema::packages::dsl::name.eq(&pname) - .and(crate::schema::packages::dsl::version.eq(&pvers))) - .filter(crate::schema::release_stores::dsl::store_name.eq(&release_store_name)) - .order(crate::schema::releases::dsl::release_date.desc()) - .select((crate::schema::releases::all_columns, crate::schema::artifacts::all_columns)) - .first::<(crate::db::models::Release, crate::db::models::Artifact)>(&mut conn)?; - - let artifact_path = config.releases_directory().join(release_store_name).join(&artifact.path); + let (release, artifact) = + crate::schema::jobs::table + .inner_join(crate::schema::packages::table) + .inner_join(crate::schema::artifacts::table) + .inner_join( + crate::schema::releases::table + .on(crate::schema::releases::artifact_id.eq(crate::schema::artifacts::id)), + ) + .inner_join(crate::schema::release_stores::table.on( + crate::schema::release_stores::id.eq(crate::schema::releases::release_store_id), + )) + .filter( + crate::schema::packages::dsl::name + .eq(&pname) + .and(crate::schema::packages::dsl::version.eq(&pvers)), + ) + .filter(crate::schema::release_stores::dsl::store_name.eq(&release_store_name)) + .order(crate::schema::releases::dsl::release_date.desc()) + .select(( + crate::schema::releases::all_columns, + crate::schema::artifacts::all_columns, + )) + .first::<(crate::db::models::Release, crate::db::models::Artifact)>(&mut conn)?; + + let artifact_path = config + .releases_directory() + .join(release_store_name) + .join(&artifact.path); if !artifact_path.is_file() { - return Err(anyhow!("Not a file: {}", artifact_path.display())) + return Err(anyhow!("Not a file: {}", artifact_path.display())); } - writeln!(std::io::stderr(), "Going to delete: {}", artifact_path.display())?; - writeln!(std::io::stderr(), "Going to remove from database: Release with ID {} from {}", release.id, release.release_date)?; - if !dialoguer::Confirm::new().with_prompt("Continue?").interact()? { - return Ok(()) + writeln!( + std::io::stderr(), + "Going to delete: {}", + artifact_path.display() + )?; + writeln!( + std::io::stderr(), + "Going to remove from database: Release with ID {} from {}", + release.id, + release.release_date + )?; + if !dialoguer::Confirm::new() + .with_prompt("Continue?") + .interact()? + { + return Ok(()); } tokio::fs::remove_file(&artifact_path).await?; @@ -266,4 +322,3 @@ pub async fn rm_release( Ok(()) } - diff --git a/src/commands/source/download.rs b/src/commands/source/download.rs index 9fbb5aa8..ca211e01 100644 --- a/src/commands/source/download.rs +++ b/src/commands/source/download.rs @@ -12,15 +12,15 @@ use std::convert::TryFrom; use std::path::PathBuf; use std::sync::Arc; +use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use clap::ArgMatches; -use tracing::{debug, trace}; use tokio::io::AsyncWriteExt; use tokio::sync::Mutex; use tokio_stream::StreamExt; +use tracing::{debug, trace}; use crate::config::*; use crate::package::PackageName; @@ -56,7 +56,7 @@ impl ProgressWrapper { finished_downloads: 0, current_bytes: 0, sum_bytes: 0, - bar: Arc::new(Mutex::new(bar)) + bar: Arc::new(Mutex::new(bar)), } } @@ -94,16 +94,26 @@ impl ProgressWrapper { async fn success(&self) { let bar = self.bar.lock().await; - bar.finish_with_message(format!("Succeeded {}/{} downloads", self.finished_downloads, self.download_count)); + bar.finish_with_message(format!( + "Succeeded {}/{} downloads", + self.finished_downloads, self.download_count + )); } async fn error(&self) { let bar = self.bar.lock().await; - bar.finish_with_message(format!("At least one download of {} failed", self.download_count)); + bar.finish_with_message(format!( + "At least one download of {} failed", + self.download_count + )); } } -async fn perform_download(source: &SourceEntry, progress: Arc>, timeout: Option) -> Result<()> { +async fn perform_download( + source: &SourceEntry, + progress: Arc>, + timeout: Option, +) -> Result<()> { trace!("Creating: {:?}", source); let file = source.create().await.with_context(|| { anyhow!( @@ -113,8 +123,8 @@ async fn perform_download(source: &SourceEntry, progress: Arc resp, - Err(e) => { - return Err(e).with_context(|| anyhow!("Downloading '{}'", source.url())) - } + Err(e) => return Err(e).with_context(|| anyhow!("Downloading '{}'", source.url())), }; - progress.lock() + progress + .lock() .await .inc_download_bytes(response.content_length().unwrap_or(0)) .await; @@ -143,25 +155,15 @@ async fn perform_download(source: &SourceEntry, progress: Arc Result<()> { let force = matches.get_flag("force"); - let timeout = matches.get_one::("timeout") + let timeout = matches + .get_one::("timeout") .map(|s| s.parse::()) .transpose() .context("Parsing timeout argument to integer")?; @@ -186,13 +189,16 @@ pub async fn download( .map(PackageVersionConstraint::try_from) .transpose()?; - let matching_regexp = matches.get_one::("matching") + let matching_regexp = matches + .get_one::("matching") .map(|s| crate::commands::util::mk_package_name_regex(s.as_ref())) .transpose()?; let progressbar = Arc::new(Mutex::new(ProgressWrapper::new(progressbars.bar()?))); - let download_sema = Arc::new(tokio::sync::Semaphore::new(NUMBER_OF_MAX_CONCURRENT_DOWNLOADS)); + let download_sema = Arc::new(tokio::sync::Semaphore::new( + NUMBER_OF_MAX_CONCURRENT_DOWNLOADS, + )); let mut r = repo.packages() .filter(|p| { @@ -208,24 +214,25 @@ pub async fn download( } }).peekable(); - // check if the iterator is empty - if r.peek().is_none() { - let pname = matches.get_one::("package_name"); - let pvers = matches.get_one::("package_version"); - let matching_regexp = matches.get_one::("matching"); + // check if the iterator is empty + if r.peek().is_none() { + let pname = matches.get_one::("package_name"); + let pvers = matches.get_one::("package_version"); + let matching_regexp = matches.get_one::("matching"); - match (pname, pvers, matching_regexp) { - (Some(pname), None, None) => return Err(anyhow!("{} not found", pname)), - (Some(pname), Some(vers), None) => return Err(anyhow!("{} {} not found", pname, vers)), - (None, None, Some(regex)) => return Err(anyhow!("{} regex not found", regex)), + match (pname, pvers, matching_regexp) { + (Some(pname), None, None) => return Err(anyhow!("{} not found", pname)), + (Some(pname), Some(vers), None) => return Err(anyhow!("{} {} not found", pname, vers)), + (None, None, Some(regex)) => return Err(anyhow!("{} regex not found", regex)), - (_, _, _) => { - panic!("This should not be possible, either we select packages by name and (optionally) version, or by regex.") - }, + (_, _, _) => { + panic!("This should not be possible, either we select packages by name and (optionally) version, or by regex.") } } + } - let r = r.flat_map(|p| { + let r = r + .flat_map(|p| { sc.sources_for(p).into_iter().map(|source| { let download_sema = download_sema.clone(); let progressbar = progressbar.clone(); @@ -243,7 +250,9 @@ pub async fn download( if source_path_exists && !force { Err(anyhow!("Source exists: {}", source.path().display())) } else { - if source_path_exists /* && force is implied by 'if' above*/ { + if source_path_exists + /* && force is implied by 'if' above*/ + { source.remove_file().await?; } @@ -274,4 +283,3 @@ pub async fn download( debug!("r = {:?}", r); r } - diff --git a/src/commands/source/mod.rs b/src/commands/source/mod.rs index 775207e2..0e2eb3a5 100644 --- a/src/commands/source/mod.rs +++ b/src/commands/source/mod.rs @@ -14,14 +14,14 @@ use std::convert::TryFrom; use std::io::Write; use std::path::PathBuf; +use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use clap::ArgMatches; use colored::Colorize; -use tracing::{info, trace}; use tokio_stream::StreamExt; +use tracing::{info, trace}; use crate::config::*; use crate::package::Package; @@ -44,7 +44,9 @@ pub async fn source( Some(("verify", matches)) => verify(matches, config, repo, progressbars).await, Some(("list-missing", matches)) => list_missing(matches, config, repo).await, Some(("url", matches)) => url(matches, repo).await, - Some(("download", matches)) => crate::commands::source::download::download(matches, config, repo, progressbars).await, + Some(("download", matches)) => { + crate::commands::source::download::download(matches, config, repo, progressbars).await + } Some(("of", matches)) => of(matches, config, repo).await, Some((other, _)) => Err(anyhow!("Unknown subcommand: {}", other)), None => Err(anyhow!("No subcommand")), @@ -68,7 +70,8 @@ pub async fn verify( .map(PackageVersionConstraint::try_from) .transpose()?; - let matching_regexp = matches.get_one::("matching") + let matching_regexp = matches + .get_one::("matching") .map(|s| crate::commands::util::mk_package_name_regex(s.as_ref())) .transpose()?; @@ -107,7 +110,8 @@ where bar.set_message("Verifying sources"); bar.set_length(sources.len() as u64); - let results = sources.into_iter() + let results = sources + .into_iter() .map(|src| (bar.clone(), src)) .map(|(bar, source)| async move { trace!("Verifying: {}", source.path().display()); @@ -219,11 +223,7 @@ pub async fn url(matches: &ArgMatches, repo: Repository) -> Result<()> { }) } -async fn of( - matches: &ArgMatches, - config: &Configuration, - repo: Repository, -) -> Result<()> { +async fn of(matches: &ArgMatches, config: &Configuration, repo: Repository) -> Result<()> { let cache = PathBuf::from(config.source_cache_root()); let sc = SourceCache::new(cache); let pname = matches @@ -245,7 +245,8 @@ async fn of( .unwrap_or(true) }) .map(|p| { - let pathes = sc.sources_for(p) + let pathes = sc + .sources_for(p) .into_iter() .map(|source| source.path()) .collect::>(); diff --git a/src/commands/tree_of.rs b/src/commands/tree_of.rs index 5f4c77c9..53e67780 100644 --- a/src/commands/tree_of.rs +++ b/src/commands/tree_of.rs @@ -17,19 +17,16 @@ use anyhow::Result; use clap::ArgMatches; use resiter::AndThen; +use crate::package::condition::ConditionData; use crate::package::Dag; use crate::package::PackageName; use crate::package::PackageVersionConstraint; -use crate::package::condition::ConditionData; use crate::repository::Repository; -use crate::util::EnvironmentVariableName; use crate::util::docker::ImageName; +use crate::util::EnvironmentVariableName; /// Implementation of the "tree_of" subcommand -pub async fn tree_of( - matches: &ArgMatches, - repo: Repository, -) -> Result<()> { +pub async fn tree_of(matches: &ArgMatches, repo: Repository) -> Result<()> { let pname = matches .get_one::("package_name") .map(|s| s.to_owned()) diff --git a/src/commands/util.rs b/src/commands/util.rs index 918cd19d..4e2a0488 100644 --- a/src/commands/util.rs +++ b/src/commands/util.rs @@ -10,19 +10,19 @@ //! Utility module for subcommand implementation helpers -use std::io::Write; use std::fmt::Display; +use std::io::Write; use std::path::Path; +use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use clap::ArgMatches; use itertools::Itertools; -use tracing::{error, info, trace}; use regex::Regex; use tokio_stream::StreamExt; +use tracing::{error, info, trace}; use crate::config::*; use crate::package::Package; @@ -183,7 +183,7 @@ pub fn display_data( csv: bool, ) -> Result<()> { if data.is_empty() { - return Ok(()) + return Ok(()); } if csv { @@ -204,9 +204,10 @@ pub fn display_data( .and_then(|text| writeln!(lock, "{text}").map_err(Error::from)) } else if atty::is(atty::Stream::Stdout) { let mut ascii_table = ascii_table::AsciiTable::default(); - ascii_table.set_max_width(terminal_size::terminal_size() - .map(|tpl| tpl.0 .0 as usize) // an ugly interface indeed! - .unwrap_or(80) + ascii_table.set_max_width( + terminal_size::terminal_size() + .map(|tpl| tpl.0 .0 as usize) // an ugly interface indeed! + .unwrap_or(80), ); headers.into_iter().enumerate().for_each(|(i, c)| { @@ -225,8 +226,12 @@ pub fn display_data( } } -pub fn get_date_filter(name: &str, matches: &ArgMatches) -> Result>> { - matches.get_one::(name) +pub fn get_date_filter( + name: &str, + matches: &ArgMatches, +) -> Result>> { + matches + .get_one::(name) .map(|s| { trace!("Parsing duration: '{}'", s); humantime::parse_duration(s) @@ -257,4 +262,3 @@ pub fn get_date_filter(name: &str, matches: &ArgMatches) -> Result { impl<'a> std::fmt::Debug for DbConnectionConfig<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - write!(f, "postgres://{user}:PASSWORD@{host}:{port}/{name}?connect_timeout={timeout}", + write!( + f, + "postgres://{user}:PASSWORD@{host}:{port}/{name}?connect_timeout={timeout}", host = self.database_host, port = self.database_port, user = self.database_user, @@ -56,21 +58,29 @@ impl<'a> std::fmt::Debug for DbConnectionConfig<'a> { impl<'a> DbConnectionConfig<'a> { pub fn parse(config: &'a Configuration, cli: &'a ArgMatches) -> Result> { Ok(DbConnectionConfig { - database_host: cli.get_one::("database_host").unwrap_or_else(|| config.database_host()), + database_host: cli + .get_one::("database_host") + .unwrap_or_else(|| config.database_host()), database_port: { cli.get_one::("database_port") .map(|s| s.parse::()) .transpose()? .unwrap_or_else(|| *config.database_port()) }, - database_user: cli.get_one::("database_user").unwrap_or_else(|| config.database_user()), - database_password: cli.get_one::("database_password").unwrap_or_else(|| config.database_password()), - database_name: cli.get_one::("database_name").unwrap_or_else(|| config.database_name()), + database_user: cli + .get_one::("database_user") + .unwrap_or_else(|| config.database_user()), + database_password: cli + .get_one::("database_password") + .unwrap_or_else(|| config.database_password()), + database_name: cli + .get_one::("database_name") + .unwrap_or_else(|| config.database_name()), database_connection_timeout: { cli.get_one::("database_connection_timeout") .map(|s| s.parse::()) .transpose()? - .unwrap_or_else( || { + .unwrap_or_else(|| { // hardcoded default of 30 seconds database timeout config.database_connection_timeout().unwrap_or(30) }) @@ -96,13 +106,14 @@ impl<'a> DbConnectionConfig<'a> { } pub fn establish_pool(self) -> Result>> { - debug!("Trying to create a connection pool for database: {:?}", self); + debug!( + "Trying to create a connection pool for database: {:?}", + self + ); let manager = ConnectionManager::::new(self.get_database_uri()); Pool::builder() .min_idle(Some(1)) .build(manager) .map_err(Error::from) } - } - diff --git a/src/db/find_artifacts.rs b/src/db/find_artifacts.rs index f421d345..433570d6 100644 --- a/src/db/find_artifacts.rs +++ b/src/db/find_artifacts.rs @@ -14,17 +14,17 @@ use std::sync::Arc; use anyhow::Result; use chrono::NaiveDateTime; +use diesel::r2d2::ConnectionManager; +use diesel::r2d2::Pool; use diesel::BoolExpressionMethods; use diesel::ExpressionMethods; use diesel::JoinOnDsl; use diesel::PgConnection; use diesel::QueryDsl; use diesel::RunQueryDsl; -use diesel::r2d2::ConnectionManager; -use diesel::r2d2::Pool; -use tracing::{debug, trace}; use resiter::AndThen; use resiter::FilterMap; +use tracing::{debug, trace}; use crate::config::Configuration; use crate::db::models as dbmodels; @@ -36,8 +36,8 @@ use crate::package::Package; use crate::package::ScriptBuilder; use crate::package::Shebang; use crate::schema; -use crate::util::EnvironmentVariableName; use crate::util::docker::ImageName; +use crate::util::EnvironmentVariableName; /// Find an artifact by a job description /// @@ -99,23 +99,25 @@ impl<'a> FindArtifacts<'a> { let mut query = schema::packages::table .filter({ // The package with pkg.name() and pkg.version() - let package_name_filter = schema::packages::name.eq(self.package.name().as_ref() as &str); + let package_name_filter = + schema::packages::name.eq(self.package.name().as_ref() as &str); let package_version_filter = schema::packages::version.eq(self.package.version().as_ref() as &str); package_name_filter.and(package_version_filter) }) - // TODO: Only select from submits where the submit contained jobs that are in the // dependencies of `pkg`. .inner_join(schema::jobs::table.inner_join(schema::submits::table)) .inner_join(schema::artifacts::table.on(schema::jobs::id.eq(schema::artifacts::job_id))) - // TODO: We do not yet have a method to "left join" properly, because diesel only has // left_outer_join (left_join is an alias) // So do not include release dates here, for now //.left_outer_join(schema::releases::table.on(schema::releases::artifact_id.eq(schema::artifacts::id))) - .inner_join(schema::images::table.on(schema::submits::requested_image_id.eq(schema::images::id))) + .inner_join( + schema::images::table + .on(schema::submits::requested_image_id.eq(schema::images::id)), + ) .into_boxed(); if let Some(allowed_images) = self.package.allowed_images() { @@ -179,12 +181,14 @@ impl<'a> FindArtifacts<'a> { .collect(); trace!("The job we found had env: {:?}", job_env); - let envs_equal = environments_equal(&job_env, package_environment.as_ref(), self.env_filter); + let envs_equal = + environments_equal(&job_env, package_environment.as_ref(), self.env_filter); trace!("environments where equal = {}", envs_equal); Ok((tpl.0, envs_equal)) }) - .filter(|r| match r { // the actual filtering from above - Err(_) => true, + .filter(|r| match r { + // the actual filtering from above + Err(_) => true, Ok((_, bl)) => *bl, }) .and_then_ok(|(art, _)| { @@ -204,7 +208,7 @@ impl<'a> FindArtifacts<'a> { ); if let Some(art) = staging.get(&artpath) { trace!("Found in staging: {:?}", art); - return staging.root_path().join(art).map(|p| p.map(|p| (p, ndt))) + return staging.root_path().join(art).map(|p| p.map(|p| (p, ndt))); } } @@ -214,11 +218,17 @@ impl<'a> FindArtifacts<'a> { for release_store in self.release_stores { if let Some(art) = release_store.get(&artpath) { trace!("Found in release: {:?}", art); - return release_store.root_path().join(art).map(|p| p.map(|p| (p, ndt))) + return release_store + .root_path() + .join(art) + .map(|p| p.map(|p| (p, ndt))); } } - trace!("Found no release for artifact {:?} in any release store", artpath.display()); + trace!( + "Found no release for artifact {:?} in any release store", + artpath.display() + ); Ok(None) }) .filter_map_ok(|opt| opt) @@ -226,46 +236,56 @@ impl<'a> FindArtifacts<'a> { } } - -fn environments_equal(job_env: &[(String, String)], pkg_env: Option<&HashMap>, add_env: &[(EnvironmentVariableName, String)]) -> bool { +fn environments_equal( + job_env: &[(String, String)], + pkg_env: Option<&HashMap>, + add_env: &[(EnvironmentVariableName, String)], +) -> bool { use std::ops::Deref; - let job_envs_all_found = || job_env.iter() - .map(|(key, value)| (EnvironmentVariableName::from(key.deref()), value)) - .all(|(key, value)| { + let job_envs_all_found = || { + job_env + .iter() + .map(|(key, value)| (EnvironmentVariableName::from(key.deref()), value)) + .all(|(key, value)| { + // check whether pair is in pkg_env + let is_in_pkg_env = || { + pkg_env + .as_ref() + .map(|hm| { + if let Some(val) = hm.get(&key) { + value == val + } else { + false + } + }) + .unwrap_or(false) + }; + + // check whether pair is in add_env + let is_in_add_env = || add_env.iter().any(|(k, v)| *k == key && v == value); + + let r = is_in_pkg_env() || is_in_add_env(); + trace!("Job Env ({}, {}) found: {}", key, value, r); + r + }) + }; - // check whether pair is in pkg_env - let is_in_pkg_env = || pkg_env.as_ref() - .map(|hm| { - if let Some(val) = hm.get(&key) { - value == val - } else { - false - } + let pkg_envs_all_found = || { + pkg_env + .map(|hm| { + hm.iter().all(|(k, v)| { + job_env.contains(&(k.as_ref().to_string(), v.to_string())) // TODO: do not allocate }) - .unwrap_or(false); - - // check whether pair is in add_env - let is_in_add_env = || add_env.iter().any(|(k, v)| *k == key && v == value); - - let r = is_in_pkg_env() || is_in_add_env(); - trace!("Job Env ({}, {}) found: {}", key, value, r); - r - }); - - let pkg_envs_all_found = || pkg_env.map(|hm| { - hm.iter() - .all(|(k, v)| { - job_env.contains(&(k.as_ref().to_string(), v.to_string())) // TODO: do not allocate }) - }) - .unwrap_or(true); + .unwrap_or(true) + }; - let add_envs_all_found = || add_env.iter() - .all(|(k, v)| { + let add_envs_all_found = || { + add_env.iter().all(|(k, v)| { job_env.contains(&(k.as_ref().to_string(), v.to_string())) // TODO: do not allocate - }); + }) + }; job_envs_all_found() && pkg_envs_all_found() && add_envs_all_found() } - diff --git a/src/db/models/endpoint.rs b/src/db/models/endpoint.rs index 9da34be8..4a26cb73 100644 --- a/src/db/models/endpoint.rs +++ b/src/db/models/endpoint.rs @@ -30,8 +30,13 @@ struct NewEndpoint<'a> { } impl Endpoint { - pub fn create_or_fetch(database_connection: &mut PgConnection, ep_name: &EndpointName) -> Result { - let new_ep = NewEndpoint { name: ep_name.as_ref() }; + pub fn create_or_fetch( + database_connection: &mut PgConnection, + ep_name: &EndpointName, + ) -> Result { + let new_ep = NewEndpoint { + name: ep_name.as_ref(), + }; database_connection.transaction::<_, Error, _>(|conn| { diesel::insert_into(endpoints::table) @@ -46,12 +51,21 @@ impl Endpoint { }) } - pub fn fetch_for_job(database_connection: &mut PgConnection, j: &crate::db::models::Job) -> Result> { + pub fn fetch_for_job( + database_connection: &mut PgConnection, + j: &crate::db::models::Job, + ) -> Result> { Self::fetch_by_id(database_connection, j.endpoint_id) } - pub fn fetch_by_id(database_connection: &mut PgConnection, eid: i32) -> Result> { - match dsl::endpoints.filter(id.eq(eid)).first::(database_connection) { + pub fn fetch_by_id( + database_connection: &mut PgConnection, + eid: i32, + ) -> Result> { + match dsl::endpoints + .filter(id.eq(eid)) + .first::(database_connection) + { Err(diesel::result::Error::NotFound) => Ok(None), Err(e) => Err(Error::from(e)), Ok(e) => Ok(Some(e)), diff --git a/src/db/models/githash.rs b/src/db/models/githash.rs index 77b9dfd0..d0dc43bb 100644 --- a/src/db/models/githash.rs +++ b/src/db/models/githash.rs @@ -30,7 +30,10 @@ struct NewGitHash<'a> { } impl GitHash { - pub fn create_or_fetch(database_connection: &mut PgConnection, githash: &str) -> Result { + pub fn create_or_fetch( + database_connection: &mut PgConnection, + githash: &str, + ) -> Result { let new_hash = NewGitHash { hash: githash }; database_connection.transaction::<_, Error, _>(|conn| { diff --git a/src/db/models/image.rs b/src/db/models/image.rs index 35d4bce3..a701e6c5 100644 --- a/src/db/models/image.rs +++ b/src/db/models/image.rs @@ -51,12 +51,18 @@ impl Image { }) } - pub fn fetch_for_job(database_connection: &mut PgConnection, j: &crate::db::models::Job) -> Result> { + pub fn fetch_for_job( + database_connection: &mut PgConnection, + j: &crate::db::models::Job, + ) -> Result> { Self::fetch_by_id(database_connection, j.image_id) } pub fn fetch_by_id(database_connection: &mut PgConnection, iid: i32) -> Result> { - match dsl::images.filter(id.eq(iid)).first::(database_connection) { + match dsl::images + .filter(id.eq(iid)) + .first::(database_connection) + { Err(diesel::result::Error::NotFound) => Ok(None), Err(e) => Err(Error::from(e)), Ok(i) => Ok(Some(i)), diff --git a/src/db/models/job.rs b/src/db/models/job.rs index 1f7f15bf..21d4c9be 100644 --- a/src/db/models/job.rs +++ b/src/db/models/job.rs @@ -8,8 +8,8 @@ // SPDX-License-Identifier: EPL-2.0 // -use anyhow::Error; use anyhow::Context; +use anyhow::Error; use anyhow::Result; use diesel::prelude::*; use diesel::PgConnection; @@ -81,12 +81,13 @@ impl Job { .values(&new_job) .on_conflict_do_nothing(); - trace!("Query = {}", diesel::debug_query::(&query)); + trace!( + "Query = {}", + diesel::debug_query::(&query) + ); database_connection.transaction::<_, Error, _>(|conn| { - query - .execute(conn) - .context("Creating job in database")?; + query.execute(conn).context("Creating job in database")?; dsl::jobs .filter(uuid.eq(job_uuid)) @@ -96,7 +97,10 @@ impl Job { }) } - pub fn env(&self, database_connection: &mut PgConnection) -> Result> { + pub fn env( + &self, + database_connection: &mut PgConnection, + ) -> Result> { use crate::schema; schema::job_envs::table diff --git a/src/db/models/package.rs b/src/db/models/package.rs index b029200d..4a1d6921 100644 --- a/src/db/models/package.rs +++ b/src/db/models/package.rs @@ -60,12 +60,21 @@ impl Package { }) } - pub fn fetch_for_job(database_connection: &mut PgConnection, j: &crate::db::models::Job) -> Result> { + pub fn fetch_for_job( + database_connection: &mut PgConnection, + j: &crate::db::models::Job, + ) -> Result> { Self::fetch_by_id(database_connection, j.package_id) } - pub fn fetch_by_id(database_connection: &mut PgConnection, pid: i32) -> Result> { - match dsl::packages.filter(id.eq(pid)).first::(database_connection) { + pub fn fetch_by_id( + database_connection: &mut PgConnection, + pid: i32, + ) -> Result> { + match dsl::packages + .filter(id.eq(pid)) + .first::(database_connection) + { Err(diesel::result::Error::NotFound) => Ok(None), Err(e) => Err(Error::from(e)), Ok(p) => Ok(Some(p)), diff --git a/src/db/models/release_store.rs b/src/db/models/release_store.rs index 3df33a2c..f64e31de 100644 --- a/src/db/models/release_store.rs +++ b/src/db/models/release_store.rs @@ -16,8 +16,8 @@ use diesel::PgConnection; use diesel::QueryDsl; use diesel::RunQueryDsl; -use crate::schema::release_stores; use crate::schema; +use crate::schema::release_stores; #[derive(Debug, Identifiable, Queryable)] #[diesel(table_name = release_stores)] @@ -29,14 +29,12 @@ pub struct ReleaseStore { #[derive(Insertable)] #[diesel(table_name = release_stores)] struct NewReleaseStore<'a> { - pub store_name : &'a str, + pub store_name: &'a str, } impl ReleaseStore { pub fn create(database_connection: &mut PgConnection, name: &str) -> Result { - let new_relstore = NewReleaseStore { - store_name: name, - }; + let new_relstore = NewReleaseStore { store_name: name }; database_connection.transaction::<_, Error, _>(|conn| { diesel::insert_into(schema::release_stores::table) @@ -51,4 +49,3 @@ impl ReleaseStore { }) } } - diff --git a/src/db/models/submit.rs b/src/db/models/submit.rs index 4d5aff63..101bb405 100644 --- a/src/db/models/submit.rs +++ b/src/db/models/submit.rs @@ -64,10 +64,8 @@ impl Submit { database_connection.transaction::<_, Error, _>(|conn| { diesel::insert_into(submits::table) .values(&new_submit) - // required because if we re-use the staging store, we do not create a new UUID but re-use the old one .on_conflict_do_nothing() - .execute(conn) .context("Inserting new submit into submits table")?; @@ -75,7 +73,10 @@ impl Submit { }) } - pub fn with_id(database_connection: &mut PgConnection, submit_id: &::uuid::Uuid) -> Result { + pub fn with_id( + database_connection: &mut PgConnection, + submit_id: &::uuid::Uuid, + ) -> Result { dsl::submits .filter(submits::uuid.eq(submit_id)) .first::(database_connection) diff --git a/src/endpoint/configured.rs b/src/endpoint/configured.rs index 7f725c59..16fa0189 100644 --- a/src/endpoint/configured.rs +++ b/src/endpoint/configured.rs @@ -13,31 +13,31 @@ use std::path::PathBuf; use std::str::FromStr; use std::sync::Arc; +use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use futures::FutureExt; use getset::{CopyGetters, Getters}; -use tracing::{trace, debug}; use result_inspect::ResultInspect; use shiplift::Container; use shiplift::Docker; use shiplift::ExecContainerOptions; -use tokio::sync::RwLock; use tokio::sync::mpsc::UnboundedSender; +use tokio::sync::RwLock; use tokio_stream::StreamExt; +use tracing::{debug, trace}; use typed_builder::TypedBuilder; use crate::config::EndpointName; use crate::endpoint::EndpointConfiguration; +use crate::filestore::path::ArtifactPath; use crate::filestore::ReleaseStore; use crate::filestore::StagingStore; -use crate::filestore::path::ArtifactPath; use crate::job::JobResource; use crate::job::RunnableJob; -use crate::log::LogItem; use crate::log::buffer_stream_to_line_stream; +use crate::log::LogItem; use crate::package::Script; use crate::util::docker::ContainerHash; use crate::util::docker::ImageName; @@ -71,13 +71,14 @@ impl Debug for Endpoint { impl Endpoint { pub(super) async fn setup(epc: EndpointConfiguration) -> Result { - let ep = Endpoint::setup_endpoint(epc.endpoint_name(), epc.endpoint()).with_context(|| { - anyhow!( - "Setting up endpoint: {} -> {}", - epc.endpoint_name(), - epc.endpoint().uri() - ) - })?; + let ep = + Endpoint::setup_endpoint(epc.endpoint_name(), epc.endpoint()).with_context(|| { + anyhow!( + "Setting up endpoint: {} -> {}", + epc.endpoint_name(), + epc.endpoint().uri() + ) + })?; let versions_compat = Endpoint::check_version_compat(epc.required_docker_versions().as_ref(), &ep); @@ -245,7 +246,12 @@ impl Endpoint { pub fn utilization(&self) -> f64 { let max_jobs = self.num_max_jobs() as f64; let run_jobs = self.running_jobs() as f64; - trace!("utilization of {}: 100.0 / {} * {}", self.name(), max_jobs, run_jobs); + trace!( + "utilization of {}: 100.0 / {} * {}", + self.name(), + max_jobs, + run_jobs + ); 100.0 / max_jobs * run_jobs } @@ -267,17 +273,12 @@ impl Endpoint { .containers() .list({ &shiplift::builder::ContainerListOptions::builder() - .all() - .build() + .all() + .build() }) .await .map_err(Error::from) - .map(|containers| { - containers - .into_iter() - .map(ContainerStat::from) - .collect() - }) + .map(|containers| containers.into_iter().map(ContainerStat::from).collect()) } pub async fn has_container_with_id(&self, id: &str) -> Result { @@ -401,7 +402,9 @@ pub struct EndpointHandle(Arc); impl EndpointHandle { pub fn new(ep: Arc) -> Self { - let res = ep.running_jobs.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + let res = ep + .running_jobs + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); trace!("Endpoint {} has one job more: {}", ep.name(), res + 1); EndpointHandle(ep) } @@ -409,7 +412,10 @@ impl EndpointHandle { impl Drop for EndpointHandle { fn drop(&mut self) { - let res = self.0.running_jobs.fetch_sub(1, std::sync::atomic::Ordering::Relaxed); + let res = self + .0 + .running_jobs + .fetch_sub(1, std::sync::atomic::Ordering::Relaxed); trace!("Endpoint {} has one job less: {}", self.0.name(), res - 1); } } @@ -422,7 +428,6 @@ impl std::ops::Deref for EndpointHandle { } } - #[derive(Getters)] pub struct PreparedContainer<'a> { endpoint: &'a Endpoint, @@ -503,7 +508,8 @@ impl<'a> PreparedContainer<'a> { let builder_opts = { let mut builder_opts = shiplift::ContainerOptions::builder(job.image().as_ref()); - let container_name = format!("butido-{package}-{version}-{id}", + let container_name = format!( + "butido-{package}-{version}-{id}", package = job.package().name().as_ref(), version = job.package().version().as_ref(), id = job.uuid() @@ -527,7 +533,12 @@ impl<'a> PreparedContainer<'a> { .containers() .create(&builder_opts) .await - .with_context(|| anyhow!("Creating container with builder options = {:?}", builder_opts)) + .with_context(|| { + anyhow!( + "Creating container with builder options = {:?}", + builder_opts + ) + }) .with_context(|| anyhow!("Creating container on '{}'", endpoint.name))?; trace!("Create info = {:?}", create_info); Ok(create_info) @@ -572,16 +583,34 @@ impl<'a> PreparedContainer<'a> { .with_context(|| anyhow!("Reading file {}", source_path.display()))?; drop(entry); - container.copy_file_into(destination, &buf) + container + .copy_file_into(destination, &buf) .await - .inspect(|_| trace!("Successfully copied source {} to container {}", source_path.display(), container.id())) - .with_context(|| anyhow!("Failed to copy source {} to container {}", source_path.display(), container.id())) + .inspect(|_| { + trace!( + "Successfully copied source {} to container {}", + source_path.display(), + container.id() + ) + }) + .with_context(|| { + anyhow!( + "Failed to copy source {} to container {}", + source_path.display(), + container.id() + ) + }) .map_err(Error::from) }) .collect::>() .collect::>() .await - .inspect(|_| trace!("Successfully copied sources to container {}", container.id())) + .inspect(|_| { + trace!( + "Successfully copied sources to container {}", + container.id() + ) + }) .with_context(|| anyhow!("Copying sources to container {}", container.id())) .map_err(Error::from) } @@ -592,13 +621,20 @@ impl<'a> PreparedContainer<'a> { ) -> Result<()> { use tokio::io::AsyncReadExt; - debug!("Copying patches to container: {:?}", job.package().patches()); + debug!( + "Copying patches to container: {:?}", + job.package().patches() + ); job.package() .patches() .iter() .map(|patch| async move { let destination = PathBuf::from(crate::consts::PATCH_DIR_PATH).join(patch); - trace!("Copying patch {} to container at {}", patch.display(), destination.display()); + trace!( + "Copying patch {} to container at {}", + patch.display(), + destination.display() + ); let mut buf = vec![]; tokio::fs::OpenOptions::new() @@ -614,11 +650,18 @@ impl<'a> PreparedContainer<'a> { .await .with_context(|| anyhow!("Reading file {}", patch.display()))?; - container.copy_file_into(destination, &buf) + container + .copy_file_into(destination, &buf) .await .map_err(Error::from) .inspect(|_| trace!("Copying patch {} successfull", patch.display())) - .with_context(|| anyhow!("Copying patch {} to container {}", patch.display(), container.id())) + .with_context(|| { + anyhow!( + "Copying patch {} to container {}", + patch.display(), + container.id() + ) + }) .map_err(Error::from) }) .collect::>() @@ -636,7 +679,8 @@ impl<'a> PreparedContainer<'a> { staging_store: Arc>, release_stores: &[Arc], ) -> Result<()> { - let stream = job.resources() + let stream = job + .resources() .iter() .filter_map(JobResource::artifact) .cloned() @@ -650,7 +694,8 @@ impl<'a> PreparedContainer<'a> { container.id() ) })?; - let destination = PathBuf::from(crate::consts::INPUTS_DIR_PATH).join(artifact_file_name); + let destination = + PathBuf::from(crate::consts::INPUTS_DIR_PATH).join(artifact_file_name); trace!( "Copying {} to container: {}:{}", art.display(), @@ -658,9 +703,9 @@ impl<'a> PreparedContainer<'a> { destination.display() ); let staging_read = staging_store.read().await; - let buf = match staging_read.root_path().join(&art)? { + let buf = match staging_read.root_path().join(&art)? { Some(fp) => fp, - None => { + None => { // TODO: Optimize. // I know this is not nice, but it works for now. let mut found = None; @@ -670,16 +715,22 @@ impl<'a> PreparedContainer<'a> { Ok(Some(path)) => { found = Some(path); break; - }, + } Err(e) => { - trace!("Failed to join '{:?}' + '{:?}'", release_store.root_path(), art.display()); - return Err(e) - }, - Ok(None) => continue, + trace!( + "Failed to join '{:?}' + '{:?}'", + release_store.root_path(), + art.display() + ); + return Err(e); + } + Ok(None) => continue, } } - found.ok_or_else(|| anyhow!("Not found in staging or release store: {:?}", art))? - }, + found.ok_or_else(|| { + anyhow!("Not found in staging or release store: {:?}", art) + })? + } } .read() .await @@ -716,7 +767,12 @@ impl<'a> PreparedContainer<'a> { stream .collect::>>() .await - .inspect(|_| trace!("Successfully copied all artifacts to the container {}", container.id())) + .inspect(|_| { + trace!( + "Successfully copied all artifacts to the container {}", + container.id() + ) + }) .with_context(|| anyhow!("Copying artifacts to container {}", container.id())) .map_err(Error::from) .map(|_| ()) @@ -781,8 +837,12 @@ impl<'a> StartedContainer<'a> { .build(); trace!("Exec options = {:?}", exec_opts); - trace!("Moving logs to log sink for container {}", self.create_info.id); - let stream = self.endpoint + trace!( + "Moving logs to log sink for container {}", + self.create_info.id + ); + let stream = self + .endpoint .docker .containers() .get(&self.create_info.id) @@ -805,16 +865,14 @@ impl<'a> StartedContainer<'a> { ) }) .and_then(|l| { - crate::log::parser() - .parse(l.as_bytes()) - .with_context(|| { - anyhow!( - "Parsing log from {}:{}: {:?}", - self.endpoint.name, - self.create_info.id, - l - ) - }) + crate::log::parser().parse(l.as_bytes()).with_context(|| { + anyhow!( + "Parsing log from {}:{}: {:?}", + self.endpoint.name, + self.create_info.id, + l + ) + }) }) .and_then(|item| { let exited_successfully = match item { @@ -884,10 +942,16 @@ impl<'a> ExecutedContainer<'a> { &self.script } - pub async fn finalize(self, staging_store: Arc>) -> Result { + pub async fn finalize( + self, + staging_store: Arc>, + ) -> Result { let (exit_info, artifacts) = match self.exit_info { Some((false, msg)) => { - let err = anyhow!("Error during container run: '{msg}'", msg = msg.as_deref().unwrap_or("")); + let err = anyhow!( + "Error during container run: '{msg}'", + msg = msg.as_deref().unwrap_or("") + ); // error because the container errored (Err(err), vec![]) @@ -896,7 +960,11 @@ impl<'a> ExecutedContainer<'a> { Some((true, _)) | None => { let container = self.endpoint.docker.containers().get(&self.create_info.id); - trace!("Fetching {} from container {}", crate::consts::OUTPUTS_DIR_PATH, self.create_info.id); + trace!( + "Fetching {} from container {}", + crate::consts::OUTPUTS_DIR_PATH, + self.create_info.id + ); let tar_stream = container .copy_from(&PathBuf::from(crate::consts::OUTPUTS_DIR_PATH)) .map(|item| { diff --git a/src/endpoint/mod.rs b/src/endpoint/mod.rs index ef7439f7..dfa9bc14 100644 --- a/src/endpoint/mod.rs +++ b/src/endpoint/mod.rs @@ -18,4 +18,3 @@ mod configured; pub use configured::*; pub mod util; - diff --git a/src/endpoint/scheduler.rs b/src/endpoint/scheduler.rs index 9a02fc2e..8f55132d 100644 --- a/src/endpoint/scheduler.rs +++ b/src/endpoint/scheduler.rs @@ -16,21 +16,21 @@ use anyhow::Context; use anyhow::Error; use anyhow::Result; use colored::Colorize; -use diesel::PgConnection; use diesel::r2d2::ConnectionManager; use diesel::r2d2::Pool; +use diesel::PgConnection; use indicatif::ProgressBar; use itertools::Itertools; -use tracing::trace; use tokio::io::AsyncWriteExt; -use tokio::sync::RwLock; use tokio::sync::mpsc::UnboundedReceiver; +use tokio::sync::RwLock; +use tracing::trace; use uuid::Uuid; use crate::db::models as dbmodels; use crate::endpoint::Endpoint; -use crate::endpoint::EndpointHandle; use crate::endpoint::EndpointConfiguration; +use crate::endpoint::EndpointHandle; use crate::filestore::ArtifactPath; use crate::filestore::ReleaseStore; use crate::filestore::StagingStore; @@ -74,7 +74,11 @@ impl EndpointScheduler { /// # Warning /// /// This function blocks as long as there is no free endpoint available! - pub async fn schedule_job(&self, job: RunnableJob, bar: indicatif::ProgressBar) -> Result { + pub async fn schedule_job( + &self, + job: RunnableJob, + bar: indicatif::ProgressBar, + ) -> Result { let endpoint = self.select_free_endpoint().await?; Ok(JobHandle { @@ -94,13 +98,20 @@ impl EndpointScheduler { let ep = self .endpoints .iter() - .filter(|ep| { // filter out all running containers where the number of max jobs is reached + .filter(|ep| { + // filter out all running containers where the number of max jobs is reached let r = ep.running_jobs() < ep.num_max_jobs(); - trace!("Endpoint {} considered for scheduling job: {}", ep.name(), r); + trace!( + "Endpoint {} considered for scheduling job: {}", + ep.name(), + r + ); r }) .sorted_by(|ep1, ep2| { - ep1.utilization().partial_cmp(&ep2.utilization()).unwrap_or(std::cmp::Ordering::Equal) + ep1.utilization() + .partial_cmp(&ep2.utilization()) + .unwrap_or(std::cmp::Ordering::Equal) }) .next(); @@ -136,14 +147,26 @@ impl JobHandle { let (log_sender, log_receiver) = tokio::sync::mpsc::unbounded_channel::(); let endpoint_uri = self.endpoint.uri().clone(); let endpoint_name = self.endpoint.name().clone(); - let endpoint = dbmodels::Endpoint::create_or_fetch(&mut self.db.get().unwrap(), self.endpoint.name())?; - let package = dbmodels::Package::create_or_fetch(&mut self.db.get().unwrap(), self.job.package())?; - let image = dbmodels::Image::create_or_fetch(&mut self.db.get().unwrap(), self.job.image())?; + let endpoint = + dbmodels::Endpoint::create_or_fetch(&mut self.db.get().unwrap(), self.endpoint.name())?; + let package = + dbmodels::Package::create_or_fetch(&mut self.db.get().unwrap(), self.job.package())?; + let image = + dbmodels::Image::create_or_fetch(&mut self.db.get().unwrap(), self.job.image())?; let envs = self.create_env_in_db()?; let job_id = *self.job.uuid(); - trace!("Running on Job {} on Endpoint {}", job_id, self.endpoint.name()); - let prepared_container = self.endpoint - .prepare_container(&self.job, self.staging_store.clone(), self.release_stores.clone()) + trace!( + "Running on Job {} on Endpoint {}", + job_id, + self.endpoint.name() + ); + let prepared_container = self + .endpoint + .prepare_container( + &self.job, + self.staging_store.clone(), + self.release_stores.clone(), + ) .await?; let container_id = prepared_container.create_info().id.clone(); let running_container = prepared_container @@ -174,7 +197,8 @@ impl JobHandle { drop(self.bar); let (run_container, logres) = tokio::join!(running_container, logres); - let log = logres.with_context(|| anyhow!("Collecting logs for job on '{}'", endpoint_name))?; + let log = + logres.with_context(|| anyhow!("Collecting logs for job on '{}'", endpoint_name))?; let run_container = run_container .with_context(|| anyhow!("Running container {} failed", container_id)) .with_context(|| { @@ -202,8 +226,14 @@ impl JobHandle { trace!("DB: Job entry for job {} created: {}", job.uuid, job.id); for env in envs { - dbmodels::JobEnv::create(&mut self.db.get().unwrap(), &job, &env) - .with_context(|| format!("Creating Environment Variable mapping for Job: {}", job.uuid))?; + dbmodels::JobEnv::create(&mut self.db.get().unwrap(), &job, &env).with_context( + || { + format!( + "Creating Environment Variable mapping for Job: {}", + job.uuid + ) + }, + )?; } let res: crate::endpoint::FinalizedContainer = run_container @@ -239,7 +269,7 @@ impl JobHandle { trace!("Error was returned from script"); return Ok({ res.map(|_| vec![]) // to have the proper type, will never be executed - }) + }); } // Have to do it the ugly way here because of borrowing semantics @@ -259,7 +289,13 @@ impl JobHandle { } /// Helper to create an error object with a nice message. - fn create_job_run_error(job_id: &Uuid, package_name: &str, package_version: &str, endpoint_uri: &str, container_id: &str) -> Error { + fn create_job_run_error( + job_id: &Uuid, + package_name: &str, + package_version: &str, + endpoint_uri: &str, + container_id: &str, + ) -> Error { anyhow!(indoc::formatdoc!( r#"Error while running job for {package_name} {package_version} with id: @@ -276,9 +312,10 @@ impl JobHandle { job_id = job_id.to_string().red(), package_name = package_name.to_string().red(), package_version = package_version.to_string().red(), - - docker_connect_string = format!("docker --host {endpoint_uri} exec -it {container_id} /bin/bash" - ).yellow().bold(), + docker_connect_string = + format!("docker --host {endpoint_uri} exec -it {container_id} /bin/bash") + .yellow() + .bold(), )) } @@ -295,7 +332,9 @@ impl JobHandle { .inspect(|(k, v)| { trace!("Creating environment variable in database: {} = {}", k, v) }) - .map(|(k, v)| dbmodels::EnvVar::create_or_fetch(&mut self.db.get().unwrap(), k, v)) + .map(|(k, v)| { + dbmodels::EnvVar::create_or_fetch(&mut self.db.get().unwrap(), k, v) + }) .collect::>>() }) .transpose()? @@ -310,7 +349,9 @@ impl JobHandle { .inspect(|(k, v)| { trace!("Creating environment variable in database: {} = {}", k, v) }) - .map(|(k, v)| dbmodels::EnvVar::create_or_fetch(&mut self.db.get().unwrap(), k, v)) + .map(|(k, v)| { + dbmodels::EnvVar::create_or_fetch(&mut self.db.get().unwrap(), k, v) + }) }) .collect() } @@ -335,7 +376,8 @@ impl<'a> LogReceiver<'a> { // Reserve a reasonable amount of elements. accu.reserve(4096); - let mut logfile = self.get_logfile() + let mut logfile = self + .get_logfile() .await .transpose() .context("Getting Logfile")?; @@ -353,15 +395,16 @@ impl<'a> LogReceiver<'a> { // Timeout for receiving from the log receiver channel // This way we can update (`tick()`) the progress bar and show the user that things are // happening, even if there was no log output for several seconds. - let logitem = match tokio::time::timeout(timeout_duration, self.log_receiver.recv()).await { - Err(_ /* elapsed */) => { - self.bar.tick(); // just ping the progressbar here - continue - }, + let logitem = + match tokio::time::timeout(timeout_duration, self.log_receiver.recv()).await { + Err(_ /* elapsed */) => { + self.bar.tick(); // just ping the progressbar here + continue; + } - Ok(None) => break, // if the log is empty, we're done - Ok(Some(logitem)) => logitem, - }; + Ok(None) => break, // if the log is empty, we're done + Ok(Some(logitem)) => logitem, + }; if let Some(lf) = logfile.as_mut() { lf.write_all(logitem.display()?.to_string().as_bytes()) @@ -381,14 +424,23 @@ impl<'a> LogReceiver<'a> { trace!("Setting bar phase to {}", phasename); self.bar.set_message(format!( "[{}/{} {} {} {}]: Phase: {}", - self.endpoint_name, self.container_id_chrs, self.job.uuid(), self.package_name, self.package_version, phasename + self.endpoint_name, + self.container_id_chrs, + self.job.uuid(), + self.package_name, + self.package_version, + phasename )); } LogItem::State(Ok(())) => { trace!("Setting bar state to Ok"); self.bar.set_message(format!( "[{}/{} {} {} {}]: State Ok", - self.endpoint_name, self.container_id_chrs, self.job.uuid(), self.package_name, self.package_version + self.endpoint_name, + self.container_id_chrs, + self.job.uuid(), + self.package_name, + self.package_version )); success = Some(true); } @@ -396,7 +448,12 @@ impl<'a> LogReceiver<'a> { trace!("Setting bar state to Err: {}", e); self.bar.set_message(format!( "[{}/{} {} {} {}]: State Err: {}", - self.endpoint_name, self.container_id_chrs, self.job.uuid(), self.package_name, self.package_version, e + self.endpoint_name, + self.container_id_chrs, + self.job.uuid(), + self.package_name, + self.package_version, + e )); success = Some(false); } @@ -408,15 +465,27 @@ impl<'a> LogReceiver<'a> { let finish_msg = match success { Some(true) => format!( "[{}/{} {} {} {}]: finished successfully", - self.endpoint_name, self.container_id_chrs, self.job.uuid(), self.package_name, self.package_version + self.endpoint_name, + self.container_id_chrs, + self.job.uuid(), + self.package_name, + self.package_version ), Some(false) => format!( "[{}/{} {} {} {}]: finished with error", - self.endpoint_name, self.container_id_chrs, self.job.uuid(), self.package_name, self.package_version + self.endpoint_name, + self.container_id_chrs, + self.job.uuid(), + self.package_name, + self.package_version ), None => format!( "[{}/{} {} {} {}]: finished", - self.endpoint_name, self.container_id_chrs, self.job.uuid(), self.package_name, self.package_version + self.endpoint_name, + self.container_id_chrs, + self.job.uuid(), + self.package_name, + self.package_version ), }; self.bar.finish_with_message(finish_msg); @@ -438,7 +507,10 @@ impl<'a> LogReceiver<'a> { Some({ let path = log_dir.join(format!( "{}-{}-{}-{}.log", - self.package_name, self.package_version, self.job.image(), self.job.uuid() + self.package_name, + self.package_version, + self.job.image(), + self.job.uuid() )); tokio::fs::OpenOptions::new() .create(true) diff --git a/src/endpoint/util.rs b/src/endpoint/util.rs index 6bdb30ac..f5d02916 100644 --- a/src/endpoint/util.rs +++ b/src/endpoint/util.rs @@ -21,10 +21,8 @@ pub async fn setup_endpoints(endpoints: Vec) -> Result(&self, mut ar: tar::Archive) -> Result> + pub(in crate::filestore) fn unpack_archive_here( + &self, + mut ar: tar::Archive, + ) -> Result> where R: std::io::Read, { @@ -132,9 +136,7 @@ impl StoreRoot { let unpack_dest = self.0.join(&path); trace!("Unpack to = '{:?}'", unpack_dest); - entry.unpack(unpack_dest) - .map(|_| path) - .map_err(Error::from) + entry.unpack(unpack_dest).map(|_| path).map_err(Error::from) }) .collect::>>() } @@ -185,7 +187,6 @@ impl AsRef for ArtifactPath { pub struct FullArtifactPath<'a>(&'a StoreRoot, &'a ArtifactPath); impl<'a> FullArtifactPath<'a> { - pub fn is_in_staging_store(&self, store: &StagingStore) -> bool { store.0.root_path() == self.0 } diff --git a/src/filestore/staging.rs b/src/filestore/staging.rs index 82e6a557..d422c118 100644 --- a/src/filestore/staging.rs +++ b/src/filestore/staging.rs @@ -10,14 +10,14 @@ use std::fmt::Debug; +use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use futures::stream::Stream; use indicatif::ProgressBar; -use tracing::trace; use result_inspect::ResultInspect; +use tracing::trace; use crate::filestore::path::ArtifactPath; use crate::filestore::path::StoreRoot; diff --git a/src/job/dag.rs b/src/job/dag.rs index 6eb30abe..cd7fc62e 100644 --- a/src/job/dag.rs +++ b/src/job/dag.rs @@ -50,27 +50,22 @@ impl Dag { } pub fn iter(&'_ self) -> impl Iterator + '_ { - self.dag - .graph() - .node_indices() - .map(move |idx| { - let job = self.dag.graph().node_weight(idx).unwrap(); // TODO - let children = self.dag.children(idx); - let children_uuids = children.iter(&self.dag) - .filter_map(|(_, node_idx)| { - self.dag.graph().node_weight(node_idx) - }) - .map(Job::uuid) - .cloned() - .collect(); + self.dag.graph().node_indices().map(move |idx| { + let job = self.dag.graph().node_weight(idx).unwrap(); // TODO + let children = self.dag.children(idx); + let children_uuids = children + .iter(&self.dag) + .filter_map(|(_, node_idx)| self.dag.graph().node_weight(node_idx)) + .map(Job::uuid) + .cloned() + .collect(); - JobDefinition { - job, - dependencies: children_uuids - } - }) + JobDefinition { + job, + dependencies: children_uuids, + } + }) } - } #[derive(Debug)] @@ -78,4 +73,3 @@ pub struct JobDefinition<'a> { pub job: &'a Job, pub dependencies: Vec, } - diff --git a/src/job/runnable.rs b/src/job/runnable.rs index 4a167dc0..de06da4c 100644 --- a/src/job/runnable.rs +++ b/src/job/runnable.rs @@ -24,8 +24,8 @@ use crate::package::Script; use crate::package::ScriptBuilder; use crate::source::SourceCache; use crate::source::SourceEntry; -use crate::util::EnvironmentVariableName; use crate::util::docker::ImageName; +use crate::util::EnvironmentVariableName; /// A job configuration that can be run. All inputs are clear here. #[derive(Debug, Getters)] @@ -75,7 +75,11 @@ impl RunnableJob { .chain(git_commit_env.as_ref().into_iter().map(|(k, v)| (k, v))) .inspect(|(name, _)| debug!("Checking: {}", name)) .try_for_each(|(name, _)| { - trace!("{:?} contains? {:?}", config.containers().allowed_env(), name); + trace!( + "{:?} contains? {:?}", + config.containers().allowed_env(), + name + ); if !config.containers().allowed_env().contains(name) { Err(anyhow!("Environment variable name not allowed: {}", name)) } else { @@ -130,17 +134,13 @@ impl RunnableJob { } pub fn environment(&self) -> impl Iterator { - self.resources - .iter() - .filter_map(|r| r.env()) - .chain({ - self.package() - .environment() - .as_ref() - .map(|hm| hm.iter()) - .into_iter() - .flatten() - }) + self.resources.iter().filter_map(|r| r.env()).chain({ + self.package() + .environment() + .as_ref() + .map(|hm| hm.iter()) + .into_iter() + .flatten() + }) } - } diff --git a/src/log/parser.rs b/src/log/parser.rs index c865c729..2a54aedb 100644 --- a/src/log/parser.rs +++ b/src/log/parser.rs @@ -43,14 +43,14 @@ impl std::fmt::Debug for ParsedLog { writeln!(f, "ParsedLog [")?; for (i, line) in self.0.iter().enumerate() { match line { - LogItem::Line(l) => { + LogItem::Line(l) => { let s = std::str::from_utf8(l).unwrap_or("ERROR UTF8 ENCODING"); writeln!(f, "[{i}] Line('{s}')")? - }, - LogItem::Progress(u) => writeln!(f, "[{i}] Progress({u})")?, + } + LogItem::Progress(u) => writeln!(f, "[{i}] Progress({u})")?, LogItem::CurrentPhase(s) => writeln!(f, "[{i}] Phase({s})")?, - LogItem::State(Ok(_)) => writeln!(f, "[{i}] State::OK")?, - LogItem::State(Err(_)) => writeln!(f, "[{i}] State::Err")?, + LogItem::State(Ok(_)) => writeln!(f, "[{i}] State::OK")?, + LogItem::State(Err(_)) => writeln!(f, "[{i}] State::Err")?, } } @@ -74,7 +74,7 @@ impl FromStr for ParsedLog { pub enum JobResult { Success, Errored, - Unknown + Unknown, } impl JobResult { @@ -93,7 +93,7 @@ impl ParsedLog { .iter() .rev() .filter_map(|line| match line { - LogItem::State(Ok(_)) => Some(JobResult::Success), + LogItem::State(Ok(_)) => Some(JobResult::Success), LogItem::State(Err(_)) => Some(JobResult::Errored), _ => None, }) diff --git a/src/main.rs b/src/main.rs index 35306558..868cdefd 100644 --- a/src/main.rs +++ b/src/main.rs @@ -39,7 +39,7 @@ unused_must_use, unused_mut, unused_parens, - while_true, + while_true )] #![allow(macro_use_extern_crate)] #![allow(unstable_name_collisions)] // TODO: Remove me with the next rustc update (probably) @@ -54,10 +54,10 @@ use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; +use aquamarine as _; use clap::ArgMatches; use logcrate::debug; -use logcrate::error; -use aquamarine as _; // doc-helper crate +use logcrate::error; // doc-helper crate mod cli; mod commands; @@ -81,7 +81,7 @@ use crate::repository::Repository; use crate::util::progress::ProgressBars; use indoc::concatdoc; -pub const VERSION_LONG: &str = concatdoc!{" +pub const VERSION_LONG: &str = concatdoc! {" butido ", env!("VERGEN_GIT_DESCRIBE"), " Git SHA: ", env!("VERGEN_GIT_SHA"), " Git Commit Timestamp: ", env!("VERGEN_GIT_COMMIT_TIMESTAMP"), " @@ -113,52 +113,56 @@ async fn main() -> Result<()> { std::process::exit(0); } - let repo = git2::Repository::open(PathBuf::from(".")) - .map_err(|e| match e.code() { - git2::ErrorCode::NotFound => { - eprintln!("Butido must be executed in the top-level of the git repository"); - std::process::exit(1) - }, - _ => Error::from(e), - })?; + let repo = git2::Repository::open(PathBuf::from(".")).map_err(|e| match e.code() { + git2::ErrorCode::NotFound => { + eprintln!("Butido must be executed in the top-level of the git repository"); + std::process::exit(1) + } + _ => Error::from(e), + })?; let repo_path = repo .workdir() .ok_or_else(|| anyhow!("Not a repository with working directory. Cannot do my job!"))?; let mut config = ::config::Config::default(); - config.merge(::config::File::from(repo_path.join("config.toml")).required(true)) + config + .merge(::config::File::from(repo_path.join("config.toml")).required(true)) .context("Failed to load config.toml from repository")?; { let xdg = xdg::BaseDirectories::with_prefix("butido")?; let xdg_config_file = xdg.find_config_file("config.toml"); if let Some(xdg_config) = xdg_config_file { - debug!("Configuration file found with XDG: {}", xdg_config.display()); - config.merge(::config::File::from(xdg_config).required(false)) + debug!( + "Configuration file found with XDG: {}", + xdg_config.display() + ); + config + .merge(::config::File::from(xdg_config).required(false)) .context("Failed to load config.toml from XDG configuration directory")?; } else { - debug!("No configuration file found with XDG: {}", xdg.get_config_home().display()); + debug!( + "No configuration file found with XDG: {}", + xdg.get_config_home().display() + ); } } config.merge(::config::Environment::with_prefix("BUTIDO"))?; - let config = config.try_into::() + let config = config + .try_into::() .context("Failed to load Configuration object")? .validate() .context("Failed to validate configuration")?; let hide_bars = cli.get_flag("hide_bars") || crate::util::stdout_is_pipe(); - let progressbars = ProgressBars::setup( - config.progress_format().clone(), - hide_bars, - ); + let progressbars = ProgressBars::setup(config.progress_format().clone(), hide_bars); let load_repo = || -> Result { let bar = progressbars.bar()?; - let repo = Repository::load(repo_path, &bar) - .context("Loading the repository")?; + let repo = Repository::load(repo_path, &bar).context("Loading the repository")?; bar.finish_with_message("Repository loading finished"); Ok(repo) }; @@ -262,21 +266,19 @@ async fn main() -> Result<()> { .context("metrics command failed")? } - Some(("endpoint", matches)) => { - crate::commands::endpoint(matches, &config, progressbars) - .await - .context("endpoint command failed")? - }, + Some(("endpoint", matches)) => crate::commands::endpoint(matches, &config, progressbars) + .await + .context("endpoint command failed")?, Some((other, _)) => { error!("Unknown subcommand: {}", other); error!("Use --help to find available subcommands"); - return Err(anyhow!("Unknown subcommand: {}", other)) - }, + return Err(anyhow!("Unknown subcommand: {}", other)); + } None => { error!("No subcommand."); error!("Use --help to find available subcommands"); - return Err(anyhow!("No subcommand")) - }, + return Err(anyhow!("No subcommand")); + } } Ok(()) @@ -288,7 +290,12 @@ fn generate_completions(matches: &ArgMatches) { fn print_completions(shell: Shell, cmd: &mut clap::Command) { eprintln!("Generating shell completions for {shell}..."); - generate(shell, cmd, cmd.get_name().to_string(), &mut std::io::stdout()); + generate( + shell, + cmd, + cmd.get_name().to_string(), + &mut std::io::stdout(), + ); } // src/cli.rs enforces that `shell` is set to a valid `Shell` so this is always true: diff --git a/src/orchestrator/mod.rs b/src/orchestrator/mod.rs index 4d2ee5c0..c6217164 100644 --- a/src/orchestrator/mod.rs +++ b/src/orchestrator/mod.rs @@ -13,4 +13,3 @@ mod orchestrator; pub use orchestrator::*; mod util; - diff --git a/src/orchestrator/orchestrator.rs b/src/orchestrator/orchestrator.rs index 3c6e79ee..a0e7fc18 100644 --- a/src/orchestrator/orchestrator.rs +++ b/src/orchestrator/orchestrator.rs @@ -16,22 +16,22 @@ use std::path::PathBuf; use std::sync::Arc; use std::sync::Mutex; -use anyhow::Error; +use anyhow::anyhow; use anyhow::Context; +use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; -use diesel::PgConnection; use diesel::r2d2::ConnectionManager; use diesel::r2d2::Pool; +use diesel::PgConnection; use git2::Repository; use indicatif::ProgressBar; use itertools::Itertools; -use tracing::{debug, trace, error}; use resiter::FilterMap; -use tokio::sync::RwLock; use tokio::sync::mpsc::Receiver; use tokio::sync::mpsc::Sender; +use tokio::sync::RwLock; use tokio_stream::StreamExt; +use tracing::{debug, error, trace}; use typed_builder::TypedBuilder; use uuid::Uuid; @@ -47,8 +47,8 @@ use crate::job::JobDefinition; use crate::job::RunnableJob; use crate::orchestrator::util::*; use crate::source::SourceCache; -use crate::util::EnvironmentVariableName; use crate::util::progress::ProgressBars; +use crate::util::EnvironmentVariableName; #[cfg_attr(doc, aquamarine::aquamarine)] /// The Orchestrator @@ -251,7 +251,7 @@ impl ProducedArtifact { } impl Borrow for ProducedArtifact { - fn borrow(&self) -> &ArtifactPath { + fn borrow(&self) -> &ArtifactPath { match self { ProducedArtifact::Built(a) => a, ProducedArtifact::Reused(a) => a, @@ -281,9 +281,7 @@ impl<'a> Orchestrator<'a> { .git_author() .as_ref() .map(|varname| -> Result<_> { - let username = self.repository - .config()? - .get_string("user.name")?; + let username = self.repository.config()?.get_string("user.name")?; Ok((varname.clone(), username)) }) @@ -311,7 +309,8 @@ impl<'a> Orchestrator<'a> { // This is an Option<> because we need to set it later and the root of the tree needs a // special handling, as this very function will wait on a receiver that gets the results // of the root task - let jobs: Vec<(Receiver, TaskPreparation, Sender, _)> = self.jobdag + let jobs: Vec<(Receiver, TaskPreparation, Sender, _)> = self + .jobdag .iter() .map(|jobdef| { // We initialize the channel with 100 elements here, as there is unlikely a task @@ -319,7 +318,10 @@ impl<'a> Orchestrator<'a> { // Either way, this might be increased in future. let (sender, receiver) = tokio::sync::mpsc::channel(100); - trace!("Creating TaskPreparation object for job {}", jobdef.job.uuid()); + trace!( + "Creating TaskPreparation object for job {}", + jobdef.job.uuid() + ); let bar = self.progress_generator.bar()?; let bar = multibar.add(bar); bar.set_length(100); @@ -337,7 +339,12 @@ impl<'a> Orchestrator<'a> { database: self.database.clone(), }; - Ok((receiver, tp, sender, std::cell::RefCell::new(None as Option>>))) + Ok(( + receiver, + tp, + sender, + std::cell::RefCell::new(None as Option>>), + )) }) .collect::>>()?; @@ -354,9 +361,9 @@ impl<'a> Orchestrator<'a> { for job in jobs.iter() { if let Some(mut v) = job.3.borrow_mut().as_mut() { v.extend({ - jobs.iter() - .filter(|j| j.1.jobdef.dependencies.contains(job.1.jobdef.job.uuid())) - .map(|j| j.2.clone()) + jobs.iter() + .filter(|j| j.1.jobdef.dependencies.contains(job.1.jobdef.job.uuid())) + .map(|j| j.2.clone()) }); continue; @@ -364,13 +371,16 @@ impl<'a> Orchestrator<'a> { // else, but not in else {} because of borrowing *job.3.borrow_mut() = { - let depending_on_job = jobs.iter() + let depending_on_job = jobs + .iter() .filter(|j| j.1.jobdef.dependencies.contains(job.1.jobdef.job.uuid())) .map(|j| { if j.1.jobdef.job.uuid() == job.1.jobdef.job.uuid() { - Err(anyhow!("Package does depend on itself: {} {}", - job.1.jobdef.job.package().name(), - job.1.jobdef.job.package().version())) + Err(anyhow!( + "Package does depend on itself: {} {}", + job.1.jobdef.job.package().name(), + job.1.jobdef.job.package().version() + )) } else { Ok(j) } @@ -378,7 +388,11 @@ impl<'a> Orchestrator<'a> { .map_ok(|j| j.2.clone()) .collect::>>>()?; - trace!("{:?} is depending on {}", depending_on_job, job.1.jobdef.job.uuid()); + trace!( + "{:?} is depending on {}", + depending_on_job, + job.1.jobdef.job.uuid() + ); if depending_on_job.is_empty() { None } else { @@ -394,7 +408,8 @@ impl<'a> Orchestrator<'a> { // By that property, we can find the root task. // // Here, we copy its uuid, because we need it later. - let root_job_id = jobs.iter() + let root_job_id = jobs + .iter() .find(|j| j.3.borrow().is_none()) .map(|j| j.1.jobdef.job.uuid()) .ok_or_else(|| anyhow!("Failed to find root task"))?; @@ -415,7 +430,10 @@ impl<'a> Orchestrator<'a> { .map(|prep| { trace!("Creating JobTask for = {}", prep.1.jobdef.job.uuid()); // the sender is set or we need to use the root sender - let sender = prep.3.into_inner().unwrap_or_else(|| vec![root_sender.clone()]); + let sender = prep + .3 + .into_inner() + .unwrap_or_else(|| vec![root_sender.clone()]); JobTask::new(prep.0, prep.1, sender) }) .inspect(|task| trace!("Running: {}", task.jobdef.job.uuid())) @@ -426,15 +444,16 @@ impl<'a> Orchestrator<'a> { running_jobs.collect::>().await?; trace!("All jobs finished"); match root_receiver.recv().await { - None => Err(anyhow!("No result received...")), + None => Err(anyhow!("No result received...")), Some(Ok(results)) => { - let results = results.into_iter() + let results = results + .into_iter() .flat_map(|tpl| tpl.1.into_iter()) .map(ProducedArtifact::unpack) .collect(); Ok((results, HashMap::with_capacity(0))) - }, - Some(Err(errors)) => Ok((vec![], errors)), + } + Some(Err(errors)) => Ok((vec![], errors)), } } } @@ -484,7 +503,6 @@ struct JobTask<'a> { sender: Vec>, } - /// Implement Drop to close the progress bar /// /// This implementation is a bit of a hack. @@ -513,19 +531,26 @@ impl<'a> Drop for JobTask<'a> { "error on other task" }; - self.bar.finish_with_message(format!("[{} {} {}] Stopped, {msg}", + self.bar.finish_with_message(format!( + "[{} {} {}] Stopped, {msg}", self.jobdef.job.uuid(), self.jobdef.job.package().name(), self.jobdef.job.package().version(), - msg = errmsg)); + msg = errmsg + )); } } } impl<'a> JobTask<'a> { - fn new(receiver: Receiver, prep: TaskPreparation<'a>, sender: Vec>) -> Self { + fn new( + receiver: Receiver, + prep: TaskPreparation<'a>, + sender: Vec>, + ) -> Self { let bar = prep.bar.clone(); - bar.set_message(format!("[{} {} {}]: Booting", + bar.set_message(format!( + "[{} {} {}]: Booting", prep.jobdef.job.uuid(), prep.jobdef.job.package().name(), prep.jobdef.job.package().version() @@ -555,58 +580,84 @@ impl<'a> JobTask<'a> { /// returned successfully. async fn run(mut self) -> Result<()> { debug!("[{}]: Running", self.jobdef.job.uuid()); - debug!("[{}]: Waiting for dependencies = {:?}", self.jobdef.job.uuid(), { - self.jobdef.dependencies.iter().map(|u| u.to_string()).collect::>() - }); + debug!( + "[{}]: Waiting for dependencies = {:?}", + self.jobdef.job.uuid(), + { + self.jobdef + .dependencies + .iter() + .map(|u| u.to_string()) + .collect::>() + } + ); let dep_len = self.jobdef.dependencies.len(); // A list of job run results from dependencies that were received from the tasks for the // dependencies - let mut received_dependencies: HashMap> = HashMap::with_capacity(dep_len); + let mut received_dependencies: HashMap> = + HashMap::with_capacity(dep_len); // A list of errors that were received from the tasks for the dependencies let mut received_errors: HashMap = HashMap::with_capacity(dep_len); // Helper function to check whether all UUIDs are in a list of UUIDs let all_dependencies_are_in = |dependency_uuids: &[Uuid], list: &HashMap>| { - dependency_uuids.iter().all(|dependency_uuid| { - list.keys().any(|id| id == dependency_uuid) - }) + dependency_uuids + .iter() + .all(|dependency_uuid| list.keys().any(|id| id == dependency_uuid)) }; // as long as the job definition lists dependencies that are not in the received_dependencies list... while !all_dependencies_are_in(&self.jobdef.dependencies, &received_dependencies) { // Update the status bar message self.bar.set_message({ - format!("[{} {} {}]: Waiting ({}/{})...", + format!( + "[{} {} {}]: Waiting ({}/{})...", self.jobdef.job.uuid(), self.jobdef.job.package().name(), self.jobdef.job.package().version(), - received_dependencies.iter().filter(|(rd_uuid, _)| self.jobdef.dependencies.contains(rd_uuid)).count(), - dep_len) + received_dependencies + .iter() + .filter(|(rd_uuid, _)| self.jobdef.dependencies.contains(rd_uuid)) + .count(), + dep_len + ) }); trace!("[{}]: Updated bar", self.jobdef.job.uuid()); trace!("[{}]: receiving...", self.jobdef.job.uuid()); // receive from the receiver - let continue_receiving = self.perform_receive(&mut received_dependencies, &mut received_errors).await?; + let continue_receiving = self + .perform_receive(&mut received_dependencies, &mut received_errors) + .await?; - trace!("[{}]: Received errors = {}", self.jobdef.job.uuid(), received_errors.display_error_map()); + trace!( + "[{}]: Received errors = {}", + self.jobdef.job.uuid(), + received_errors.display_error_map() + ); // if there are any errors from child tasks if !received_errors.is_empty() { // send them to the parent,... // // We only send to one parent, because it doesn't matter // And we know that we have at least one sender - error!("[{}]: Received errors = {}", self.jobdef.job.uuid(), received_errors.display_error_map()); + error!( + "[{}]: Received errors = {}", + self.jobdef.job.uuid(), + received_errors.display_error_map() + ); self.sender[0].send(Err(received_errors)).await; // ... and stop operation, because the whole tree will fail anyways. - self.bar.finish_with_message(format!("[{} {} {}] Stopping, errors from child received", + self.bar.finish_with_message(format!( + "[{} {} {}] Stopping, errors from child received", self.jobdef.job.uuid(), self.jobdef.job.package().name(), - self.jobdef.job.package().version())); - return Ok(()) + self.jobdef.job.package().version() + )); + return Ok(()); } if !continue_receiving { @@ -616,7 +667,8 @@ impl<'a> JobTask<'a> { // Check if any of the received dependencies was built (and not reused). // If any dependency was built, we need to build as well. - let any_dependency_was_built = received_dependencies.values() + let any_dependency_was_built = received_dependencies + .values() .flat_map(|v| v.iter()) .any(ProducedArtifact::was_build); @@ -631,7 +683,10 @@ impl<'a> JobTask<'a> { // This is because we do not have access to the commandline-passed (additional) // environment variables at this point. But using the JobResource::env() variables // works as well. - let additional_env = self.jobdef.job.resources() + let additional_env = self + .jobdef + .job + .resources() .iter() .filter_map(crate::job::JobResource::env) .map(|(k, v)| (k.clone(), v.clone())) @@ -645,7 +700,6 @@ impl<'a> JobTask<'a> { .package(self.jobdef.job.package()) .release_stores(&self.release_stores) .image_name(Some(self.jobdef.job.image())) - // We can simply pass the staging store here, because it doesn't hurt. There are // two scenarios: // @@ -665,11 +719,18 @@ impl<'a> JobTask<'a> { .build() .run()?; - debug!("[{}]: Found {} replacement artifacts", self.jobdef.job.uuid(), replacement_artifacts.len()); - trace!("[{}]: Found replacement artifacts: {:?}", self.jobdef.job.uuid(), replacement_artifacts); + debug!( + "[{}]: Found {} replacement artifacts", + self.jobdef.job.uuid(), + replacement_artifacts.len() + ); + trace!( + "[{}]: Found replacement artifacts: {:?}", + self.jobdef.job.uuid(), + replacement_artifacts + ); let mut artifacts = replacement_artifacts .into_iter() - // First of all, we sort by whether the artifact path is in the staging store, // because we prefer staging store artifacts at this point. .sorted_by(|(p1, _), (p2, _)| { @@ -677,12 +738,10 @@ impl<'a> JobTask<'a> { let r2 = p2.is_in_staging_store(&staging_store); r1.cmp(&r2) }) - // We don't need duplicates here, so remove them by making the iterator unique // If we have two artifacts that are the same, the one in the staging store will be // preffered in the next step .unique_by(|tpl| tpl.0.artifact_path().clone()) - // Fetch the artifact from the staging store, if there is one. // If there is none, try the release store. // If there is none, there won't be a replacement artifact @@ -702,23 +761,31 @@ impl<'a> JobTask<'a> { if !artifacts.is_empty() { received_dependencies.insert(*self.jobdef.job.uuid(), artifacts); - trace!("[{}]: Sending to parent: {:?}", self.jobdef.job.uuid(), received_dependencies); + trace!( + "[{}]: Sending to parent: {:?}", + self.jobdef.job.uuid(), + received_dependencies + ); for s in self.sender.iter() { s.send(Ok(received_dependencies.clone())) .await .context("Cannot send received dependencies to parent") .with_context(|| { - format!("Sending-Channel is closed in Task for {}: {} {}", + format!( + "Sending-Channel is closed in Task for {}: {} {}", self.jobdef.job.uuid(), self.jobdef.job.package().name(), - self.jobdef.job.package().version()) + self.jobdef.job.package().version() + ) })?; } - self.bar.finish_with_message(format!("[{} {} {}] Reusing artifact", + self.bar.finish_with_message(format!( + "[{} {} {}] Reusing artifact", self.jobdef.job.uuid(), self.jobdef.job.package().name(), - self.jobdef.job.package().version())); - return Ok(()) + self.jobdef.job.package().version() + )); + return Ok(()); } } @@ -732,8 +799,13 @@ impl<'a> JobTask<'a> { .map(ProducedArtifact::borrow) .cloned() .collect::>(); - trace!("[{}]: Dependency artifacts = {:?}", self.jobdef.job.uuid(), dependency_artifacts); - self.bar.set_message(format!("[{} {} {}]: Preparing...", + trace!( + "[{}]: Dependency artifacts = {:?}", + self.jobdef.job.uuid(), + dependency_artifacts + ); + self.bar.set_message(format!( + "[{} {} {}]: Preparing...", self.jobdef.job.uuid(), self.jobdef.job.package().name(), self.jobdef.job.package().version() @@ -746,9 +818,11 @@ impl<'a> JobTask<'a> { self.config, self.git_author_env, self.git_commit_env, - dependency_artifacts)?; + dependency_artifacts, + )?; - self.bar.set_message(format!("[{} {} {}]: Scheduling...", + self.bar.set_message(format!( + "[{} {} {}]: Scheduling...", self.jobdef.job.uuid(), self.jobdef.job.package().name(), self.jobdef.job.package().version() @@ -756,9 +830,19 @@ impl<'a> JobTask<'a> { let job_uuid = *self.jobdef.job.uuid(); // Schedule the job on the scheduler - match self.scheduler.schedule_job(runnable, self.bar.clone()).await?.run().await? { + match self + .scheduler + .schedule_job(runnable, self.bar.clone()) + .await? + .run() + .await? + { Err(e) => { - trace!("[{}]: Scheduler returned error = {:?}", self.jobdef.job.uuid(), e); + trace!( + "[{}]: Scheduler returned error = {:?}", + self.jobdef.job.uuid(), + e + ); // ... and we send that to our parent // // We only send to one parent, because it doesn't matter anymore @@ -771,14 +855,20 @@ impl<'a> JobTask<'a> { .send(Err(errormap)) .await .context("Failed sending scheduler errors to parent") - .with_context(|| format!("Failed sending error from job {}", self.jobdef.job.uuid()))?; - return Ok(()) - }, + .with_context(|| { + format!("Failed sending error from job {}", self.jobdef.job.uuid()) + })?; + return Ok(()); + } // if the scheduler run reports success, // it returns the database artifact objects it created! Ok(artifacts) => { - trace!("[{}]: Scheduler returned artifacts = {:?}", self.jobdef.job.uuid(), artifacts); + trace!( + "[{}]: Scheduler returned artifacts = {:?}", + self.jobdef.job.uuid(), + artifacts + ); // mark the produced artifacts as "built" (rather than reused) let artifacts = artifacts.into_iter().map(ProducedArtifact::Built).collect(); @@ -787,7 +877,7 @@ impl<'a> JobTask<'a> { for s in self.sender.iter() { s.send(Ok(received_dependencies.clone())).await?; } - }, + } } trace!("[{}]: Finished successfully", self.jobdef.job.uuid()); @@ -802,7 +892,11 @@ impl<'a> JobTask<'a> { /// Return Ok(true) if we should continue operation /// Return Ok(false) if the channel is empty and we're done receiving or if the channel is /// empty and there were errors collected - async fn perform_receive(&mut self, received_dependencies: &mut HashMap>, received_errors: &mut HashMap) -> Result { + async fn perform_receive( + &mut self, + received_dependencies: &mut HashMap>, + received_errors: &mut HashMap, + ) -> Result { match self.receiver.recv().await { Some(Ok(mut v)) => { // The task we depend on succeeded and returned an @@ -810,46 +904,58 @@ impl<'a> JobTask<'a> { trace!("[{}]: Received: {:?}", self.jobdef.job.uuid(), v); received_dependencies.extend(v); Ok(true) - }, + } Some(Err(mut e)) => { // The task we depend on failed // we log that error for now trace!("[{}]: Received: {:?}", self.jobdef.job.uuid(), e); received_errors.extend(e); Ok(true) - }, + } None => { // The task we depend on finished... we must check what we have now... - trace!("[{}]: Received nothing, channel seems to be empty", self.jobdef.job.uuid()); + trace!( + "[{}]: Received nothing, channel seems to be empty", + self.jobdef.job.uuid() + ); // If the channel was closed and there are already errors in the `received_errors` // buffer, we return Ok(false) to notify the caller that we should not continue // receiving if !received_errors.is_empty() { - trace!("[{}]: There are errors, stop receiving", self.jobdef.job.uuid()); - return Ok(false) + trace!( + "[{}]: There are errors, stop receiving", + self.jobdef.job.uuid() + ); + return Ok(false); } // Find all dependencies that we need but which are not received let received = received_dependencies.keys().collect::>(); - let missing_deps: Vec<_> = self.jobdef + let missing_deps: Vec<_> = self + .jobdef .dependencies .iter() .filter(|d| !received.contains(d)) .collect(); - trace!("[{}]: Missing dependencies = {:?}", self.jobdef.job.uuid(), missing_deps); + trace!( + "[{}]: Missing dependencies = {:?}", + self.jobdef.job.uuid(), + missing_deps + ); // ... if there are any, error if !missing_deps.is_empty() { let missing: Vec = missing_deps.iter().map(|u| u.to_string()).collect(); - Err(anyhow!("Childs finished, but dependencies still missing: {:?}", missing)) + Err(anyhow!( + "Childs finished, but dependencies still missing: {:?}", + missing + )) } else { // all dependencies are received - Ok(false) + Ok(false) } - }, + } } } - } - diff --git a/src/orchestrator/util.rs b/src/orchestrator/util.rs index fd372584..7efc23c9 100644 --- a/src/orchestrator/util.rs +++ b/src/orchestrator/util.rs @@ -27,12 +27,12 @@ impl AsReceivedErrorDisplay for HashMap { } } - pub struct ReceivedErrorDisplay<'a>(&'a HashMap); impl<'a> std::fmt::Display for ReceivedErrorDisplay<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.iter().try_for_each(|(uuid, err)| writeln!(f, "{uuid}: {err}")) + self.0 + .iter() + .try_for_each(|(uuid, err)| writeln!(f, "{uuid}: {err}")) } } - diff --git a/src/package/dag.rs b/src/package/dag.rs index 21e45c7f..cfc97cd4 100644 --- a/src/package/dag.rs +++ b/src/package/dag.rs @@ -13,27 +13,26 @@ use std::collections::HashMap; use std::io::Result as IoResult; use std::io::Write; +use anyhow::anyhow; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use daggy::Walker; use getset::Getters; use indicatif::ProgressBar; use itertools::Itertools; -use tracing::trace; use ptree::Style; use ptree::TreeItem; use resiter::AndThen; +use tracing::trace; -use crate::package::Package; -use crate::package::PackageName; -use crate::package::PackageVersionConstraint; use crate::package::condition::ConditionCheckable; use crate::package::condition::ConditionData; use crate::package::dependency::ParseDependency; +use crate::package::Package; +use crate::package::PackageName; +use crate::package::PackageVersionConstraint; use crate::repository::Repository; - #[derive(Debug, Getters)] pub struct Dag { #[getset(get = "pub")] @@ -50,12 +49,12 @@ impl Dag { progress: Option<&ProgressBar>, conditional_data: &ConditionData<'_>, // required for selecting packages with conditional dependencies ) -> Result { - /// helper fn with bad name to check the dependency condition of a dependency and parse the dependency into a tuple of /// name and version for further processing - fn process(d: &D, conditional_data: &ConditionData<'_>) - -> Result<(bool, PackageName, PackageVersionConstraint)> - { + fn process( + d: &D, + conditional_data: &ConditionData<'_>, + ) -> Result<(bool, PackageName, PackageVersionConstraint)> { // Check whether the condition of the dependency matches our data let take = d.check_condition(conditional_data)?; let (name, version) = d.parse_as_name_and_version()?; @@ -71,21 +70,22 @@ impl Dag { /// /// It also filters out dependencies that do not match the `conditional_data` passed and /// makes the dependencies unique over (name, version). - fn get_package_dependencies<'a>(package: &'a Package, conditional_data: &'a ConditionData<'_>) - -> impl Iterator> + 'a - { - - package.dependencies() + fn get_package_dependencies<'a>( + package: &'a Package, + conditional_data: &'a ConditionData<'_>, + ) -> impl Iterator> + 'a { + package + .dependencies() .build() .iter() .map(move |d| process(d, conditional_data)) .chain({ - package.dependencies() + package + .dependencies() .runtime() .iter() .map(move |d| process(d, conditional_data)) }) - // Now filter out all dependencies where their condition did not match our // `conditional_data`. .filter(|res| match res { @@ -93,10 +93,8 @@ impl Dag { Ok((false, _, _)) => false, Err(_) => true, }) - // Map out the boolean from the condition, because we don't need that later on .map(|res| res.map(|(_, name, vers)| (name, vers))) - // Make all dependencies unique, because we don't want to build one dependency // multiple times .unique_by(|res| res.as_ref().ok().cloned()) @@ -112,26 +110,40 @@ impl Dag { ) -> Result<()> { get_package_dependencies(p, conditional_data) .and_then_ok(|(name, constr)| { - trace!("Dependency for {} {} found: {:?}", p.name(), p.version(), name); + trace!( + "Dependency for {} {} found: {:?}", + p.name(), + p.version(), + name + ); let packs = repo.find_with_version(&name, &constr); if packs.is_empty() { - return Err(anyhow!("Dependency of {} {} not found: {} {}", p.name(), p.version(), name, constr)) + return Err(anyhow!( + "Dependency of {} {} not found: {} {}", + p.name(), + p.version(), + name, + constr + )); } trace!("Found in repo: {:?}", packs); // If we didn't check that dependency already - if !mappings.keys().any(|p| packs.iter().any(|pk| pk.name() == p.name() && pk.version() == p.version())) { + if !mappings.keys().any(|p| { + packs + .iter() + .any(|pk| pk.name() == p.name() && pk.version() == p.version()) + }) { // recurse - packs.into_iter() - .try_for_each(|p| { - let _ = progress.as_ref().map(|p| p.tick()); + packs.into_iter().try_for_each(|p| { + let _ = progress.as_ref().map(|p| p.tick()); - let idx = dag.add_node(p); - mappings.insert(p, idx); + let idx = dag.add_node(p); + mappings.insert(p, idx); - trace!("Recursing for: {:?}", p); - add_sub_packages(repo, mappings, dag, p, progress, conditional_data) - }) + trace!("Recursing for: {:?}", p); + add_sub_packages(repo, mappings, dag, p, progress, conditional_data) + }) } else { Ok(()) } @@ -139,17 +151,19 @@ impl Dag { .collect::>() } - fn add_edges(mappings: &HashMap<&Package, daggy::NodeIndex>, + fn add_edges( + mappings: &HashMap<&Package, daggy::NodeIndex>, dag: &mut daggy::Dag<&Package, i8>, conditional_data: &ConditionData<'_>, - ) -> Result<()> - { + ) -> Result<()> { for (package, idx) in mappings { get_package_dependencies(package, conditional_data) .and_then_ok(|(name, constr)| { mappings .iter() - .filter(|(package, _)| *package.name() == name && constr.matches(package.version())) + .filter(|(package, _)| { + *package.name() == name && constr.matches(package.version()) + }) .try_for_each(|(_, dep_idx)| { dag.add_edge(*idx, *dep_idx, 0) .map(|_| ()) @@ -168,13 +182,20 @@ impl Dag { trace!("Making package Tree for {:?}", p); let root_idx = dag.add_node(&p); mappings.insert(&p, root_idx); - add_sub_packages(repo, &mut mappings, &mut dag, &p, progress, conditional_data)?; + add_sub_packages( + repo, + &mut mappings, + &mut dag, + &p, + progress, + conditional_data, + )?; add_edges(&mappings, &mut dag, conditional_data)?; trace!("Finished makeing package Tree"); Ok(Dag { dag: dag.map(|_, p: &&Package| -> Package { (*p).clone() }, |_, e| *e), - root_idx + root_idx, }) } @@ -203,7 +224,11 @@ impl<'a> TreeItem for DagDisplay<'a> { type Child = Self; fn write_self(&self, f: &mut W, _: &Style) -> IoResult<()> { - let p = self.0.dag.graph().node_weight(self.1) + let p = self + .0 + .dag + .graph() + .node_weight(self.1) .ok_or_else(|| anyhow!("Error finding node: {:?}", self.1)) .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; write!(f, "{} {}", p.name(), p.version()) @@ -211,9 +236,10 @@ impl<'a> TreeItem for DagDisplay<'a> { fn children(&self) -> Cow<[Self::Child]> { let c = self.0.dag.children(self.1); - Cow::from(c.iter(&self.0.dag) - .map(|(_, idx)| DagDisplay(self.0, idx)) - .collect::>() + Cow::from( + c.iter(&self.0.dag) + .map(|(_, idx)| DagDisplay(self.0, idx)) + .collect::>(), ) } } @@ -224,13 +250,13 @@ mod tests { use std::collections::BTreeMap; - use crate::package::Dependencies; - use crate::package::Dependency; use crate::package::condition::Condition; use crate::package::condition::OneOrMore; use crate::package::tests::package; use crate::package::tests::pname; use crate::package::tests::pversion; + use crate::package::Dependencies; + use crate::package::Dependency; use crate::util::docker::ImageName; use indicatif::ProgressBar; @@ -391,7 +417,9 @@ mod tests { assert!(r.is_ok()); let r = r.unwrap(); let ps = r.all_packages(); - assert!(ps.iter().any(|p| *p.name() == pname("p1") && *p.version() == pversion("1"))); + assert!(ps + .iter() + .any(|p| *p.name() == pname("p1") && *p.version() == pversion("1"))); assert!(ps.iter().any(|p| *p.name() == pname("p2"))); assert!(ps.iter().any(|p| *p.name() == pname("p4"))); assert!(ps.iter().any(|p| *p.name() == pname("p3"))); @@ -539,7 +567,9 @@ mod tests { assert!(r.is_ok()); let r = r.unwrap(); let ps = r.all_packages(); - assert!(ps.iter().any(|p| *p.name() == pname("p1") && *p.version() == pversion("1"))); + assert!(ps + .iter() + .any(|p| *p.name() == pname("p1") && *p.version() == pversion("1"))); assert!(ps.iter().any(|p| *p.name() == pname("p2"))); assert!(ps.iter().any(|p| *p.name() == pname("p3"))); assert!(ps.iter().any(|p| *p.name() == pname("p4"))); @@ -649,13 +679,14 @@ mod tests { assert!(r.is_ok()); let r = r.unwrap(); let ps = r.all_packages(); - assert!(ps.iter().any(|p| *p.name() == pname("p1") && *p.version() == pversion("1"))); + assert!(ps + .iter() + .any(|p| *p.name() == pname("p1") && *p.version() == pversion("1"))); assert!(ps.iter().any(|p| *p.name() == pname("p2"))); assert!(ps.iter().any(|p| *p.name() == pname("p3"))); assert!(ps.iter().any(|p| *p.name() == pname("p4"))); } - /// Build a repository with two packages and a condition for their dependency fn repo_with_ab_packages_with_condition(cond: Condition) -> (Package, Repository) { let mut btree = BTreeMap::new(); @@ -712,8 +743,14 @@ mod tests { assert!(ps.iter().any(|p| *p.version() == pversion("1"))); // Not in the tree: - assert!(!ps.iter().any(|p| *p.name() == pname("b")), "'b' should not be in tree, but is: {ps:?}"); - assert!(!ps.iter().any(|p| *p.version() == pversion("2")), "'2' should not be in tree, but is: {ps:?}"); + assert!( + !ps.iter().any(|p| *p.name() == pname("b")), + "'b' should not be in tree, but is: {ps:?}" + ); + assert!( + !ps.iter().any(|p| *p.version() == pversion("2")), + "'2' should not be in tree, but is: {ps:?}" + ); } // Test whether the dependency DAG is correctly build if a image is used, but not the one @@ -779,6 +816,4 @@ mod tests { assert!(ps.iter().any(|p| *p.name() == pname("b"))); assert!(ps.iter().any(|p| *p.version() == pversion("2"))); } - } - diff --git a/src/package/dependency/build.rs b/src/package/dependency/build.rs index d303f1b5..678c5646 100644 --- a/src/package/dependency/build.rs +++ b/src/package/dependency/build.rs @@ -12,21 +12,18 @@ use anyhow::Result; use serde::Deserialize; use serde::Serialize; -use crate::package::PackageName; -use crate::package::PackageVersionConstraint; +use crate::package::dependency::condition::Condition; use crate::package::dependency::ParseDependency; use crate::package::dependency::StringEqual; -use crate::package::dependency::condition::Condition; +use crate::package::PackageName; +use crate::package::PackageVersionConstraint; /// A dependency that is packaged and is only required during build time #[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)] #[serde(untagged)] pub enum BuildDependency { Simple(String), - Conditional { - name: String, - condition: Condition, - }, + Conditional { name: String, condition: Condition }, } impl AsRef for BuildDependency { @@ -49,7 +46,9 @@ impl StringEqual for BuildDependency { impl ParseDependency for BuildDependency { fn parse_as_name_and_version(&self) -> Result<(PackageName, PackageVersionConstraint)> { - crate::package::dependency::parse_package_dependency_string_into_name_and_version(self.as_ref()) + crate::package::dependency::parse_package_dependency_string_into_name_and_version( + self.as_ref(), + ) } } @@ -66,7 +65,8 @@ mod tests { #[test] fn test_parse_dependency() { - let s: TestSetting = toml::from_str(r#"setting = "foo""#).expect("Parsing TestSetting failed"); + let s: TestSetting = + toml::from_str(r#"setting = "foo""#).expect("Parsing TestSetting failed"); match s.setting { BuildDependency::Simple(name) => assert_eq!(name, "foo", "Expected 'foo', got {name}"), other => panic!("Unexpected deserialization to other variant: {other:?}"), @@ -75,14 +75,19 @@ mod tests { #[test] fn test_parse_conditional_dependency() { - let s: TestSetting = toml::from_str(r#"setting = { name = "foo", condition = { in_image = "bar"} }"#).expect("Parsing TestSetting failed"); + let s: TestSetting = + toml::from_str(r#"setting = { name = "foo", condition = { in_image = "bar"} }"#) + .expect("Parsing TestSetting failed"); match s.setting { BuildDependency::Conditional { name, condition } => { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } @@ -103,13 +108,15 @@ mod tests { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } - #[derive(serde::Serialize, serde::Deserialize)] #[allow(unused)] pub struct TestSettings { @@ -118,14 +125,19 @@ mod tests { #[test] fn test_parse_conditional_dependencies() { - let s: TestSettings = toml::from_str(r#"settings = [{ name = "foo", condition = { in_image = "bar"} }]"#).expect("Parsing TestSetting failed"); + let s: TestSettings = + toml::from_str(r#"settings = [{ name = "foo", condition = { in_image = "bar"} }]"#) + .expect("Parsing TestSetting failed"); match s.settings.get(0).expect("Has not one dependency") { BuildDependency::Conditional { name, condition } => { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } @@ -145,8 +157,11 @@ mod tests { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } @@ -166,10 +181,12 @@ mod tests { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } } - diff --git a/src/package/dependency/condition.rs b/src/package/dependency/condition.rs index 596a25c0..5bc046f4 100644 --- a/src/package/dependency/condition.rs +++ b/src/package/dependency/condition.rs @@ -10,13 +10,13 @@ use std::collections::BTreeMap; +use anyhow::Result; +use getset::Getters; use serde::Deserialize; use serde::Serialize; -use getset::Getters; -use anyhow::Result; -use crate::util::EnvironmentVariableName; use crate::util::docker::ImageName; +use crate::util::EnvironmentVariableName; /// The Condition type /// @@ -44,12 +44,16 @@ pub struct Condition { impl Condition { #[cfg(test)] - pub fn new(has_env: Option>, - env_eq: Option>, - in_image: Option>) - -> Self - { - Condition { has_env, env_eq, in_image } + pub fn new( + has_env: Option>, + env_eq: Option>, + in_image: Option>, + ) -> Self { + Condition { + has_env, + env_eq, + in_image, + } } /// Check whether the condition matches a certain set of data @@ -59,15 +63,15 @@ impl Condition { /// Always returns Ok(_) in the current implementation pub fn matches(&self, data: &ConditionData<'_>) -> Result { if !self.matches_env_cond(data)? { - return Ok(false) + return Ok(false); } if !self.matches_env_eq_cond(data)? { - return Ok(false) + return Ok(false); } if !self.matches_in_image_cond(data)? { - return Ok(false) + return Ok(false); } Ok(true) @@ -77,15 +81,13 @@ impl Condition { if let Some(has_env_cond) = self.has_env.as_ref() { let b = match has_env_cond { OneOrMore::One(env) => data.env.iter().any(|(name, _)| env == name), - OneOrMore::More(envs) => envs.iter().all(|required_env| { - data.env - .iter() - .any(|(name, _)| name == required_env) - }) + OneOrMore::More(envs) => envs + .iter() + .all(|required_env| data.env.iter().any(|(name, _)| name == required_env)), }; if !b { - return Ok(false) + return Ok(false); } } @@ -94,17 +96,16 @@ impl Condition { fn matches_env_eq_cond(&self, data: &ConditionData<'_>) -> Result { if let Some(env_eq_cond) = self.env_eq.as_ref() { - let b = env_eq_cond.iter() - .all(|(req_env_name, req_env_val)| { - data.env - .iter() - .find(|(env_name, _)| env_name == req_env_name) - .map(|(_, env_val)| env_val == req_env_val) - .unwrap_or(false) - }); + let b = env_eq_cond.iter().all(|(req_env_name, req_env_val)| { + data.env + .iter() + .find(|(env_name, _)| env_name == req_env_name) + .map(|(_, env_val)| env_val == req_env_val) + .unwrap_or(false) + }); if !b { - return Ok(false) + return Ok(false); } } @@ -127,16 +128,13 @@ impl Condition { .as_ref() .map(|i| i.as_ref() == req_image) .unwrap_or(false) - }, - OneOrMore::More(req_images) => { - req_images.iter() - .any(|ri| { - data.image_name - .as_ref() - .map(|inam| inam.as_ref() == ri) - .unwrap_or(false) - }) - }, + } + OneOrMore::More(req_images) => req_images.iter().any(|ri| { + data.image_name + .as_ref() + .map(|inam| inam.as_ref() == ri) + .unwrap_or(false) + }), }; Ok(b) @@ -146,7 +144,6 @@ impl Condition { } } - /// Helper type for supporting Vec and T in value /// position of Condition #[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)] @@ -180,7 +177,6 @@ impl From for OneOrMore { } } - #[derive(Debug)] pub struct ConditionData<'a> { pub(crate) image_name: Option<&'a ImageName>, @@ -207,7 +203,9 @@ impl ConditionCheckable for crate::package::BuildDependency { // If the dependency is a simple one, e.g. "foo =1.2.3", there is no condition, so the // dependency has always to be used crate::package::BuildDependency::Simple(_) => Ok(true), - crate::package::BuildDependency::Conditional { condition, .. } => condition.matches(data), + crate::package::BuildDependency::Conditional { condition, .. } => { + condition.matches(data) + } } } } @@ -232,7 +230,10 @@ mod tests { let s = r#"has_env = "foo""#; let c: Condition = toml::from_str(s).expect("Deserializing has_env"); - assert_eq!(c.has_env.unwrap(), OneOrMore::::One(EnvironmentVariableName::from("foo"))); + assert_eq!( + c.has_env.unwrap(), + OneOrMore::::One(EnvironmentVariableName::from("foo")) + ); assert!(c.env_eq.is_none()); assert!(c.in_image.is_none()); } @@ -244,7 +245,10 @@ mod tests { assert_eq!(c.has_env.unwrap(), { OneOrMore::::More({ - vec![EnvironmentVariableName::from("foo"), EnvironmentVariableName::from("bar")] + vec![ + EnvironmentVariableName::from("foo"), + EnvironmentVariableName::from("bar"), + ] }) }); assert!(c.env_eq.is_none()); @@ -272,7 +276,10 @@ mod tests { assert!(c.has_env.is_none()); assert!(c.env_eq.is_none()); - assert_eq!(c.in_image.unwrap(), OneOrMore::::One(String::from("foo"))); + assert_eq!( + c.in_image.unwrap(), + OneOrMore::::One(String::from("foo")) + ); } #[test] @@ -282,7 +289,10 @@ mod tests { assert!(c.has_env.is_none()); assert!(c.env_eq.is_none()); - assert_eq!(c.in_image.unwrap(), OneOrMore::::More(vec![String::from("foo")])); + assert_eq!( + c.in_image.unwrap(), + OneOrMore::::More(vec![String::from("foo")]) + ); } #[test] @@ -348,9 +358,15 @@ mod tests { env: &[], }; - let condition = Condition::new({ - Some(OneOrMore::::One(EnvironmentVariableName::from("A"))) - }, None, None); + let condition = Condition::new( + { + Some(OneOrMore::::One( + EnvironmentVariableName::from("A"), + )) + }, + None, + None, + ); assert!(!condition.matches(&data).unwrap()); } @@ -362,9 +378,15 @@ mod tests { env: &[(EnvironmentVariableName::from("A"), String::from("1"))], }; - let condition = Condition::new({ - Some(OneOrMore::::One(EnvironmentVariableName::from("A"))) - }, None, None); + let condition = Condition::new( + { + Some(OneOrMore::::One( + EnvironmentVariableName::from("A"), + )) + }, + None, + None, + ); assert!(condition.matches(&data).unwrap()); } @@ -376,11 +398,15 @@ mod tests { env: &[], }; - let condition = Condition::new(None, { - let mut hm = BTreeMap::new(); - hm.insert(EnvironmentVariableName::from("A"), String::from("1")); - Some(hm) - }, None); + let condition = Condition::new( + None, + { + let mut hm = BTreeMap::new(); + hm.insert(EnvironmentVariableName::from("A"), String::from("1")); + Some(hm) + }, + None, + ); assert!(!condition.matches(&data).unwrap()); } @@ -392,11 +418,15 @@ mod tests { env: &[(EnvironmentVariableName::from("A"), String::from("1"))], }; - let condition = Condition::new(None, { - let mut hm = BTreeMap::new(); - hm.insert(EnvironmentVariableName::from("A"), String::from("2")); - Some(hm) - }, None); + let condition = Condition::new( + None, + { + let mut hm = BTreeMap::new(); + hm.insert(EnvironmentVariableName::from("A"), String::from("2")); + Some(hm) + }, + None, + ); assert!(!condition.matches(&data).unwrap()); } @@ -408,13 +438,16 @@ mod tests { env: &[(EnvironmentVariableName::from("A"), String::from("1"))], }; - let condition = Condition::new(None, { - let mut hm = BTreeMap::new(); - hm.insert(EnvironmentVariableName::from("A"), String::from("1")); - Some(hm) - }, None); + let condition = Condition::new( + None, + { + let mut hm = BTreeMap::new(); + hm.insert(EnvironmentVariableName::from("A"), String::from("1")); + Some(hm) + }, + None, + ); assert!(condition.matches(&data).unwrap()); } - } diff --git a/src/package/dependency/runtime.rs b/src/package/dependency/runtime.rs index ff3dc418..f077e57f 100644 --- a/src/package/dependency/runtime.rs +++ b/src/package/dependency/runtime.rs @@ -12,21 +12,18 @@ use anyhow::Result; use serde::Deserialize; use serde::Serialize; -use crate::package::PackageName; -use crate::package::PackageVersionConstraint; +use crate::package::dependency::condition::Condition; use crate::package::dependency::ParseDependency; use crate::package::dependency::StringEqual; -use crate::package::dependency::condition::Condition; +use crate::package::PackageName; +use crate::package::PackageVersionConstraint; /// A dependency that is packaged and is required during runtime #[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq, Ord, PartialOrd)] #[serde(untagged)] pub enum Dependency { Simple(String), - Conditional { - name: String, - condition: Condition, - }, + Conditional { name: String, condition: Condition }, } #[cfg(test)] @@ -62,7 +59,9 @@ impl From for Dependency { impl ParseDependency for Dependency { fn parse_as_name_and_version(&self) -> Result<(PackageName, PackageVersionConstraint)> { - crate::package::dependency::parse_package_dependency_string_into_name_and_version(self.as_ref()) + crate::package::dependency::parse_package_dependency_string_into_name_and_version( + self.as_ref(), + ) } } @@ -79,7 +78,8 @@ mod tests { #[test] fn test_parse_dependency() { - let s: TestSetting = toml::from_str(r#"setting = "foo""#).expect("Parsing TestSetting failed"); + let s: TestSetting = + toml::from_str(r#"setting = "foo""#).expect("Parsing TestSetting failed"); match s.setting { Dependency::Simple(name) => assert_eq!(name, "foo", "Expected 'foo', got {name}"), @@ -89,14 +89,19 @@ mod tests { #[test] fn test_parse_conditional_dependency() { - let s: TestSetting = toml::from_str(r#"setting = { name = "foo", condition = { in_image = "bar"} }"#).expect("Parsing TestSetting failed"); + let s: TestSetting = + toml::from_str(r#"setting = { name = "foo", condition = { in_image = "bar"} }"#) + .expect("Parsing TestSetting failed"); match s.setting { Dependency::Conditional { name, condition } => { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } @@ -117,13 +122,15 @@ mod tests { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } - #[derive(serde::Serialize, serde::Deserialize)] #[allow(unused)] pub struct TestSettings { @@ -132,14 +139,19 @@ mod tests { #[test] fn test_parse_conditional_dependencies() { - let s: TestSettings = toml::from_str(r#"settings = [{ name = "foo", condition = { in_image = "bar"} }]"#).expect("Parsing TestSetting failed"); + let s: TestSettings = + toml::from_str(r#"settings = [{ name = "foo", condition = { in_image = "bar"} }]"#) + .expect("Parsing TestSetting failed"); match s.settings.get(0).expect("Has not one dependency") { Dependency::Conditional { name, condition } => { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } @@ -159,8 +171,11 @@ mod tests { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } @@ -180,8 +195,11 @@ mod tests { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } @@ -205,8 +223,11 @@ mod tests { assert_eq!(name, "foo", "Expected 'foo', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("bar")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("bar"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } @@ -215,10 +236,12 @@ mod tests { assert_eq!(name, "baz", "Expected 'baz', got {name}"); assert_eq!(*condition.has_env(), None); assert_eq!(*condition.env_eq(), None); - assert_eq!(condition.in_image().as_ref(), Some(&OneOrMore::::One(String::from("boogie")))); - }, + assert_eq!( + condition.in_image().as_ref(), + Some(&OneOrMore::::One(String::from("boogie"))) + ); + } other => panic!("Unexpected deserialization to other variant: {other:?}"), } } } - diff --git a/src/package/package.rs b/src/package/package.rs index 40e0a768..a42f7ba7 100644 --- a/src/package/package.rs +++ b/src/package/package.rs @@ -135,50 +135,83 @@ pub struct DebugPackage<'a>(&'a Package); #[cfg(debug_assertions)] impl<'a> std::fmt::Debug for DebugPackage<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { - writeln!(f, "Package({name} {version} ({semver}))", + writeln!( + f, + "Package({name} {version} ({semver}))", name = self.0.name, version = self.0.version, - semver = if self.0.version_is_semver { "is semver" } else { "not semver" })?; + semver = if self.0.version_is_semver { + "is semver" + } else { + "not semver" + } + )?; writeln!(f, "\tSources = ")?; - self.0.sources.iter().try_for_each(|(k, v)| writeln!(f, "\t\t{name} = (Url = {url}, Hash = {hash} ({hasht}), {dl})", - name = k, - url = v.url(), - hash = v.hash().value(), - hasht = v.hash().hashtype(), - dl = if *v.download_manually() { "manual download" } else { "automatic download" }, - ))?; + self.0.sources.iter().try_for_each(|(k, v)| { + writeln!( + f, + "\t\t{name} = (Url = {url}, Hash = {hash} ({hasht}), {dl})", + name = k, + url = v.url(), + hash = v.hash().value(), + hasht = v.hash().hashtype(), + dl = if *v.download_manually() { + "manual download" + } else { + "automatic download" + }, + ) + })?; writeln!(f, "\tBuild Dependencies = ")?; - self.0.dependencies.build.iter().try_for_each(|d| writeln!(f, "\t\t{d:?}"))?; + self.0 + .dependencies + .build + .iter() + .try_for_each(|d| writeln!(f, "\t\t{d:?}"))?; writeln!(f, "\tRuntime Dependencies = ")?; - self.0.dependencies.runtime.iter().try_for_each(|r| writeln!(f, "\t\t{r:?}"))?; + self.0 + .dependencies + .runtime + .iter() + .try_for_each(|r| writeln!(f, "\t\t{r:?}"))?; writeln!(f, "\tPatches = ")?; - self.0.patches.iter().try_for_each(|p| writeln!(f, "\t\t{}", p.display()))?; + self.0 + .patches + .iter() + .try_for_each(|p| writeln!(f, "\t\t{}", p.display()))?; writeln!(f, "\tEnvironment = ")?; - self.0.environment + self.0 + .environment .as_ref() - .map(|hm| hm.iter().try_for_each(|(k, v)| writeln!(f, "\t\t{k:?} = {v}"))) + .map(|hm| { + hm.iter() + .try_for_each(|(k, v)| writeln!(f, "\t\t{k:?} = {v}")) + }) .transpose()?; writeln!(f, "\tAllowed Images = ")?; - self.0.allowed_images + self.0 + .allowed_images .as_ref() .map(|v| v.iter().try_for_each(|i| writeln!(f, "\t\t{i:?}"))) .transpose()?; writeln!(f, "\tDenied Images = ")?; - self.0.denied_images + self.0 + .denied_images .as_ref() .map(|v| v.iter().try_for_each(|i| writeln!(f, "\t\t{i:?}"))) .transpose()?; writeln!(f, "\tPhases = ")?; - self.0.phases + self.0 + .phases .iter() .try_for_each(|(k, _)| writeln!(f, "\t\t{k:?} = ..."))?; diff --git a/src/package/script.rs b/src/package/script.rs index e85574e8..07fb5893 100644 --- a/src/package/script.rs +++ b/src/package/script.rs @@ -21,7 +21,6 @@ use handlebars::{ Context, Handlebars, Helper, HelperDef, HelperResult, JsonRender, Output, PathAndJson, RenderContext, RenderError, }; -use tracing::trace; use serde::Deserialize; use serde::Serialize; use syntect::easy::HighlightLines; @@ -29,6 +28,7 @@ use syntect::highlighting::ThemeSet; use syntect::parsing::SyntaxSet; use syntect::util::{as_24_bit_terminal_escaped, LinesWithEndings}; use tokio::process::Command; +use tracing::trace; use crate::package::Package; use crate::package::Phase; @@ -138,8 +138,7 @@ impl<'a> HighlightedScript<'a> { LinesWithEndings::from(&self.script.0) .map(move |line| -> Result { - h - .highlight_line(line, &self.ps) + h.highlight_line(line, &self.ps) .with_context(|| anyhow!("Could not highlight the following line: {}", line)) .map(|r| as_24_bit_terminal_escaped(&r[..], true) + reset_all_attributes) }) @@ -148,7 +147,8 @@ impl<'a> HighlightedScript<'a> { } pub fn lines_numbered(&'a self) -> Result + 'a> { - self.lines().map(|iter| iter.enumerate().map(|(n, l)| (n + 1, l))) + self.lines() + .map(|iter| iter.enumerate().map(|(n, l)| (n + 1, l))) } } @@ -248,7 +248,13 @@ impl<'a> ScriptBuilder<'a> { } hb.render("script", package) - .with_context(|| anyhow!("Rendering script for package {} {} failed", package.name(), package.version())) + .with_context(|| { + anyhow!( + "Rendering script for package {} {} failed", + package.name(), + package.version() + ) + }) .map_err(Error::from) } } diff --git a/src/package/source.rs b/src/package/source.rs index e65b7fe4..eb62eac4 100644 --- a/src/package/source.rs +++ b/src/package/source.rs @@ -12,9 +12,9 @@ use anyhow::anyhow; use anyhow::Context; use anyhow::Result; use getset::Getters; -use tracing::trace; use serde::Deserialize; use serde::Serialize; +use tracing::trace; use url::Url; #[derive(Clone, Debug, Serialize, Deserialize, Getters)] @@ -52,7 +52,8 @@ pub struct SourceHash { impl SourceHash { pub async fn matches_hash_of(&self, reader: R) -> Result<()> { trace!("Hashing buffer with: {:?}", self.hashtype); - let h = self.hashtype + let h = self + .hashtype .hash_from_reader(reader) .await .context("Hashing failed")?; @@ -93,7 +94,10 @@ pub enum HashType { } impl HashType { - async fn hash_from_reader(&self, mut reader: R) -> Result { + async fn hash_from_reader( + &self, + mut reader: R, + ) -> Result { use tokio::io::AsyncReadExt; let mut buffer = [0; 1024]; @@ -105,7 +109,8 @@ impl HashType { trace!("SHA1 hashing buffer"); let mut m = sha1::Sha1::new(); loop { - let count = reader.read(&mut buffer) + let count = reader + .read(&mut buffer) .await .context("Reading buffer failed")?; @@ -124,7 +129,8 @@ impl HashType { trace!("SHA256 hashing buffer"); let mut m = sha2::Sha256::new(); loop { - let count = reader.read(&mut buffer) + let count = reader + .read(&mut buffer) .await .context("Reading buffer failed")?; @@ -145,7 +151,8 @@ impl HashType { trace!("SHA512 hashing buffer"); let mut m = sha2::Sha512::new(); loop { - let count = reader.read(&mut buffer) + let count = reader + .read(&mut buffer) .await .context("Reading buffer failed")?; diff --git a/src/package/version.rs b/src/package/version.rs index ba0aa196..066fbcc3 100644 --- a/src/package/version.rs +++ b/src/package/version.rs @@ -67,7 +67,6 @@ impl std::convert::TryFrom<&str> for PackageVersionConstraint { .context("Failed to parse package version constraint") .context("A package version constraint must have a comparator and a version string, like so: =0.1.0") .map_err(Error::from) - } } @@ -137,39 +136,17 @@ mod tests { assert!(PackageVersion::parser().parse(b"=a1").is_err()); assert!(PackageVersion::parser().parse(b"a").is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"=") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"*1") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b">1") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"<1") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"=a") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"=.a") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"=.1") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"=a1") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"1") - .is_err()); - assert!(PackageVersionConstraint::parser() - .parse(b"a") - .is_err()); + assert!(PackageVersionConstraint::parser().parse(b"").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"=").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"*1").is_err()); + assert!(PackageVersionConstraint::parser().parse(b">1").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"<1").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"=a").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"=.a").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"=.1").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"=a1").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"1").is_err()); + assert!(PackageVersionConstraint::parser().parse(b"a").is_err()); } #[test] diff --git a/src/repository/fs/element.rs b/src/repository/fs/element.rs index 75a417de..12c6fb9b 100644 --- a/src/repository/fs/element.rs +++ b/src/repository/fs/element.rs @@ -18,7 +18,7 @@ use crate::repository::fs::path::PathComponent; #[derive(Debug)] pub enum Element { File(String), - Dir(HashMap) + Dir(HashMap), } impl Element { @@ -30,4 +30,3 @@ impl Element { } } } - diff --git a/src/repository/fs/mod.rs b/src/repository/fs/mod.rs index ad2c3e31..9b047723 100644 --- a/src/repository/fs/mod.rs +++ b/src/repository/fs/mod.rs @@ -13,4 +13,3 @@ pub use representation::FileSystemRepresentation; mod element; mod path; - diff --git a/src/repository/fs/path.rs b/src/repository/fs/path.rs index b8955763..95294f61 100644 --- a/src/repository/fs/path.rs +++ b/src/repository/fs/path.rs @@ -48,7 +48,7 @@ impl TryFrom<&std::path::Component<'_>> for PathComponent { } else { Ok(PathComponent::DirName(filename.to_string())) } - }, + } } } } @@ -64,8 +64,7 @@ impl PathComponent { pub fn dir_name(&self) -> Option<&str> { match self { PathComponent::PkgToml => None, - PathComponent::DirName(dn) => Some(dn) + PathComponent::DirName(dn) => Some(dn), } } } - diff --git a/src/repository/fs/representation.rs b/src/repository/fs/representation.rs index 02423cdd..5901fef9 100644 --- a/src/repository/fs/representation.rs +++ b/src/repository/fs/representation.rs @@ -14,16 +14,16 @@ use std::convert::TryInto; use std::path::Path; use std::path::PathBuf; +use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::anyhow; use resiter::AndThen; use resiter::Filter; use resiter::Map; +use tracing::trace; use walkdir::DirEntry; use walkdir::WalkDir; -use tracing::trace; use crate::repository::fs::element::Element; use crate::repository::fs::path::PathComponent; @@ -63,7 +63,10 @@ impl FileSystemRepresentation { .unwrap_or(usize::MAX) // if usize is smaller than u64, usize::MAX will do }; - trace!("Loading files from filesystem starting at: {}", root.display()); + trace!( + "Loading files from filesystem starting at: {}", + root.display() + ); trace!("Loading with a maximum of {} files open", max_files_open); WalkDir::new(root) .follow_links(false) @@ -83,18 +86,21 @@ impl FileSystemRepresentation { for cmp in de_path.components() { match PathComponent::try_from(&cmp)? { PathComponent::PkgToml => { - curr_hm.entry(PathComponent::PkgToml) + curr_hm + .entry(PathComponent::PkgToml) .or_insert(Element::File(load_file(de_path)?)); - }, + } dir @ PathComponent::DirName(_) => { - curr_hm.entry(dir.clone()) + curr_hm + .entry(dir.clone()) .or_insert_with(|| Element::Dir(HashMap::new())); - curr_hm = curr_hm.get_mut(&dir) + curr_hm = curr_hm + .get_mut(&dir) .unwrap() // safe, because we just inserted it .get_map_mut() .unwrap(); // safe, because we inserted Element::Dir - }, + } } } @@ -124,15 +130,17 @@ impl FileSystemRepresentation { // Helper to check whether a tree contains pkg.toml files, recursively fn toml_files_in_tree(hm: &HashMap) -> bool { if let Some(Element::File(_)) = hm.get(&PathComponent::PkgToml) { - return true + return true; } for value in hm.values() { match value { Element::File(_) => return true, - Element::Dir(hm) => if toml_files_in_tree(hm) { - return true - }, + Element::Dir(hm) => { + if toml_files_in_tree(hm) { + return true; + } + } } } false @@ -146,10 +154,13 @@ impl FileSystemRepresentation { // if I have a file now, and the current hashmap only holds either // * No directory // * or a directory where all subdirs do not contain a pkg.toml - return Ok(curr_hm.values().count() == 1 || !toml_files_in_tree(curr_hm)) - }, + return Ok(curr_hm.values().count() == 1 || !toml_files_in_tree(curr_hm)); + } Some(Element::Dir(hm)) => curr_hm = hm, - None => anyhow::bail!("Path component '{:?}' was not loaded in map, this is most likely a bug", elem), + None => anyhow::bail!( + "Path component '{:?}' was not loaded in map, this is most likely a bug", + elem + ), } } @@ -187,7 +198,10 @@ impl FileSystemRepresentation { curr_path = curr_path.join(elem.dir_name().unwrap()); // unwrap safe by above match curr_hm = hm; } - None => anyhow::bail!("Path component '{:?}' was not loaded in map, this is most likely a bug", elem), + None => anyhow::bail!( + "Path component '{:?}' was not loaded in map, this is most likely a bug", + elem + ), } } @@ -198,7 +212,11 @@ impl FileSystemRepresentation { /// Helper to check whether a DirEntry points to a hidden file fn is_hidden(entry: &DirEntry) -> bool { trace!("Check {:?} is hidden", entry); - entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) + entry + .file_name() + .to_str() + .map(|s| s.starts_with('.')) + .unwrap_or(false) } /// Helper to check whether a DirEntry points to a directory @@ -210,7 +228,11 @@ fn is_dir(entry: &DirEntry) -> bool { /// Helper to check whether a DirEntry points to a pkg.toml file fn is_pkgtoml(entry: &DirEntry) -> bool { trace!("Check {:?} == 'pkg.toml'", entry); - entry.file_name().to_str().map(|s| s == "pkg.toml").unwrap_or(false) + entry + .file_name() + .to_str() + .map(|s| s == "pkg.toml") + .unwrap_or(false) } /// Helper fn to load a Path into memory as String @@ -226,7 +248,10 @@ mod tests { use super::*; fn dir(name: &str, hm: Vec<(PathComponent, Element)>) -> (PathComponent, Element) { - (PathComponent::DirName(name.to_string()), Element::Dir(hm.into_iter().collect())) + ( + PathComponent::DirName(name.to_string()), + Element::Dir(hm.into_iter().collect()), + ) } fn pkgtoml(content: &str) -> (PathComponent, Element) { @@ -250,21 +275,20 @@ mod tests { // / // /foo // /foo/pkg.toml - elements: vec![ - dir("foo", vec![ - pkgtoml("content") - ]) - ].into_iter().collect(), + elements: vec![dir("foo", vec![pkgtoml("content")])] + .into_iter() + .collect(), - files: vec![ - PathBuf::from("foo/pkg.toml") - ], + files: vec![PathBuf::from("foo/pkg.toml")], }; let path = "foo/pkg.toml".as_ref(); assert!(fsr.is_leaf_file(path).unwrap()); - assert_eq!(fsr.get_files_for(path).unwrap(), vec![(pb("foo/pkg.toml"), &s("content"))]); + assert_eq!( + fsr.get_files_for(path).unwrap(), + vec![(pb("foo/pkg.toml"), &s("content"))] + ); } #[test] @@ -278,25 +302,23 @@ mod tests { // /foo/bar // /foo/baz // /foo/baz/pkg.toml - elements: vec![ - dir("foo", vec![ - dir("bar", vec![ - dir("baz", vec![ - pkgtoml("content"), - ]) - ]) - ]) - ].into_iter().collect(), + elements: vec![dir( + "foo", + vec![dir("bar", vec![dir("baz", vec![pkgtoml("content")])])], + )] + .into_iter() + .collect(), - files: vec![ - PathBuf::from("foo/bar/baz/pkg.toml") - ], + files: vec![PathBuf::from("foo/bar/baz/pkg.toml")], }; let path = "foo/bar/baz/pkg.toml".as_ref(); assert!(fsr.is_leaf_file(path).unwrap()); - assert_eq!(fsr.get_files_for(path).unwrap(), vec![(pb("foo/bar/baz/pkg.toml"), &s("content"))]); + assert_eq!( + fsr.get_files_for(path).unwrap(), + vec![(pb("foo/bar/baz/pkg.toml"), &s("content"))] + ); } #[test] @@ -310,22 +332,23 @@ mod tests { // /foo/bar // /foo/baz // /foo/baz/pkg.toml - elements: vec![ - dir("foo", vec![ + elements: vec![dir( + "foo", + vec![ pkgtoml("content1"), - dir("bar", vec![ - pkgtoml("content2"), - dir("baz", vec![ - pkgtoml("content3"), - ]) - ]) - ]) - ].into_iter().collect(), + dir( + "bar", + vec![pkgtoml("content2"), dir("baz", vec![pkgtoml("content3")])], + ), + ], + )] + .into_iter() + .collect(), files: vec![ PathBuf::from("foo/pkg.toml"), PathBuf::from("foo/bar/pkg.toml"), - PathBuf::from("foo/bar/baz/pkg.toml") + PathBuf::from("foo/bar/baz/pkg.toml"), ], }; @@ -343,11 +366,14 @@ mod tests { let path = "foo/bar/baz/pkg.toml".as_ref(); assert!(fsr.is_leaf_file(path).unwrap()); - assert_eq!(fsr.get_files_for(path).unwrap(), vec![ - (pb("foo/pkg.toml"), &s("content1")), - (pb("foo/bar/pkg.toml"), &s("content2")), - (pb("foo/bar/baz/pkg.toml"), &s("content3")), - ]); + assert_eq!( + fsr.get_files_for(path).unwrap(), + vec![ + (pb("foo/pkg.toml"), &s("content1")), + (pb("foo/bar/pkg.toml"), &s("content2")), + (pb("foo/bar/baz/pkg.toml"), &s("content3")), + ] + ); } } @@ -362,20 +388,19 @@ mod tests { // /foo/bar // /foo/baz // /foo/baz/pkg.toml - elements: vec![ - dir("foo", vec![ + elements: vec![dir( + "foo", + vec![ pkgtoml("content1"), - dir("bar", vec![ - dir("baz", vec![ - pkgtoml("content3"), - ]) - ]) - ]) - ].into_iter().collect(), + dir("bar", vec![dir("baz", vec![pkgtoml("content3")])]), + ], + )] + .into_iter() + .collect(), files: vec![ PathBuf::from("foo/pkg.toml"), - PathBuf::from("foo/bar/baz/pkg.toml") + PathBuf::from("foo/bar/baz/pkg.toml"), ], }; @@ -384,10 +409,13 @@ mod tests { let path = "foo/bar/baz/pkg.toml".as_ref(); assert!(fsr.is_leaf_file(path).unwrap()); - assert_eq!(fsr.get_files_for(path).unwrap(), vec![ - (pb("foo/pkg.toml"), &s("content1")), - (pb("foo/bar/baz/pkg.toml"), &s("content3")), - ]); + assert_eq!( + fsr.get_files_for(path).unwrap(), + vec![ + (pb("foo/pkg.toml"), &s("content1")), + (pb("foo/bar/baz/pkg.toml"), &s("content3")), + ] + ); } #[test] @@ -403,18 +431,17 @@ mod tests { // /foo/baz/pkg.toml elements: vec![ pkgtoml("content1"), - dir("foo", vec![ - dir("bar", vec![ - dir("baz", vec![ - pkgtoml("content3"), - ]) - ]) - ]) - ].into_iter().collect(), + dir( + "foo", + vec![dir("bar", vec![dir("baz", vec![pkgtoml("content3")])])], + ), + ] + .into_iter() + .collect(), files: vec![ PathBuf::from("pkg.toml"), - PathBuf::from("foo/bar/baz/pkg.toml") + PathBuf::from("foo/bar/baz/pkg.toml"), ], }; @@ -423,10 +450,12 @@ mod tests { let path = "foo/bar/baz/pkg.toml".as_ref(); assert!(fsr.is_leaf_file(path).unwrap()); - assert_eq!(fsr.get_files_for(path).unwrap(), vec![ - (pb("pkg.toml"), &s("content1")), - (pb("foo/bar/baz/pkg.toml"), &s("content3")), - ]); + assert_eq!( + fsr.get_files_for(path).unwrap(), + vec![ + (pb("pkg.toml"), &s("content1")), + (pb("foo/bar/baz/pkg.toml"), &s("content3")), + ] + ); } - } diff --git a/src/repository/mod.rs b/src/repository/mod.rs index 8a2e9e93..5167a781 100644 --- a/src/repository/mod.rs +++ b/src/repository/mod.rs @@ -13,4 +13,3 @@ mod repository; pub use repository::*; mod fs; - diff --git a/src/repository/repository.rs b/src/repository/repository.rs index 70faa809..848b2d42 100644 --- a/src/repository/repository.rs +++ b/src/repository/repository.rs @@ -16,10 +16,10 @@ use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use tracing::trace; use resiter::AndThen; use resiter::FilterMap; use resiter::Map; +use tracing::trace; use crate::package::Package; use crate::package::PackageName; @@ -54,7 +54,8 @@ impl Repository { fn get_patches(config: &Config) -> Result> { match config.get_array("patches") { - Ok(v) => v.into_iter() + Ok(v) => v + .into_iter() .map(config::Value::into_str) .map_err(Error::from) .map_err(|e| e.context("patches must be strings")) diff --git a/src/source/mod.rs b/src/source/mod.rs index 092aca69..636f4243 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -101,10 +101,7 @@ impl SourceEntry { .context("Opening file failed")?; trace!("Reader constructed for path: {}", p.display()); - self.package_source - .hash() - .matches_hash_of(reader) - .await + self.package_source.hash().matches_hash_of(reader).await } pub async fn create(&self) -> Result { diff --git a/src/ui/mod.rs b/src/ui/mod.rs index db962f88..64aff12d 100644 --- a/src/ui/mod.rs +++ b/src/ui/mod.rs @@ -13,8 +13,8 @@ use std::path::Path; use std::path::PathBuf; -use anyhow::Result; use anyhow::anyhow; +use anyhow::Result; use itertools::Itertools; use crate::config::Configuration; diff --git a/src/ui/package.rs b/src/ui/package.rs index cbca44bb..5d0fef57 100644 --- a/src/ui/package.rs +++ b/src/ui/package.rs @@ -59,17 +59,30 @@ impl PackagePrintFlags { } } - pub trait PreparePrintable<'a> - where Self: Borrow + Sized +where + Self: Borrow + Sized, { - fn prepare_print(self, config: &'a Configuration, flags: &'a PackagePrintFlags, handlebars: &'a Handlebars<'a>, i: usize) -> PreparePrintPackage<'a, Self>; + fn prepare_print( + self, + config: &'a Configuration, + flags: &'a PackagePrintFlags, + handlebars: &'a Handlebars<'a>, + i: usize, + ) -> PreparePrintPackage<'a, Self>; } impl<'a, P> PreparePrintable<'a> for P - where P: Borrow +where + P: Borrow, { - fn prepare_print(self, config: &'a Configuration, flags: &'a PackagePrintFlags, handlebars: &'a Handlebars<'a>, i: usize) -> PreparePrintPackage<'a, P> { + fn prepare_print( + self, + config: &'a Configuration, + flags: &'a PackagePrintFlags, + handlebars: &'a Handlebars<'a>, + i: usize, + ) -> PreparePrintPackage<'a, P> { PreparePrintPackage { package: self, config, @@ -88,7 +101,6 @@ pub struct PreparePrintPackage<'a, P: Borrow> { i: usize, } - pub fn handlebars_for_package_printing(format: &str) -> Result { let mut hb = Handlebars::new(); hb.register_escape_fn(handlebars::no_escape); @@ -98,11 +110,13 @@ pub fn handlebars_for_package_printing(format: &str) -> Result { impl<'a, P: Borrow> PreparePrintPackage<'a, P> { pub fn into_displayable(self) -> Result { - let script = ScriptBuilder::new(&Shebang::from(self.config.shebang().clone())).build( - self.package.borrow(), - self.config.available_phases(), - *self.config.strict_script_interpolation(), - ).context("Rendering script for printing it failed")?; + let script = ScriptBuilder::new(&Shebang::from(self.config.shebang().clone())) + .build( + self.package.borrow(), + self.config.available_phases(), + *self.config.strict_script_interpolation(), + ) + .context("Rendering script for printing it failed")?; let script = crate::ui::script_to_printable( &script, @@ -110,12 +124,17 @@ impl<'a, P: Borrow> PreparePrintPackage<'a, P> { self.config .script_highlight_theme() .as_ref() - .ok_or_else(|| anyhow!("Highlighting for script enabled, but no theme configured"))?, + .ok_or_else(|| { + anyhow!("Highlighting for script enabled, but no theme configured") + })?, self.flags.script_line_numbers, )?; let mut data = BTreeMap::new(); - data.insert("i", serde_json::Value::Number(serde_json::Number::from(self.i))); + data.insert( + "i", + serde_json::Value::Number(serde_json::Number::from(self.i)), + ); data.insert("p", serde_json::to_value(self.package.borrow())?); data.insert("script", serde_json::Value::String(script)); data.insert("print_any", serde_json::Value::Bool(self.flags.print_any())); @@ -170,11 +189,12 @@ impl<'a, P: Borrow> PreparePrintPackage<'a, P> { } } -pub struct PrintablePackage { string: String } +pub struct PrintablePackage { + string: String, +} impl std::fmt::Display for PrintablePackage { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.string) } } - diff --git a/src/util/filters.rs b/src/util/filters.rs index 1b0a0e34..7002b896 100644 --- a/src/util/filters.rs +++ b/src/util/filters.rs @@ -11,8 +11,8 @@ use anyhow::Error; use anyhow::Result; use filters::failable::filter::FailableFilter; -use tracing::trace; use resiter::Map; +use tracing::trace; use crate::package::Package; use crate::package::PackageName; diff --git a/src/util/mod.rs b/src/util/mod.rs index cd480499..85ce136d 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -39,7 +39,6 @@ impl AsRef for EnvironmentVariableName { } } - pub mod docker; pub mod env; pub mod filters; diff --git a/src/util/progress.rs b/src/util/progress.rs index 15615533..9f69c0da 100644 --- a/src/util/progress.rs +++ b/src/util/progress.rs @@ -8,8 +8,8 @@ // SPDX-License-Identifier: EPL-2.0 // -use indicatif::*; use getset::CopyGetters; +use indicatif::*; #[derive(Clone, Debug, CopyGetters)] pub struct ProgressBars { @@ -21,10 +21,7 @@ pub struct ProgressBars { impl ProgressBars { pub fn setup(bar_template: String, hide: bool) -> Self { - ProgressBars { - bar_template, - hide, - } + ProgressBars { bar_template, hide } } pub fn bar(&self) -> anyhow::Result {