diff --git a/R/as_pkgrefs.R b/R/as_pkgrefs.R index f974a14..e4214af 100644 --- a/R/as_pkgrefs.R +++ b/R/as_pkgrefs.R @@ -35,10 +35,10 @@ as_pkgrefs.default <- function(x, ...) { #' @rdname as_pkgrefs #' @export as_pkgrefs.character <- function(x, bioc_version = NULL, ...) { - if(.is_renv_lockfile(x)){ + if(.is_renv_lockfile(x)) { return(.extract_pkgrefs_renv_lockfile(path = x)) } - if(.is_directory(x)){ + if(.is_directory(x)) { return(.extract_pkgrefs_dir(x,bioc_version)) } return(.normalize_pkgs(pkgs = x, bioc_version = bioc_version)) @@ -50,22 +50,25 @@ as_pkgrefs.sessionInfo <- function(x, ...) { vapply(X = x$otherPkgs, FUN = .extract_pkgref_packageDescription, FUN.VALUE = character(1), USE.NAMES = FALSE) } -.extract_pkgrefs_renv_lockfile <- function(path){ +.extract_pkgrefs_renv_lockfile <- function(path) { lockfile <- .parse_renv_lockfile(path) sources <- vapply(lockfile[["Packages"]],`[[`,character(1),"Source",USE.NAMES = FALSE) pkgs <- c() - if("Repository"%in%sources){ - pkgs <- c(pkgs, paste0("cran::",vapply(lockfile[["Packages"]][sources=="Repository"],`[[`,character(1),"Package",USE.NAMES = FALSE))) + if("Repository" %in% sources) { + pkgs <- c(pkgs, paste0("cran::",vapply(lockfile[["Packages"]][sources=="Repository"],`[[`,character(1),"Package",USE.NAMES = FALSE))) } - if("Bioconductor"%in%sources){ - pkgs <- c(pkgs,paste0("bioc::",vapply(lockfile[["Packages"]][sources=="Bioconductor"],`[[`,character(1),"Package",USE.NAMES = FALSE))) + if("Bioconductor" %in% sources) { + pkgs <- c(pkgs,paste0("bioc::",vapply(lockfile[["Packages"]][sources=="Bioconductor"],`[[`,character(1),"Package",USE.NAMES = FALSE))) } - if("GitHub"%in%sources){ - pkgs <- c(pkgs, - paste0("github::", - vapply(lockfile[["Packages"]][sources=="GitHub"],`[[`,character(1), "RemoteUsername", USE.NAMES = FALSE),"/", - vapply(lockfile[["Packages"]][sources=="GitHub"],`[[`,character(1), "Package", USE.NAMES = FALSE)) - ) + if("GitHub" %in% sources) { + pkgs <- c(pkgs, + paste0("github::", + vapply(lockfile[["Packages"]][sources=="GitHub"],`[[`,character(1), "RemoteUsername", USE.NAMES = FALSE),"/", + vapply(lockfile[["Packages"]][sources=="GitHub"],`[[`,character(1), "Package", USE.NAMES = FALSE)) + ) + } + if ("Local" %in% sources) { + pkgs <- c(pkgs, paste0("local::", vapply(lockfile[["Packages"]][sources=="Local"],`[[`,character(1),"RemoteUrl",USE.NAMES = FALSE))) } return(pkgs) } @@ -78,23 +81,19 @@ as_pkgrefs.sessionInfo <- function(x, ...) { if (grepl("bioconductor", packageDescription[["URL"]])) { return(paste0("bioc::",handle)) } - ## uncomment this when #57 is implemented - ##if (basename(attr(packageDescription, "file")) == "DESCRIPTION") { + if (basename(attr(packageDescription, "file")) == "DESCRIPTION") { ## probably load via devtools::load_all - ## return(paste0("local::", dirname(attr(packageDescription, "file")))) - ##} - ## TODO bioc - ## if (basename(attr(packageDescription, "file")) == "package.rds") { + return(paste0("local::", dirname(attr(packageDescription, "file")))) + } return(paste0("cran::", handle)) - ## } } -.is_renv_lockfile <- function(path){ +.is_renv_lockfile <- function(path) { # assuming all renv lockfiles are called renv.lock and path is only length 1 - if(length(path)!=1){ + if(length(path)!=1) { return(FALSE) } - if(isFALSE(file.exists(path))){ + if(isFALSE(file.exists(path))) { return(FALSE) } if (isFALSE(basename(path) == "renv.lock")) { @@ -103,24 +102,24 @@ as_pkgrefs.sessionInfo <- function(x, ...) { TRUE } -.parse_renv_lockfile <- function(path){ +.parse_renv_lockfile <- function(path) { lockfile <- jsonlite::fromJSON(path, simplifyVector = FALSE) # class(lockfile) <- "renv_lockfile" lockfile } -.is_directory <- function(path){ - if(length(path)!=1){ +.is_directory <- function(path) { + if(length(path)!=1) { return(FALSE) } - if(isFALSE(dir.exists(path))){ + if(isFALSE(dir.exists(path))) { return(FALSE) } TRUE } -.extract_pkgrefs_dir <- function(path, bioc_version = NULL){ +.extract_pkgrefs_dir <- function(path, bioc_version = NULL) { pkgs <- suppressMessages(unique(renv::dependencies(path,progress = FALSE)$Package)) warning("scanning directories for R packages cannot detect github packages.",call. = FALSE) return(.normalize_pkgs(pkgs = pkgs, bioc_version = bioc_version)) -} \ No newline at end of file +} diff --git a/R/installation.R b/R/installation.R index a438c3a..2e627ad 100644 --- a/R/installation.R +++ b/R/installation.R @@ -40,23 +40,23 @@ } ## installation simulation installed_pkgrefs <- c() - github_pkgrefs <- c() + noncranlike_pkgrefs <- c() ## github and local are noncran-like needed_pkgrefs <- dep$keys() ## install all terminal nodes for (pkgref in needed_pkgrefs) { - if (.is_github(pkgref)) { - github_pkgrefs <- c(github_pkgrefs, pkgref) - next() + if (.parse_pkgref(pkgref, return_handle = FALSE) %in% c("github", "local")) { + noncranlike_pkgrefs <- c(noncranlike_pkgrefs, pkgref) + next() } if (is.null(dep$get(pkgref))) { installed_pkgrefs <- c(installed_pkgrefs, pkgref) } } loop_counter <- 0 - while(length(setdiff(needed_pkgrefs, c(installed_pkgrefs, github_pkgrefs))) != 0) { + while(length(setdiff(needed_pkgrefs, c(installed_pkgrefs, noncranlike_pkgrefs))) != 0) { unfulfilled_pkgrefs <- c() for (pkgref in needed_pkgrefs) { - if (!pkgref %in% installed_pkgrefs && !pkgref %in% github_pkgrefs) { + if (!pkgref %in% installed_pkgrefs && !pkgref %in% noncranlike_pkgrefs) { ## check requirement requirement_fulfilled <- length(setdiff(dep$get(pkgref), installed_pkgrefs)) == 0 if (requirement_fulfilled) { @@ -71,7 +71,7 @@ stop("Can't determine installation order. Please report the to the developers:\n", paste0(unfulfilled_pkgrefs, collapse = ","), call. = FALSE) } } - ordered_pkgrefs <- c(installed_pkgrefs, github_pkgrefs) + ordered_pkgrefs <- c(installed_pkgrefs, noncranlike_pkgrefs) ordered_x <- vapply(ordered_pkgrefs, function(x) pkgname$get(x), character(1), USE.NAMES = FALSE) ordered_version <- vapply(ordered_pkgrefs, function(x) version$get(x), character(1), USE.NAMES = FALSE) ordered_source <- vapply(ordered_pkgrefs, function(x) .parse_pkgref(x, return_handle = FALSE), character(1), USE.NAMES = FALSE) @@ -129,7 +129,11 @@ } .write_rang_as_comment <- function(rang, con, path, verbose, lib, - cran_mirror, check_cran_mirror, bioc_mirror) { + cran_mirror, check_cran_mirror, bioc_mirror) { + if (isTRUE(any(grepl("^local::", .extract_pkgrefs(rang))))) { + cat("## ## WARNING:", file = con) + cat("## ## Local packages found. The following instructions are not reproducible.", file = con) + } cat("## ## To reconstruct this file, please install version", as.character(utils::packageVersion("rang")), "of `rang` and run:\n", file = con) cat("## rang <- \n", file = con) @@ -181,6 +185,15 @@ } } +.check_tarball_path <- function(tarball_path, x, dir = FALSE) { + ## raise error when tarball_path doesn't exist + if ((isFALSE(dir) && isFALSE(file.exists(tarball_path))) || + (isTRUE(dir) && isFALSE(dir.exists(tarball_path)))) { + stop(x, " can't be cached.", call. = FALSE) + } + invisible() +} + .cache_pkg_cran <- function(x, version, cache_dir, cran_mirror, verbose) { url <- paste(cran_mirror, "src/contrib/Archive/", x, "/", x, "_", version, ".tar.gz", sep = "") tarball_path <- file.path(cache_dir, paste(x, "_", version, ".tar.gz", sep = "")) @@ -191,18 +204,14 @@ url <- paste(cran_mirror, "src/contrib/", x, "_", version, ".tar.gz", sep = "") utils::download.file(url, destfile = tarball_path, quiet = !verbose) }) - if (!file.exists(tarball_path)) { - warning(names(x), "(", x,") can't be cache.") - } + .check_tarball_path(tarball_path, x) } .cache_pkg_bioc <- function(x, version, cache_dir, bioc_mirror, bioc_version, verbose, uid) { url <- paste(bioc_mirror, bioc_version, "/", uid, "/src/contrib/", x, "_", version, ".tar.gz", sep = "") tarball_path <- file.path(cache_dir, paste(x, "_", version, ".tar.gz", sep = "")) suppressWarnings(utils::download.file(url, destfile = tarball_path, quiet = !verbose)) - if (!file.exists(tarball_path)) { - warning(names(x), "(", x,") can't be cache.") - } + .check_tarball_path(tarball_path, x) } .cache_pkg_github <- function(x, version, handle, source, uid, cache_dir, verbose) { @@ -210,8 +219,23 @@ tarball_path <- file.path(cache_dir, paste("raw_", x, "_", version, ".tar.gz", sep = "")) utils::download.file(paste("https://api.github.com/repos/", handle, "/tarball/", sha, sep = ""), destfile = tarball_path, quiet = !verbose) - if (!file.exists(tarball_path)) { - warning(names(x), "(", x,") can't be cache.") + .check_tarball_path(tarball_path, x) +} + +.cache_pkg_local <- function(x, version, cache_dir, uid) { + local_path <- uid + tarball_path <- file.path(cache_dir, paste("raw_", x, "_", version, ".tar.gz", sep = "")) + if (isTRUE(grepl("\\.tar.gz$|\\.tgz$", local_path))) { + ## it could be a valid source package, but don't trust it blindly, mark it as raw_ + ## similar to github packages + file.copy(local_path, tarball_path) + return(.check_tarball_path(tarball_path, x)) + } + if (.is_directory(local_path)) { + dir_pkg_path <- file.path(cache_dir, paste("dir_", x, "_", version, sep = "")) + res <- file.copy(from = local_path, to = cache_dir, recursive = TRUE, overwrite = TRUE) + res <- file.rename(from = file.path(cache_dir, x), to = dir_pkg_path) + return(.check_tarball_path(dir_pkg_path, x, dir = TRUE)) } } @@ -239,9 +263,14 @@ } if(source == "bioc") { .cache_pkg_bioc(x = x, version = version, cache_dir = cache_dir, - bioc_mirror = bioc_mirror,bioc_version = rang$bioc_version, verbose = verbose, + bioc_mirror = bioc_mirror, bioc_version = rang$bioc_version, verbose = verbose, uid = uid) } + if(source == "local") { + ## please note that these cached packages are not built + .cache_pkg_local(x = x, version = version, cache_dir = cache_dir, uid = uid) + } + } ## For #14, cache R source in the future here invisible(output_dir) @@ -390,8 +419,8 @@ export_rang <- function(rang, path, rang_as_comment = TRUE, verbose = TRUE, lib #' @param output_dir character, where to put the Docker file and associated content #' @param materials_dir character, path to the directory containing additional resources (e.g. analysis scripts) to be copied into `output_dir` and in turn into the Docker container #' @param image character, which versioned Rocker image to use. Can only be "r-ver", "rstudio", "tidyverse", "verse", "geospatial" -#' This applies only to R version <= 3.1 -#' @param cache logical, whether to cache the packages now. Please note that the system requirements are not cached. For query with non-CRAN packages, this option is strongly recommended. For R version < 3.1, this must be TRUE if there is any non-CRAN packages. +#' This applies only to R version >= 3.1 +#' @param cache logical, whether to cache the packages now. Please note that the system requirements are not cached. For query with non-CRAN packages, this option is strongly recommended. For query with local packages, this must be TRUE regardless of R version. For R version < 3.1, this must be also TRUE if there is any non-CRAN packages. #' @param no_rocker logical, whether to skip using Rocker images even when an appropriate version is available. Please keep this as `TRUE` unless you know what you are doing #' @param debian_version, when Rocker images are not used, which EOL version of Debian to use. Can only be "lenny", "etch", "squeeze", "wheezy", "jessie", "stretch". Please keep this as default "lenny" unless you know what you are doing #' @param ... arguments to be passed to `dockerize` @@ -436,9 +465,10 @@ dockerize <- function(rang, output_dir, materials_dir = NULL, image = c("r-ver", need_cache <- (isTRUE(any(grepl("^github::", .extract_pkgrefs(rang)))) && utils::compareVersion(rang$r_version, "3.1") == -1) || (isTRUE(any(grepl("^bioc::", .extract_pkgrefs(rang)))) && - utils::compareVersion(rang$r_version, "3.3") == -1) + utils::compareVersion(rang$r_version, "3.3") == -1) || + (isTRUE(any(grepl("^local::", .extract_pkgrefs(rang))))) if (isTRUE(need_cache) && isFALSE(cache)) { - stop("Non-CRAN packages must be cached for this R version: ", rang$r_version, ". Please set `cache` = TRUE.", call. = FALSE) + stop("Packages must be cached. Please set `cache` = TRUE.", call. = FALSE) } image <- match.arg(image) debian_version <- match.arg(debian_version) diff --git a/R/pkgref.R b/R/pkgref.R index 497ed4d..9d92f76 100644 --- a/R/pkgref.R +++ b/R/pkgref.R @@ -52,23 +52,34 @@ return(source) } -.is_github <- function(pkg){ - ## For now, this is sufficient. - ## If "local" and "url" are supported, this is not - grepl("/", pkg) +.is_github <- function(pkg) { + if (grepl("github\\.com", pkg)) { + return(TRUE) + } + grepl("/", pkg) && isFALSE(grepl("^[\\.~]?/", pkg)) && + isFALSE(grepl("/$", pkg)) && + length(strsplit(pkg, split = "/")[[1]]) == 2 } -.is_bioc <- function(pkg,bioc_version){ +.is_bioc <- function(pkg, bioc_version) { + if (is.null(bioc_version)) { + return(FALSE) + } bioc_pkgs <- .memo_search_bioc(bioc_version) - pkg%in%bioc_pkgs$Package + pkg %in% bioc_pkgs$Package } -.is_pkgref <- function(pkg) { - grepl("::", pkg) + +.is_local <- function(pkg) { + ## according to the standard, it must be started by ".", "~", "/" + grepl("^[\\.~/]", pkg) } ## TBI: .is_valid_pkgref ## pkgref is only valid if: exactly one "::", source %in% c("cran", "github"), if "github", .is_github is TRUE +.is_pkgref <- function(pkg) { + grepl("^github::|^cran::|^local::|^bioc::", pkg) +} .extract_github_handle <- function(url) { url <- gsub("^github::", "", url) @@ -83,24 +94,6 @@ return(paste0(path_components[1], "/", path_components[2])) } -## to normalize a pkg to pkgref -# .normalize_pkg <- function(pkg) { -# if (pkg == "" || is.na(pkg)) { -# stop("Invalid `pkg`.", call. = FALSE) -# } -# if (isTRUE(.is_github(pkg))) { -# if (isTRUE(grepl("github\\.com", pkg))) { -# pkg <- .extract_github_handle(pkg) -# } -# } -# if (isTRUE(.is_pkgref(pkg))) { -# return(.clean_suffixes(pkg)) -# } -# if (isTRUE(.is_github(pkg))) { -# return(paste0("github::", .clean_suffixes(pkg))) -# } -# return(paste0("cran::", .clean_suffixes(pkg))) -# } .normalize_pkg <- function(pkg,bioc_version=NULL) { if (pkg == "" || is.na(pkg)) { stop("Invalid `pkg`.", call. = FALSE) @@ -116,16 +109,15 @@ if (isTRUE(.is_github(pkg))) { return(paste0("github::", .clean_suffixes(pkg))) } - if(is.null(bioc_version)){ - return(paste0("cran::", .clean_suffixes(pkg))) - } else{ - if(isTRUE(.is_bioc(pkg,bioc_version))){ - return(paste0("bioc::", .clean_suffixes(pkg))) - } else{ - return(paste0("cran::", .clean_suffixes(pkg))) - } + if (isTRUE(.is_local(pkg))) { + return(paste0("local::", .clean_suffixes(pkg))) } + if (isTRUE(.is_bioc(pkg, bioc_version))) { + return(paste0("bioc::", .clean_suffixes(pkg))) + } + paste0("cran::", .clean_suffixes(pkg)) } + ## vectorize .normalize_pkgs <- function(pkgs,bioc_version = NULL) { vapply(X = pkgs, bioc_version = bioc_version ,FUN = .normalize_pkg, FUN.VALUE = character(1), USE.NAMES = FALSE) diff --git a/R/resolve.R b/R/resolve.R index 777e5bd..22fc46b 100644 --- a/R/resolve.R +++ b/R/resolve.R @@ -34,6 +34,10 @@ "bioc" = { ## no need to have bioc_version because it will get queried once again return(.query_snapshot_dependencies_bioc(handle = .parse_pkgref(pkgref), snapshot_date = snapshot_date)) + }, + "local" = { + return(.query_snapshot_dependencies_local(handle = .parse_pkgref(pkgref), snapshot_date = snapshot_date, + bioc_version = bioc_version)) }) } @@ -102,7 +106,37 @@ pkg_dep_df[,c("snapshot_date", "x", "x_version", "x_pubdate", "x_pkgref", "x_bioc_ver", "x_uid", "y", "type", "y_raw_version", "y_pkgref")] } -# get the commit sha for the commit closest to date +.read_tarball_description <- function(path) { + tempfile <- tempfile() + DESCRIPTION_in_tarball <- grep("DESCRIPTION$", utils::untar(path, list = TRUE), value = TRUE) + utils::untar(path, files = DESCRIPTION_in_tarball, exdir = tempfile) + list.files(tempfile, pattern = "DESCRIPTION", full.names = TRUE, recursive = TRUE)[1] +} + +.extract_local_description_path <- function(handle) { + ## .check_local_in_pkgrefs did the check already + if (.is_directory(handle)) { + return(file.path(handle, "DESCRIPTION")) + } + .read_tarball_description(handle) +} + +.query_snapshot_dependencies_local <- function(handle, snapshot_date, bioc_version = NULL) { + snapshot_date <- parsedate::parse_date(snapshot_date) + description_path <- .extract_local_description_path(handle) + descr_df <- as.data.frame(read.dcf(description_path)) + pkg_dep_df <- .parse_desc(descr_df, snapshot_date) + pkg_dep_df$x_pkgref <- .normalize_pkgs(pkgs = handle, bioc_version = bioc_version) + pkg_dep_df$x_uid <- normalizePath(handle) + pkg_dep_df$x_pubdate <- snapshot_date + if (isFALSE("y" %in% names(pkg_dep_df))) { + return(pkg_dep_df[,c("snapshot_date", "x", "x_version", "x_pubdate", "x_pkgref", "x_uid")]) + } + pkg_dep_df$y_pkgref <- .normalize_pkgs(pkg_dep_df$y, bioc_version = bioc_version) + pkg_dep_df[,c("snapshot_date", "x", "x_version", "x_pubdate", "x_pkgref", "x_uid", "y", "type", "y_raw_version", "y_pkgref")] +} + +## get the commit sha for the commit closest to date .query_sha <- function(handle, date) { commits <- .gh(paste0("/repos/", handle, "/commits"), per_page = 100) dates <- sapply(commits,function(x) x$commit$committer$date) @@ -208,6 +242,20 @@ bioc_version } +.check_local_in_pkgrefs <- function(pkgrefs) { + res <- .group_pkgrefs_by_source(pkgrefs) + any_local <- isFALSE(is.null(res[["local"]])) + if (isTRUE(any_local)) { + warning("Using \"local\" package(s) to resolve dependencies is not reproducible on another machine.", call. = FALSE) + for (handle in res[["local"]]) { + if (isFALSE(.is_directory(handle)) && isFALSE(grepl("\\.tar.gz$|\\.tgz$", handle))) { + stop(handle, " doesn't appear to be a valid local package.", call. = FALSE) + } + } + } + invisible() +} + #' Resolve Dependencies Of R Packages #' #' This function recursively queries dependencies of R packages at a specific snapshot time. The dependency graph can then be used to recreate the computational environment. The data on dependencies are provided by R-hub. @@ -253,6 +301,7 @@ resolve <- function(pkgs, snapshot_date, no_enhances = TRUE, no_suggests = TRUE, snapshot_date <- .extract_date(pkgs = pkgs, date = snapshot_date, verbose = verbose) bioc_version <- .generate_bioc_version(snapshot_date = snapshot_date, pkgs = pkgs) pkgrefs <- as_pkgrefs(pkgs, bioc_version = bioc_version) + .check_local_in_pkgrefs(pkgrefs) output <- list() output$call <- match.call() output$ranglets <- list() @@ -446,6 +495,9 @@ query_sysreqs <- function(rang, os = "ubuntu-20.04") { if ("bioc" %in% names(grouped_handles)) { output[["bioc"]] <- .query_sysreqs_bioc(grouped_handles[["bioc"]], os = os) } + if ("local" %in% names(grouped_handles)) { + output[["local"]] <- .query_sysreqs_local(grouped_handles[["local"]], os = os) + } unique(unlist(output)) } @@ -497,6 +549,21 @@ query_sysreqs <- function(rang, os = "ubuntu-20.04") { .query_singleline_sysreqs(singleline_sysreqs = singleline_sysreqs, arch = arch) } +.query_sysreqs_local <- function(handles, os) { + if (grepl("^ubuntu|^debian", os)) { + arch <- "DEB" + } + if (grepl("^centos|^fedora|^redhat", os)) { + arch <- "RPM" + } + description_paths <- vapply(handles, .extract_local_description_path, FUN.VALUE = character(1)) + raw_sys_reqs <- vapply(description_paths, FUN = function(x) read.dcf(x, fields = "SystemRequirements")[,1], + FUN.VALUE = character(1)) + singleline_sysreqs <- paste0(raw_sys_reqs[!is.na(raw_sys_reqs)], collapse = ", ") + singleline_sysreqs <- gsub("\\n", " ", singleline_sysreqs) + .query_singleline_sysreqs(singleline_sysreqs = singleline_sysreqs, arch = arch) +} + .query_singleline_sysreqs <- function(singleline_sysreqs, arch = "DEB") { baseurl <- "https://sysreqs.r-hub.io/map/" url <- utils::URLencode(paste0(baseurl, singleline_sysreqs)) @@ -586,7 +653,7 @@ query_sysreqs <- function(rang, os = "ubuntu-20.04") { url <- httr::modify_url(url, path = path) token <- Sys.getenv("GITHUB_PAT", NA_character_) if(is.na(token)){ - token <- Sys.getenv("GITHUB_TOKEN", NA_character_) + token <- Sys.getenv("GITHUB_TOKEN", NA_character_) } if(is.na(token)){ token <- "" diff --git a/inst/header.R b/inst/header.R index 5d743bc..da08ba9 100644 --- a/inst/header.R +++ b/inst/header.R @@ -64,11 +64,26 @@ return(tarball_path) } +.build_dir_tarball <- function(dir_pkg_path, x, version, tarball_path, current_r_version) { + if (utils::compareVersion(current_r_version, "3.1") != -1) { + vignetteflag <- "--no-build-vignettes" + } else { + vignetteflag <- "--no-vignettes" + } + expected_tarball_path <- paste(x, "_", version, ".tar.gz", sep = "") + res <- system(command = paste("R", "CMD", "build", vignetteflag, dir_pkg_path)) + expected_tarball_path <- paste(x, "_", version, ".tar.gz", sep = "") + stopifnot(file.exists(expected_tarball_path)) + file.rename(expected_tarball_path, tarball_path) + return(tarball_path) +} + .install_from_source <- function(x, version, handle, source, uid, lib, path = tempdir(), verbose, cran_mirror, bioc_mirror, current_r_version) { tarball_path <- file.path(path, paste(x, "_", version, ".tar.gz", sep = "")) raw_tarball_path <- file.path(path, paste("raw_", x, "_", version, ".tar.gz", sep = "")) - if (!file.exists(tarball_path) && !file.exists(raw_tarball_path)) { + dir_pkg_path <- file.path(path, paste("dir_", x, "_", version, sep = "")) + if (!file.exists(tarball_path) && !file.exists(raw_tarball_path) && !file.exists(dir_pkg_path)) { .download_package(tarball_path = tarball_path, x = x, version = version, handle = handle, source = source, uid = uid, verbose = verbose, cran_mirror = cran_mirror, bioc_mirror = bioc_mirror) } @@ -79,6 +94,13 @@ stop("building failed.") } } + if (file.exists(dir_pkg_path)) { + tarball_path <- .build_dir_tarball(dir_pkg_path, x = x, version = version, tarball_path, + current_r_version = current_r_version) + if (!file.exists(tarball_path)) { + stop("building failed.") + } + } .install_packages(tarball_path, lib, verbose, current_r_version) ## check and error if (!is.na(lib)) { diff --git a/man/dockerize.Rd b/man/dockerize.Rd index 4fb3479..df6ebb7 100644 --- a/man/dockerize.Rd +++ b/man/dockerize.Rd @@ -37,12 +37,12 @@ dockerise_rang(...) \item{materials_dir}{character, path to the directory containing additional resources (e.g. analysis scripts) to be copied into \code{output_dir} and in turn into the Docker container} \item{image}{character, which versioned Rocker image to use. Can only be "r-ver", "rstudio", "tidyverse", "verse", "geospatial" -This applies only to R version <= 3.1} +This applies only to R version >= 3.1} \item{rang_as_comment}{logical, whether to write resolved result and the steps to reproduce the file to \code{path} as comment} -\item{cache}{logical, whether to cache the packages now. Please note that the system requirements are not cached. For query with non-CRAN packages, this option is strongly recommended. For R version < 3.1, this must be TRUE if there is any non-CRAN packages.} +\item{cache}{logical, whether to cache the packages now. Please note that the system requirements are not cached. For query with non-CRAN packages, this option is strongly recommended. For query with local packages, this must be TRUE regardless of R version. For R version < 3.1, this must be also TRUE if there is any non-CRAN packages.} \item{verbose}{logical, pass to \code{\link[=install.packages]{install.packages()}}, the negated value is also passed as \code{quiet} to both \code{\link[=install.packages]{install.packages()}} and \code{\link[=download.file]{download.file()}}.} diff --git a/tests/testdata/askpass/DESCRIPTION b/tests/testdata/askpass/DESCRIPTION new file mode 100644 index 0000000..00b1e63 --- /dev/null +++ b/tests/testdata/askpass/DESCRIPTION @@ -0,0 +1,29 @@ +Package: askpass +Type: Package +Title: Safe Password Entry for R, Git, and SSH +Version: 1.1 +Authors@R: person("Jeroen", "Ooms", role = c("aut", "cre"), + email = "jeroen@berkeley.edu", comment = c(ORCID = "0000-0002-4035-0289")) +Description: Cross-platform utilities for prompting the user for credentials or a + passphrase, for example to authenticate with a server or read a protected key. + Includes native programs for MacOS and Windows, hence no 'tcltk' is required. + Password entry can be invoked in two different ways: directly from R via the + askpass() function, or indirectly as password-entry back-end for 'ssh-agent' + or 'git-credential' via the SSH_ASKPASS and GIT_ASKPASS environment variables. + Thereby the user can be prompted for credentials or a passphrase if needed + when R calls out to git or ssh. +License: MIT + file LICENSE +URL: https://github.com/jeroen/askpass#readme +BugReports: https://github.com/jeroen/askpass/issues +Encoding: UTF-8 +LazyData: true +Imports: sys (>= 2.1) +RoxygenNote: 6.1.1 +Suggests: testthat +Language: en-US +NeedsCompilation: yes +Packaged: 2019-01-13 12:08:17 UTC; jeroen +Author: Jeroen Ooms [aut, cre] () +Maintainer: Jeroen Ooms +Repository: CRAN +Date/Publication: 2019-01-13 12:50:03 UTC diff --git a/tests/testdata/askpass/LICENSE b/tests/testdata/askpass/LICENSE new file mode 100644 index 0000000..8b5d4b0 --- /dev/null +++ b/tests/testdata/askpass/LICENSE @@ -0,0 +1,2 @@ +YEAR: 2018 +COPYRIGHT HOLDER: Jeroen Ooms diff --git a/tests/testdata/askpass/MD5 b/tests/testdata/askpass/MD5 new file mode 100644 index 0000000..789bcaf --- /dev/null +++ b/tests/testdata/askpass/MD5 @@ -0,0 +1,17 @@ +eb6f4d0b9cc61dfa08b3804779591d5f *DESCRIPTION +98c71b5eae0ac8dabb055bd8883b8398 *LICENSE +ad7ca63fdfe698661be582601697c491 *NAMESPACE +53a778139c44afbdf7a33ef6f5df8705 *NEWS +35412ba699bd625370487aa6a3e462c4 *R/askpass.R +97259f0bad2d259cc87ad38a01dbf02e *R/onload.R +a1624267f9c82ed814f980de8c5fbc66 *R/ssh.R +1f9f6a06b0543cf62931f42ad291db6b *inst/WORDLIST +a34602417af1b0c9ad06fc93df828c71 *inst/mac-askpass +e0651808479eb9c747ffbd785441912b *inst/mac-simplepass +0eb1bcf4a9936ace5ae19d962a2a56a7 *man/askpass.Rd +20d01a60d6cef576ee14420f207d90d1 *man/ssh_askpass.Rd +dce2a57c7c4f360319f3beaec3245444 *src/Makevars.win +ff322a40812325235977b54323b67c91 *src/askpass.c +b0e65f1a2fd9237f0cb01f9e2e6f64a4 *src/win32/win-askpass.c +203e4bb2a5fd4caccb9a07d14bf48a90 *tests/testthat.R +631ab61b3bc4600779d0eee8aaf6cb32 *tests/testthat/test-option.R diff --git a/tests/testdata/askpass/NAMESPACE b/tests/testdata/askpass/NAMESPACE new file mode 100644 index 0000000..e157cef --- /dev/null +++ b/tests/testdata/askpass/NAMESPACE @@ -0,0 +1,4 @@ +# Generated by roxygen2: do not edit by hand + +export(askpass) +export(ssh_askpass) diff --git a/tests/testdata/askpass/NEWS b/tests/testdata/askpass/NEWS new file mode 100644 index 0000000..b8c2da1 --- /dev/null +++ b/tests/testdata/askpass/NEWS @@ -0,0 +1,6 @@ +1.1 + - Fix build on R 3.3 and older + - Fix compiler warning on Solaris + +1.0 + - Initial release diff --git a/tests/testdata/askpass/R/askpass.R b/tests/testdata/askpass/R/askpass.R new file mode 100644 index 0000000..c8c7570 --- /dev/null +++ b/tests/testdata/askpass/R/askpass.R @@ -0,0 +1,86 @@ +#' Password Prompt Utility +#' +#' Prompt the user for a password to authenticate or read a protected key. +#' By default, this function automatically uses the most appropriate method +#' based on the user platform and front-end. Users or IDEs can override this +#' and set a custom password entry function via the `askpass` option. +#' +#' @export +#' @param prompt the string printed when prompting the user for input. +#' @examples \donttest{ +#' # Prompt user for passwd +#' pw <- askpass("Please enter your password") +#' } +askpass <- function(prompt = "Please enter your password: "){ + FUN <- getOption("askpass", ask_password_default) + FUN(prompt) +} + +ask_password_default <- function(prompt){ + if(!interactive()) + return(NULL) + if(is_windows()){ + askpass_windows(prompt) + } else if(is_macos() && !isatty(stdin())){ + askpass_mac(prompt) + } else { + readline_silent(prompt) + } +} + +askpass_path <- function(simple = TRUE){ + if(is_windows()){ + arch <- .Machine$sizeof.pointer * 8; + system.file(sprintf('win-askpass%d.exe', arch), + package = 'askpass', mustWork = TRUE) + } else if(is_macos()){ + prog <- ifelse(isTRUE(simple), 'mac-simplepass', 'mac-askpass') + system.file(prog, package = 'askpass', mustWork = TRUE) + } +} + +askpass_windows <- function(prompt, user = names(prompt)){ + tryCatch({ + res <- sys::exec_internal(askpass_path(), c(prompt, user), timeout = 120) + out_without_eol(res$stdout) + }, error = function(e){ + message(e$message) + }) +} + +askpass_mac <- function(prompt){ + tryCatch({ + res <- sys::exec_internal(askpass_path(), prompt, timeout = 120) + out_without_eol(res$stdout) + }, error = function(e){ + message(e$message) + }) +} + +readline_silent <- function(prompt, icon = "\U0001f511 "){ + if(is_unix() && isatty(stdin())){ + if(system('stty -echo') == 0){ + on.exit(system('stty echo')) + } + } + cat(prompt, "\n") + out <- base::readline(icon) + cat(" OK\n") + out +} + +is_windows <- function(){ + .Platform$OS.type == 'windows' +} + +is_unix <- function(){ + .Platform$OS.type == "unix" +} + +is_macos <- function(){ + identical(tolower(Sys.info()[['sysname']]), "darwin") +} + +out_without_eol <- function(x){ + sub("\r?\n$", "", rawToChar(x)) +} diff --git a/tests/testdata/askpass/R/onload.R b/tests/testdata/askpass/R/onload.R new file mode 100644 index 0000000..06f0178 --- /dev/null +++ b/tests/testdata/askpass/R/onload.R @@ -0,0 +1,43 @@ +.onLoad <- function(libname, pkgname){ + setup_askpass_vars() +} + +setup_askpass_vars <- function(){ + if(var_exists('RSTUDIO')){ + fix_rstudio_path() + } else { + # This is mostly for RGui and R.app (tty could mean MacOS server) + if(is_windows() || (is_macos() && !isatty(stdin()))){ + askpass_bin = ssh_askpass() + if(!var_exists('GIT_ASKPASS')){ + Sys.setenv("GIT_ASKPASS" = askpass_bin) + } + if(!var_exists('SSH_ASKPASS')){ + Sys.setenv("SSH_ASKPASS" = askpass_bin) + } + } + } +} + +# Try to put 'rpostback-askpass' on the path in RStudio if needed +# See: https://github.com/rstudio/rstudio/issues/3805 +fix_rstudio_path <- function(){ + rs_path <- Sys.getenv('RS_RPOSTBACK_PATH') + git_askpass <- Sys.getenv('GIT_ASKPASS') + if(nchar(rs_path) && !cmd_exists(git_askpass)){ + PATH <- Sys.getenv("PATH") + if(!grepl(normalizePath(rs_path, mustWork = FALSE), PATH, fixed = TRUE)){ + rs_path <- unique(c(rs_path, sub("rpostback", 'postback', rs_path))) + Sys.setenv(PATH = paste(c(PATH, normalizePath(rs_path, mustWork = FALSE)), + collapse = .Platform$path.sep)) + } + } +} + +var_exists <- function(var){ + nchar(Sys.getenv(var)) > 0 +} + +cmd_exists <- function(cmd){ + nchar(Sys.which(cmd)) > 0 +} diff --git a/tests/testdata/askpass/R/ssh.R b/tests/testdata/askpass/R/ssh.R new file mode 100644 index 0000000..3b4be4f --- /dev/null +++ b/tests/testdata/askpass/R/ssh.R @@ -0,0 +1,14 @@ +#' ASKPASS CMD TOOL +#' +#' This returns the path to the native askpass executable which can be used +#' by git-credential or ssh-agent. Most users don't have worry about this. +#' +#' On Windows and MacOS the package automatically sets the `SSH_ASKPASS` and +#' `GIT_ASKPASS` variables on load (if not already set). If these are set +#' you should be able to run e.g. `sys::exec_wait("ssh-add")` and you should +#' be prompted for a passphrase if your key is protected. +#' +#' @export +ssh_askpass <- function(){ + askpass_path(simple = FALSE) +} diff --git a/tests/testdata/askpass/inst/WORDLIST b/tests/testdata/askpass/inst/WORDLIST new file mode 100644 index 0000000..2671cec --- /dev/null +++ b/tests/testdata/askpass/inst/WORDLIST @@ -0,0 +1,4 @@ +CMD +IDEs +MacOS +tcltk diff --git a/tests/testdata/askpass/inst/mac-askpass b/tests/testdata/askpass/inst/mac-askpass new file mode 100755 index 0000000..d785f10 --- /dev/null +++ b/tests/testdata/askpass/inst/mac-askpass @@ -0,0 +1,21 @@ +#!/usr/bin/env osascript +# This only works on MacOS! +# Adapted from: https://github.com/theseal/ssh-askpass + +on run argv + set args to argv as text + set frontmost_application to name of (info for (path to frontmost application)) + tell application frontmost_application + if args ends with ": " or args ends with ":" then + if args contains "pass" or args contains "pin" then + display dialog args with icon note default button "OK" default answer "" with hidden answer + else + display dialog args with icon note default button "OK" default answer "" + end if + return result's text returned + else + display dialog args with icon note default button "Cancel" + return + end if + end tell +end run diff --git a/tests/testdata/askpass/inst/mac-simplepass b/tests/testdata/askpass/inst/mac-simplepass new file mode 100755 index 0000000..fcd2894 --- /dev/null +++ b/tests/testdata/askpass/inst/mac-simplepass @@ -0,0 +1,11 @@ +#!/usr/bin/env osascript +# This only works on MacOS! + +on run argv +set args to argv as text +set frontmost_application to name of (info for (path to frontmost application)) +tell application frontmost_application +display dialog args with icon note default button "OK" default answer "" with hidden answer +return result's text returned + end tell +end run diff --git a/tests/testdata/askpass/man/askpass.Rd b/tests/testdata/askpass/man/askpass.Rd new file mode 100644 index 0000000..ae55a90 --- /dev/null +++ b/tests/testdata/askpass/man/askpass.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/askpass.R +\name{askpass} +\alias{askpass} +\title{Password Prompt Utility} +\usage{ +askpass(prompt = "Please enter your password: ") +} +\arguments{ +\item{prompt}{the string printed when prompting the user for input.} +} +\description{ +Prompt the user for a password to authenticate or read a protected key. +By default, this function automatically uses the most appropriate method +based on the user platform and front-end. Users or IDEs can override this +and set a custom password entry function via the \code{askpass} option. +} +\examples{ +\donttest{ +# Prompt user for passwd +pw <- askpass("Please enter your password") +} +} diff --git a/tests/testdata/askpass/man/ssh_askpass.Rd b/tests/testdata/askpass/man/ssh_askpass.Rd new file mode 100644 index 0000000..873e053 --- /dev/null +++ b/tests/testdata/askpass/man/ssh_askpass.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ssh.R +\name{ssh_askpass} +\alias{ssh_askpass} +\title{ASKPASS CMD TOOL} +\usage{ +ssh_askpass() +} +\description{ +This returns the path to the native askpass executable which can be used +by git-credential or ssh-agent. Most users don't have worry about this. +} +\details{ +On Windows and MacOS the package automatically sets the \code{SSH_ASKPASS} and +\code{GIT_ASKPASS} variables on load (if not already set). If these are set +you should be able to run e.g. \code{sys::exec_wait("ssh-add")} and you should +be prompted for a passphrase if your key is protected. +} diff --git a/tests/testdata/askpass/src/Makevars.win b/tests/testdata/askpass/src/Makevars.win new file mode 100644 index 0000000..b87142e --- /dev/null +++ b/tests/testdata/askpass/src/Makevars.win @@ -0,0 +1,10 @@ +ASKPASS=../inst/win-askpass$(WIN).exe +ASKPASSOBJ=win32/win-askpass.o + +all: clean $(ASKPASS) + +clean: + rm -f $(ASKPASS) $(ASKPASSOBJ) + +$(ASKPASS): $(ASKPASSOBJ) + $(CC) $(CFLAGS) -o $(ASKPASS) $(ASKPASSOBJ) -lcredui diff --git a/tests/testdata/askpass/src/askpass.c b/tests/testdata/askpass/src/askpass.c new file mode 100644 index 0000000..a076dfa --- /dev/null +++ b/tests/testdata/askpass/src/askpass.c @@ -0,0 +1,27 @@ +#include +#include +#include +#include + +/* We don't use this anymore */ + +SEXP pw_entry_dialog(SEXP prompt){ +#ifndef _WIN32 + const char *text = CHAR(STRING_ELT(prompt, 0)); + const char *pass = getpass(text); + if(pass != NULL) + return Rf_mkString(pass); +#endif + return R_NilValue; +} + +static const R_CallMethodDef CallEntries[] = { + {"pw_entry_dialog", (DL_FUNC) &pw_entry_dialog, 1}, + {NULL, NULL, 0} +}; + +void R_init_askpass(DllInfo *dll){ + R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); + R_useDynamicSymbols(dll, FALSE); + R_forceSymbols(dll, TRUE); +} diff --git a/tests/testdata/askpass/src/win32/win-askpass.c b/tests/testdata/askpass/src/win32/win-askpass.c new file mode 100644 index 0000000..8055f36 --- /dev/null +++ b/tests/testdata/askpass/src/win32/win-askpass.c @@ -0,0 +1,59 @@ +#include +#include +#include + +static const char *formatError(DWORD res){ + static char buf[1000], *p; + FormatMessage(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, res, + MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + buf, 1000, NULL); + p = buf+strlen(buf) -1; + if(*p == '\n') *p = '\0'; + p = buf+strlen(buf) -1; + if(*p == '\r') *p = '\0'; + p = buf+strlen(buf) -1; + if(*p == '.') *p = '\0'; + return buf; +} + +int main( int argc, const char* argv[] ){ + const char *prompt = argc > 1 ? argv[1] : "Please enter password"; + const char *user = argc > 2 ? argv[2] : "NA"; + CREDUI_INFO cui; + TCHAR pszPwd[CREDUI_MAX_PASSWORD_LENGTH+1]; + BOOL fSave; + DWORD dwErr; + + cui.cbSize = sizeof(CREDUI_INFO); + cui.hwndParent = GetActiveWindow(); + cui.pszMessageText = TEXT(prompt); + cui.pszCaptionText = TEXT("Password Entry"); + cui.hbmBanner = NULL; + fSave = FALSE; + SecureZeroMemory(pszPwd, sizeof(pszPwd)); + dwErr = CredUIPromptForCredentials( + &cui, // CREDUI_INFO structure + TEXT("TheServer"), // Target for credentials + NULL, // Reserved + 0, // Reason + (char*) user, // User name + 0, // Max number of char for user name + pszPwd, // Password + CREDUI_MAX_PASSWORD_LENGTH+1, // Max number of char for password + &fSave, // State of save check box + CREDUI_FLAGS_GENERIC_CREDENTIALS | // flags + CREDUI_FLAGS_KEEP_USERNAME | + CREDUI_FLAGS_PASSWORD_ONLY_OK | + CREDUI_FLAGS_ALWAYS_SHOW_UI | + CREDUI_FLAGS_DO_NOT_PERSIST); + + if(!dwErr) { + fprintf( stdout, "%s\n", pszPwd); + + return 0; + } else { + fprintf( stderr, "%s\n", formatError(GetLastError())); + return 1; + } +} diff --git a/tests/testdata/askpass/tests/testthat.R b/tests/testdata/askpass/tests/testthat.R new file mode 100644 index 0000000..24cb924 --- /dev/null +++ b/tests/testdata/askpass/tests/testthat.R @@ -0,0 +1,4 @@ +library(testthat) +library(askpass) + +test_check("askpass") diff --git a/tests/testdata/askpass/tests/testthat/test-option.R b/tests/testdata/askpass/tests/testthat/test-option.R new file mode 100644 index 0000000..2c13db6 --- /dev/null +++ b/tests/testdata/askpass/tests/testthat/test-option.R @@ -0,0 +1,14 @@ +context("test-option") + +test_that("program exists", { + if(is_windows() || is_macos()){ + expect_true(file.exists(ssh_askpass())) + } +}) + +test_that("option askpass is respected", { + options(askpass = function(...){ + 'supersecret' + }) + expect_equal(askpass(), 'supersecret') +}) diff --git a/tests/testdata/askpass_1.1.tar.gz b/tests/testdata/askpass_1.1.tar.gz new file mode 100644 index 0000000..d8cc743 Binary files /dev/null and b/tests/testdata/askpass_1.1.tar.gz differ diff --git a/tests/testdata/fakeRhtslib.tar.gz b/tests/testdata/fakeRhtslib.tar.gz new file mode 100644 index 0000000..b878309 Binary files /dev/null and b/tests/testdata/fakeRhtslib.tar.gz differ diff --git a/tests/testdata/fakeRhtslib/DESCRIPTION b/tests/testdata/fakeRhtslib/DESCRIPTION new file mode 100644 index 0000000..4a4b4e4 --- /dev/null +++ b/tests/testdata/fakeRhtslib/DESCRIPTION @@ -0,0 +1,42 @@ +Package: Rhtslib +Title: HTSlib high-throughput sequencing library as an R package +Description: This package provides version 1.15.1 of the 'HTSlib' C + library for high-throughput sequence analysis. The package is + primarily useful to developers of other R packages who wish to + make use of HTSlib. Motivation and instructions for use of this + package are in the vignette, vignette(package="Rhtslib", "Rhtslib"). +biocViews: DataImport, Sequencing +URL: https://bioconductor.org/packages/Rhtslib, http://www.htslib.org/ +BugReports: https://github.com/Bioconductor/Rhtslib/issues +Version: 2.0.0 +License: LGPL (>= 2) +Copyright: Unless otherwise noted in the file, all files outside + src/htslib-1.15.1 or inst/include copyright Bioconductor; for + files inside src/htslib-1.15.1 or inst/include, see file + src/htslib-1.15.1/LICENSE. +Encoding: UTF-8 +Authors@R: + c(person("Nathaniel", "Hayden", email="nhayden@fredhutch.org", + role=c("led", "aut")), + person("Martin", "Morgan", email="martin.morgan@roswellpark.org", + role="aut"), + person("Hervé", "Pagès", + email="hpages.on.github@gmail.com", role=c("aut", "cre"))) +Imports: zlibbioc +LinkingTo: zlibbioc +Suggests: knitr, rmarkdown, BiocStyle +SystemRequirements: libbz2 & liblzma & libcurl (with header files), GNU + make +StagedInstall: no +VignetteBuilder: knitr +git_url: https://git.bioconductor.org/packages/Rhtslib +git_branch: RELEASE_3_16 +git_last_commit: 1757333 +git_last_commit_date: 2022-11-01 +Date/Publication: 2022-11-01 +NeedsCompilation: yes +Packaged: 2022-11-01 22:45:17 UTC; biocbuild +Author: Nathaniel Hayden [led, aut], + Martin Morgan [aut], + Hervé Pagès [aut, cre] +Maintainer: Hervé Pagès diff --git a/tests/testdata/fakexml2/DESCRIPTION b/tests/testdata/fakexml2/DESCRIPTION new file mode 100644 index 0000000..e21d0de --- /dev/null +++ b/tests/testdata/fakexml2/DESCRIPTION @@ -0,0 +1,60 @@ +Package: xml2 +Title: Parse XML +Version: 1.3.3.9000 +Authors@R: c( + person("Hadley", "Wickham", , "hadley@rstudio.com", role = c("aut", "cre")), + person("Jim", "Hester", role = "aut"), + person("Jeroen", "Ooms", role = "aut"), + person("RStudio", role = c("cph", "fnd")), + person("R Foundation", role = "ctb", + comment = "Copy of R-project homepage cached as example") + ) +Description: Work with XML files using a simple, consistent interface. + Built on top of the 'libxml2' C library. +License: MIT + file LICENSE +URL: https://xml2.r-lib.org/, https://github.com/r-lib/xml2 +BugReports: https://github.com/r-lib/xml2/issues +Depends: + R (>= 3.1.0) +Imports: + methods +Suggests: + covr, + curl, + httr, + knitr, + magrittr, + mockery, + rmarkdown, + testthat (>= 2.1.0) +VignetteBuilder: + knitr +Config/Needs/website: tidyverse/tidytemplate +Encoding: UTF-8 +Roxygen: list(markdown = TRUE) +RoxygenNote: 7.1.2 +SystemRequirements: libxml2: libxml2-dev (deb), libxml2-devel (rpm) +Collate: + 'S4.R' + 'as_list.R' + 'xml_parse.R' + 'as_xml_document.R' + 'classes.R' + 'init.R' + 'paths.R' + 'utils.R' + 'xml_attr.R' + 'xml_children.R' + 'xml_find.R' + 'xml_modify.R' + 'xml_name.R' + 'xml_namespaces.R' + 'xml_path.R' + 'xml_schema.R' + 'xml_serialize.R' + 'xml_structure.R' + 'xml_text.R' + 'xml_type.R' + 'xml_url.R' + 'xml_write.R' + 'zzz.R' diff --git a/tests/testdata/fakezlibbioc/DESCRIPTION b/tests/testdata/fakezlibbioc/DESCRIPTION new file mode 100644 index 0000000..428bad2 --- /dev/null +++ b/tests/testdata/fakezlibbioc/DESCRIPTION @@ -0,0 +1,16 @@ +Package: zlibbioc +Type: Package +Title: An R packaged zlib-1.2.5 +Version: 1.41.0 +Author: Martin Morgan +Maintainer: Bioconductor Package Maintainer +Description: This package uses the source code of zlib-1.2.5 to create + libraries for systems that do not have these available via other + means (most Linux and Mac users should have system-level access to + zlib, and no direct need for this package). See the vignette for + instructions on use. +biocViews: Infrastructure +URL: https://bioconductor.org/packages/zlibbioc +BugReports: https://github.com/Bioconductor/zlibbioc/issues +License: Artistic-2.0 + file LICENSE +LazyLoad: yes diff --git a/tests/testdata/local_renv_lock/renv.lock b/tests/testdata/local_renv_lock/renv.lock new file mode 100644 index 0000000..300119d --- /dev/null +++ b/tests/testdata/local_renv_lock/renv.lock @@ -0,0 +1,501 @@ +{ + "R": { + "Version": "4.2.2", + "Repositories": [ + { + "Name": "CRAN", + "URL": "https://cran.rstudio.com" + } + ] + }, + "Packages": { + "KernSmooth": { + "Package": "KernSmooth", + "Version": "2.23-20", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "8dcfa99b14c296bc9f1fd64d52fd3ce7", + "Requirements": [] + }, + "MASS": { + "Package": "MASS", + "Version": "7.3-58.2", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "e02d1a0f6122fd3e634b25b433704344", + "Requirements": [] + }, + "Matrix": { + "Package": "Matrix", + "Version": "1.5-1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "539dc0c0c05636812f1080f473d2c177", + "Requirements": [ + "lattice" + ] + }, + "R6": { + "Package": "R6", + "Version": "2.5.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "470851b6d5d0ac559e9d01bb352b4021", + "Requirements": [] + }, + "askpass": { + "Package": "askpass", + "Version": "1.1", + "Source": "Local", + "Repository": "CRAN", + "RemoteType": "local", + "RemoteUrl": "~/dev/rang/tests/testdata/askpass_1.1.tar.gz", + "Hash": "7f1f85d806abb68dc4e04ea2c568cafe", + "Requirements": [ + "sys" + ] + }, + "boot": { + "Package": "boot", + "Version": "1.3-28", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "0baa960e3b49c6176a4f42addcbacc59", + "Requirements": [] + }, + "cachem": { + "Package": "cachem", + "Version": "1.0.7", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "cda74447c42f529de601fe4d4050daef", + "Requirements": [ + "fastmap", + "rlang" + ] + }, + "class": { + "Package": "class", + "Version": "7.3-21", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "8ae0d4328e2eb3a582dfd5391a3663b7", + "Requirements": [ + "MASS" + ] + }, + "cli": { + "Package": "cli", + "Version": "3.6.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "3177a5a16c243adc199ba33117bd9657", + "Requirements": [] + }, + "clipr": { + "Package": "clipr", + "Version": "0.8.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "3f038e5ac7f41d4ac41ce658c85e3042", + "Requirements": [] + }, + "cluster": { + "Package": "cluster", + "Version": "2.1.4", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "5edbbabab6ce0bf7900a74fd4358628e", + "Requirements": [] + }, + "codetools": { + "Package": "codetools", + "Version": "0.2-19", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "c089a619a7fae175d149d89164f8c7d8", + "Requirements": [] + }, + "curl": { + "Package": "curl", + "Version": "5.0.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "e4f97056611e8e6b8b852d13b7400cf1", + "Requirements": [] + }, + "dplyr": { + "Package": "dplyr", + "Version": "1.1.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "d3c34618017e7ae252d46d79a1b9ec32", + "Requirements": [ + "R6", + "cli", + "generics", + "glue", + "lifecycle", + "magrittr", + "pillar", + "rlang", + "tibble", + "tidyselect", + "vctrs" + ] + }, + "fansi": { + "Package": "fansi", + "Version": "1.0.4", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "1d9e7ad3c8312a192dea7d3db0274fde", + "Requirements": [] + }, + "fastmap": { + "Package": "fastmap", + "Version": "1.1.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "f7736a18de97dea803bde0a2daaafb27", + "Requirements": [] + }, + "foreign": { + "Package": "foreign", + "Version": "0.8-82", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "32b25c97ce306a760c4d9f787991b5d9", + "Requirements": [] + }, + "generics": { + "Package": "generics", + "Version": "0.1.3", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "15e9634c0fcd294799e9b2e929ed1b86", + "Requirements": [] + }, + "glue": { + "Package": "glue", + "Version": "1.6.2", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e", + "Requirements": [] + }, + "httr": { + "Package": "httr", + "Version": "1.4.5", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "f6844033201269bec3ca0097bc6c97b3", + "Requirements": [ + "R6", + "curl", + "jsonlite", + "mime", + "openssl" + ] + }, + "jsonlite": { + "Package": "jsonlite", + "Version": "1.8.4", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "a4269a09a9b865579b2635c77e572374", + "Requirements": [] + }, + "lattice": { + "Package": "lattice", + "Version": "0.20-45", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "b64cdbb2b340437c4ee047a1f4c4377b", + "Requirements": [] + }, + "lifecycle": { + "Package": "lifecycle", + "Version": "1.0.3", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "001cecbeac1cff9301bdc3775ee46a86", + "Requirements": [ + "cli", + "glue", + "rlang" + ] + }, + "magrittr": { + "Package": "magrittr", + "Version": "2.0.3", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "7ce2733a9826b3aeb1775d56fd305472", + "Requirements": [] + }, + "memoise": { + "Package": "memoise", + "Version": "2.0.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "e2817ccf4a065c5d9d7f2cfbe7c1d78c", + "Requirements": [ + "cachem", + "rlang" + ] + }, + "mgcv": { + "Package": "mgcv", + "Version": "1.8-41", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "6b3904f13346742caa3e82dd0303d4ad", + "Requirements": [ + "Matrix", + "nlme" + ] + }, + "mime": { + "Package": "mime", + "Version": "0.12", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "18e9c28c1d3ca1560ce30658b22ce104", + "Requirements": [] + }, + "nlme": { + "Package": "nlme", + "Version": "3.1-162", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "0984ce8da8da9ead8643c5cbbb60f83e", + "Requirements": [ + "lattice" + ] + }, + "nnet": { + "Package": "nnet", + "Version": "7.3-18", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "170da2130d5332bea7d6ede01875ba1d", + "Requirements": [] + }, + "openssl": { + "Package": "openssl", + "Version": "2.0.5", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "b04c27110bf367b4daa93f34f3d58e75", + "Requirements": [ + "askpass" + ] + }, + "parsedate": { + "Package": "parsedate", + "Version": "1.3.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "7f5024cc7af45eeecef657fa62beb568", + "Requirements": [] + }, + "pillar": { + "Package": "pillar", + "Version": "1.8.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "f2316df30902c81729ae9de95ad5a608", + "Requirements": [ + "cli", + "fansi", + "glue", + "lifecycle", + "rlang", + "utf8", + "vctrs" + ] + }, + "pkgconfig": { + "Package": "pkgconfig", + "Version": "2.0.3", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "01f28d4278f15c76cddbea05899c5d6f", + "Requirements": [] + }, + "pkgsearch": { + "Package": "pkgsearch", + "Version": "3.1.2", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "02175979f7d8edc194e5b1fe5d9f4bf0", + "Requirements": [ + "curl", + "jsonlite", + "parsedate", + "prettyunits" + ] + }, + "prettyunits": { + "Package": "prettyunits", + "Version": "1.1.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "95ef9167b75dde9d2ccc3c7528393e7e", + "Requirements": [] + }, + "rang": { + "Package": "rang", + "Version": "0.1.2", + "Source": "Local", + "RemoteType": "local", + "RemoteUrl": "~/dev/rang", + "Hash": "e4689687fe206e887e5e8b4377a62367", + "Requirements": [ + "fastmap", + "httr", + "jsonlite", + "memoise", + "parsedate", + "pkgsearch", + "remotes", + "renv", + "vctrs" + ] + }, + "remotes": { + "Package": "remotes", + "Version": "2.4.2", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "227045be9aee47e6dda9bb38ac870d67", + "Requirements": [] + }, + "renv": { + "Package": "renv", + "Version": "0.16.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "c9e8442ab69bc21c9697ecf856c1e6c7", + "Requirements": [] + }, + "rlang": { + "Package": "rlang", + "Version": "1.0.6", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "4ed1f8336c8d52c3e750adcdc57228a7", + "Requirements": [] + }, + "rpart": { + "Package": "rpart", + "Version": "4.1.19", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "b3c892a81783376cc2204af0f5805a80", + "Requirements": [] + }, + "rtoot": { + "Package": "rtoot", + "Version": "0.3.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "06eb72de42a3f8fcb252badc58f92b2b", + "Requirements": [ + "clipr", + "curl", + "dplyr", + "httr", + "jsonlite", + "tibble" + ] + }, + "spatial": { + "Package": "spatial", + "Version": "7.3-15", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "c23666fdb7789c8a45e65340bb334607", + "Requirements": [] + }, + "survival": { + "Package": "survival", + "Version": "3.5-3", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "aea2b8787db7088ba50ba389848569ee", + "Requirements": [ + "Matrix" + ] + }, + "sys": { + "Package": "sys", + "Version": "3.4.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "34c16f1ef796057bfa06d3f4ff818a5d", + "Requirements": [] + }, + "tibble": { + "Package": "tibble", + "Version": "3.1.8", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "56b6934ef0f8c68225949a8672fe1a8f", + "Requirements": [ + "fansi", + "lifecycle", + "magrittr", + "pillar", + "pkgconfig", + "rlang", + "vctrs" + ] + }, + "tidyselect": { + "Package": "tidyselect", + "Version": "1.2.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "79540e5fcd9e0435af547d885f184fd5", + "Requirements": [ + "cli", + "glue", + "lifecycle", + "rlang", + "vctrs", + "withr" + ] + }, + "utf8": { + "Package": "utf8", + "Version": "1.2.3", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "1fe17157424bb09c48a8b3b550c753bc", + "Requirements": [] + }, + "vctrs": { + "Package": "vctrs", + "Version": "0.5.2", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "e4ffa94ceed5f124d429a5a5f0f5b378", + "Requirements": [ + "cli", + "glue", + "lifecycle", + "rlang" + ] + }, + "withr": { + "Package": "withr", + "Version": "2.5.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "c0e49a9760983e81e55cdd9be92e7182", + "Requirements": [] + } + } +} diff --git a/tests/testthat/test_dockerize.R b/tests/testthat/test_dockerize.R index 0e5e1de..fd427af 100644 --- a/tests/testthat/test_dockerize.R +++ b/tests/testthat/test_dockerize.R @@ -20,6 +20,7 @@ test_that("integration of #13 in dockerize()", { dockerize(rang = rang_ok, output_dir = temp_dir) ## rang_as_comment = TRUE x <- readLines(file.path(temp_dir, "rang.R")) expect_true(any(grepl("^## ## To reconstruct this file", x))) + expect_false(any(grepl("^## ## WARNING", x))) dockerize(rang = rang_ok, output_dir = temp_dir, rang_as_comment = FALSE) x <- readLines(file.path(temp_dir, "rang.R")) expect_false(any(grepl("^## ## To reconstruct this file", x))) @@ -237,3 +238,10 @@ test_that("no_rocker #67", { expect_error(dockerize(rang = rang_ok, output_dir = temp_dir, no_rocker = TRUE, debian_version = "3.11")) }) + +test_that(".check_tarball_path", { + expect_error(.check_tarball_path("../testdata/gesis_2.0.tar.gz", "gesis")) ##dir = FALSE + expect_error(.check_tarball_path("../testdata/askpass_1.1.tar.gz", "askpass"), NA) + expect_error(.check_tarball_path("../testdata/gesis", "gesis", dir = TRUE)) + expect_error(.check_tarball_path("../testdata/askpass", "askpass", dir = TRUE), NA) +}) diff --git a/tests/testthat/test_pkgref.R b/tests/testthat/test_pkgref.R index d78a57f..88efa70 100644 --- a/tests/testthat/test_pkgref.R +++ b/tests/testthat/test_pkgref.R @@ -1,5 +1,7 @@ ## using all the cases in https://r-lib.github.io/pkgdepends/reference/pkg_refs.html +## .normalize_pkgs aka as_pkgrefs + test_that(".normalize_pkgs, defensive programming", { expect_error(.normalize_pkgs(c("forecast", "r-lib/crayon")), NA) expect_error(.normalize_pkgs(NULL), NA) @@ -52,6 +54,27 @@ test_that(".normalize_pkgs: github remote string", { expect_equal(.normalize_pkgs("github::git@github.com:r-lib/pak.git"), "github::r-lib/pak") }) +test_that(".normalize_pkgs: local", { + expect_equal(.normalize_pkgs("local::/foo/bar/package_1.0.0.tar.gz"), "local::/foo/bar/package_1.0.0.tar.gz") + expect_equal(.normalize_pkgs("local::/foo/bar/pkg"), "local::/foo/bar/pkg") + expect_equal(.normalize_pkgs("local::."), "local::.") + expect_equal(.normalize_pkgs("/absolute/path/package_1.0.0.tar.gz"), "local::/absolute/path/package_1.0.0.tar.gz") + expect_equal(.normalize_pkgs("~/path/from/home"), "local::~/path/from/home") + expect_equal(.normalize_pkgs("./relative/path"), "local::./relative/path") + expect_equal(.normalize_pkgs("."), "local::.") +}) + +test_that("as_pkgrefs dispatch", { + expect_error(as_pkgrefs(TRUE)) + expect_error(as_pkgrefs(7.21)) + expect_error(as_pkgrefs(1L)) + expect_equal(as_pkgrefs("rtoot"), "cran::rtoot") + expect_equal(as_pkgrefs(c("rtoot", "sna")), c("cran::rtoot", "cran::sna")) + expect_equal(as_pkgrefs(c("rtoot", "S4Vectors")), c("cran::rtoot", "cran::S4Vectors")) ## the bioc version is in test_resolve +}) + +## .parse_pkgref + test_that(".parse_pkgref", { expect_error(.parse_pkgref("withr")) expect_error(.parse_pkgref("r-lib/withr")) @@ -63,33 +86,24 @@ test_that(".parse_pkgref", { expect_equal(.parse_pkgref("cran::testthat?source&nocache", FALSE), "cran") expect_equal(.parse_pkgref("cran::testthat", TRUE), "testthat") expect_equal(.parse_pkgref("cran::testthat", FALSE), "cran") + expect_equal(.parse_pkgref("local::./relative/path", TRUE), "./relative/path") + expect_equal(.parse_pkgref("local::./relative/path", FALSE), "local") }) +## as_pkgrefs.sessionInfo() + test_that(".extract_pkgref_packageDescription", { si <- readRDS("../testdata/sessionInfo1.RDS") expect_equal(.extract_pkgref_packageDescription(si$otherPkgs[[1]]), "github::chainsawriot/grafzahl") expect_equal(.extract_pkgref_packageDescription(si$otherPkgs[[2]]), "cran::rtoot") - ## change this with #57 - ## expect_equal(.extract_pkgref_packageDescription(si$otherPkgs[[3]]), "local::/home/chainsawriot/dev/rang") - expect_equal(.extract_pkgref_packageDescription(si$otherPkgs[[3]]), "cran::rang") + expect_equal(.extract_pkgref_packageDescription(si$otherPkgs[[3]]), "local::/home/chainsawriot/dev/rang") expect_equal(.extract_pkgref_packageDescription(si$otherPkgs[[4]]), "cran::testthat") }) -test_that("as_pkgrefs dispatch", { - expect_error(as_pkgrefs(TRUE)) - expect_error(as_pkgrefs(7.21)) - expect_error(as_pkgrefs(1L)) - expect_equal(as_pkgrefs("rtoot"), "cran::rtoot") - expect_equal(as_pkgrefs(c("rtoot", "sna")), c("cran::rtoot", "cran::sna")) - expect_equal(as_pkgrefs(c("rtoot", "S4Vectors")), c("cran::rtoot", "cran::S4Vectors")) ## the bioc version is in test_resolve -}) - test_that("as_pkgrefs_packageDescription", { si <- readRDS("../testdata/sessionInfo1.RDS") res <- as_pkgrefs(si) - ## change this with #57 - ## expect_equal(res, c("github::chainsawriot/grafzahl", "cran::rtoot", "local::/home/chainsawriot/dev/rang", "cran::testthat") - expect_equal(res, c("github::chainsawriot/grafzahl", "cran::rtoot", "cran::rang", "cran::testthat")) + expect_equal(res, c("github::chainsawriot/grafzahl", "cran::rtoot", "local::/home/chainsawriot/dev/rang", "cran::testthat")) ## bioc si <- readRDS("../testdata/sessionInfo3.RDS") res <- as_pkgrefs(si) @@ -97,6 +111,8 @@ test_that("as_pkgrefs_packageDescription", { expect_true("bioc::S4Vectors" %in% res) }) +## as_pkgregs.character (renv) + test_that("as_pkgrefs renv_lockfile", { res <- as_pkgrefs("../testdata/large_renv_lock/renv.lock") expect_equal(res, readRDS("../testdata/bioc_renv.RDS")) @@ -108,14 +124,48 @@ test_that(".is_renv_lockfile false",{ expect_false(.is_renv_lockfile("../testdata/fake_renv.lock")) }) -test_that(".is_directory false",{ - expect_false(.is_directory(c("a/","b/"))) - expect_false(.is_directory("a/")) +test_that("as_pkgrefs renv_lockfile with local", { + res <- as_pkgrefs("../testdata/local_renv_lock/renv.lock") + expect_true("local::~/dev/rang/tests/testdata/askpass_1.1.tar.gz" %in% res) + expect_true("local::~/dev/rang" %in% res) }) +## as_pkgrefs.character (directory -> scanning) + test_that("as_pkgrefs directory", { skip_if_offline() skip_on_cran() res <- suppressWarnings(as_pkgrefs("../testdata/test_dir",bioc_version = "3.16")) expect_equal(res, c("bioc::BiocGenerics", "cran::rtoot")) }) + +## .is_* + +test_that(".is_github", { + expect_true(.is_github("cran/rtoot")) + expect_false(.is_github("cran//rtoot")) + expect_false(.is_github("~/hello")) + expect_false(.is_github("./hello")) + expect_false(.is_github("/hello")) + expect_false(.is_github("/hello/world")) + expect_false(.is_github("/hello/world/")) + expect_false(.is_github("world/")) +}) + +test_that(".is_directory false",{ + expect_false(.is_directory(c("a/","b/"))) + expect_false(.is_directory("a/")) +}) + +test_that(".is_local", { + expect_false(.is_local("cran/rtoot")) + expect_false(.is_local("cran//rtoot")) + expect_false(.is_local("world/")) + expect_true(.is_local("~/hello")) + expect_true(.is_local("./hello")) + expect_true(.is_local("/hello")) + expect_true(.is_local("/hello/world")) + expect_true(.is_local("/hello/world/")) + expect_true(.is_local("/hello/world/")) + expect_true(.is_local("../testdata/fakexml2")) +}) diff --git a/tests/testthat/test_resolve.R b/tests/testthat/test_resolve.R index 52b32df..96a7ed1 100644 --- a/tests/testthat/test_resolve.R +++ b/tests/testthat/test_resolve.R @@ -6,6 +6,19 @@ test_that("defensive programming", { expect_error(resolve("LDAvis", os = "windows")) }) +test_that(".extract_date", { + expect_error(.extract_date("."), NA) + expect_error(.extract_date("../testdata/renv.lock"), NA) + expect_error(.extract_date("rtoot"), NA) +}) + +test_that(".check_local_in_pkgrefs", { + expect_silent(.check_local_in_pkgrefs(c("cran::rtoot", "bioc::S4Vectors", "github::cran/rtoot"))) + expect_warning(.check_local_in_pkgrefs(c("local::../testdata/fakexml2"))) + expect_warning(.check_local_in_pkgrefs(c("local::../testdata/askpass_1.1.tar.gz"))) + expect_error(suppressWarnings(.check_local_in_pkgrefs(c("local::../testdata/issue39.RDS", "cran::rtoot")))) +}) + ## The following are real tests. Even with memoisation, please keep at minimum test_that("normal", { @@ -263,8 +276,69 @@ test_that(".gh error handling", { expect_error(.gh("path/is/wrong")) }) -test_that(".extract_date", { - expect_error(.extract_date("."),NA) - expect_error(.extract_date("../testdata/renv.lock"),NA) - expect_error(.extract_date("rtoot"),NA) -}) \ No newline at end of file +test_that(".query_sysreqs_local", { + skip_if_offline() + skip_on_cran() + expect_error(sysreqs <- .query_sysreqs_local(c("../testdata/fakexml2", "../testdata/askpass_1.1.tar.gz", "../testdata/fakeRhtslib.tar.gz"), "ubuntu-20.04"), NA) + expect_true("apt-get install -y libxml2-dev" %in% sysreqs) + expect_true("apt-get install -y libbz2-dev" %in% sysreqs) + ## dispatch in .query_sysreqs_smart + expect_error(sysreqs2 <- .query_sysreqs_smart(c("local::../testdata/fakexml2", "local::../testdata/askpass_1.1.tar.gz", "local::../testdata/fakeRhtslib.tar.gz"), "ubuntu-20.04"), NA) + expect_equal(sysreqs, sysreqs2) +}) + +test_that(".query_snapshot_dependencies for local packages", { + skip_if_offline() + skip_on_cran() + skip_on_os("windows") ## don't want to be slabbed in the back by Windows' paths + expect_error(dep_df <- .query_snapshot_dependencies("local::../testdata/fakeRhtslib", + snapshot_date = "2023-01-01", bioc_version = "3.3"), NA) + expect_true("y" %in% colnames(dep_df)) + expect_true("bioc::zlibbioc" %in% dep_df$y_pkgref) + expect_true("cran::knitr" %in% dep_df$y_pkgref) + expect_equal("Rhtslib", unique(dep_df$x)) + expect_true(grepl("^/", unique(dep_df$x_uid))) ## path expanded to abs. path + ## do the same thing but with tar.gz + expect_error(dep_df <- .query_snapshot_dependencies("local::../testdata/fakeRhtslib.tar.gz", + snapshot_date = "2023-01-01", bioc_version = "3.3"), NA) + expect_true("y" %in% colnames(dep_df)) + expect_equal(unique(dep_df$x_pubdate), parsedate::parse_date("2023-01-01")) + expect_true("bioc::zlibbioc" %in% dep_df$y_pkgref) + expect_true("cran::knitr" %in% dep_df$y_pkgref) + expect_equal("Rhtslib", unique(dep_df$x)) + expect_true(grepl("^/", unique(dep_df$x_uid))) ## path expanded to abs. path + ## No y + expect_error(dep_df <- .query_snapshot_dependencies("local::../testdata/fakezlibbioc", + snapshot_date = "2023-01-01", bioc_version = "3.3"), NA) + expect_false("y" %in% colnames(dep_df)) + ## real data + expect_error(dep_df <- .query_snapshot_dependencies("local::../testdata/askpass_1.1.tar.gz", + snapshot_date = "2023-01-01", bioc_version = "3.3"), NA) + expect_true("cran::sys" %in% dep_df$y_pkgref) +}) + +test_that("dockerize local package as tarball", { + skip_if_offline() + skip_on_cran() + temp_dir <- .generate_temp_dir() + expect_error(suppressWarnings(graph <- resolve("local::../testdata/askpass_1.1.tar.gz", snapshot_date = "2023-01-01")), NA) + expect_error(dockerize(graph, output_dir = temp_dir)) ## cache = FALSE + temp_dir <- .generate_temp_dir() + expect_error(dockerize(graph, output_dir = temp_dir, cache = TRUE, verbose = FALSE), NA) ## cache = FALSE + expect_true(file.exists(file.path(temp_dir, "cache", "sys_3.4.1.tar.gz"))) + expect_true(file.exists(file.path(temp_dir, "cache", "raw_askpass_1.1.tar.gz"))) +}) + +test_that("dockerize local package as tarball", { + skip_if_offline() + skip_on_cran() + temp_dir <- .generate_temp_dir() + expect_error(suppressWarnings(graph <- resolve("local::../testdata/askpass", snapshot_date = "2023-01-01")), NA) + expect_error(dockerize(graph, output_dir = temp_dir)) ## cache = FALSE + temp_dir <- .generate_temp_dir() + expect_error(dockerize(graph, output_dir = temp_dir, cache = TRUE, verbose = FALSE), NA) ## cache = FALSE + expect_true(file.exists(file.path(temp_dir, "cache", "sys_3.4.1.tar.gz"))) + expect_true(dir.exists(file.path(temp_dir, "cache", "dir_askpass_1.1"))) + x <- readLines(file.path(temp_dir, "rang.R")) + expect_true(any(grepl("^## ## WARNING", x))) +}) diff --git a/vignettes/faq.Rmd b/vignettes/faq.Rmd index 040c3cb..f2e95ba 100644 --- a/vignettes/faq.Rmd +++ b/vignettes/faq.Rmd @@ -47,38 +47,18 @@ And the tenth line is not needed. **GQ2: For running `resolve()`, how do I know which packages are used in a project?** -**GA2:** We recommend `renv::dependencies()`. - -Suppose in a directory called "project" there are two R files: - -```r -here::here() -``` +**GA2:** `rang` >= 0.2 supports scanning of a directory for R packages. `snapshot_date` is inferred from the latest modification date of all files. ```r -library(rio) -x <- import("hello.csv") +resolve(".") ``` -Running this reveals +A better strategy, however, is to do the scanning first and then manually review which packages are from non-CRAN sources. -```{r include = FALSE} -## x <- tempfile() -## dir.create(x) -## writeLines(c("library(rio)", "here::here()"), file.path(x, "fake.R")) -## y <- renv::dependencies(x) -## y$Source <- c("myproject/1.R", "myproject/2.R") -## y +```r +pkgs <- as_pkgrefs(".") ``` -
renv::dependencies("project")
-
#> Finding R package dependencies ... Done!
-#>          Source Package Require Version   Dev
-#> 1 myproject/1.R    here                 FALSE
-#> 2 myproject/2.R     rio                 FALSE
- -You may still need to manually review which packages are from Github. - **GQ3: Why is the R script generated by `dockerize()` and `export_rang()` so strange/unidiomatic/inefficient/did you guys read `fortunes::fortune("answer is parse")`?** **GA3:** It is because we optimize the R code in `rang.R` for backward compatibility. We need to make sure that the code runs well in vanilla R environments since 2.1.0. @@ -101,7 +81,10 @@ You may still need to manually review which packages are from Github. * `query_sysreqs()` (as well as `resolve(query_sysreqs = TRUE)`) queries for System Requirements based on the latest version of the packages on CRAN / Github. Therefore: * Removed CRAN packages are assumed to have no System Requirements * R Packages with changed System Requirements between `snapshot_date` and the date of running `resolve()` might produce incorrect System Requirements -* A result from `resolve()` with R version < 3.1 and has at least one Github package must be dockerized with caching (i.e. `dockerize(cache = TRUE)`). It is because the outdated version of Debian cannot communicate with the Github API +* A result from `resolve()` in the following cases must be dockerized with caching (i.e. `dockerize(cache = TRUE)`) + * R version < 3.1 and has at least one Github package. It is because the outdated version of Debian cannot communicate with the Github API + * R version < 3.3 and has at least one Bioconductor package, same reason. + * Has at least one local package. * R packages on Github or CRAN might not be available in the near future (Github: likely; CRAN: very unlikely). But one can cache the packages (`dockerize(cache = TRUE)`). * The Rocker project and its host Docker Hub might not be available in the near future (unlikely) * Ubuntu / Debian archives (for System Requirements) might not be available in the future (super unlikely)