diff --git a/.Rbuildignore b/.Rbuildignore index 146763f85..50d2170a4 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -54,3 +54,11 @@ ^src/library/ps/src/px$ ^src/library/zip/src/tools/cmdunzip$ ^src/library/zip/src/tools/cmdzip$ +^[.]vscode$ +^[.]covrignore$ +[.]gcda$ +[.]gcno$ +[.]gcov$ +^tests/startup.Rs$ +^tests/.*[.]Rout$ +^tests/.*[.]Rout[.]fail$ diff --git a/.covrignore b/.covrignore new file mode 100644 index 000000000..4aae05254 --- /dev/null +++ b/.covrignore @@ -0,0 +1,25 @@ +R/aaa-rstudio-detect.R +R/build-pak-binary.R +R/dev-mode.R +R/embed.R +R/embed-ca-certs.R +R/errors.R +R/git-app.R +R/pak-sitrep-data.R +R/push-packages.R +R/system-requirements.R +src/library/R6/ +src/library/callr/ +src/library/cli/ +src/library/curl/ +src/library/desc/ +src/library/filelock/ +src/library/jsonlite/ +src/library/lpSolve/ +src/library/pkgbuild/ +src/library/processx/ +src/library/ps/ +src/library/zip/ +src/library/pkgcache/R/aaa-rstudio-detect.R +src/library/pkgdepends/R/aaa-rstudio-detect.R +src/library/pkgsearch/R/aaa-rstudio-detect.R diff --git a/.github/workflows/nosuggests.yaml b/.github/workflows/nosuggests.yaml new file mode 100644 index 000000000..1cccbe30e --- /dev/null +++ b/.github/workflows/nosuggests.yaml @@ -0,0 +1,63 @@ +on: + push: + branches: [main, master] + pull_request: + branches: [main, master] + +name: No suggested packages + +jobs: + R-CMD-check: + runs-on: ${{ matrix.config.os }} + + name: ${{ matrix.config.os }} (${{ matrix.config.r }}) + + strategy: + fail-fast: false + matrix: + config: + - {os: macos-latest, r: 'devel' } + - {os: macos-latest, r: 'release' } + - {os: macos-latest, r: 'oldrel-1'} + - {os: macos-latest, r: 'oldrel-2'} + - {os: macos-latest, r: 'oldrel-3'} + - {os: macos-latest, r: 'oldrel-4'} + + - {os: windows-latest, r: 'devel' } + - {os: windows-latest, r: 'release '} + - {os: windows-latest, r: 'oldrel-1'} + - {os: windows-latest, r: 'oldrel-2'} + - {os: windows-latest, r: 'oldrel-3'} + - {os: windows-latest, r: 'oldrel-4'} + + - {os: ubuntu-latest, r: 'devel' } + - {os: ubuntu-latest, r: 'release' } + - {os: ubuntu-latest, r: 'oldrel-1'} + - {os: ubuntu-latest, r: 'oldrel-2'} + - {os: ubuntu-latest, r: 'oldrel-3'} + - {os: ubuntu-latest, r: 'oldrel-4'} + + env: + GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} + R_KEEP_PKG_SOURCE: yes + + steps: + - uses: actions/checkout@v3 + + - uses: r-lib/actions/setup-r@v2 + with: + r-version: ${{ matrix.config.r }} + http-user-agent: ${{ matrix.config.http-user-agent }} + use-public-rspm: true + + - name: Install package + run: R CMD INSTALL --install-tests . + + - name: Run test + run: | + setwd("tests") + tools:::.runPackageTests() + shell: Rscript {0} + env: + PAK_EXTRA_TESTS: true + PAK_TESTS: false diff --git a/.github/workflows/test-coverage.yaml b/.github/workflows/test-coverage.yaml index 960234cd7..fe16b5330 100644 --- a/.github/workflows/test-coverage.yaml +++ b/.github/workflows/test-coverage.yaml @@ -23,28 +23,13 @@ jobs: - uses: r-lib/actions/setup-r-dependencies@v2 with: - extra-packages: any::covr + extra-packages: gaborcsardi/covrlabs needs: coverage - name: Test coverage run: | - covr::codecov( - quiet = FALSE, - clean = FALSE, - install_path = file.path(normalizePath(Sys.getenv("RUNNER_TEMP"), winslash = "/"), "package") - ) + pkgload::load_all() + ac <- covr() shell: Rscript {0} - - - name: Show testthat output - if: always() - run: | - ## -------------------------------------------------------------------- - find ${{ runner.temp }}/package -name 'testthat.Rout*' -exec cat '{}' \; || true - shell: bash - - - name: Upload test results - if: failure() - uses: actions/upload-artifact@v4 - with: - name: coverage-test-failures - path: ${{ runner.temp }}/package + env: + TEST_COVERAGE_PAK: true diff --git a/.gitignore b/.gitignore index 7a79fd033..d56e73a67 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,10 @@ *.o *.so *.DSYM +*.gcda +*.gcno +*.gcov +/covr_trace_* /src/library/curl/configure.log /src/library/curl/src/Makevars /src/library/curl/tools/option_table.txt @@ -45,3 +49,6 @@ /tools/build/linux/*.done /vignettes/internals.html /vignettes/internals.md +/tests/startup.Rs +/tests/*.Rout +/tests/*.Rout.fail diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..23fd35f0e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "editor.formatOnSave": true +} \ No newline at end of file diff --git a/DESCRIPTION b/DESCRIPTION index 21db37f14..95103d66c 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -38,13 +38,14 @@ Imports: Suggests: callr (>= 3.7.0), cli (>= 3.2.0), - covr, + codetools, curl (>= 4.3.2), desc (>= 1.4.1), filelock (>= 1.0.2), gitcreds, glue (>= 1.6.2), jsonlite (>= 1.8.0), + knitr, mockery, pingr, pkgbuild (>= 1.4.2), @@ -55,7 +56,9 @@ Suggests: ps (>= 1.6.0), rstudioapi, testthat (>= 3.2.0), - withr + webfakes, + withr, + zip ByteCompile: true Config/build/extra-sources: configure* Config/needs/dependencies: diff --git a/NAMESPACE b/NAMESPACE index 157f1d403..f8182bd68 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -1,16 +1,22 @@ # Generated by roxygen2: do not edit by hand -S3method("[",pak_search_result) -S3method("[",pak_sysreqs) -S3method(format,pak_deps_explain) -S3method(format,pak_preformatted) -S3method(format,pak_sysreqs) -S3method(print,pak_deps_explain) -S3method(print,pak_error) -S3method(print,pak_preformatted) -S3method(print,pak_search_result) -S3method(print,pak_sysreqs) -S3method(print,pkg_install_result) +S3method("[", pak_pkg_deps_tree) +S3method("[", pak_pkg_search_result) +S3method("[", pak_pkgcache_repo_status_summary) +S3method("[", pak_sysreqs) +S3method(format, pak_deps_explain) +S3method(format, pak_pkg_deps_tree) +S3method(format, pak_pkg_name_check) +S3method(format, pak_sysreqs) +S3method(print, pak_deps_explain) +S3method(print, pak_pkg_deps_tree) +S3method(print, pak_pkg_name_check) +S3method(print, pak_pkg_search_result) +S3method(print, pak_pkgcache_repo_status_summary) +S3method(print, pak_sysreqs) +S3method(print, pkg_install_result) +S3method(summary, pak_pkg_search_result) +S3method(summary, pak_pkgcache_repo_status) export(cache_clean) export(cache_delete) export(cache_list) @@ -72,3 +78,4 @@ export(sysreqs_list_system_packages) export(sysreqs_platforms) export(system_r_platform) export(system_r_platform_data) +if (getRversion() >= "4.0.0") importFrom(tools, R_user_dir) diff --git a/R/aaa-rstudio-detect.R b/R/aaa-rstudio-detect.R index cc0b4c1a1..06428d306 100644 --- a/R/aaa-rstudio-detect.R +++ b/R/aaa-rstudio-detect.R @@ -1,6 +1,4 @@ - rstudio <- local({ - standalone_env <- environment() parent.env(standalone_env) <- baseenv() @@ -18,7 +16,8 @@ rstudio <- local({ "RSTUDIO_CONSOLE_COLOR", "RSTUDIOAPI_IPC_REQUESTS_FILE", "XPC_SERVICE_NAME", - "ASCIICAST") + "ASCIICAST" + ) d <- list( pid = Sys.getpid(), @@ -53,10 +52,12 @@ rstudio <- local({ # Cached? if (clear_cache) data <<- NULL - if (!is.null(data)) return(get_caps(data)) + if (!is.null(data)) { + return(get_caps(data)) + } if ((rspid <- Sys.getenv("RSTUDIO_SESSION_PID")) != "" && - any(c("ps", "cli") %in% loadedNamespaces())) { + any(c("ps", "cli") %in% loadedNamespaces())) { detect_new(rspid, clear_cache) } else { detect_old(clear_cache) @@ -85,35 +86,30 @@ rstudio <- local({ pane <- Sys.getenv("RSTUDIO_CHILD_PROCESS_PANE") # this should not happen, but be defensive and fall back - if (pane == "") return(detect_old(clear_cache)) + if (pane == "") { + return(detect_old(clear_cache)) + } # direct subprocess new$type <- if (rspid == parentpid) { - if (pane == "job") { "rstudio_job" - } else if (pane == "build") { "rstudio_build_pane" - } else if (pane == "render") { "rstudio_render_pane" - } else if (pane == "terminal" && new$tty && - new$envs["ASCIICAST"] != "true") { + new$envs["ASCIICAST"] != "true") { # not possible, because there is a shell in between, just in case "rstudio_terminal" - } else { # don't know what kind of direct subprocess "rstudio_subprocess" } - } else if (pane == "terminal" && new$tty && - new$envs[["ASCIICAST"]] != "true") { + new$envs[["ASCIICAST"]] != "true") { # not a direct subproces, so check other criteria as well "rstudio_terminal" - } else { # don't know what kind of subprocess "rstudio_subprocess" @@ -123,7 +119,6 @@ rstudio <- local({ } detect_old <- function(clear_cache = FALSE) { - # Cache unless told otherwise cache <- TRUE new <- get_data() @@ -131,20 +126,16 @@ rstudio <- local({ new$type <- if (new$envs[["RSTUDIO"]] != "1") { # 1. Not RStudio at all "not_rstudio" - } else if (new$gui == "RStudio" && new$api) { # 2. RStudio console, properly initialized "rstudio_console" - - } else if (! new$api && basename(new$args[1]) == "RStudio") { + } else if (!new$api && basename(new$args[1]) == "RStudio") { # 3. RStudio console, initializing cache <- FALSE "rstudio_console_starting" - } else if (new$gui == "Rgui") { # Still not RStudio, but Rgui that was started from RStudio "not_rstudio" - } else if (new$tty && new$envs[["ASCIICAST"]] != "true") { # 4. R in the RStudio terminal # This could also be a subprocess of the console or build pane @@ -152,29 +143,25 @@ rstudio <- local({ # out, without inspecting some process data with ps::ps_*(). # At least we rule out asciicast "rstudio_terminal" - - } else if (! new$tty && - new$envs[["RSTUDIO_TERM"]] == "" && - new$envs[["R_BROWSER"]] == "false" && - new$envs[["R_PDFVIEWER"]] == "false" && - is_build_pane_command(new$args)) { + } else if (!new$tty && + new$envs[["RSTUDIO_TERM"]] == "" && + new$envs[["R_BROWSER"]] == "false" && + new$envs[["R_PDFVIEWER"]] == "false" && + is_build_pane_command(new$args)) { # 5. R in the RStudio build pane # https://github.com/rstudio/rstudio/blob/master/src/cpp/session/ # modules/build/SessionBuild.cpp#L231-L240 "rstudio_build_pane" - } else if (new$envs[["RSTUDIOAPI_IPC_REQUESTS_FILE"]] != "" && - grepl("rstudio", new$envs[["XPC_SERVICE_NAME"]])) { + grepl("rstudio", new$envs[["XPC_SERVICE_NAME"]])) { # RStudio job, XPC_SERVICE_NAME=0 in the subprocess of a job # process. Hopefully this is reliable. "rstudio_job" - } else if (new$envs[["RSTUDIOAPI_IPC_REQUESTS_FILE"]] != "" && - any(grepl("SourceWithProgress.R", new$args))) { + any(grepl("SourceWithProgress.R", new$args))) { # Or we can check SourceWithProgress.R in the command line, see # https://github.com/r-lib/cli/issues/367 "rstudio_job" - } else { # Otherwise it is a subprocess of the console, terminal or # build pane, and it is hard to say which, so we do not try. @@ -296,3 +283,5 @@ rstudio <- local({ class = c("standalone_rstudio_detect", "standalone") ) }) + +rstudio_detect <- rstudio$detect diff --git a/R/cache.R b/R/cache.R index 8a9a9a3c4..f2c1d72a4 100644 --- a/R/cache.R +++ b/R/cache.R @@ -1,4 +1,3 @@ - #' Package cache utilities #' #' @description @@ -20,16 +19,8 @@ #' ``` cache_summary <- function() { - remote( - function(...) { - get("cache_summary_internal", asNamespace("pak"))(...) - }, - list() - ) -} - -cache_summary_internal <- function() { - pkgcache::pkg_cache_summary() + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["pkg_cache_summary"]]() } #' @details `cache_list()` lists all (by default), or a subset of @@ -60,16 +51,8 @@ cache_summary_internal <- function() { cache_list <- function(...) { load_extra("pillar") - remote( - function(...) { - get("cache_list_internal", asNamespace("pak"))(...) - }, - list(...) - ) -} - -cache_list_internal <- function(...) { - pkgcache::pkg_cache_find(...) + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["pkg_cache_find"]](...) } #' @details `cache_delete()` deletes files from the cache. @@ -84,17 +67,8 @@ cache_list_internal <- function(...) { #' ``` cache_delete <- function(...) { - remote( - function(...) { - get("cache_delete_internal", asNamespace("pak"))(...) - }, - list(...) - ) - invisible() -} - -cache_delete_internal <- function(...) { - pkgcache::pkg_cache_delete_files(...) + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["pkg_cache_delete_files"]](...) } #' @details `cache_clean()` deletes all files from the cache. @@ -109,17 +83,8 @@ cache_delete_internal <- function(...) { #' ``` cache_clean <- function() { - remote( - function(...) { - get("cache_clean_internal", asNamespace("pak"))(...) - }, - list() - ) - invisible() -} - -cache_clean_internal <- function() { - pkgcache::pkg_cache_delete_files() + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["pkg_cache_delete_files"]]() } #' Metadata cache utilities @@ -148,19 +113,13 @@ cache_clean_internal <- function() { #' ``` meta_summary <- function() { - remote( - function(...) { - get("meta_summary_internal", asNamespace("pak"))(...) - }, - list() - ) -} + load_all_private() -meta_summary_internal <- function() { - cmc <- pkgcache::cranlike_metadata_cache$new( - platforms = pkgdepends::current_config()$get("platforms"), - cran_mirror = pkgdepends::current_config()$get("cran_mirror"), - r_version = pkgdepends::current_config()$get("r_versions") + config <- pkg_data[["ns"]][["pkgdepends"]][["current_config"]]() + cmc <- pkg_data[["ns"]][["pkgcache"]][["cranlike_metadata_cache"]]$new( + platforms = config$get("platforms"), + cran_mirror = config$get("cran_mirror"), + r_version = config$get("r_versions") ) ret <- cmc$summary() list( @@ -195,19 +154,12 @@ meta_summary_internal <- function() { meta_list <- function(pkg = NULL) { load_extra("pillar") - remote( - function(...) { - get("meta_list_internal", asNamespace("pak"))(...) - }, - list(pkg = pkg) - ) -} - -meta_list_internal <- function(pkg) { - cmc <- pkgcache::cranlike_metadata_cache$new( - platforms = pkgdepends::current_config()$get("platforms"), - cran_mirror = pkgdepends::current_config()$get("cran_mirror"), - r_version = pkgdepends::current_config()$get("r_versions") + load_all_private() + config <- pkg_data[["ns"]][["pkgdepends"]][["current_config"]]() + cmc <- pkg_data[["ns"]][["pkgcache"]][["cranlike_metadata_cache"]]$new( + platforms = config$get("platforms"), + cran_mirror = config$get("cran_mirror"), + r_version = config$get("r_versions") ) cmc$list(packages = pkg) } @@ -228,20 +180,12 @@ meta_list_internal <- function(pkg) { #' ``` meta_update <- function() { - remote( - function(...) { - get("meta_update_internal", asNamespace("pak"))(...) - }, - list() - ) - invisible() -} - -meta_update_internal <- function() { - cmc <- pkgcache::cranlike_metadata_cache$new( - platforms = pkgdepends::current_config()$get("platforms"), - cran_mirror = pkgdepends::current_config()$get("cran_mirror"), - r_version = pkgdepends::current_config()$get("r_versions") + load_all_private() + config <- pkg_data[["ns"]][["pkgdepends"]][["current_config"]]() + cmc <- pkg_data[["ns"]][["pkgcache"]][["cranlike_metadata_cache"]]$new( + platforms = config$get("platforms"), + cran_mirror = config$get("cran_mirror"), + r_version = config$get("r_versions") ) cmc$update() invisible() @@ -267,24 +211,15 @@ meta_clean <- function(force = FALSE) { ) } if (!force) { - msg("x Metadata cleanup aborted") - return(invisible()) + stop("Metadata cleanup aborted.") } - remote( - function(...) { - get("meta_clean_internal", asNamespace("pak"))(...) - }, - list() - ) - invisible() -} - -meta_clean_internal <- function() { - cmc <- pkgcache::cranlike_metadata_cache$new( - platforms = pkgdepends::current_config()$get("platforms"), - cran_mirror = pkgdepends::current_config()$get("cran_mirror"), - r_version = pkgdepends::current_config()$get("r_versions") + load_all_private() + config <- pkg_data[["ns"]][["pkgdepends"]][["current_config"]]() + cmc <- pkg_data[["ns"]][["pkgcache"]][["cranlike_metadata_cache"]]$new( + platforms = config$get("platforms"), + cran_mirror = config$get("cran_mirror"), + r_version = config$get("r_versions") ) cmc$cleanup(force = TRUE) } diff --git a/R/compat-vctrs.R b/R/compat-vctrs.R index 34860cf4a..516817664 100644 --- a/R/compat-vctrs.R +++ b/R/compat-vctrs.R @@ -1,4 +1,4 @@ - +# nocov start compat_vctrs <- local({ # Modified from https://github.com/r-lib/rlang/blob/master/R/compat-vctrs.R diff --git a/R/confirmation.R b/R/confirmation.R index 0e389750f..97ccbb170 100644 --- a/R/confirmation.R +++ b/R/confirmation.R @@ -1,11 +1,12 @@ - should_ask_confirmation <- function(sol) { # We should ask if at least one package is updated any(sol$lib_status == "update") } print_install_details <- function(prop, lib, loaded) { - cli::cli_div( + load_all_private() + cli <- pkg_data[["ns"]][["cli"]] + cli$cli_div( theme = list( "div.alert-warning" = list("margin-top" = 1, "margin-bottom" = 1) ) @@ -13,20 +14,20 @@ print_install_details <- function(prop, lib, loaded) { sol <- prop$get_solution()$data direct <- sum(sol$direct) - deps <- sum(! sol$direct) + deps <- sum(!sol$direct) n_newly <- sum(newly <- sol$lib_status == "new") - n_upd <- sum(upd <- sol$lib_status == "update") - n_curr <- sum(curr <- sol$lib_status == "current") + n_upd <- sum(upd <- sol$lib_status == "update") + n_curr <- sum(curr <- sol$lib_status == "current") n_noupd <- sum(noupd <- sol$lib_status == "no-update") - cli::cli_verbatim(" ") + cli$cli_verbatim(" ") if (n_newly) { - cli::cli_alert("Will {.emph install} {n_newly} package{?s}.") + cli$cli_alert("Will {.emph install} {n_newly} package{?s}.") } if (n_upd) { - cli::cli_alert("Will {.emph update} {n_upd} package{?s}.") + cli$cli_alert("Will {.emph update} {n_upd} package{?s}.") } w_dl <- sol$cache_status == "miss" & !is.na(sol$cache_status) @@ -37,36 +38,33 @@ print_install_details <- function(prop, lib, loaded) { b_dl <- format_bytes$pretty_bytes(sum(sol$filesize[w_dl], na.rm = TRUE)) b_ch <- format_bytes$pretty_bytes(sum(sol$filesize[w_ch], na.rm = TRUE)) - any_unk <- length(u_dl) > 0 - if (n_dl == 0) { if (n_ch > 0) { if (n_ch == 1) { - cli::cli_alert("The package ({b_ch}) is cached.") + cli$cli_alert("The package ({b_ch}) is cached.") } else { - cli::cli_alert("All {n_ch} packages ({b_ch}) are cached.") + cli$cli_alert("All {n_ch} packages ({b_ch}) are cached.") } } - } else if (n_ch == 0) { - if (n_dl - u_dl > 0) { - cli::cli_alert("Will {.emph download} {n_dl - u_dl} CRAN package{?s} ({b_dl}).") + if (n_dl - u_dl > 0) { + cli$cli_alert("Will {.emph download} {n_dl - u_dl} CRAN package{?s} ({b_dl}).") } if (u_dl > 0) { - cli::cli_alert("Will {.emph download} {u_dl} package{?s} with unknown size.") + cli$cli_alert("Will {.emph download} {u_dl} package{?s} with unknown size.") } - - } else if (!any_unk) { - cli::cli_alert( - "Will {.emph download} {n_dl} package{?s} ({b_dl}), cached: {n_ch} ({b_ch}).") - + } else if (u_dl == 0) { + cli$cli_alert( + "Will {.emph download} {n_dl} package{?s} ({b_dl}), cached: {n_ch} ({b_ch})." + ) } else { if (n_dl - u_dl > 0) { - cli::cli_alert( - "Will {.emph download} {n_dl - u_dl} CRAN package{?s} ({b_dl}), cached: {n_ch} ({b_ch}).") + cli$cli_alert( + "Will {.emph download} {n_dl - u_dl} CRAN package{?s} ({b_dl}), cached: {n_ch} ({b_ch})." + ) } if (u_dl > 0) { - cli::cli_alert("Will {.emph download} {u_dl} package{?s} with unknown size.") + cli$cli_alert("Will {.emph download} {u_dl} package{?s} with unknown size.") } } @@ -74,25 +72,7 @@ print_install_details <- function(prop, lib, loaded) { prop$show_solution(key = FALSE) } - sysreqs <- prop$get_sysreqs() - if (!is.null(sysreqs)) { - num <- length(sysreqs$miss) + length(sysreqs$upd) - if (length(sysreqs$inst) > 0 && num == 0) { - cli::cli_alert_success("All system requirements are already installed.") - } else if (num > 0) { - install_sysreqs <- prop$get_config()$get("sysreqs") - if (install_sysreqs) { - cli::cli_alert("Will {.emph install} {num} system package{?s}:") - } else { - cli::cli_alert_danger( - "Missing {num} system package{?s}. You'll probably need to - install {?it/them} manually:", - wrap = TRUE - ) - } - } - prop$show_sysreqs() - } + print_sysreqs_details(prop) if (length(loaded) > 0 || get_os() == "win") { ls <- warn_for_loaded_packages(sol$package[newly | upd], lib, loaded) @@ -104,9 +84,38 @@ print_install_details <- function(prop, lib, loaded) { invisible(list(should_ask = should_ask, loaded_status = ls)) } -get_confirmation <- function(q, msg = "Aborted.") { +print_sysreqs_details <- function(prop) { + sysreqs <- prop$get_sysreqs() + if (is.null(sysreqs)) { + return() + } + load_all_private() + cli <- pkg_data[["ns"]][["cli"]] + + do <- length(sysreqs$miss) + length(sysreqs$upd) + if (length(sysreqs$inst) > 0 && do == 0) { + cli$cli_alert_success("All system requirements are already installed.") + } else if (do > 0) { + install_sysreqs <- prop$get_config()$get("sysreqs") + if (install_sysreqs) { + cli$cli_alert("Will {.emph install} {do} system package{?s}:") + } else { + cli$cli_alert_danger( + "Missing {do} system package{?s}. You'll probably need to + install {?it/them} manually:", + wrap = TRUE + ) + } + } + # workaround for printing an empty line if no sysreqs + if (do + length(sysreqs$inst) > 0) { + prop$show_sysreqs() + } +} + +get_confirmation <- function(q, msg = "Aborted.") { ans <- readline(q) - if (! tolower(ans) %in% c("", "y", "yes", "yeah", "yep")) { + if (!tolower(ans) %in% c("", "y", "yes", "yeah", "yep")) { stop(msg, call. = FALSE) } } @@ -122,17 +131,20 @@ get_answer <- function(answers, prompt = NULL) { ans <- readline(prompt) ans <- str_trim(ans) if (ans == "") ans <- answers[1] - if (ans %in% answers) return(ans) + if (ans %in% answers) { + return(ans) + } } } offer_restart <- function(unloaded) { - message("\n", - "! pak had to unload some packages before installation, and the\n", - " current R session may be unstable. It is best to restart R now.\n" + message( + "\n", + "! pak had to unload some packages before installation, and the\n", + " current R session may be unstable. It is best to restart R now.\n" ) - rs <- rstudio$detect()$type + rs <- rstudio_detect()$type if (rs == "rstudio_console") { message( @@ -143,15 +155,12 @@ offer_restart <- function(unloaded) { ans <- get_answer(1:3) if (ans == "1") { rstudioapi::restartSession() - } else if (ans == "2") { message("Saving workspace to .RData...") save.image() rstudioapi::restartSession() - } else if (ans == "3") { invisible("OK") - } } } diff --git a/R/covr.R b/R/covr.R new file mode 100644 index 000000000..0a5b9685e --- /dev/null +++ b/R/covr.R @@ -0,0 +1,3 @@ +run_gcov <- function() { + +} diff --git a/R/deps-explain.R b/R/deps-explain.R index bbad5095b..b55f77e88 100644 --- a/R/deps-explain.R +++ b/R/deps-explain.R @@ -1,4 +1,3 @@ - #' Explain how a package depends on other packages #' #' Extract dependency chains from `pkg` to `deps`. @@ -29,29 +28,22 @@ pkg_deps_explain <- function(pkg, deps, upgrade = TRUE, dependencies = NA) { stopifnot(length(pkg == 1) && is.character(pkg)) - remote( - function(...) { - get("pkg_deps_explain_internal", asNamespace("pak"))(...) - }, - list(pkg = pkg, deps = deps, upgrade = upgrade, - dependencies = dependencies) - ) -} - -pkg_deps_explain_internal <- function(pkg, deps, upgrade, dependencies = NA) { - data <- pkg_deps_internal2(pkg, upgrade, dependencies)$get_solution()$data + load_all_private() + data <- pkg_deps(pkg, upgrade = upgrade, dependencies = dependencies) wpkg <- match(pkg, data$ref) paths <- structure(vector("list", length(deps)), names = deps) - types <- pkgdepends::as_pkg_dependencies(dependencies) + types <- pkg_data[["ns"]][["pkgdepends"]][["as_pkg_dependencies"]]( + dependencies + ) deps1 <- local({ d1 <- data$deps[[wpkg]] - pk <- d1$package[ tolower(d1$type) %in% tolower(types[[1]]) ] + pk <- d1$package[tolower(d1$type) %in% tolower(types[[1]])] na_omit(match(pk, data$package)) }) adjlist <- lapply(data$deps, function(di) { - p <- di$package[ tolower(di$type) %in% tolower(types[[2]]) ] + p <- di$package[tolower(di$type) %in% tolower(types[[2]])] p <- setdiff(p, "R") na_omit(match(p, data$package)) }) @@ -81,7 +73,6 @@ pkg_deps_explain_internal <- function(pkg, deps, upgrade, dependencies = NA) { if (dpkg %in% deps) { paths[[dpkg]] <- c(paths[[dpkg]], list(data$package[stack[1:ssize]])) } - } else { ssize <- ssize - 1L nptr[act] <- 1L @@ -100,7 +91,6 @@ pkg_deps_explain_internal <- function(pkg, deps, upgrade, dependencies = NA) { #' @export format.pak_deps_explain <- function(x, ...) { - format_path1 <- function(p1) { strwrap(paste0(p1, collapse = " -> "), exdent = 2L) } @@ -124,6 +114,6 @@ format.pak_deps_explain <- function(x, ...) { #' @export print.pak_deps_explain <- function(x, ...) { - cat(format(x, ...), sep = "\n") + writeLines(format(x, ...)) invisible(x) } diff --git a/R/dev-mode.R b/R/dev-mode.R index 92c438aef..a504f9471 100644 --- a/R/dev-mode.R +++ b/R/dev-mode.R @@ -16,5 +16,12 @@ create_dev_lib <- function() { on.exit(setwd(wd), add = TRUE) setwd(dirname(inst_script)) + if (Sys.getenv("TEST_COVERAGE_PAK") == "true") { + message("Instrumenting pak for test coverage") + asNamespace("covrlabs")$trace_package("pak") + } + system2(rscript, c("--vanilla", "install-embedded.R", "--load-all", lib)) + + invisible() } diff --git a/R/embed.R b/R/embed.R index 9373dd861..61beb4fa4 100644 --- a/R/embed.R +++ b/R/embed.R @@ -199,10 +199,9 @@ embed <- local({ rimraf(file.path(lib_dir(), pkg)) } - addupdate_package <- function( - pkg, - ver = NULL, - mode = c("add", "update")) { + addupdate_package <- function(pkg, + ver = NULL, + mode = c("add", "update")) { mode <- match.arg(mode) stopifnot( is_string(pkg), @@ -235,7 +234,6 @@ embed <- local({ system2("R", c("CMD", "build ", dir())) path <- normalizePath(dir(pattern = "[.]tar[.]gz$")) setwd(wd) - } else { path <- utils::download.packages(pkg, tmp, repos = get_repos())[, 2] } @@ -288,6 +286,7 @@ embed <- local({ rimraf(file.path(lib, pkg, "inst", "CITATION")) rimraf(file.path(lib, pkg, "MD5")) rimraf(file.path(lib, pkg, "README.md")) + rimraf(file.path(lib, pkg, "vignettes")) } } diff --git a/R/formatted.R b/R/formatted.R deleted file mode 100644 index 8b4046c47..000000000 --- a/R/formatted.R +++ /dev/null @@ -1,18 +0,0 @@ - -#' @export - -format.pak_preformatted <- function(x, ...) { - attr(x, "pak_preformatted") -} - -#' @export - -print.pak_preformatted <- function(x, ...) { - cat(format(x, ...), sep = "\n") -} - -pak_preformat <- function(x, ...) { - attr(x, "pak_preformatted") <- format(x, ...) - class(x) <- c("pak_preformatted", class(x)) - x -} diff --git a/R/gh-app.R b/R/gh-app.R new file mode 100644 index 000000000..31aee3c8d --- /dev/null +++ b/R/gh-app.R @@ -0,0 +1,379 @@ + +# nocov start + +str_starts_with <- function(x, pre) { + substring(x, 1, nchar(pre)) == pre +} + +gr_response_headers_graphql <- function(upd = NULL) { + list( + server = "GitHub.com", + `content-type` = "application/json; charset=utf-8", + `x-oauth-scopes` = "delete:packages, delete_repo, read:org, repo, workflow, write:packages", + `x-accepted-oauth-scopes` = "repo", + `x-github-media-type` = "github.v3; format=json", + `x-ratelimit-limit` = "5000", + `x-ratelimit-remaining` = "4998", + `x-ratelimit-reset` = as.integer(Sys.time() + as.difftime(1, units = "hours")), + `x-ratelimit-used` = "2", + `x-ratelimit-resource` = "graphql", + `access-control-expose-headers` = "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset", + `access-control-allow-origin` = "*", + `strict-transport-security` = "max-age=31536000; includeSubdomains; preload", + `x-frame-options` = "deny", + `x-content-type-options` = "nosniff", + `x-xss-protection` = "0", + `referrer-policy` = "origin-when-cross-origin, strict-origin-when-cross-origin", + `content-security-policy` = "default-src 'none'", + vary = "Accept-Encoding, Accept, X-Requested-With", + `x-github-request-id` = basename(tempfile()) + ) +} + +make_dummy_zip <- function(commit) { + mkdirp(tmp <- tempfile()) + old <- getwd() + on.exit(setwd(old), add = TRUE) + setwd(tmp) + root <- paste0(commit$repo, "-", commit$branch) + mkdirp(root) + setwd(root) + for (i in seq_along(commit$files)) { + nm <- names(commit$files)[[i]] + ct <- commit$files[[i]] + mkdirp(dirname(nm)) + if (is.raw(ct)) { + writeBin(ct, nm) + } else { + writeLines(ct, nm) + } + } + setwd(tmp) + zip::zip(paste0(root, ".zip"), root) + file.path(tmp, paste0(root, ".zip")) +} + +re_gh_auth <- function() { + paste0( + "^token (gh[pousr]_[A-Za-z0-9_]{36,251}|", + "[[:xdigit:]]{40})$" + ) +} + +process_repos <- function(repos) { + for (i in seq_along(repos$users)) { + u <- names(repos$users)[i] + repos$users[[i]]$user <- u + for (j in seq_along(repos$users[[i]]$repos)) { + r <- names(repos$users[[i]]$repos)[j] + repos$users[[i]]$repos[[j]]$user <- u + for (k in seq_along(repos$users[[i]]$repos[[j]]$commits)) { + repos$users[[i]]$repos[[j]]$commits[[k]]$user <- u + repos$users[[i]]$repos[[j]]$commits[[k]]$repo <- r + } + } + } + repos +} + +gh_fmt_desc <- function(dsc) { + if (is.null(dsc)) { + return(NA) + + } else if (is.raw(dsc)) { + list( + isBinary = TRUE, + text = NA + ) + + } else { + list( + isBinary = FALSE, + text = dsc + ) + } +} + +gh_app <- function(repos = NULL, log = interactive(), options = list()) { + + app <- webfakes::new_app() + + # Log requests by default + if (log) app$use("logger" = webfakes::mw_log()) + + # Parse JSON body, even if no content-type header is sent + app$use("json body parser" = webfakes::mw_json( + type = c( + "", + "application/json", + "application/json; charset=utf-8" + ) + )) + + # app$use("text body parser" = webfakes::mw_text(type = c("text/plain", "application/json"))) + # app$use("multipart body parser" = webfakes::mw_multipart()) + # app$use("URL encoded body parser" = webfakes::mw_urlencoded()) + + # Add etags by default + app$use("add etag" = webfakes::mw_etag()) + + # Add date by default + app$use("add date" = function(req, res) { + res$set_header("Date", as.character(Sys.time())) + "next" + }) + + app$locals$repos <- process_repos(repos) + app$locals$data <- list() + + app$use(function(req, res) { + auth <- req$get_header("Authorization") + if (is.null(auth)) return("next") + if (!grepl(re_gh_auth(), auth)) { + res$set_status(401) + res$send_json( + auto_unbox = TRUE, + list( + message = "Bad credentials", + documentation_url = "https://docs.github.com/graphql" + ) + ) + } else { + req$.token <- sub("^token ", "", auth) + "next" + } + }) + + app$post("/404/graphql", function(req, res) { + res$send_status(404) + }) + + app$post("/graphql", function(req, res) { + re_ref <- paste0( + "owner:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "name:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "description:[ ]*object[(]expression:[ ]*\"[^:]+:(?[^\"]+)\"", "(?s:.)*", + "sha:[ ]*object[(]expression:[ ]*\"(?[^\"]+)\"" + ) + + psd <- re_match(req$json$query, re_ref) + if (is.na(psd$.match)) return("next") + + if (!psd$user %in% names(app$locals$repos$users)) { + send_user_not_found(res, psd) + return() + } + if (!psd$repo %in% names(app$locals$repos$users[[psd$user]]$repos)) { + send_repo_not_found(res, psd) + return() + } + + commits <- app$locals$repos$users[[psd$user]]$repos[[psd$repo]]$commits + for (cmt in commits) { + if ((!is.null(cmt$tag) && cmt$tag == psd$ref) || + (!is.null(cmt$branch) && cmt$branch == psd$ref) || + str_starts_with(cmt$sha, psd$ref)) { + add_gh_headers(res) + dsc <- cmt$files[[psd$path]] + if (!is.null(cmt$token) && + (is.null(req$.token) || req$.token != cmt$token)) { + send_repo_not_found(res, psd) + return() + } + res$send_json( + auto_unbox = TRUE, + list(data = list(repository = list( + description = gh_fmt_desc(dsc), + sha = list(oid = cmt$sha) + ))) + ) + return() + } + } + + res$set_status(200) + res$send_json(auto_unbox = TRUE, + list(data = list(repository = list( + description = NA, + sha = NA + ))) + ) + }) + + app$post("/graphql", function(req, res) { + re_pull <- paste0( + "owner:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "name:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "pullRequest[(]number:[ ]*(?[0-9]+)[)]", "(?s:.)*", + "file[(]path:[ ]*\"(?.*)\"" + ) + + psd <- re_match(req$json$query, re_pull) + if (is.na(psd$.match)) return("next") + + if (!psd$user %in% names(app$locals$repos$users)) { + send_user_not_found(res, psd) + return() + } + if (!psd$repo %in% names(app$locals$repos$users[[psd$user]]$repos)) { + send_repo_not_found(res, psd) + return() + } + + commits <- app$locals$repos$users[[psd$user]]$repos[[psd$repo]]$commits + for (cmt in commits) { + if (!is.null(cmt$pull) && cmt$pull == psd$pull) { + add_gh_headers(res) + dsc <- cmt$files[[psd$path]] + res$send_json( + auto_unbox = TRUE, + list(data = list(repository = list(pullRequest = list( + headRefOid = cmt$sha, + headRef = list(target = list(file = list(object = gh_fmt_desc(dsc)))) + )))) + ) + return() + } + } + + send_pull_not_found(res, psd) + }) + + # @*release + app$post("/graphql", function(req, res) { + re_release <- paste0( + "owner:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "name:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "file[(]path:[ ]*\"(?.*)\"" + ) + + psd <- re_match(req$json$query, re_release) + if (is.na(psd$.match)) return("next") + + commits <- app$locals$repos$users[[psd$user]]$repos[[psd$repo]]$commits + for (cmt in commits) { + if (isTRUE(cmt$latestRelease)) { + add_gh_headers(res) + dsc <- cmt$files[[psd$path]] + res$send_json( + auto_unbox = TRUE, + list(data = list(repository = list(latestRelease = list( + tagName = cmt$tagName, + tagCommit = list( + oid = cmt$sha, + file = list(object = gh_fmt_desc(dsc)) + ) + )))) + ) + return() + } + } + + send_no_releases(res, psd) + }) + + app$get("/repos/:user/:repo/zipball/:sha", function(req, res) { + if (!req$params$user %in% names(app$locals$repos$users)) { + send_user_not_found(res, req$params) + return() + } + if (!req$params$repo %in% names(app$locals$repos$users[[req$params$user]]$repos)) { + send_repo_not_found(res, req$params) + return() + } + + commits <- app$locals$repos$users[[req$params$user]]$repos[[req$params$repo]]$commits + shas <- vapply(commits, "[[", "", "sha") + if (!req$params$sha %in% shas) { + send_sha_not_found(res, req$params) + return() + } + + cmt <- commits[[which(shas == req$params$sha)]] + z <- make_dummy_zip(cmt) + res$send_file(z, root = "/") + }) + + app +} + +add_gh_headers <- function(res) { + headers <- gr_response_headers_graphql() + for (i in seq_along(headers)) { + res$set_header(names(headers)[i], headers[i]) + } +} + +send_user_not_found <- function(res, psd) { + res$set_status(200) + res$send_json(auto_unbox = TRUE, + list( + data = list(repository = NA), + errors = list( + list( + type = "NOT_FOUND", + path = list("repository"), + locations = list( + list( + line = 2, + column = 3 + ) + ), + message = sprintf( + "Could not resolve to a Repository with the name '%s'.", + paste0(psd$user, "/", psd$repo) + ) + ) + ) + ) + ) +} + +send_repo_not_found <- function(res, psd) { + send_user_not_found(res, psd) +} + +send_ref_not_found <- function(res, psd) { + res$send_status(404) +} + +send_pull_not_found <- function(res, psd) { + res$set_status(200) + res$send_json(auto_unbox = TRUE, + list( + data = list(repository = list(pullRequest = NA)), + errors = list( + list( + type = "NOT_FOUND", + path = list("repository", "pullRequest"), + locations = list( + list( + line = 3L, + column = 5L + ) + ), + message = sprintf( + "Could not resolve to a PullRequest with the number of %s.", + psd$pull + ) + ) + ) + ) + ) +} + +send_sha_not_found <- function(res, psd) { + # TODO + res$send_status(404) +} + +send_no_releases <- function(res, psd) { + res$set_status(200) + res$send_json(auto_unbox = TRUE, + list( + data = list(repository = list(latestRelease = NA)) + ) + ) +} + +# nocov end diff --git a/R/git-app.R b/R/git-app.R new file mode 100644 index 000000000..7db82943c --- /dev/null +++ b/R/git-app.R @@ -0,0 +1,199 @@ + +# TODO: allow restriting to dumb, v1, v2 protocol + +git_app <- function(git_root, + git_timeout = as.difftime(1, units = "mins"), + filter = TRUE, + cleanup = TRUE) { + + app <- webfakes::new_app() + app$locals$git_root <- git_root + app$locals$git_timeout <- as.double(git_timeout, units = "secs") * 1000 + app$locals$git_config <- tempfile() + + reg.finalizer(app, function(app0) unlink(app$locals$git_config), TRUE) + writeLines( + c( + "[uploadpack]", + paste0("\tallowFilter = ", if (isTRUE(filter)) "true" else "false") + ), + app$locals$git_config + ) + + if (cleanup) { + reg.finalizer( + app, + function(app) unlink(app$locals$git_root, recursive = TRUE), + TRUE + ) + } + + app$get(webfakes::new_regexp("^(?.*)$"), function(req, res) { + out <- tempfile() + err <- tempfile() + on.exit(unlink(c(out, err)), add = TRUE) + px <- processx::process$new( + "git", + "http-backend", + env = git_env_vars(req), + stdout = out, + stderr = err + ) + px$wait(req$app$locals$git_timeout) + parse_cgi_output(px, out, err, res) + }) + + app$post(webfakes::new_regexp("^(?.*)$"), function(req, res) { + tmp <- tempfile() + out <- tempfile() + err <- tempfile() + on.exit(unlink(c(out, err, tmp)), add = TRUE) + writeBin(req$.body, tmp) + px <- processx::process$new( + "git", + "http-backend", + env = git_env_vars(req), + stdin = tmp, + stdout = out, + stderr = err + ) + px$wait(req$app$locals$git_timeout) + parse_cgi_output(px, out, err, res) + }) + + app +} + +git_env_vars <- function(req) { + url <- parse_url(req$url) + c( + "current", + + # For git + GIT_CONFIG_GLOBAL = req$app$locals$git_config, + GIT_HTTP_EXPORT_ALL = "true", + GIT_PROJECT_ROOT = req$app$locals$git_root, + GIT_PROTOCOL = req$get_header("Git-Protocol") %||% "", + HTTP_GIT_PROTOCOL = req$get_header("Git-Protocol") %||% "", + + # general CGI + CONTENT_LENGTH = if (length(req$.body)) length(req$.body), + CONTENT_TYPE = req$get_header("content-type") %||% "", + GATEWAY_INTERFACE = "CGI/1.1", + PATH_INFO = req$path, + QUERY_STRING = req$query_string, + REMOTE_ADDR = req$remote_addr, + REMOTE_HOST = req$remote_addr, + REMOTE_USER = "anonymous", + REQUEST_METHOD = toupper(req$method), + SERVER_NAME = url$host, + SERVER_PORT = url$port, + SERVER_PROTOCOL = paste0("http/", req$http_version), + SERVER_SOFTWARE = "https://github.com/r-lib/webfakes" + ) +} + +parse_cgi_output <- function(px, out, err, res) { + if (px$is_alive() || px$get_exit_status() != 0) { + px$kill() + res$ + set_status(500)$ + send(paste0("Internal git error: ", err)) + } + + out <- read_bin(out) + err <- read_char(err) + + cgi_res <- split_cgi_output(out) + headers <- cgi_res$headers + + for (idx in seq_along(headers)) { + if (tolower(names(headers)[idx]) == "status") { + res$set_status(parse_status(headers[[idx]])) + } else { + res$set_header(names(headers)[idx], headers[[idx]]) + } + } + + if (! "status" %in% names(headers)) { + res$set_status(200L) + } + + res$send(cgi_res$body) +} + +split_cgi_output <- function(x) { + nlnl <- grepRaw("\r?\n\r?\n", x)[1] + if (is.na(nlnl)) { + stop("Invalid response from git cgi, no headers?") + } + + headers <- parse_headers(rawToChar(x[1:(nlnl - 1L)])) + + body <- x[nlnl:length(x)] + ndrop <- 1L + while (body[ndrop] != 0x0a) ndrop <- ndrop + 1L + ndrop <- ndrop + 1L + while (body[ndrop] != 0x0a) ndrop <- ndrop + 1L + body <- utils::tail(body, -ndrop) + + list(headers = headers, body = body) +} + +parse_status <- function(x) { + status <- as.integer(strsplit(x, " ", fixed = TRUE)[[1]][1]) + if (is.na(status)) { + stop("Invalid status from git cgi: ", x) + } +} + +read_bin <- function(path) { + readBin(path, "raw", file.info(path)$size) +} + +parse_headers <- function (txt) { + headers <- grep(":", parse_headers0(txt), fixed = TRUE, value = TRUE) + out <- lapply(headers, split_header) + names <- tolower(vapply(out, `[[`, character(1), 1)) + values <- lapply(lapply(out, `[[`, 2), trimws) + names(values) <- names + values +} + +parse_headers0 <- function (txt, multiple = FALSE) { + if (!length(txt)) + return(NULL) + if (is.raw(txt)) { + txt <- rawToChar(txt) + } + stopifnot(is.character(txt)) + if (length(txt) > 1) { + txt <- paste(txt, collapse = "\n") + } + sets <- strsplit(txt, "\\r\\n\\r\\n|\\n\\n|\\r\\r")[[1]] + headers <- strsplit(sets, "\\r\\n|\\n|\\r") + if (multiple) { + headers + } + else { + headers[[length(headers)]] + } +} + +split_header <- function(x) { + pos <- grepRaw(":", x, fixed = TRUE)[1] + if (is.na(pos)) { + stop("Invalid response header from git cgi: ", x) + } + c(substr(x, 1, pos - 1L), substr(x, pos + 1L, nchar(x))) +} + +parse_url <- function(url) { + re_url <- paste0( + "^(?[a-zA-Z0-9]+)://", + "(?:(?[^@/:]+)(?::(?[^@/]+))?@)?", + "(?[^/]+)", + "(?.*)$" # don't worry about query params here... + ) + re_match(url, re_url)$groups +} diff --git a/R/global-handler.R b/R/global-handler.R index 6d0209d93..06943a51d 100644 --- a/R/global-handler.R +++ b/R/global-handler.R @@ -1,4 +1,3 @@ - #' Install missing packages on the fly #' #' Use this function to set up a global error handler, that is called @@ -37,23 +36,30 @@ handle_package_not_found <- function(err) { # TODO: is this what we want? Or refine this? E.g. testthat, knitr? - if (!is_interactive()) return() + if (!is_interactive()) { + return() + } # TODO: what if message output is redirected? we ignore for now - if (sink.number() != 0) return() + if (sink.number() != 0) { + return() + } pkg <- err$package lib <- err$lib.loc %||% .libPaths()[1] can_cont <- !is.null(findRestart("retry_loadNamespace")) - cli <- load_private_cli() + load_all_private() + cli <- pkg_data[["ns"]][["cli"]] cli$cli_text() cli$cli_alert_danger( - c("Failed to load package {.pkg {pkg}}. Do you want to install it ", - "into the default library at {.path {lib}}?"), + c( + "Failed to load package {.pkg {pkg}}. Do you want to install it ", + "into the default library at {.path {lib}}?" + ), wrap = TRUE - ) + ) cli$cli_text() dv <- cli$cli_div(theme = list(ol = list("margin-left" = 2))) @@ -71,7 +77,9 @@ handle_package_not_found <- function(err) { cat("\n") - if (ans == "2") return() + if (ans == "2") { + return() + } cli$cli_rule("start installation") pkg_install(pkg, lib = lib[1]) diff --git a/R/library.R b/R/library.R index 8565dcb97..2b11b84a2 100644 --- a/R/library.R +++ b/R/library.R @@ -1,5 +1,3 @@ - - #' Status of packages in a library #' #' @param lib Path to library. @@ -16,9 +14,8 @@ lib_status <- function(lib = .libPaths()[1]) { load_extra("pillar") - remote( - function(...) asNamespace("pak")$lib_status_internal(...), - list(lib = lib)) + load_all_private() + pkg_data[["ns"]][["pkgdepends"]][["lib_status"]](lib) } ## TODO: lib_check() @@ -33,7 +30,3 @@ lib_status <- function(lib = .libPaths()[1]) { ## TODO: lib_update() ## ---------------------------------------------------------------------- - -lib_status_internal <- function(lib) { - pkgdepends::lib_status(lib) -} diff --git a/R/load-all-private.R b/R/load-all-private.R index 9909e64d7..4e54957e3 100644 --- a/R/load-all-private.R +++ b/R/load-all-private.R @@ -1,17 +1,58 @@ load_all_private <- function() { - load_private_cli() - load_private_package("R6") - load_private_package("curl") - load_private_package("distro") - load_private_package("filelock") - load_private_package("jsonlite") - load_private_package("lpSolve") - load_private_package("ps") - load_private_package("zip") - load_private_package("processx", "c_") - load_private_package("callr") - load_private_package("desc") - load_private_package("pkgbuild") - load_private_package("pkgsearch") - load_private_package("pkgdepends") + if (length(pkg_data[["ns"]]) > 0) { + return() + } + lib <- private_lib_dir() + if (Sys.getenv("TEST_COVERAGE_PAK") == "true") { + deps_path <- file.path(lib, "deps-covr.rds") + cnt_path <- file.path(lib, "deps-cnt.rds") + asNamespace("covrlabs")$add_counters(readRDS(cnt_path)) + } else { + deps_path <- file.path(lib, "deps.rds") # nocov + } + pkg_data[["ns"]] <- readRDS(deps_path) + parent.env(pkg_data[["ns"]]) <- getNamespace(.packageName) + # These register C functions with a c_ prefix + prefix_pkgs <- c("filelock", "processx", "zip") + for (pkg in names(pkg_data[["ns"]])) { + pkg_env <- pkg_data[["ns"]][[pkg]] + reg_prefix <- if (pkg %in% prefix_pkgs) "c_" else "" + parent.env(pkg_env) <- getNamespace(.packageName) + + pkg_env[["__pkg-dir__"]] <- normalizePath(file.path(lib, pkg)) + + dll_file <- paste0(pkg, .Platform$dynlib.ext) + dll_path <- file.path(lib, pkg, "libs", .Platform$r_arch, dll_file) + if (file.exists(dll_path)) { + # TODO: copy it on windows (or always?) + dll <- dyn.load(dll_path) + dll[["name"]] <- paste0("pak-", dll[["name"]]) + .dynLibs(c(.dynLibs(), list(dll))) + natfuns <- getDLLRegisteredRoutines(dll)$.Call + for (natfun in natfuns) { + pkg_env[[paste0(reg_prefix, natfun$name)]] <- natfun + } + } + } + + # This is use in .onLoad() in pkgcache + pcnp <- Sys.getenv("PKGCACHE_NO_PILLAR") + if (pcnp == "") { + on.exit(Sys.unsetenv("PKGCACHE_NO_PILLAR"), add = TRUE) + } else { + on.exit(Sys.setenv(PKGCACHE_NO_PILLAR = pcnp), add = TRUE) # nocov + } + Sys.setenv("PKGCACHE_NO_PILLAR" = "true") + + for (pkg in names(pkg_data[["ns"]])) { + pkg_env <- pkg_data[["ns"]][[pkg]] + if (".onLoad" %in% names(pkg_env)) { + pkg_env[[".onLoad"]](lib, pkg) + } + } +} + +embedded_call <- function(pkg, fun) { + load_all_private() + pkg_data[["ns"]][[pkg]][[fun]] } diff --git a/R/local.R b/R/local.R index 2ee585a9b..6722cdfe7 100644 --- a/R/local.R +++ b/R/local.R @@ -1,4 +1,3 @@ - #' About local package trees #' #' pak can install packages from local package trees. This is convenient @@ -16,7 +15,7 @@ #' #' @name local_package_trees #' @family local package trees - NULL +NULL #' Install a package tree #' @@ -34,31 +33,14 @@ local_install <- function(root = ".", lib = .libPaths()[1], upgrade = TRUE, ask = interactive(), dependencies = NA) { - - start <- Sys.time() - - status <- remote( - function(...) get("local_install_make_plan", asNamespace("pak"))(...), - list("local", root = root, lib = lib, upgrade = upgrade, ask = ask, - start = start, dependencies = dependencies, - loaded = loaded_packages(lib))) - - unloaded <- handle_status(status, lib, ask)$unloaded - - inst <- remote( - function(...) get("pkg_install_do_plan", asNamespace("pak"))(...), - list(proposal = NULL)) - - if (length(unloaded) > 0) offer_restart(unloaded) - - invisible(inst) -} - -local_install_make_plan <- function(type, root, lib, upgrade, ask, start, - dependencies, loaded) { root <- find_package_root(path = root) - pkg <- paste0(type, "::", root) - pkg_install_make_plan(pkg, lib, upgrade, ask, start, dependencies, loaded) + pkg_install( + paste0("local::", root), + lib = lib, + upgrade = upgrade, + ask = ask, + dependencies = dependencies + ) } #' Install the dependencies of a package tree @@ -79,23 +61,14 @@ local_install_make_plan <- function(type, root, lib, upgrade, ask, start, local_install_deps <- function(root = ".", lib = .libPaths()[1], upgrade = TRUE, ask = interactive(), dependencies = NA) { - start <- Sys.time() - - status <- remote( - function(...) get("local_install_make_plan", asNamespace("pak"))(...), - list("deps", root = root, lib = lib, upgrade = upgrade, ask = ask, - start = start, dependencies = dependencies, - loaded = loaded_packages(lib))) - - unloaded <- handle_status(status, lib, ask)$unloaded - - inst <- remote( - function(...) get("pkg_install_do_plan", asNamespace("pak"))(...), - list(proposal = NULL)) - - if (length(unloaded) > 0) offer_restart(unloaded) - - invisible(inst) + root <- find_package_root(path = root) + pkg_install( + paste0("deps::", root), + lib = lib, + upgrade = upgrade, + ask = ask, + dependencies = dependencies + ) } #' Install all (development) dependencies of a package tree @@ -114,27 +87,14 @@ local_install_deps <- function(root = ".", lib = .libPaths()[1], local_install_dev_deps <- function(root = ".", lib = .libPaths()[1], upgrade = TRUE, ask = interactive(), dependencies = TRUE) { - start <- Sys.time() - - status <- remote( - function(...) { - get("local_install_dev_deps_make_plan", asNamespace("pak"))(...) - }, - list(root = root, lib = lib, upgrade = upgrade, start = start, - dependencies = dependencies, loaded = loaded_packages(lib))) - - unloaded <- handle_status(status, lib, ask)$unloaded - - inst <- remote( - function(...) { - get("local_install_dev_deps_do_plan", asNamespace("pak"))(...) - }, - list() + root <- find_package_root(path = root) + pkg_install( + paste0("deps::", root), + lib = lib, + upgrade = upgrade, + ask = ask, + dependencies = dependencies ) - - if (length(unloaded) > 0) offer_restart(unloaded) - - invisible(inst) } #' Dependencies of a package tree @@ -151,6 +111,7 @@ local_install_dev_deps <- function(root = ".", lib = .libPaths()[1], #' @export local_deps <- function(root = ".", upgrade = TRUE, dependencies = NA) { + root <- find_package_root(path = root) ref <- paste0("local::", root) pkg_deps(ref, upgrade = upgrade, dependencies = dependencies) } @@ -159,6 +120,7 @@ local_deps <- function(root = ".", upgrade = TRUE, dependencies = NA) { #' @rdname local_deps local_deps_tree <- function(root = ".", upgrade = TRUE, dependencies = NA) { + root <- find_package_root(path = root) ref <- paste0("local::", root) pkg_deps_tree(ref, upgrade = upgrade, dependencies = dependencies) } @@ -194,6 +156,7 @@ local_dev_deps_tree <- function(root = ".", upgrade = TRUE, dependencies = TRUE) local_deps_explain <- function(deps, root = ".", upgrade = TRUE, dependencies = NA) { + root <- find_package_root(path = root) ref <- paste0("local::", root) pkg_deps_explain(ref, deps, upgrade, dependencies) } @@ -203,31 +166,7 @@ local_deps_explain <- function(deps, root = ".", upgrade = TRUE, local_dev_deps_explain <- function(deps, root = ".", upgrade = TRUE, dependencies = TRUE) { + root <- find_package_root(path = root) ref <- paste0("local::", root) pkg_deps_explain(ref, deps, upgrade, dependencies) } - -## ---------------------------------------------------------------------- - -## Almost the same as a "regular" install, but need to set dependencies - -local_install_dev_deps_make_plan <- function(root, lib, upgrade, start, - dependencies, loaded) { - root <- find_package_root(path = root) - prop <- pkgdepends::new_pkg_installation_proposal( - paste0("deps::", root), - config = list(library = lib, dependencies = dependencies) - ) - - prop$set_solve_policy(if (upgrade) "upgrade" else "lazy") - prop$solve() - prop$stop_for_solution_error() - pkg_data$tmp <- list(proposal = prop, start = start) - print_install_details(prop, lib, loaded) -} - -## This is the same as a regular install - -local_install_dev_deps_do_plan <- function() { - pkg_install_do_plan(proposal = NULL) -} diff --git a/R/lockfile.R b/R/lockfile.R index 8e7d159db..ab8286f48 100644 --- a/R/lockfile.R +++ b/R/lockfile.R @@ -1,4 +1,3 @@ - #' Create a lock file #' #' The lock file can be used later, possibly in a new R session, to carry @@ -23,31 +22,14 @@ lockfile_create <- function(pkg = "deps::.", lockfile = "pkg.lock", lib = NULL, upgrade = FALSE, dependencies = NA) { - ret <- remote( - function(...) { - get("lockfile_create_internal", asNamespace("pak"))(...) - }, - list(pkg = pkg, lockfile = lockfile, lib = lib, upgrade = upgrade, - dependencies = dependencies) - ) - - invisible(ret) -} - -lockfile_create_internal <- function(pkg, lockfile, lib, upgrade, - dependencies) { + load_all_private() if (is.null(lib)) { lib <- tempfile() mkdirp(lib) on.exit(unlink(lib, recursive = TRUE), add = TRUE) } - cli::cli_progress_step( - "Creating lockfile {.path {lockfile}}", - msg_done = "Created lockfile {.path {lockfile}}" - ) - - prop <- pkgdepends::new_pkg_installation_proposal( + prop <- pkg_data[["ns"]][["pkgdepends"]][["new_pkg_installation_proposal"]]( pkg, config = list(library = lib, dependencies = dependencies) ) @@ -73,27 +55,19 @@ lockfile_create_internal <- function(pkg, lockfile, lib, upgrade, lockfile_install <- function(lockfile = "pkg.lock", lib = .libPaths()[1], update = TRUE) { - + load_extra("pillar") + load_all_private() start <- Sys.time() mkdirp(lib) - ret <- remote( - function(...) { - get("lockfile_install_internal", asNamespace("pak"))(...) - }, - list(lockfile = lockfile, lib = lib, update = update, start = start, - loaded = loaded_packages(lib)) - ) - - invisible(ret) -} - -lockfile_install_internal <- function(lockfile, lib, update, loaded, start) { - cli::cli_alert_info("Installing lockfile {.path {lockfile}}") config <- list(library = lib) - plan <- pkgdepends::new_pkg_installation_plan(lockfile, config = config) + plan <- pkg_data[["ns"]][["pkgdepends"]][["new_pkg_installation_plan"]]( + lockfile, + config = config + ) if (update) plan$update() + loaded <- loaded_packages(lib) print_install_details(plan, lib, loaded) plan$download() @@ -108,7 +82,5 @@ lockfile_install_internal <- function(lockfile, lib, update, loaded, start) { ## One line summary of the install print_install_summary(inst) - cli::cli_alert_success("Installed lockfile {.path {lockfile}}") - - inst + invisible(inst) } diff --git a/R/name-check.R b/R/name-check.R index 3cf6e5a44..b77d7501e 100644 --- a/R/name-check.R +++ b/R/name-check.R @@ -1,4 +1,3 @@ - #' Check if an R package name is available #' #' @inherit pkgdepends::pkg_name_check description details return @@ -12,11 +11,29 @@ #' ``` pkg_name_check <- function(name, dictionaries = NULL) { - remote( - function(...) { - ret <- pkgdepends::pkg_name_check(...) - asNamespace("pak")$pak_preformat(ret) - }, - list(name, dictionaries) - ) + ret <- embedded_call("pkgdepends", "pkg_name_check")(name, dictionaries) + class(ret) <- c("pak_pkg_name_check", class(ret)) + ret +} + +#' @export + +format.pak_pkg_name_check <- function(x, limit = 6, ...) { + load_all_private() + # lots of S3 in pkgdepends for this, we need to do that manually here + pd <- pkg_data[["ns"]][["pkgdepends"]] + for (n in c("basics", "wikipedia", "wiktionary", "sentiment", "urban")) { + if (!is.null(x[[n]])) { + fn <- paste0("format.pkg_name_check_", n) + x[[n]] <- pd[[fn]](x[[n]], limit = limit) + } + } + unlist(x) +} + +#' @export + +print.pak_pkg_name_check <- function(x, ...) { + writeLines(format(x, ...)) + invisible(x) } diff --git a/R/onload.R b/R/onload.R index 9b82bae0b..39db65518 100644 --- a/R/onload.R +++ b/R/onload.R @@ -1,5 +1,6 @@ pkg_data <- new.env(parent = emptyenv()) +# nocov start .onLoad <- function(libname, pkgname) { if (Sys.getenv("DEVTOOLS_LOAD") == "pak") { create_dev_lib() @@ -7,33 +8,12 @@ pkg_data <- new.env(parent = emptyenv()) if (Sys.getenv("_R_CHECK_PACKAGE_NAME_", "") == "") { check_platform(libname, pkgname) } - pkg_data$ns <- list() - - worker <- Sys.getenv("R_PKG_PKG_WORKER", "") - if (worker == "") { - ## In the main process - fix_macos_path_in_rstudio() - } else if (worker == "true") { - ## In the worker process - Sys.setenv("R_PKG_PKG_WORKER" = "false") - # We don't use the env vars that cli supports, on purpose, because - # they are inherited in the subprocess of the subprocess - options( - cli.num_colors = as.numeric(Sys.getenv("R_PKG_NUM_COLORS", "1")), - rlib_interactive = (Sys.getenv("R_PKG_INTERACTIVE") == "TRUE"), - cli.dynamic = (Sys.getenv("R_PKG_DYNAMIC_TTY") == "TRUE") - ) - ca_path <- system.file(package = "pak", "curl-ca-bundle.crt") - cainfo <- getOption("async_http_cainfo") - if (is.null(cainfo) && ca_path != "") options(async_http_cainfo = ca_path) - use_private_lib() - } else { - ## In a subprocess of a worker - use_private_lib() - } + + fix_macos_path_in_rstudio() invisible() } +# nocov end check_platform <- function(libname = dirname(find.package("pak")), pkgname = "pak", data = pak_sitrep_data) { diff --git a/R/package.R b/R/package.R index a13a3bcf5..ad054b602 100644 --- a/R/package.R +++ b/R/package.R @@ -1,4 +1,3 @@ - #' Install packages #' #' Install one or more packages and their dependencies into a single @@ -55,7 +54,7 @@ #' Upgrade dplyr and all its dependencies: #' ```{asciicast pkg-install-upgrade} #' pkg_install("dplyr", upgrade = TRUE) -#' ```` +#' ``` #' #' Install the development version of dplyr: #' ```{asciicast pkg-install-gh} @@ -70,47 +69,34 @@ pkg_install <- function(pkg, lib = .libPaths()[[1L]], upgrade = FALSE, ask = interactive(), dependencies = NA) { + load_extra("pillar") + load_all_private() start <- Sys.time() + loaded <- loaded_packages(lib) - status <- remote( - function(...) get("pkg_install_make_plan", asNamespace("pak"))(...), - list(pkg = pkg, lib = lib, upgrade = upgrade, ask = ask, - start = start, dependencies = dependencies, - loaded = loaded_packages(lib))) - - unloaded <- handle_status(status, lib, ask)$unloaded - - inst <- remote( - function(...) get("pkg_install_do_plan", asNamespace("pak"))(...), - list(proposal = NULL)) - - if (length(unloaded) > 0) offer_restart(unloaded) - - invisible(inst) -} - -pkg_install_make_plan <- function(pkg, lib, upgrade, ask, start, - dependencies, loaded) { - prop <- pkgdepends::new_pkg_installation_proposal( + prop <- pkg_data[["ns"]][["pkgdepends"]][["new_pkg_installation_proposal"]]( pkg, config = list(library = lib, dependencies = dependencies) ) - - ## Solve the dependency graph prop$set_solve_policy(if (upgrade) "upgrade" else "lazy") prop$solve() prop$stop_for_solution_error() - pkg_data$tmp <- list(proposal = prop, start = start) - print_install_details(prop, lib, loaded) -} -pkg_install_do_plan <- function(proposal) { + status <- print_install_details(prop, lib, loaded) + unloaded <- handle_status(status, lib, ask)$unloaded + + inst <- pkg_install_do_plan(prop, start) - proposal <- proposal %||% pkg_data$tmp$proposal - start <- pkg_data$tmp$start - pkg_data$tmp <- NULL + # This refers to the desc namespace, and we don't really need it + inst$extra <- NULL + if (length(unloaded) > 0) offer_restart(unloaded) + + invisible(inst) +} + +pkg_install_do_plan <- function(proposal, start) { # Actually download packages as needed proposal$download() proposal$stop_for_download_error() @@ -149,17 +135,9 @@ pkg_install_do_plan <- function(proposal) { #' ``` pkg_status <- function(pkg, lib = .libPaths()) { - stopifnot(length(pkg == 1) && is.character(pkg)) - load_extra("pillar") - remote( - function(...) asNamespace("pak")$pkg_status_internal(...), - list(pkg = pkg, lib = lib)) -} - -pkg_status_internal <- function(pkg, lib = .libPaths()) { - st <- lapply(lib, pkgdepends::lib_status, packages = pkg) - do.call("rbind_expand", st) + load_all_private() + pkg_data[["ns"]][["pkgdepends"]][["lib_status"]](lib, pkg) } #' Remove installed packages @@ -171,17 +149,8 @@ pkg_status_internal <- function(pkg, lib = .libPaths()) { #' @family package functions pkg_remove <- function(pkg, lib = .libPaths()[[1L]]) { - remote( - function(...) { - get("pkg_remove_internal", asNamespace("pak"))(...) - }, - list(pkg = pkg, lib = lib) - ) - invisible() -} - -pkg_remove_internal <- function(pkg, lib) { - pr <- pkgdepends::parse_pkg_ref(pkg) + load_all_private() + pr <- pkg_data[["ns"]][["pkgdepends"]][["parse_pkg_ref"]](pkg) suppressMessages(utils::remove.packages(pr$package, lib)) invisible(pr) } @@ -208,34 +177,33 @@ pkg_remove_internal <- function(pkg, lib) { #' ``` pkg_deps <- function(pkg, upgrade = TRUE, dependencies = NA) { - stopifnot(length(pkg) == 1 && is.character(pkg)) - load_extra("pillar") - remote( - function(...) { - get("pkg_deps_internal", asNamespace("pak"))(...) - }, - list(pkg = pkg, upgrade = upgrade, dependencies = dependencies) - ) + pkg_deps_internal(pkg, upgrade, dependencies) } -pkg_deps_internal <- function(pkg, upgrade, dependencies = NA) { - deps <- pkg_deps_internal2(pkg, upgrade, dependencies) - data <- deps$get_solution()$data - # This refers to the 'desc' package namespace, and we don't really need it - data$extra <- NULL - data -} +pkg_deps_internal <- function(pkg, upgrade, dependencies) { + load_extra("pillar") + load_all_private() -pkg_deps_internal2 <- function(pkg, upgrade, dependencies) { dir.create(lib <- tempfile()) on.exit(rimraf(lib), add = TRUE) + config <- list(library = lib) if (!is.null(dependencies)) config$dependencies <- dependencies - deps <- pkgdepends::new_pkg_deps(pkg, config = config) + deps <- pkg_data[["ns"]][["pkgdepends"]][["new_pkg_deps"]]( + pkg, + config = config + ) if (upgrade) deps$set_solve_policy("upgrade") deps$solve() deps$stop_for_solution_error() - deps + data <- deps$get_solution()$data + # This refers to the 'desc' package namespace, and we don't need it + data$extra <- NULL + + tree <- deps$draw() + attr(data, "pkg_deps_tree") <- tree + + data } #' Draw the dependency tree of a package @@ -257,24 +225,29 @@ pkg_deps_internal2 <- function(pkg, upgrade, dependencies) { #' ``` pkg_deps_tree <- function(pkg, upgrade = TRUE, dependencies = NA) { - stopifnot(length(pkg == 1) && is.character(pkg)) - ret <- remote( - function(...) { - get("pkg_deps_tree_internal", asNamespace("pak"))(...) - }, - list(pkg = pkg, upgrade = upgrade, dependencies = dependencies) - ) - cat(ret$tree, sep = "\n") - invisible(ret$data) + data <- pkg_deps_internal(pkg, upgrade, dependencies) + class(data) <- c("pak_pkg_deps_tree", class(data)) + data } -pkg_deps_tree_internal <- function(pkg, upgrade, dependencies = NA) { - deps <- pkg_deps_internal2(pkg, upgrade, dependencies) - tree <- deps$draw() - data <- deps$get_solution()$data - # This refers to the 'desc' package namespace, and we don't really need it - data$extra <- NULL - list(tree = tree, data = data) +#' @export + +format.pak_pkg_deps_tree <- function(x, ...) { + attr(x, "pkg_deps_tree") +} + +#' @export + +print.pak_pkg_deps_tree <- function(x, ...) { + writeLines(format(x, ...)) + invisible(x) +} + +#' @export + +`[.pak_pkg_deps_tree` <- function(x, i, j, ..., drop = TRUE) { + class(x) <- setdiff(class(x), "pak_pkg_deps_tree") + NextMethod(x) } #' @rdname lib_status @@ -323,34 +296,19 @@ pkg_list <- function(lib = .libPaths()[1]) { pkg_download <- function(pkg, dest_dir = ".", dependencies = FALSE, platforms = NULL, r_versions = NULL) { - args <- list( - pkg = pkg, - dest_dir = dest_dir, - dependencies = dependencies, - platforms = platforms, - r_versions = r_versions - ) - - dl <- remote( - function(...) { - get("pkg_download_internal", asNamespace("pak"))(...) - }, - args - ) - load_extra("pillar") - invisible(dl) -} + load_all_private() -pkg_download_internal <- function(pkg, dest_dir = ".", dependencies = FALSE, - platforms = NULL, r_versions = NULL) { mkdirp(dest_dir) config <- list(cache_dir = dest_dir, dependencies = dependencies) if (!is.null(platforms)) config$platforms <- platforms if (!is.null(r_versions)) config$`r-versions` <- r_versions - dl <- pkgdepends::new_pkg_download_proposal(pkg, config = config) + dl <- pkg_data[["ns"]][["pkgdepends"]][["new_pkg_download_proposal"]]( + pkg, + config = config + ) dl$resolve() dl$download() dl$stop_for_download_error() - dl$get_downloads() + invisible(dl$get_downloads()) } diff --git a/R/pak-cleanup.R b/R/pak-cleanup.R index 589eb3b82..cac493755 100644 --- a/R/pak-cleanup.R +++ b/R/pak-cleanup.R @@ -1,4 +1,3 @@ - #' Clean up pak caches #' #' @param package_cache Whether to clean up the cache of package files. @@ -13,83 +12,57 @@ pak_cleanup <- function(package_cache = TRUE, metadata_cache = TRUE, pak_lib = TRUE, force = FALSE) { - if (!force && !interactive()) { stop("Refused to clean up, please specify `force = TRUE`") } - if (package_cache) package_cache <- pak_cleanup_package_cache(force) - if (metadata_cache) metadata_cache <- pak_cleanup_metadata_cache(force) - all <- package_cache && metadata_cache + if (package_cache) pak_cleanup_package_cache(force) + if (metadata_cache) pak_cleanup_metadata_cache(force) invisible() } pak_cleanup_package_cache <- function(force) { + load_all_private() + sum <- pkg_data[["ns"]][["pkgcache"]][["pkg_cache_summary"]]() if (!force) { - remote( - function(...) { - asNamespace("pak")$pak_cleanup_package_cache_print(...) - }) + size <- format_bytes$pretty_bytes(sum$size) + pkg_data[["ns"]][["cli"]][["cli_alert"]]( + "{.emph Package cache} is in {.path {sum$cachepath}} ({size})" + ) force <- get_confirmation2("? Do you want to remove it? (Y/n) ") } if (force) { - remote( - function(...) { - asNamespace("pak")$pak_cleanup_package_cache2() - }) + unlink(sum$cachepath, recursive = TRUE) + root <- sum$cachepath + if (length(dir(root)) == 0) unlink(root, recursive = TRUE) + pkg_data[["ns"]][["cli"]][["cli_alert_success"]]( + "Cleaned up package cache" + ) } - force -} - -pak_cleanup_package_cache_print <- function() { - sum <- pkgcache::pkg_cache_summary() - size <- format_bytes$pretty_bytes(sum$size) - cli::cli_alert( - "{.emph Package cache} is in {.path {sum$cachepath}} ({size})") -} - -pak_cleanup_package_cache2 <- function() { - sum <- pkgcache::pkg_cache_summary() - unlink(sum$cachepath, recursive = TRUE) - root <- dirname(sum$cachepath) - if (length(dir(root)) == 0) unlink(root, recursive = TRUE) - cli::cli_alert_success("Cleaned up package cache") - invisible() + invisible(force) } pak_cleanup_metadata_cache <- function(force) { + load_all_private() + sum <- pkg_data[["ns"]][["pkgcache"]][["meta_cache_summary"]]() if (!force) { - remote( - function(...) { - asNamespace("pak")$pak_cleanup_metadata_cache_print(...) - }) + size <- format_bytes$pretty_bytes(sum$size) + pkg_data[["ns"]][["cli"]][["cli_alert"]]( + "{.emph Metadata cache} is in {.path {sum$cachepath}} ({size})" + ) force <- get_confirmation2("? Do you want to remove it? (Y/n) ") } if (force) { - remote( - function(...) { - asNamespace("pak")$pak_cleanup_metadata_cache2() - }) + unlink(sum$cachepath, recursive = TRUE) + unlink(sum$lockfile, recursive = TRUE) + root <- dirname(sum$cachepath) + if (length(dir(root)) == 0) unlink(root, recursive = TRUE) + pkg_data[["ns"]][["cli"]][["cli_alert_success"]]( + "Cleaned up metadata cache" + ) } - force -} - -pak_cleanup_metadata_cache_print <- function() { - sum <- pkgcache::meta_cache_summary() - size <- format_bytes$pretty_bytes(sum$size) - cli::cli_alert( - "{.emph Metadata cache} is in {.path {sum$cachepath}} ({size})") -} - -pak_cleanup_metadata_cache2 <- function() { - sum <- pkgcache::meta_cache_summary() - unlink(sum$cachepath, recursive = TRUE) - unlink(sum$lockfile, recursive = TRUE) - root <- dirname(sum$cachepath) - if (length(dir(root)) == 0) unlink(root, recursive = TRUE) - cli::cli_alert_success("Cleaned up metadata cache") - invisible() + invisible(force) } diff --git a/R/pak-sitrep.R b/R/pak-sitrep.R index 841d268cd..0c1a4cc49 100644 --- a/R/pak-sitrep.R +++ b/R/pak-sitrep.R @@ -4,8 +4,7 @@ #' * pak version, #' * platform the package was built on, and the current platform, #' * the current library path, -#' * versions of dependencies, -#' * whether dependencies can be loaded. +#' * versions of dependencies. #' #' @export #' @family pak housekeeping @@ -59,73 +58,16 @@ pak_sitrep <- function() { cat(paste0("- ", .libPaths()), sep = "\n") lib <- private_lib_dir() - if (!is.null(asNamespace("pak")[[".__DEVTOOLS__"]])) { + if (is_load_all()) { cat0("* Using `load_all()` from ", find.package("pak"), ".\n") cat0("* Private library location:\n- ", lib, "\n") } else { cat0("* pak is installed at ", find.package("pak"), ".\n") } - dver <- pak_library_versions(lib) - cat0("* Dependency versions:\n") - cat0(paste0( - "- ", - format(dver$package), - " ", - format(dver$version), - ifelse(is.na(dver$sha), "", paste0(" (", substr(dver$sha, 1, 7), ")")), - "\n" - )) - - ret <- tryCatch( - { - new_remote_session() - # TODO: check that all packages can be loaded in subprocess - TRUE - }, - error = function(e) e - ) - - if (isTRUE(ret)) { - cat0("* Dependencies can be loaded\n") - } else { - cat("! Could not load dependencies, pak installation is broken. :(\n") - cat0("Error: ", conditionMessage(ret)) - } - invisible() } -pak_library_versions <- function(lib) { - pkgs <- dir(lib) - vers <- lapply(pkgs, function(pkg) get_ver(file.path(lib, pkg))) - data.frame( - stringsAsFactors = FALSE, - package = pkgs, - version = vcapply(vers, "[", 1), - sha = vcapply(vers, "[", 2) - ) -} - -# this is slightly different than the one in install-embedded.R - -get_ver <- function(path) { - if (!file.exists(path)) { - return(NA_character_) - } - desc <- file.path(path, "DESCRIPTION") - if (!file.exists(desc)) { - return(NA_character_) - } - dsc <- read.dcf(desc) - ver <- package_version(dsc[, "Version"]) - devver <- ver[1, 4] - if (!is.na(devver) && devver >= 90000) { - if ("RemoteSha" %in% colnames(dsc)) { - sha <- dsc[, "RemoteSha"] - return(c(ver, sha)) - } - } - - as.character(ver) +is_load_all <- function() { + !is.null(asNamespace("pak")[[".__DEVTOOLS__"]]) } diff --git a/R/pak-update.R b/R/pak-update.R index 36c043afc..f0cec466b 100644 --- a/R/pak-update.R +++ b/R/pak-update.R @@ -1,4 +1,3 @@ - detect_platform <- function() { me <- list( os = R.Version()$os, @@ -6,14 +5,16 @@ detect_platform <- function() { rver = get_minor_r_version(getRversion()) ) - if (me$os %in% c("linux-dietlibc", "linux-gnu", "linux-musl", - "linux-uclibc", "linux-unknown")) { + if (me$os %in% c( + "linux-dietlibc", "linux-gnu", "linux-musl", + "linux-uclibc", "linux-unknown" + )) { me$os <- "linux" } me } -pak_stream <- function(stream) { +pak_stream <- function(stream = "auto") { if (stream == "auto") { version <- unclass(package_version(utils::packageVersion("pak")))[[1]] stream <- if (length(version) >= 4 && version[4] == 9999) { @@ -65,16 +66,13 @@ pak_repo_metadata <- function(repo = NULL, stream = "auto") { #' @export pak_update <- function( - force = FALSE, - stream = c("auto", "stable", "rc", "devel")) { - + force = FALSE, + stream = c("auto", "stable", "rc", "devel")) { stopifnot(is_flag(force)) stream <- match.arg(stream) stream <- pak_stream(stream) - repo <- pak_repo() - - if (!is.null(.getNamespace("pak")$.__DEVTOOLS__)) { + if (is_load_all()) { lib <- .libPaths()[1] warning( "`load_all()`-d pak package, updating in default library at\n ", @@ -91,16 +89,20 @@ pak_update <- function( me <- detect_platform() cand <- which( meta$OS == me$os & - meta$Arch == me$arch & - meta$RVersion == me$rver + meta$Arch == me$arch & + meta$RVersion == me$rver ) if (length(cand) == 0) { pak_update_unsupported_platform(stream, me, meta) } else if (length(cand) > 1) { - warning("Multiple pak candidates are available for this platform, ", - "this should not happen. Using the first one.") + # nocov start + warning( + "Multiple pak candidates are available for this platform, ", + "this should not happen. Using the first one." + ) cand <- cand[1] + # nocov end } check_mac_cran_r(me, meta) @@ -117,9 +119,6 @@ pak_update <- function( date <- get_built_date(meta$Built[cand]) message("\nUpdating to version ", meta$Version[cand], " (", date, ")\n") - # Otherwise the subprocess might be locking some DLLs - try(pkg_data$remote$kill(), silent = TRUE) - # Windows cannot install binaries with arbitrary names, apparently. ext <- tools::file_ext(tgt) if (.Platform$OS.type == "windows" && ext == "zip") { @@ -137,22 +136,25 @@ pak_update <- function( message("\nReloading pak.") # Try to use it to see if it was successful - suppressWarnings(tryCatch({ - eapply(asNamespace("pak"), base::force, all.names = TRUE) - unloadNamespace("pak") - # This works around the help lazy load DB errors - intern <- base::.Internal - lazyLoadDBflush <- function(...) NULL - tryCatch( - intern(lazyLoadDBflush(file.path(lib, "pak", "help", "pak.rdb"))), - error = function(e) NULL - ) - loadNamespace("pak") - if (attached) library(pak) - suppressWarnings(tools::Rd_db(package = "pak")) - }, error = function(err) { - message("\nFailed to reload pak. Please restart your R session.") - })) + suppressWarnings(tryCatch( + { + eapply(asNamespace("pak"), base::force, all.names = TRUE) + unloadNamespace("pak") + # This works around the help lazy load DB errors + intern <- base::.Internal + lazyLoadDBflush <- function(...) NULL + tryCatch( + intern(lazyLoadDBflush(file.path(lib, "pak", "help", "pak.rdb"))), + error = function(e) NULL + ) + loadNamespace("pak") + if (attached) library(pak) + suppressWarnings(tools::Rd_db(package = "pak")) + }, + error = function(err) { + message("\nFailed to reload pak. Please restart your R session.") + } + )) invisible() } @@ -178,8 +180,10 @@ pak_update_unsupported_platform <- function(stream, me, meta) { } check_mac_cran_r <- function(me, meta) { - if (! grepl("^darwin", me$os)) return() - if (.Platform$pkgType == "source") { + if (!grepl("^darwin", me$os)) { + return() + } + if (platform_pkgtype() == "source") { stop( "pak only has binaries for the CRAN build of R, and this ", "seems to be a brew or another non-CRAN build." @@ -188,6 +192,10 @@ check_mac_cran_r <- function(me, meta) { } } +platform_pkgtype <- function() { + .Platform$pkgType +} + should_update_to <- function(new) { # check if the right platform was installed current <- R.Version()$platform @@ -198,17 +206,23 @@ should_update_to <- function(new) { # otherwise use version number first dsc <- utils::packageDescription("pak") - if (package_version(dsc$Version) < new$Version) return(TRUE) + if (package_version(dsc$Version) < new$Version) { + return(TRUE) + } # or the build date blt_cur <- get_built_date(dsc$Built) blt_new <- get_built_date(new$Built) - if (is.na(blt_cur) || blt_cur < blt_new) return(TRUE) + if (is.na(blt_cur) || blt_cur < blt_new) { + return(TRUE) + } FALSE } get_built_date <- function(x) { - if (!is_string(x)) return(NA_character_) + if (!is_string(x)) { + return(NA_character_) + } # We can compare these dates as strings, so no need to parse strsplit(x, "[ ]*;[ ]*")[[1]][3] } diff --git a/R/ppm.R b/R/ppm.R index 3bc6679c1..377dc2d19 100644 --- a/R/ppm.R +++ b/R/ppm.R @@ -1,4 +1,3 @@ - #' Does PPM build binary packages for the current platform? #' #' @return `TRUE` or `FALSE`. @@ -10,16 +9,9 @@ #' @examplesIf FALSE #' system_r_platform() #' ppm_has_binaries() - ppm_has_binaries <- function() { - remote( - function(...) asNamespace("pak")$ppm_has_binaries_internal(...), - list() - ) -} - -ppm_has_binaries_internal <- function() { - pkgcache::ppm_has_binaries() + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["ppm_has_binaries"]]() } #' List all platforms supported by Posit Package Manager (PPM) @@ -39,17 +31,10 @@ ppm_has_binaries_internal <- function() { #' @export #' @examplesIf FALSE #' ppm_platforms() - ppm_platforms <- function() { load_extra("pillar") - remote( - function(...) asNamespace("pak")$ppm_platforms_internal(...), - list() - ) -} - -ppm_platforms_internal <- function() { - pkgcache::ppm_platforms() + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["ppm_platforms"]]() } #' List all R versions supported by Posit Package Manager (PPM) @@ -64,17 +49,10 @@ ppm_platforms_internal <- function() { #' @export #' @examplesIf FALSE #' ppm_r_versions() - ppm_r_versions <- function() { load_extra("pillar") - remote( - function(...) asNamespace("pak")$ppm_r_versions_internal(...), - list() - ) -} - -ppm_r_versions_internal <- function() { - pkgcache::ppm_r_versions() + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["ppm_r_versions"]]() } #' Returns the current Posit Package Manager (PPM) repository URL @@ -113,16 +91,9 @@ ppm_r_versions_internal <- function() { #' @family PPM functions #' @examplesIf FALSE #' ppm_repo_url() - ppm_repo_url <- function() { - remote( - function(...) asNamespace("pak")$ppm_repo_url_internal(...), - list() - ) -} - -ppm_repo_url_internal <- function() { - pkgcache::ppm_repo_url() + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["ppm_repo_url"]]() } #' List all available Posit Package Manager (PPM) snapshots @@ -160,14 +131,8 @@ ppm_repo_url_internal <- function() { #' @export #' @examplesIf FALSE #' ppm_snapshots() - ppm_snapshots <- function() { - remote( - function(...) asNamespace("pak")$ppm_snapshots_internal(...), - list() - ) -} - -ppm_snapshots_internal <- function() { - pkgcache::ppm_snapshots() + load_extra("pillar") + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["ppm_snapshots"]]() } diff --git a/R/print.R b/R/print.R index c69f69745..f6addbc72 100644 --- a/R/print.R +++ b/R/print.R @@ -1,4 +1,3 @@ - ## Cases: ## - was already installed, current ## - was already installed, not current @@ -42,17 +41,17 @@ nice_df_print <- function(x, ...) { print_install_summary <- function(x) { direct <- sum(x$direct & x$type != "deps") - deps <- sum(! x$direct) + deps <- sum(!x$direct) newly <- sum(x$lib_status == "new" & x$type != "deps") - upd <- sum(x$lib_status == "update" & ! x$type %in% c("installed", "deps")) - curr <- sum(x$lib_status == "current" & x$type != "deps") + upd <- sum(x$lib_status == "update" & !x$type %in% c("installed", "deps")) + curr <- sum(x$lib_status == "current" & x$type != "deps") # Not used currently. The packages we could have updated but did not noupd <- sum(x$lib_status == "no-update") downloaded <- sum(x$download_status == "Got") cached <- sum(x$download_status == "Had" & - ! x$type %in% c("installed", "deps")) + !x$type %in% c("installed", "deps")) dlbytes <- sum(x$file_size[x$download_status == "Got"]) total_time <- format_time$pretty_dt(attr(x, "total_time")) %||% "" @@ -78,7 +77,8 @@ print_install_summary <- function(x) { } ts <- if (nzchar(total_time)) " {.timestamp {total_time}}" else "" - cli::cli_alert_success(c( + load_all_private() + pkg_data[["ns"]][["cli"]]$cli_alert_success(c( pkgsum, ": ", updsum, diff --git a/R/private-lib.R b/R/private-lib.R index 657cf0d48..a7e681779 100644 --- a/R/private-lib.R +++ b/R/private-lib.R @@ -1,4 +1,3 @@ - #' The directory of the private library #' #' This is usually within the package, but in dev mode in can be @@ -7,13 +6,17 @@ #' #' @noRd -private_lib_dir <- function() { +private_lib_dir <- function() { mydir <- getNamespaceInfo(asNamespace(.packageName), "path") embedded <- file.path(mydir, "library") - if (file.exists(embedded)) return(c(embedded = embedded)) + if (file.exists(embedded)) { + return(c(embedded = embedded)) + } ppl <- Sys.getenv("PAK_PRIVATE_LIBRARY", NA_character_) - if (!is.na(ppl)) return(ppl) + if (!is.na(ppl)) { + return(ppl) + } file.path( user_cache_dir("pak"), @@ -22,16 +25,3 @@ private_lib_dir <- function() { R.Version()$arch ) } - -#' Attach pak's internal library to the search path -#' -#' This should be only called in a pak subprocess, from `.onLoad()`. -#' -#' @noRd - -use_private_lib <- function() { - lib <- private_lib_dir() - old <- .libPaths() - new <- c(lib, old[old != lib]) - .libPaths(new) -} diff --git a/R/rematch2.R b/R/rematch2.R new file mode 100644 index 000000000..c4bc9cf2b --- /dev/null +++ b/R/rematch2.R @@ -0,0 +1,33 @@ + +re_match <- function(text, pattern, perl = TRUE, ...) { + + stopifnot(is_string(pattern)) + text <- as.character(text) + + match <- regexpr(pattern, text, perl = perl, ...) + + start <- as.vector(match) + length <- attr(match, "match.length") + end <- start + length - 1L + + matchstr <- substring(text, start, end) + matchstr[ start == -1 ] <- NA_character_ + + res <- data_frame(.text = text, .match = matchstr) + + if (!is.null(attr(match, "capture.start"))) { + + gstart <- attr(match, "capture.start") + glength <- attr(match, "capture.length") + gend <- gstart + glength - 1L + + groupstr <- substring(text, gstart, gend) + groupstr[ gstart == -1 ] <- NA_character_ + dim(groupstr) <- dim(gstart) + + res <- cbind(groupstr, res, stringsAsFactors = FALSE) + } + names(res) <- c(attr(match, "capture.names"), ".text", ".match") + class(res) <- c("tbl", class(res)) + res +} diff --git a/R/repo.R b/R/repo.R index 59334a844..476619277 100644 --- a/R/repo.R +++ b/R/repo.R @@ -1,4 +1,3 @@ - #' Show the status of CRAN-like repositories #' #' It checks the status of the configured or supplied repositories. @@ -57,59 +56,78 @@ repo_status <- function(platforms = NULL, r_version = getRversion(), bioc = TRUE, cran_mirror = NULL) { load_extra("pillar") - remote( - function(...) asNamespace("pak")$repo_status_internal(...), - list(platforms, r_version, bioc, cran_mirror) - ) -} - -repo_status_internal <- function(platforms = NULL, r_version = getRversion(), - bioc = TRUE, cran_mirror = NULL) { + load_all_private() - platforms <- platforms %||% pkgcache::default_platforms() - cran_mirror <- cran_mirror %||% pkgcache::default_cran_mirror() + # FIXME: why do we need this explicitly? + platforms <- platforms %||% + pkg_data[["ns"]][["pkgcache"]][["default_platforms"]]() + cran_mirror <- cran_mirror %||% + pkg_data[["ns"]][["pkgcache"]][["default_cran_mirror"]]() - tab <- pkgcache::repo_status( - platforms = platforms, + tab <- pkg_data[["ns"]][["pkgcache"]][["repo_status"]]( + platform = platforms, r_version = r_version, bioc = bioc, cran_mirror = cran_mirror ) - class(tab) <- setdiff(class(tab), "pkgcache_repo_status") + # For the summary() method + class(tab) <- c("pak_pkgcache_repo_status", class(tab)) tab } #' @export -#' @rdname repo_status -repo_ping <- function(platforms = NULL, r_version = getRversion(), - bioc = TRUE, cran_mirror = NULL) { - ret <- remote( - function(...) asNamespace("pak")$repo_ping_internal(...), - list(platforms, r_version, bioc, cran_mirror) +summary.pak_pkgcache_repo_status <- function(object, ...) { + load_all_private() + ret <- withVisible( + pkg_data[["ns"]][["pkgcache"]][["summary.pkgcache_repo_status"]]( + object, + ... + ) ) - cat(ret$fmt, sep = "\n") - invisible(ret$data) + # For the print and `[` methods + class(ret$value) <- c("pak_pkgcache_repo_status_summary", class(ret$value)) + if (ret$visible) ret$value else invisible(ret$value) } -repo_ping_internal <- function(platforms = NULL, r_version = getRversion(), - bioc = TRUE, cran_mirror = NULL) { +#' @export - platforms <- platforms %||% pkgcache::default_platforms() - cran_mirror <- cran_mirror %||% pkgcache::default_cran_mirror() +print.pak_pkgcache_repo_status_summary <- function(x, ...) { + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["print.pkgcache_repo_status_summary"]]( + x, + ... + ) +} - tab <- pkgcache::repo_status( +#' @export + +`[.pak_pkgcache_repo_status_summary` <- function(x, i, j, drop = FALSE) { + load_all_private() + # FIXME: We can't call NextMethod from the hand-loaded code? + # So we do this manually here. + class(x) <- setdiff( + class(x), + c("pak_pkgcache_repo_status_summary", "pkgcache_repo_status_summary") + ) + NextMethod("[") +} + +#' @export +#' @rdname repo_status + +repo_ping <- function(platforms = NULL, r_version = getRversion(), + bioc = TRUE, cran_mirror = NULL) { + rst <- repo_status( platforms = platforms, r_version = r_version, bioc = bioc, cran_mirror = cran_mirror ) - fmt <- utils::capture.output(summary(tab)) - class(tab) <- setdiff(class(tab), "pkgcache_repo_status") - list(fmt = fmt, data = tab) + summary(rst) } #' Query the currently configured CRAN-like repositories @@ -137,16 +155,15 @@ repo_ping_internal <- function(platforms = NULL, r_version = getRversion(), repo_get <- function(r_version = getRversion(), bioc = TRUE, cran_mirror = NULL) { load_extra("pillar") - remote( - function(...) asNamespace("pak")$repo_get_internal(...), - list(r_version, bioc, cran_mirror) - ) -} + load_all_private() + + # FIXME: why do we need this explicitly? + cran_mirror <- cran_mirror %||% + pkg_data[["ns"]][["pkgcache"]][["default_cran_mirror"]]() -repo_get_internal <- function(r_version = getRversion(), bioc = TRUE, - cran_mirror = NULL) { - cran_mirror = cran_mirror %||% pkgcache::default_cran_mirror() - pkgcache::repo_get(r_version, bioc, cran_mirror) + pkg_data[["ns"]][["pkgcache"]][["repo_get"]]( + r_version, bioc, cran_mirror + ) } #' Add a new CRAN-like repository @@ -236,18 +253,9 @@ repo_get_internal <- function(r_version = getRversion(), bioc = TRUE, #' ``` repo_add <- function(..., .list = NULL) { - new <- c(list(...), .list) - ret <- remote( - function(...) asNamespace("pak")$repo_add_internal(...), - list(.list = new) - ) - options(repos = ret$option) - invisible(ret$tab) -} - -repo_add_internal <- function(.list) { - tab <- pkgcache::repo_add(.list = .list) - list(option = getOption("repos"), tab = tab) + load_extra("pillar") + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["repo_add"]](..., .list = .list) } #' @param spec Repository specification, a possibly named character @@ -258,5 +266,6 @@ repo_add_internal <- function(.list) { #' @export repo_resolve <- function(spec) { - remote(function(spec) pkgcache::repo_resolve(spec), list(spec)) + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["repo_resolve"]](spec) } diff --git a/R/rstudio-detect.R b/R/rstudio-detect.R deleted file mode 100644 index 435614511..000000000 --- a/R/rstudio-detect.R +++ /dev/null @@ -1,173 +0,0 @@ - -rstudio <- local({ - - standalone_env <- environment() - parent.env(standalone_env) <- baseenv() - - # -- Collect data ------------------------------------------------------ - - data <- NULL - - get_data <- function() { - envs <- c( - "R_BROWSER", - "R_PDFVIEWER", - "RSTUDIO", - "RSTUDIO_TERM", - "RSTUDIO_CONSOLE_COLOR", - "ASCIICAST") - - d <- list( - pid = Sys.getpid(), - envs = Sys.getenv(envs), - api = tryCatch( - asNamespace("rstudioapi")$isAvailable(), - error = function(err) FALSE - ), - tty = isatty(stdin()), - gui = .Platform$GUI, - args = commandArgs(), - search = search() - ) - d$ver <- if (d$api) asNamespace("rstudioapi")$getVersion() - d$desktop <- if (d$api) asNamespace("rstudioapi")$versionInfo()$mode - - d - } - - # -- Auto-detect environment ------------------------------------------- - - is_rstudio <- function() { - Sys.getenv("RSTUDIO") == "1" - } - - detect <- function(clear_cache = FALSE) { - # Cached? - if (clear_cache) data <<- list() - if (!is.null(data)) return(get_caps(data)) - - # Otherwise get data - new <- get_data() - - # Cache unless told otherwise - cache <- TRUE - - new$type <- if (new$envs[["RSTUDIO"]] != "1") { - # 1. Not RStudio at all - "not_rstudio" - - } else if (new$gui == "RStudio" && new$api) { - # 2. RStudio console, properly initialized - "rstudio_console" - - } else if (new$gui == "RStudio" && ! new$api) { - # 3. RStudio console, initilizing - cache <- FALSE - "rstudio_console_starting" - - } else if (new$tty && new$envs[["ASCIICAST"]] != "true") { - # 4. R in the RStudio terminal - # This could also be a subprocess of the console or build pane - # with a pseudo-terminal. There isn't really a way to rule that - # out, without inspecting some process data with ps::ps_*(). - # At least we rule out asciicast - "rstudio_terminal" - - } else if (! new$tty && - new$envs[["RSTUDIO_TERM"]] == "" && - new$envs[["R_BROWSER"]] == "false" && - new$envs[["R_PDFVIEWER"]] == "false" && - is_build_pane_command(new$args)) { - # 5. R in the RStudio build pane - # https://github.com/rstudio/rstudio/blob/main/src/cpp/session/ - # modules/build/SessionBuild.cpp#L231-L240 - "rstudio_build_pane" - - } else { - # Otherwise it is a subprocess of the console, terminal or - # build pane, and it is hard to say which, so we do not try. - "rstudio_subprocess" - } - - if (cache) data <<- new - - get_caps(new) - } - - is_build_pane_command <- function(args) { - cmd <- gsub("[\"']", "", args[[length(args)]]) - rcmd <- sub("[(].*$", "", cmd) - rcmd %in% c("devtools::build", "devtools::test", "devtools::check") - } - - # -- Capabilities ------------------------------------------------------ - - caps <- list() - - caps$not_rstudio <- function(data) { - list( - type = "not_rstudio", - dynamic_tty = FALSE, - ansi_tty = FALSE, - ansi_color = FALSE, - num_colors = 1L - ) - } - - caps$rstudio_console <- function(data) { - list( - type = "rstudio_console", - dynamic_tty = TRUE, - ansi_tty = FALSE, - ansi_color = data$envs[["RSTUDIO_CONSOLE_COLOR"]] != "", - num_colors = as.integer(data$envs[["RSTUDIO_CONSOLE_COLOR"]]) - ) - } - - caps$rstudio_console_starting <- function(data) { - res <- caps$rstudio_console(data) - res$type <- "rstudio_console_starting" - res - } - - caps$rstudio_terminal <- function(data) { - list( - type = "rstudio_terminal", - dynamic_tty = TRUE, - ansi_tty = TRUE, - ansi_color = data$envs[["RSTUDIO_CONSOLE_COLOR"]] != "", - num_colors = as.integer(data$envs[["RSTUDIO_CONSOLE_COLOR"]]) - ) - } - - caps$rstudio_build_pane <- function(data) { - list( - type = "rstudio_build_pane", - dynamic_tty = TRUE, - ansi_tty = FALSE, - ansi_color = data$envs[["RSTUDIO_CONSOLE_COLOR"]] != "", - num_colors = as.integer(data$envs[["RSTUDIO_CONSOLE_COLOR"]]) - ) - } - - caps$rstudio_subprocess <- function(data) { - list( - type = "rstudio_subprocess", - dynamic_tty = FALSE, - ansi_tty = FALSE, - ansi_color = FALSE, - num_colors = 1L - ) - } - - get_caps <- function(data, type = data$type) caps[[type]](data) - - structure( - list( - .internal = standalone_env, - is_rstudio = is_rstudio, - detect = detect - ), - class = c("standalone_rstudio_detect", "standalone") - ) -}) diff --git a/R/search.R b/R/search.R index 776082ca0..e42c40306 100644 --- a/R/search.R +++ b/R/search.R @@ -1,4 +1,3 @@ - #' Search CRAN packages #' #' Search the indexed database of current CRAN packages. It uses the @@ -17,7 +16,7 @@ #' Simple search #' ```{asciicast pkg-search, R.options = list(width = 72)} #' pkg_search("survival") -#'``` +#' ``` #' #' See the underlying data frame #' ```{asciicast pkg-search-2, R.options = list(width = 72)} @@ -25,55 +24,38 @@ #' psro[] #' ``` -pkg_search <- function(query, ...) { +pkg_search <- function(query = NULL, ...) { load_extra("pillar") - remote( - function(...) { - get("pkg_search_internal", asNamespace("pak"))(...) - }, - list(query = query, ...) + load_all_private() + ret <- withVisible( + pkg_data[["ns"]][["pkgsearch"]][["pkg_search"]](query, ...) ) -} -pkg_search_internal <- function(query, ...) { - res <- pkgsearch::pkg_search(query, ...) - res$ago <- format_time_ago$time_ago(res$date) - class(res) <- c("pak_search_result", class(res)) - res + class(ret$value) <- c("pak_pkg_search_result", class(ret$value)) + if (ret$visible) ret$value else invisible(ret$value) } #' @export -print.pak_search_result <- function(x, ...) { - catln("") - if (nrow(x) == 0) { - catln("x No result. :(") - return(invisible(x)) - } - - md <- attr(x, "metadata") - md$size <- min(md$size, md$total - md$from + 1) - catln( - "# '", md$query, "' -- hits ", md$from, "-", - md$from + md$size - 1, " of ", md$total - ) +print.pak_pkg_search_result <- function(x, ...) { + load_all_private() + pkg_data[["ns"]][["pkgsearch"]][["print.pkg_search_result"]](x, ...) +} - num <- as.character(seq(md$from, md$from + md$size - 1)) - for (i in seq_len(nrow(x))) { - r <- x[i,] - catln("") - catln(num[i], " ", r$package, " ", as.character(r$version), " -- by ", - r$maintainer_name, ", ", r$ago) - catln(paste(strwrap(r$title, indent = 2), collapse = " ")) - } +#' @export - invisible(x) +summary.pak_pkg_search_result <- function(object, ...) { + load_all_private() + pkg_data[["ns"]][["pkgsearch"]][["summary.pkg_search_result"]]( + object, + ... + ) } #' @export -`[.pak_search_result` <- function(x, i, j, drop = FALSE) { - class(x) <- setdiff(class(x), c("pak_search_result", "pkg_search_result")) +`[.pak_pkg_search_result` <- function(x, i, j, drop = FALSE) { + class(x) <- setdiff(class(x), c("pak_pkg_search_result", "pkg_search_result")) NextMethod("[") } @@ -92,14 +74,7 @@ print.pak_search_result <- function(x, ...) { pkg_history <- function(pkg) { load_extra("pillar") - remote( - function(...) { - get("pkg_history_internal", asNamespace("pak"))(...) - }, - list(pkg = pkg) - ) -} + load_all_private() -pkg_history_internal <- function(pkg) { - pkgsearch::cran_package_history(pkg) + pkg_data[["ns"]][["pkgsearch"]][["cran_package_history"]](pkg) } diff --git a/R/subprocess.R b/R/subprocess.R deleted file mode 100644 index 9dff2fb0d..000000000 --- a/R/subprocess.R +++ /dev/null @@ -1,392 +0,0 @@ -## ---------------------------------------------------------------------- -## Helper functions -## ---------------------------------------------------------------------- - -remote_is_alive <- function() { - inherits(rs <- pkg_data$remote, "process") && rs$is_alive() -} - -remote <- function(func, args = list()) { - restart_remote_if_needed() - load_private_packages() - on.exit(restart_remote_if_needed(), add = TRUE) - - rs <- pkg_data$remote - state <- rs$get_state() - if (state %in% c("busy", "starting")) { - timeout <- suppressWarnings(as.numeric( - Sys.getenv("PKG_SUBPROCESS_TIMEOUT", "") - )) - if (is.na(timeout)) timeout <- 5000 - pr <- pkg_data$ns$processx$poll(list(rs$get_poll_connection()), timeout)[[1]] - state <- rs$get_state() - if (state == "starting") { - rs$read() - state <- rs$get_state() - } - } - if (state != "idle") stop("Subprocess is busy or cannot start") - - func2 <- func - subst_args <- list( - "__body__" = body(func), - "__verbosity__" = is_verbose(), - "__repos__" = getOption("repos"), - "__width__" = pkg_data$ns$cli$console_width() - ) - body(func2) <- substitute( - { - withCallingHandlers( - cli_message = function(msg) { - withCallingHandlers( - asNamespace("cli")$cli_server_default(msg), - message = function(mmsg) { - class(mmsg) <- c("callr_message", "message", "condition") - signalCondition(mmsg) - invokeRestart("cli_message_handled") - } - ) - invokeRestart("cli_message_handled") - }, - { - options( - pkg.show_progress = `__verbosity__`, - repos = `__repos__`, - cli.width = `__width__`, - width = `__width__` - ) - asNamespace("pak") - `__body__` - } - ) - }, - subst_args - ) - - opts <- options() - extraopts <- c("Ncpus", "BioC_mirror") - pkg_options <- opts[grepl("^pkg[.]", names(opts)) | names(opts) %in% extraopts] - envs <- Sys.getenv() - extraenvs <- "R_BIOC_VERSION" - pkg_envs <- envs[grepl("^PKG_", names(envs)) | names(envs) %in% extraenvs] - rs$run(function(new_opts, new_envs) { - opts <- options() - old_opts <- opts[grepl("^pkg[.]", names(opts))] - # remove all pkg.* options - options(structure( - vector(length(old_opts), mode = "list"), - names = names(old_opts) - )) - # set new ones - options(new_opts) - - envs <- Sys.getenv() - old_envs <- envs[grepl("^PKG_", names(envs))] - Sys.unsetenv(old_envs) - if (length(new_envs)) do.call("Sys.setenv", as.list(new_envs)) - }, list(new_opts = pkg_options, new_envs = pkg_envs)) - - res <- withCallingHandlers( - callr_message = function(msg) { - withRestarts( - { - signalCondition(msg) - out <- if (is_interactive() || sink.number() > 0) stdout() else stderr() - cat(conditionMessage(msg), file = out, sep = "") - }, - muffleMessage = function() NULL - ) - if (!is.null(findRestart("cli_message_handled"))) { - invokeRestart("cli_message_handled") - } - # This is to prevent the original condition from bubbling up, - # because we re-emitted it already. - if (!is.null(findRestart("muffleMessage"))) { - invokeRestart("muffleMessage") - } - }, - rs$run_with_output(func2, args) - ) - - if (!is.null(res$error)) { - if (inherits(res$error, "callr_status_error")) { - res$error$message <- "error in pak subprocess" - } - # This is a temporary workaround until we have a principled way of - # printing the various error types in the main process. - if (inherits(res$error$parent, "package_build_error") && - !is.null(res$error$parent$data$stdout)) { - res$error$parent$message <- paste0( - res$error$parent$message, - "\nFull installation output:\n", - paste(res$error$parent$data$stdout, collapse = "\n") - ) - } - err$throw(res$error) - } - - res$result -} - -#' @export -print.pak_error <- function(x, ...) { - cat(x$formatted_message, sep = "\n") -} - -new_remote_session <- function() { - load_private_packages() - callr <- pkg_data$ns$callr - cli <- pkg_data$ns$cli - opts <- callr$r_session_options( - stderr = NULL, - stdout = NULL, - user_profile = if (Sys.getenv("RENV_PROJECT") != "") FALSE else "project" - ) - opts$env <- c( - opts$env, - R_PKG_SHOW_PROGRESS = is_verbose(), - R_PKG_PKG_WORKER = "true", - R_PKG_NUM_COLORS = as.character(cli$num_ansi_colors()), - R_PKG_DYNAMIC_TTY = cli$is_dynamic_tty(), - PKGCACHE_NO_PILLAR = "true" - ) - pkg_data$remote <- callr$r_session$new(opts, wait = FALSE) -} - -try_new_remote_session <- function() { - tryCatch( - { - load_private_packages() - new_remote_session() - }, - error = function(e) e - ) -} - -restart_remote_if_needed <- function() { - "!DEBUG Restarting background process" - rs <- pkg_data$remote - if (inherits(rs, "r_session") && - rs$is_alive() && - rs$get_state() != "busy") { - return() - } - - ## Try to interrupt nicely (SIGINT/CTRL+C), if that fails within 100ms, - ## kill it. - if (inherits(rs, "r_session")) { - rs$interrupt() - rs$wait(100) - rs$kill() - } - - new_remote_session() -} - -load_private_cli <- function() { - if (!is.null(pkg_data$ns$cli)) { - return(pkg_data$ns$cli) - } - old <- Sys.getenv("CLI_NO_THREAD", NA_character_) - Sys.setenv(CLI_NO_THREAD = "1") - on.exit( - if (is.na(old)) { - Sys.unsetenv("CLI_NO_THREAD") - } else { - Sys.setenv(CLI_NO_THREAD = old) - }, - add = TRUE - ) - load_private_package("cli") - pkg_data$ns$cli -} - -load_private_packages <- function() { - load_private_cli() - load_private_package("ps") - load_private_package("processx", "c_") - load_private_package("callr") -} - -load_private_package <- function(package, reg_prefix = "", - lib = private_lib_dir()) { - if (!is.null(pkg_data$ns[[package]])) { - return() - } - - ## Load the R code - pkg_env <- new.env(parent = asNamespace(.packageName)) - if (!file.exists(file.path(lib, package))) { - stop("Cannot load ", package, " from the private library") - } - pkg_dir0 <- normalizePath(file.path(lib, package)) - mkdirp(pkg_dir <- file.path(tempfile(), package)) - pkg_dir <- normalizePath(pkg_dir) - file.copy(pkg_dir0, dirname(pkg_dir), recursive = TRUE) - pkg_env[[".packageName"]] <- package - pkg_env[["__pkg-dir__"]] <- pkg_dir - - reg.finalizer(pkg_env, onexit = TRUE, function(x) { - tryCatch( - { - pkg_dir <- pkg_env[["__pkg-dir__"]] - if (!is.null(pkg_dir)) pkg_dir <- suppressWarnings(normalizePath(pkg_dir)) - if (!is.null(pkg_env[[".onUnload"]])) { - tryCatch(pkg_env[[".onUnload"]](pkg_dir), error = function(e) e) - } - libs <- .dynLibs() - paths <- suppressWarnings(normalizePath(vcapply(libs, "[[", "path"))) - matchidx <- grepl(pkg_dir, paths, fixed = TRUE) - if (any(matchidx)) { - pkglibs <- libs[matchidx] - for (lib in pkglibs) dyn.unload(lib[["path"]]) - .dynLibs(libs[!matchidx]) - } - unlink(dirname(pkg_dir), recursive = TRUE, force = TRUE) - }, - error = function(e) e - ) - }) - - tryCatch( - suppressWarnings(lazyLoad(file.path(pkg_dir, "R", package), envir = pkg_env)), - error = function(err) { - err$message <- paste0( - "Cannot load ", package, " from the private library: ", - err$message - ) - stop(err) - } - ) - - sysdata <- file.path(pkg_dir, "R", "sysdata.rdb") - if (file.exists(sysdata)) { - lazyLoad(file.path(pkg_dir, "R", "sysdata"), envir = pkg_env) - } - - ## Reset environments - set_function_envs(pkg_env, pkg_env) - ## Sometimes a package refers to its env, this is one known instance. - ## We could also walk the whole tree, but probably not worth it. - if (!is.null(pkg_env$err$.internal$package_env)) { - pkg_env$err$.internal$package_env <- pkg_env - } - - # patch processx and callr - if (package %in% c("callr", "processx")) { - assign("has_cli", function() TRUE, envir = pkg_env$err$.internal) - } - if (package == "callr") { - registerS3method( - "format", - "callr_status_error", - pkg_env$format.callr_status_error, - baseenv() - ) - registerS3method( - "print", - "callr_status_error", - pkg_env$print.callr_status_error, - baseenv() - ) - } - - ## Load shared library - dll_file <- file.path( - pkg_dir, "libs", .Platform$r_arch, - paste0(package, .Platform$dynlib.ext) - ) - if (file.exists(dll_file)) { - dll <- dyn.load(dll_file) - dll[["name"]] <- paste0("pkg-", dll[["name"]]) - .dynLibs(c(.dynLibs(), list(dll))) - natfuns <- getDLLRegisteredRoutines(dll)$.Call - for (natfun in natfuns) { - pkg_env[[paste0(reg_prefix, natfun$name)]] <- natfun - } - } - - pkg_env[["::"]] <- function(pkg, name) { - pkg <- as.character(substitute(pkg)) - name <- as.character(substitute(name)) - if (pkg %in% names(pkg_data$ns)) { - pkg_data$ns[[pkg]][[name]] - } else { - getExportedValue(pkg, name) - } - } - environment(pkg_env[["::"]]) <- pkg_env - - pkg_env[["asNamespace"]] <- function(ns, ...) { - if (ns %in% names(pkg_data$ns)) { - pkg_data$ns[[ns]] - } else { - base::asNamespace(ns, ...) - } - } - environment(pkg_env[["asNamespace"]]) <- pkg_env - - pkg_env[["UseMethod"]] <- function(generic, object) { - base::UseMethod(generic, object) - } - environment(pkg_env[["UseMethod"]]) <- pkg_env - - ## We add the env before calling .onLoad, because .onLoad might refer - ## to the package env via asNamespace(), e.g. the ps package does that. - ## In theory we should handle errors in .onLoad... - pkg_data$ns[[package]] <- pkg_env - if (".onLoad" %in% names(pkg_env)) { - if (package == "callr") { - px <- pkg_data$ns$processx[["__pkg-dir__"]] - Sys.setenv(CALLR_PROCESSX_CLIENT_LIB = px) - } - withCallingHandlers( - pkg_env$.onLoad(dirname(pkg_dir), package), - error = function(e) pkg_data$ns[[package]] <<- NULL - ) - } - - invisible() -} - -set_function_envs <- function(within, new) { - old <- .libPaths() - .libPaths(character()) - on.exit(.libPaths(old), add = TRUE) - nms <- names(within) - - is_target_env <- function(x) { - identical(x, base::.GlobalEnv) || environmentName(x) != "" - } - - suppressWarnings({ - for (nm in nms) { - if (is.function(within[[nm]])) { - if (is_target_env(environment(within[[nm]]))) { - environment(within[[nm]]) <- new - } else if (is_target_env(parent.env(environment(within[[nm]])))) { - parent.env(environment(within[[nm]])) <- new - } - } else if ("R6ClassGenerator" %in% class(within[[nm]])) { - within[[nm]]$parent_env <- new - for (mth in names(within[[nm]]$public_methods)) { - environment(within[[nm]]$public_methods[[mth]]) <- new - } - for (mth in names(within[[nm]]$private_methods)) { - environment(within[[nm]]$private_methods[[mth]]) <- new - } - } - } - }) - - invisible() -} - -## This is a workaround for R CMD check - -r_cmd_check_fix <- function() { - callr::r - cli::rule - filelock::lock - invisible() -} diff --git a/R/sysreqs.R b/R/sysreqs.R index dcd56b5ee..e8e33672a 100644 --- a/R/sysreqs.R +++ b/R/sysreqs.R @@ -1,67 +1,58 @@ - sysreqs_is_supported <- function(sysreqs_platform = NULL) { - remote( - function(...) pkgdepends::sysreqs_is_supported(...), - list(sysreqs_platform = sysreqs_platform) + load_all_private() + pkg_data[["ns"]][["pkgdepends"]][["sysreqs_is_supported"]]( + sysreqs_platform ) } sysreqs_platforms <- function() { - remote( - function() pkgdepends::sysreqs_platforms() - ) + load_extra("pillar") + load_all_private() + pkg_data[["ns"]][["pkgdepends"]][["sysreqs_platforms"]]() } sysreqs_list_system_packages <- function() { load_extra("pillar") - remote( - function() pkgdepends::sysreqs_list_system_packages() - ) + load_all_private() + pkg_data[["ns"]][["pkgdepends"]][["sysreqs_list_system_packages"]]() } sysreqs_db_match <- function(specs, sysreqs_platform = NULL) { load_extra("pillar") - remote( - function(...) pkgdepends::sysreqs_db_match(...), - list(specs = specs, sysreqs_platform = sysreqs_platform) + load_all_private() + pkg_data[["ns"]][["pkgdepends"]][["sysreqs_db_match"]]( + specs, + sysreqs_platform ) } sysreqs_db_update <- function() { - invisible(remote( - function() pkgdepends::sysreqs_db_update() - )) + load_all_private() + invisible(pkg_data[["ns"]][["pkgdepends"]][["sysreqs_db_update"]]()) } sysreqs_db_list <- function(sysreqs_platform = NULL) { load_extra("pillar") - remote( - function(...) pkgdepends::sysreqs_db_list(...), - list(sysreqs_platform = sysreqs_platform) - ) + pkg_data[["ns"]][["pkgdepends"]][["sysreqs_db_list"]](sysreqs_platform) } sysreqs_check_installed <- function(packages = NULL, library = .libPaths()[1]) { load_extra("pillar") - remote( - function(...) { - ret <- pkgdepends::sysreqs_check_installed(...) - asNamespace("pak")$pak_preformat(ret) - }, - list(packages = packages, library = library) + load_all_private() + pkg_data[["ns"]][["pkgdepends"]][["sysreqs_check_installed"]]( + packages, + library ) } sysreqs_fix_installed <- function(packages = NULL, - library = .libPaths()[1]) { + library = .libPaths()[1]) { load_extra("pillar") - invisible(remote( - function(...) { - ret <- pkgdepends::sysreqs_fix_installed(...) - asNamespace("pak")$pak_preformat(ret) - }, - list(packages = packages, library = library) + load_all_private() + invisible(pkg_data[["ns"]][["pkgdepends"]][["sysreqs_fix_installed"]]( + packages, + library )) } @@ -102,24 +93,12 @@ sysreqs_fix_installed <- function(packages = NULL, pkg_sysreqs <- function(pkg, upgrade = TRUE, dependencies = NA, sysreqs_platform = NULL) { - load_extra("pillar") - remote( - function(...) { - get("pkg_sysreqs_internal", asNamespace("pak"))(...) - }, - list( - pkg = pkg, - upgrade = upgrade, - dependencies = dependencies, - sysreqs_platform = sysreqs_platform - ) - ) -} - -pkg_sysreqs_internal <- function(pkg, upgrade = TRUE, dependencies = NA, - sysreqs_platform = NULL) { dir.create(lib <- tempfile()) on.exit(rimraf(lib), add = TRUE) + + load_extra("pillar") + load_all_private() + config <- list(library = lib) if (!is.null(sysreqs_platform)) { config[["sysreqs_platform"]] <- sysreqs_platform @@ -127,7 +106,7 @@ pkg_sysreqs_internal <- function(pkg, upgrade = TRUE, dependencies = NA, if (!is.null(dependencies)) { config[["dependencies"]] <- dependencies } - srq <- pkgdepends::sysreqs_install_plan( + srq <- pkg_data[["ns"]][["pkgdepends"]][["sysreqs_install_plan"]]( pkg, upgrade = upgrade, config = config @@ -151,7 +130,8 @@ os_label <- function(x) { #' @export format.pak_sysreqs <- function(x, ...) { - cli <- load_private_cli() + load_all_private() + cli <- pkg_data[["ns"]][["cli"]] label <- os_label(x) pkgs <- cisort(unique(unlist(x$packages$packages))) pkgs <- structure(vector("list", length(pkgs)), names = pkgs) @@ -193,7 +173,7 @@ print.pak_sysreqs <- function(x, ...) { #' @export -`[.pak_sysreqs` <- function (x, i, j, drop = FALSE) { +`[.pak_sysreqs` <- function(x, i, j, drop = FALSE) { class(x) <- setdiff(class(x), "pak_sysreqs") NextMethod("[") } diff --git a/R/sysreqsdocs.R b/R/sysreqsdocs.R index a01680c7a..2b46d6530 100644 --- a/R/sysreqsdocs.R +++ b/R/sysreqsdocs.R @@ -1,4 +1,3 @@ - #' List platforms with system requirements support #' #' @usage @@ -21,7 +20,6 @@ #' @family system requirements functions #' @examplesIf Sys.getenv("IN_PKGDOWN") == "true" #' sysreqs_platforms() - sysreqs_platforms #' Check if a platform has system requirements support @@ -42,7 +40,6 @@ sysreqs_platforms #' \eval{man_config_link("configuration option")}. #' @examplesIf Sys.getenv("IN_PKGDOWN") == "true" #' sysreqs_is_supported() - sysreqs_is_supported #' List contents of the system requirements DB, for a platform @@ -64,14 +61,13 @@ sysreqs_is_supported #' `SystemRequirements` fields. #' * `packages`: one or more system package names to install. #' * `pre_install`: command(s) to run before installing the packages. -#' * `post_install`:: command(s) to run after installing the packages. +#' * `post_install`: command(s) to run after installing the packages. #' #' @name sysreqs_db_list #' @export #' @family system requirements functions #' @examplesIf Sys.getenv("IN_PKGDOWN") == "true" #' sysreqs_db_list(sysreqs_platform = "ubuntu-22.04") - sysreqs_db_list #' Match system requirement descriptions to the database @@ -109,7 +105,6 @@ sysreqs_db_list #' c("Needs libcurl", "Java, libssl"), #' sysreqs_platform = "ubuntu-22.04" #' ) - sysreqs_db_match #' Update the cached copy of the system requirements database @@ -173,7 +168,6 @@ sysreqs_db_update #' @examplesIf Sys.getenv("IN_PKGDOWN") == "true" && Sys.info()[["sysname"]] == "Linux" #' # This only works on supported platforms #' sysreqs_check_installed() - sysreqs_check_installed #' @export @@ -203,6 +197,5 @@ sysreqs_fix_installed <- sysreqs_fix_installed #' @export #' @family system requirements functions #' @examplesIf Sys.getenv("IN_PKGDOWN") == "true" && Sys.info()[["sysname"]] == "Linux" -#' sysreqs_list_system_packages()[1:10,] - +#' sysreqs_list_system_packages()[1:10, ] sysreqs_list_system_packages diff --git a/R/system-requirements.R b/R/system-requirements.R index e5cb28fce..2c15ddd6c 100644 --- a/R/system-requirements.R +++ b/R/system-requirements.R @@ -1,5 +1,5 @@ -DEFAULT_RSPM_REPO_ID <- "1" # cran -DEFAULT_RSPM <- "https://packagemanager.rstudio.com" +DEFAULT_RSPM_REPO_ID <- "1" # cran +DEFAULT_RSPM <- "https://packagemanager.rstudio.com" #' Query system requirements #' @@ -52,16 +52,19 @@ DEFAULT_RSPM <- "https://packagemanager.rstudio.com" #' @examplesIf FALSE #' local_system_requirements("ubuntu", "20.04") local_system_requirements <- function(os = NULL, os_release = NULL, root = ".", execute = FALSE, sudo = execute, echo = FALSE) { - once_per_session(message( "`pak::local_system_requirements()` is deprecated since pak 0.6.0.\n", "Please use `pak::pkg_sysreqs()` instead." )) - - res <- remote( - function(...) asNamespace("pak")$system_requirements_internal(...), - list(os = os, os_release = os_release, root = root, package = NULL, execute = execute, sudo = sudo, echo = echo)) - if (execute) invisible(res) else res + system_requirements_internal( + os = os, + os_release = os_release, + root = root, + package = NULL, + execute = execute, + sudo = sudo, + echo = echo + ) } #' @details @@ -81,21 +84,25 @@ local_system_requirements <- function(os = NULL, os_release = NULL, root = ".", #' pkg_system_requirements("iDontExist", "ubuntu", "20.04") #' pkg_system_requirements(c("curl", "iDontExist"), "ubuntu", "20.04") pkg_system_requirements <- function(package, os = NULL, os_release = NULL, execute = FALSE, sudo = execute, echo = FALSE) { - once_per_session(message( "`pak::pkg_system_requirements()` is deprecated since pak 0.6.0.\n", "Please use `pak::pkg_sysreqs()` instead." )) - - res <- remote( - function(...) asNamespace("pak")$system_requirements_internal(...), - list(os = os, os_release = os_release, root = NULL, package = package, execute = execute, sudo = sudo, echo = echo)) - if (execute) invisible(res) else res + system_requirements_internal( + os = os, + os_release = os_release, + root = NULL, + package = package, + execute = execute, + sudo = sudo, + echo = echo + ) } system_requirements_internal <- function(os, os_release, root, package, execute, sudo, echo) { + load_all_private() if (is.null(os) || is.null(os_release)) { - d <- pkgcache::current_r_platform_data() + d <- pkg_data[["ns"]][["pkgcache"]][["current_r_platform_data"]]() os <- os %||% d$distribution os_release <- os_release %||% d$release } @@ -119,17 +126,18 @@ system_requirements_internal <- function(os, os_release, root, package, execute, os, os_release ) - res <- curl::curl_fetch_memory(req_url) - data <- jsonlite::fromJSON(rawToChar(res$content), simplifyVector = FALSE) + res <- pkg_data[["ns"]][["curl"]][["curl_fetch_memory"]](req_url) + data <- pkg_data[["ns"]][["jsonlite"]][["fromJSON"]]( + rawToChar(res$content), + simplifyVector = FALSE + ) if (!is.null(data$error)) { stop(data$error) } pre_install <- unique(unlist(c(data[["pre_install"]], lapply(data[["requirements"]], `[[`, c("requirements", "pre_install"))))) install_scripts <- unique(unlist(c(data[["install_scripts"]], lapply(data[["requirements"]], `[[`, c("requirements", "install_scripts"))))) - } - - else { + } else { desc_file <- normalizePath(file.path(root, "DESCRIPTION"), mustWork = FALSE) if (!file.exists(desc_file)) { stop("`", root, "` must contain a package.", call. = FALSE) @@ -142,24 +150,29 @@ system_requirements_internal <- function(os, os_release, root, package, execute, os_release ) - h <- curl::new_handle() + h <- pkg_data[["ns"]][["curl"]][["new_handle"]]() desc_size <- file.size(desc_file) desc_data <- readBin(desc_file, "raw", desc_size) - curl::handle_setheaders(h, + pkg_data[["ns"]][["curl"]][["handle_setheaders"]]( + h, customrequest = "POST", "content-type" = "text/plain" ) - curl::handle_setopt(h, + pkg_data[["ns"]][["curl"]][["handle_setopt"]]( + h, postfieldsize = desc_size, postfields = desc_data ) - res <- curl::curl_fetch_memory(req_url, h) + res <- pkg_data[["ns"]][["curl"]][["curl_fetch_memory"]](req_url, h) - data <- jsonlite::fromJSON(rawToChar(res$content), simplifyVector = FALSE) + data <- pkg_data[["ns"]][["jsonlite"]][["fromJSON"]]( + rawToChar(res$content), + simplifyVector = FALSE + ) if (!is.null(data$error)) { stop(data$error) } @@ -170,7 +183,9 @@ system_requirements_internal <- function(os, os_release, root, package, execute, commands <- as.character(c(pre_install, simplify_install(install_scripts))) if (echo) { - callback <- function(x, ...) cli::cli_verbatim(sub("[\r\n]+$", "", x)) + callback <- function(x, ...) { + pkg_data[["ns"]][["cli"]][["cli_verbatim"]](sub("[\r\n]+$", "", x)) + } } else { callback <- function(x, ...) invisible() } @@ -180,9 +195,16 @@ system_requirements_internal <- function(os, os_release, root, package, execute, if (sudo) { cmd <- paste("sudo", cmd) } - cli::cli_alert_info("Executing {.code {cmd}}") - - processx::run("sh", c("-c", cmd), stdout_callback = callback, stderr_to_stdout = TRUE) + pkg_data[["ns"]][["cli"]][["cli_alert_info"]]( + "Executing {.code {cmd}}" + ) + + pkg_data[["ns"]][["processx"]][["run"]]( + "sh", + c("-c", cmd), + stdout_callback = callback, + stderr_to_stdout = TRUE + ) } } @@ -193,13 +215,13 @@ system_requirements_internal <- function(os, os_release, root, package, execute, # OSs commented out are not currently supported by the API supported_os_versions <- function() { list( - #"debian" = c("8", "9"), + # "debian" = c("8", "9"), "ubuntu" = c("14.04", "16.04", "18.04", "20.04", "22.04"), "centos" = c("6", "7", "8"), "redhat" = c("6", "7", "8"), "opensuse" = c("42.3", "15.0"), "sle" = c("12.3", "15.0") - #"windows" = c("") + # "windows" = c("") ) } diff --git a/R/system.R b/R/system.R index a8a0f9c3f..bd1ffc06e 100644 --- a/R/system.R +++ b/R/system.R @@ -1,4 +1,3 @@ - #' R platforms #' #' @details @@ -59,28 +58,15 @@ #' @examplesIf FALSE #' system_r_platform() #' system_r_platform_data() - system_r_platform <- function() { - remote( - function(...) asNamespace("pak")$system_r_platform_internal(...), - list() - ) -} - -system_r_platform_internal <- function() { - pkgcache::current_r_platform() + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["current_r_platform"]]() } #' @export #' @rdname system_r_platform system_r_platform_data <- function() { - remote( - function(...) asNamespace("pak")$system_r_platform_data_internal(...), - list() - ) -} - -system_r_platform_data_internal <- function() { - pkgcache::current_r_platform_data() + load_all_private() + pkg_data[["ns"]][["pkgcache"]][["current_r_platform_data"]]() } diff --git a/R/terminate.R b/R/terminate.R index ca48de1b7..ad48bfc37 100644 --- a/R/terminate.R +++ b/R/terminate.R @@ -1,7 +1,6 @@ - terminate <- function(others, msg = " Terminating %s (%i) ...") { - load_private_packages() - ps <- pkg_data$ns$ps + load_all_private() + ps <- pkg_data[["ns"]][["ps"]] pcs <- others$users pcs <- pcs[!duplicated(pcs$pid), , drop = FALSE] diff --git a/R/utils.R b/R/utils.R index 3f38a991f..f1823a1b5 100644 --- a/R/utils.R +++ b/R/utils.R @@ -1,17 +1,9 @@ +# Needed for pkgcache +#' @rawNamespace if (getRversion() >= "4.0.0") importFrom(tools, R_user_dir) +NULL `%||%` <- function(l, r) if (is.null(l)) r else l -# Adapted from withr:::merge_new -merge_new <- function(old, new, action = c("replace", "prepend", "append")) { - action <- match.arg(action, c("replace", "prepend", "append")) - - switch(action, - prepend = c(new, old), - append = c(old, new), - replace = new - ) -} - vcapply <- function(X, FUN, ...) { vapply(X, FUN, FUN.VALUE = character(1), ...) } @@ -28,44 +20,10 @@ vdapply <- function(X, FUN, ...) { vapply(X, FUN, FUN.VALUE = double(1), ...) } -is_verbose <- function() { - env <- Sys.getenv("R_PKG_SHOW_PROGRESS", "") - if (env != "") { - tolower(env) == "true" - } else { - opt <- getOption("pkg.show_progress") - if (!is.null(opt)) { - return(isTRUE(opt)) - } else { - interactive() - } - } -} - -backtick <- function (x) { - encodeString(x, quote = "`", na.encode = FALSE) -} - -format_items <- function (x) { - paste0(cli::ansi_collapse(backtick(x), sep = ", ", last = " and ")) -} - -str_trim <- function (x) { - sub("^\\s+", "", sub("\\s+$", "", x)) -} - -list_files <- function(path) { - if (!file.exists(path)) return(character()) - fs <- dir(path, full.names = TRUE) - basename(fs[! is_dir(fs)]) -} - -file_mtime <- function(...) { - file.info(..., extra_cols = FALSE)$mtime -} - -is_dir <- function(...) { - file.info(..., extra_cols = FALSE)$isdir +str_trim <- function(x) { + x <- sub("(*UCP)\\s+$", "", x, perl = TRUE) + x <- sub("(*UCP)^\\s+", "", x, perl = TRUE) + x } get_minor_r_version <- function(x = getRversion()) { @@ -73,6 +31,7 @@ get_minor_r_version <- function(x = getRversion()) { vapply(unclass(x), function(x) paste(x[1:2], collapse = "."), character(1)) } +# nocov start get_os <- function() { if (.Platform$OS.type == "windows") { "win" @@ -84,16 +43,16 @@ get_os <- function() { "unknown" } } +# nocov end -user_cache_dir <- function(appname) { +user_cache_dir <- function(appname = utils::packageName()) { if (nzchar(cache <- Sys.getenv("R_PKG_CACHE_DIR", ""))) { return(cache) } if (nzchar(cache <- Sys.getenv("R_USER_CACHE_DIR", ""))) { return(file.path(cache, "R", appname)) } - switch( - get_os(), + switch(get_os(), win = file_path(win_path_local(), "R", "Cache", appname), mac = file_path("~/Library/Caches", "org.R-project.R", "R", appname), unix = file_path(Sys.getenv("XDG_CACHE_HOME", "~/.cache"), "R", appname), @@ -108,10 +67,8 @@ file_path <- function(...) { win_path_local <- function() { if (nzchar(lapp <- Sys.getenv("LOCALAPPDATA", ""))) { lapp - } else if (nzchar(usrprof <- Sys.getenv("USERPROFILE", ""))) { file.path(usrprof, "AppData", "Local") - } else { file.path(tempdir(), "r-pkg-cache") } @@ -121,123 +78,45 @@ cat0 <- function(..., sep = "") { cat(..., sep = sep) } -catln <- function(..., sep = "") { - cat(..., "\n", sep = "") -} - -get_num_workers <- function() { - n <- tryCatch( - suppressWarnings(as.integer(getOption("Ncpus", NA_integer_))), - error = function(e) NA_integer_) - - if (length(n) != 1 || is.na(n)) { - n <- tryCatch( - ps::ps_cpu_count(logical = TRUE), - error = function(e) NA_integer_) - } - - if (is.na(n)) n <- 1L - - n -} - -to_package_name <- function(x) { - x <- gsub("[^a-zA-Z0-9\\.]", "", x) - if (nchar(x) < 2) { - "unknown.package" - } else if (!grepl("^[a-zA-Z]", x)) { - paste0("X", x) - } else { - x - } -} - -strrep <- function(x, times) { - x = as.character(x) - if (length(x) == 0L) - return(x) - unlist(.mapply(function(x, times) { - if (is.na(x) || is.na(times)) - return(NA_character_) - if (times <= 0L) - return("") - paste0(replicate(times, x), collapse = "") - }, list(x = x, times = times), MoreArgs = list()), use.names = FALSE) -} - -testthat_testing <- function() { - identical(Sys.getenv("TESTTHAT"), "true") -} - -norm_path <- function(x) { - normalizePath(x, winslash = "/") -} - -drop_nulls <- function(x) { - is_null <- vlapply(x, is.null) - x[!is_null] -} - mkdirp <- function(dir, msg = NULL) { s <- vlapply(dir, dir.create, recursive = TRUE, showWarnings = FALSE) - if (any(s) && !is.null(msg) && is_verbose()) { - cli::cli_alert_info("{msg}: {.path {format_items(dir[s])}}") + if (any(s) && !is.null(msg)) { + load_all_private() + cli <- pkg_data[["ns"]][["cli"]] + cli$cli_alert_info("{msg}: {.path {dir[s]}}.") } invisible(s) } fix_macos_path_in_rstudio <- function() { ## Only in RStudio - if (Sys.getenv("RSTUDIO") != "1") return() + if (Sys.getenv("RSTUDIO") != "1") { + return() + } ## Only on macOS - if (Sys.info()["sysname"] != "Darwin") return() + if (Sys.info()["sysname"] != "Darwin") { + return() + } - if (!file.exists("/etc/paths")) return() + if (!file.exists("/etc/paths")) { + return() + } path <- Sys.getenv("PATH") new_path <- readLines("/etc/paths", n = 1000) - Sys.setenv(PATH = paste0(path, ":", paste(new_path, collapse = ":"))) - - invisible() -} + # it is always : on macOS, but we test this on Windows as well + sep <- .Platform$path.sep + Sys.setenv(PATH = paste0(path, sep, paste(new_path, collapse = sep))) -append_union <- function(path, cnt, msg_new = NULL, msg_done = NULL) { - lines <- readLines(path) - new_cnt <- setdiff(cnt, lines) - if (length(new_cnt)) { - new_lines <- c(lines, new_cnt) - if (!is.null(msg_new)) cli::cli_alert_info(msg_new) - writeLines(new_lines, path) - } else { - if (!is.null(msg_done)) cli::cli_alert_info(msg_done) - } invisible() } -try_add_to_git <- function(path) { - tryCatch({ - processx::run("git", c("add", path), timeout = 10) - cli::cli_alert_info("Add {.path {path}} to git.") - }, error = function(x) x) -} - rimraf <- function(...) { x <- file.path(...) - if ("~" %in% x) stop("Cowardly refusing to delete `~`") + if ("~" %in% x) stop("Cowardly refusing to delete `~`") # nocov coward... unlink(x, recursive = TRUE, force = TRUE) } -msg <- function(..., domain = NULL, appendLF = TRUE) { - msg <- .makeMessage(..., domain = domain, appendLF = appendLF) - - output <- if (is_interactive()) stdout() else stderr() - - withRestarts(muffleMessage = function() NULL, { - signalCondition(simpleMessage(msg)) - cat(msg, file = output, sep = "") - }) -} - is_interactive <- function() { opt <- getOption("rlib_interactive") if (isTRUE(opt)) { @@ -277,13 +156,13 @@ na_omit <- function(x) { ## Not an issue currently, might be in the future. base_packages <- function() { - if (is.null(pkg_data$base_packages)) { pkg_data$base_packages <- - c("base", "compiler", "datasets", "graphics", "grDevices", "grid", + c( + "base", "compiler", "datasets", "graphics", "grDevices", "grid", "methods", "parallel", "splines", "stats", "stats4", "tcltk", "tools", "utils" - ) + ) } pkg_data$base_packages } @@ -296,29 +175,22 @@ is_string <- function(x) { is.character(x) && length(x) == 1 && !is.na(x) } -map_named <- function(x, fun) { - mapply(names(x), x, SIMPLIFY = FALSE, FUN = fun) +is_count <- function(x) { + is.numeric(x) && length(x) == 1 && !is.na(x) && x >= 0 && + as.integer(x) == x } -rbind_expand <- function(..., .list = list()) { - data <- c(list(...), .list) - cols <- unique(unlist(lapply(data, function(x) colnames(x)))) - for (i in seq_along(data)) { - miss_cols <- setdiff(cols, colnames(data[[i]])) - if (length(miss_cols)) { - na_df <- as_data_frame(structure( - replicate( - length(miss_cols), - if (nrow(data[[i]])) NA else logical(), - simplify = FALSE), - names = miss_cols)) - data[[i]] <- as_data_frame(cbind(data[[i]], na_df)) - } - } - - do.call(rbind, data) +map_named <- function(x, fun) { + mapply(names(x), x, SIMPLIFY = FALSE, FUN = fun) } cisort <- function(x) { x[order(tolower(x))] } + +read_char <- function(path) { + bin <- readBin(path, "raw", file.info(path)$size) + txt <- rawToChar(bin) + Encoding(txt) <- "UTF-8" + txt +} diff --git a/R/warn-loaded.R b/R/warn-loaded.R index 4467970eb..56303be5d 100644 --- a/R/warn-loaded.R +++ b/R/warn-loaded.R @@ -1,11 +1,12 @@ - warn_for_loaded_packages <- function(pkgs, lib, loaded, pid = NULL) { - if (length(pkgs) == 0) return(list(status = "none-none")) - if (get_os() == "win") { - warn_for_loaded_packages_win(pkgs, lib, loaded, pid) - } else { - warn_for_loaded_packages_unix(pkgs, lib, loaded, pid) - } + if (length(pkgs) == 0) { + return(list(status = "none-none")) + } + if (get_os() == "win") { + warn_for_loaded_packages_win(pkgs, lib, loaded, pid) + } else { + warn_for_loaded_packages_unix(pkgs, lib, loaded, pid) + } } handle_status <- function(status, lib, ask) { @@ -16,9 +17,10 @@ handle_status <- function(status, lib, ask) { ans <- get_answer(loaded_status$answers) sts <- loaded_packages_response(loaded_status, ans) if (sts$status != "try-again") break - loaded_status <- remote( - function(...) get("warn_for_loaded_packages", asNamespace("pak"))(...), - list(loaded_status$pkgs, lib, loaded_packages(lib)) + warn_for_loaded_packages( + loaded_status$pkgs, + lib, + loaded_packages(lib) ) } @@ -33,7 +35,9 @@ handle_status <- function(status, lib, ask) { # -- Unix ------------------------------------------------------------ warn_for_loaded_packages_unix <- function(pkgs, lib, loaded, pid) { - if (is.null(loaded)) return() + if (is.null(loaded)) { + return() + } bad <- intersect(pkgs, loaded) bad <- setdiff(bad, "pak") if (length(bad)) warn_for_loaded_packages_emit(bad) @@ -41,7 +45,8 @@ warn_for_loaded_packages_unix <- function(pkgs, lib, loaded, pid) { } warn_for_loaded_packages_emit <- function(pkgs) { - cli::cli_alert_warning( + load_all_private() + pkg_data[["ns"]][["cli"]]$cli_alert_warning( "{.pkg {pkgs}} {?is/are} loaded in the current R session, \\ you probably need to restart R after the installation.", wrap = TRUE @@ -65,19 +70,14 @@ warn_for_loaded_packages_win <- function(pkgs, lib, loaded, pid = NULL) { status$answers <- if (current$status == "none" && others$status == "none") { # Nothing to do NULL - } else if (current$status == "loaded" && others$status == "none") { warn_for_loaded_packages_loaded_none(current) - } else if (current$status == "locked" && others$status == "none") { warn_for_loaded_packages_locked_none(current) - } else if (current$status == "none" && others$status == "locked") { warn_for_loaded_packages_none_locked(others) - } else if (current$status == "loaded" && others$status == "locked") { warn_for_loaded_packages_loaded_locked(current, others) - } else if (current$status == "locked" && others$status == "locked") { warn_for_loaded_packages_locked_locked(current, others) } @@ -91,7 +91,9 @@ warn_for_loaded_packages_loaded_none <- function(current) { } warn_for_loaded_packages_locked_none <- function(current) { - cli::cli_alert_warning( + load_all_private() + cli <- pkg_data[["ns"]][["cli"]] + cli$cli_alert_warning( "{.pkg {current$locked}} {?is/are} loaded in the current \\ R session and {?has/have} locked files in your library. The \\ installation will probably fail, unless pak unloads {?it/them}, \\ @@ -99,14 +101,14 @@ warn_for_loaded_packages_locked_none <- function(current) { wrap = TRUE ) - cli::cli_div(theme = list( + cli$cli_div(theme = list( body = list("margin-left" = 2L), par = list("margin-top" = 1L), ol = list("margin-top" = 1L) )) - cli::cli_par() - cli::cli_text("What do you want to do?") - cli::cli_ol(c( + cli$cli_par() + cli$cli_text("What do you want to do?") + cli$cli_ol(c( "Have pak unload them before the installation. (Safest option.)", "Try the installation without unloading them", "Abort the installation." @@ -115,6 +117,8 @@ warn_for_loaded_packages_locked_none <- function(current) { } warn_for_loaded_packages_none_locked <- function(others) { + load_all_private() + cli <- pkg_data[["ns"]][["cli"]] pcs <- others$users[!duplicated(others$users$pid), , drop = FALSE] sess <- sprintf("%s %s", pcs$name, pcs$pid) apps <- ifelse( @@ -123,24 +127,24 @@ warn_for_loaded_packages_none_locked <- function(others) { sprintf(" in %s (%s)", pcs$app_name, pcs$app_pid) ) sess <- paste0(sess, apps) - cli::cli_alert_warning( + cli$cli_alert_warning( "{.pkg {others$locked}} {?is/are} loaded in other R sessions and \\ {?has/have} locked files in your library. The installation will \\ - probably fail, unless you quit from {cli::qty(length(sess))} \\ + probably fail, unless you quit from {cli$qty(length(sess))} \\ {?this/these} R session{?s}:", wrap = TRUE ) - cli::cli_div(theme = list( + cli$cli_div(theme = list( body = list("margin-left" = 2L), par = list("margin-top" = 1L), ol = list("margin-top" = 1L), ul = list("margin-top" = 1L, "margin-bottom" = 1L) )) - cli::cli_par() - cli::cli_ul(sess) - cli::cli_text("What do you want to do?") - cli::cli_ol(c( + cli$cli_par() + cli$cli_ul(sess) + cli$cli_text("What do you want to do?") + cli$cli_ol(c( "Quit these R sessions and try the installation again \\ (Safest option.)", "Terminate the listed R sessions. They may lose data!", @@ -151,12 +155,15 @@ warn_for_loaded_packages_none_locked <- function(others) { } warn_for_loaded_packages_loaded_locked <- function(current, others) { + load_all_private() warn_for_loaded_packages_emit(current$loaded) - cli::cli_verbatim("") + pkg_data[["ns"]][["cli"]]$cli_verbatim("") warn_for_loaded_packages_none_locked(others) } warn_for_loaded_packages_locked_locked <- function(current, others) { + load_all_private() + cli <- pkg_data[["ns"]][["cli"]] pcs <- others$users[!duplicated(others$users$pid), , drop = FALSE] sess <- sprintf("%s (%s)", pcs$name, pcs$pid) apps <- ifelse( @@ -166,41 +173,41 @@ warn_for_loaded_packages_locked_locked <- function(current, others) { ) sess <- paste0(sess, apps) if (identical(sort(current$locked), sort(others$locked))) { - cli::cli_alert_warning( + cli$cli_alert_warning( "{.pkg {current$locked}} {?is/are} loaded in the current R \\ session and in other R sessions and {?has/have} locked files \\ in your library. The installation will probably fail unless \\ pak unloads {?it/them} from the current R session and you - quit from {cli::qty(length(sess))} {?this/these} R session{?s}:", + quit from {cli$qty(length(sess))} {?this/these} R session{?s}:", wrap = TRUE ) } else { - cli::cli_alert_warning( + cli$cli_alert_warning( "{.pkg {current$locked}} {?is/are} loaded in the current R \\ session and {?has/have} locked files in your library. The \\ installation will fail unless pak unloads it.", wrap = TRUE ) - cli::cli_verbatim("") - cli::cli_alert_warning( + cli$cli_verbatim("") + cli$cli_alert_warning( "{.pkg {others$locked}} {?is/are} loaded in other R sessions \\ and {?has/have} locked files in your library. The installation \\ - will fail unless you quit from {cli::qty(length(sess))} \\ + will fail unless you quit from {cli$qty(length(sess))} \\ {?this/these} R session{?s}:", wrap = TRUE ) } - cli::cli_div(theme = list( + cli$cli_div(theme = list( body = list("margin-left" = 2L), par = list("margin-top" = 1L), ol = list("margin-top" = 1L), ul = list("margin-top" = 1L, "margin-bottom" = 1L) )) - cli::cli_par() - cli::cli_ul(sess) - cli::cli_text("What do you want to do?") - cli::cli_ol(c( + cli$cli_par() + cli$cli_ul(sess) + cli$cli_text("What do you want to do?") + cli$cli_ol(c( "Quit the listed R sessions and then have pak unload the \\ packages from the current R session (Safest option.)", "Terminate the listed R sessions (they may lose data!) and have \\ @@ -218,7 +225,8 @@ r_process_names <- function() { #' @noRd r_app_names <- function() { - c("Rgui" = "Rgui.exe", + c( + "Rgui" = "Rgui.exe", "RStudio" = "rstudio.exe", "VScode" = "Code.exe", "Windows Terminal" = "WindowsTerminal.exe", @@ -230,7 +238,11 @@ r_app_names <- function() { guess_r_app <- function(ancestry) { names <- vcapply(ancestry, function(p) { - tryCatch(ps::ps_name(p), error = function(e) NA_character_) + load_all_private() + tryCatch( + pkg_data[["ns"]][["ps"]]$ps_name(p), + error = function(e) NA_character_ + ) }) good <- which(!is.na(names) & names %in% r_app_names())[1] @@ -268,7 +280,8 @@ guess_r_app <- function(ancestry) { get_locked_libs <- function(lib, pkgs) { libs <- file.path(lib, pkgs, "libs", .Platform$r_arch) dlls <- dir(libs, pattern = "\\.dll$", full.names = TRUE) - users <- ps::ps_shared_lib_users( + load_all_private() + users <- pkg_data[["ns"]][["ps"]]$ps_shared_lib_users( dlls, user = NULL, filter = r_process_names() @@ -300,7 +313,8 @@ get_locked_libs <- function(lib, pkgs) { loaded_status_current <- function(pkgs, loaded, locked, pid = NULL) { bad <- intersect(pkgs, loaded) bad <- setdiff(bad, "pak") - pid <- pid %||% ps::ps_ppid() + load_all_private() + pid <- pid %||% pkg_data[["ns"]][["ps"]]$ps_ppid() cur_locked <- locked$package[ locked$package %in% bad & locked$pid == pid ] @@ -343,20 +357,22 @@ loaded_status_current <- function(pkgs, loaded, locked, pid = NULL) { #' @noRd loaded_status_others <- function(locked, pid = NULL) { - pid <- pid %||% ps::ps_ppid() + load_all_private() + ps <- pkg_data[["ns"]][["ps"]] + pid <- pid %||% ps$ps_ppid() oth_locked <- locked[locked$pid != pid, , drop = FALSE] oth_pids <- unique(oth_locked$pid) descent <- lapply(oth_pids, function(pid) { ps <- oth_locked$ps_handle[[match(pid, oth_locked$pid)]] - tryCatch(ps::ps_descent(ps), error = function(e) list(ps)) + tryCatch(ps$ps_descent(ps), error = function(e) list(ps)) }) app_handle <- lapply(descent, guess_r_app) app_pid <- viapply(app_handle, function(p) { - tryCatch(ps::ps_pid(p), error = function(e) NA_integer_) + tryCatch(ps$ps_pid(p), error = function(e) NA_integer_) }) app_name <- vcapply(app_handle, function(p) { - tryCatch(ps::ps_name(p), error = function(e) NA_character_) + tryCatch(ps$ps_name(p), error = function(e) NA_character_) }) map <- match(oth_locked$pid, oth_pids) @@ -369,8 +385,8 @@ loaded_status_others <- function(locked, pid = NULL) { status <- if (nrow(oth_locked) > 0) "locked" else "none" - oth_locked$create_time <- vdapply(oth_locked$ps_handle, ps::ps_create_time) - oth_locked$app_create_time <- vdapply(oth_locked$app_handle, ps::ps_create_time) + oth_locked$create_time <- vdapply(oth_locked$ps_handle, ps$ps_create_time) + oth_locked$app_create_time <- vdapply(oth_locked$app_handle, ps$ps_create_time) oth_locked$ps_handle <- NULL oth_locked$app_handle <- NULL @@ -382,11 +398,9 @@ loaded_status_others <- function(locked, pid = NULL) { } loaded_packages_response <- function(status, response) { - switch( - status$status, + switch(status$status, "locked-none" = { - switch( - response, + switch(response, "1" = { # Unload unload(status$current$locked) @@ -404,10 +418,9 @@ loaded_packages_response <- function(status, response) { ) }, "none-locked" = , - "loaded-locked" =, + "loaded-locked" = , "locked-locked" = { - switch( - response, + switch(response, "1" = { # Unload (if needed), try again if (length(status$current$locked) > 0) { diff --git a/man/pak_sitrep.Rd b/man/pak_sitrep.Rd index c3e119713..9a61d8456 100644 --- a/man/pak_sitrep.Rd +++ b/man/pak_sitrep.Rd @@ -12,8 +12,7 @@ It prints \item pak version, \item platform the package was built on, and the current platform, \item the current library path, -\item versions of dependencies, -\item whether dependencies can be loaded. +\item versions of dependencies. } } \section{Examples}{ diff --git a/man/pkg_search.Rd b/man/pkg_search.Rd index 2037077fd..323c1f4af 100644 --- a/man/pkg_search.Rd +++ b/man/pkg_search.Rd @@ -4,7 +4,7 @@ \alias{pkg_search} \title{Search CRAN packages} \usage{ -pkg_search(query, ...) +pkg_search(query = NULL, ...) } \arguments{ \item{query}{Search query string.} diff --git a/src/Makevars-covr b/src/Makevars-covr new file mode 100644 index 000000000..75f0ae4e1 --- /dev/null +++ b/src/Makevars-covr @@ -0,0 +1,15 @@ +CFLAGS = -O0 --coverage -DGCOV_COMPILE +CXXFLAGS += -O0 --coverage -DGCOV_COMPILE +CXX1XFLAGS += -O0 --coverage -DGCOV_COMPILE +CXX11FLAGS += -O0 --coverage -DGCOV_COMPILE +CXX14FLAGS += -O0 --coverage -DGCOV_COMPILE +CXX17FLAGS += -O0 --coverage -DGCOV_COMPILE +CXX20FLAGS += -O0 --coverage -DGCOV_COMPILE + +FFLAGS += -O0 --coverage +FCFLAGS += -O0 --coverage +FLIBS += -lgcov + +# LDFLAGS is ignored on windows and visa versa +LDFLAGS += --coverage +SHLIB_LIBADD += --coverage diff --git a/src/install-embedded.R b/src/install-embedded.R index 9d689b3d7..5100aa97e 100644 --- a/src/install-embedded.R +++ b/src/install-embedded.R @@ -1,6 +1,8 @@ -opts <- function() { +install_opts <- function() { + cov <- Sys.getenv("TEST_COVERAGE_PAK") == "true" paste( - "--without-keep.source", + if (cov) "--with-keep.source" else "--without-keep.source", + if (cov) "--preclean", "--no-html", "--no-help", "--no-data", @@ -17,7 +19,9 @@ rimraf <- function(path) { unlink(path, force = TRUE, recursive = TRUE) } -get_lib <- function(lib) { +# For `load_all()` the `lib` argument is passed in. + +get_lib <- function(lib = NULL) { lib <- lib %||% file.path(Sys.getenv("R_PACKAGE_DIR"), "library") dir.create(lib, recursive = TRUE, showWarnings = FALSE) lib @@ -55,15 +59,24 @@ update_description_build <- function(path, platform) { install_one <- function(pkg, lib = NULL) { lib <- get_lib(lib) + if (file.exists(file.path(lib, pkg))) { + warning("Package already installed, this should not happen.") + } + cat("\nCompiling", pkg, "\n") suppressWarnings(suppressMessages(utils::capture.output(install.packages( paste0("library/", pkg), lib = lib, repos = NULL, type = "source", - INSTALL_opts = opts() + INSTALL_opts = install_opts() )))) + # Need to stop explicitly, because `install.packages()` does not error + # on failed installation. Before calling `install_one()` we always + # remove the previously installed package, so this check still works if + # we are updating. + if (!file.exists(file.path(lib, pkg))) { stop("FAILED") } @@ -96,6 +109,32 @@ install_order <- function() { pkgs } +# we do not need the R/ directory once the R code is bundled into a +# single RDS file. This is not called for `load_all()`, so that we +# can update single packages. + +# except that we need it for a couple of packages that are loaded in a +# subprocess + +clean_up_r <- function(lib = NULL) { + lib <- get_lib(lib) + for (pkg in setdiff(dir(lib), c("cli", "desc", "filelock", "processx", "R6"))) { + r_dir <- file.path(lib, pkg, "R") + if (file.exists(r_dir)) { + rimraf(r_dir) + } + } +} + +# This is used for `R CMD INSTALL`, but not for `load_all()`. +# This is pretty simple, except when we are cross-compiling. +# +# Cross-compiling means that we must be able to load the dependncies +# on the building platform, and the packages for the target platform +# must be installed into a separate library. Actually, we put each +# one into its own library, to be on the safe side. After all +# dependencies are installed, we move them over to the proper place. + install_dummies <- function(lib) { pkgs <- dir("dummy") dir.create(lib, showWarnings = FALSE, recursive = TRUE) @@ -194,10 +233,14 @@ install_all <- function(lib = NULL) { } else { for (pkg in pkgs) install_one(pkg, lib = lib) } - file.create("DONE") invisible() } +# This is used to decide if we need to update a dependency during +# `load_all()`. At this point it is only a performance optimization, and +# it could be removed, because if the vesions match, then we use +# hashing to make sure that they are the same. + get_ver <- function(path) { if (!file.exists(path)) { return(NA_character_) @@ -219,24 +262,80 @@ get_ver <- function(path) { as.character(ver) } +# MD5 of a string + +md5 <- function(str) { + tmp <- tempfile() + on.exit(unlink(tmp), add = TRUE) + cat(str, file = tmp) + tools::md5sum(tmp) +} + +# Hash of a directory. MD5 of each file, and then the MD5 of these. +# FIXME: hash the path as well, not just the contents, to account +# for renaming and moving files. + +dir_hash <- function(path) { + files <- sort(dir(path, full.names = TRUE, recursive = TRUE)) + files <- grep( + "[.](s?o|gcda|gcov)$", + files, + value = TRUE, + invert = TRUE + ) + # Reinstall if turning hashing on/off + hashes <- c( + unname(tools::md5sum(files)), + Sys.getenv("TEST_COVERAGE_PAK", "false") + ) + md5(paste(hashes, collapse = " ")) +} + +# This is what we use in `load_all()`. It is not used during +# `R CMD INSTALL`, that case is covered by `install_all()`. +# +# It uses hashing to decide if a dependency needs to be updated in +# the private library. + update_all <- function(lib = NULL) { + if (Sys.getenv("TEST_COVERAGE_PAK") == "true") { + old <- Sys.getenv("R_MAKEVARS_USER", NA_character_) + if (is.na(old)) { + on.exit(Sys.unsetenv("R_MAKEVARS_USER"), add = TRUE) + } else { + on.exit(Sys.setenv("R_MAKEVARS_USER" = old), add = TRUE) + } + Sys.setenv(R_MAKEVARS_USER = normalizePath("Makevars-covr")) + message("Test coverage build!") + } lib <- get_lib(lib) - cat("Updating dev lib at", lib, "\n") + hash_tmpl <- file.path(lib, "_%s.hash") pkgs <- install_order() + upd <- structure(logical(length(pkgs)), names = pkgs) for (pkg in pkgs) { - oldver <- get_ver(file.path(lib, pkg)) - newver <- get_ver(file.path("library", pkg)) - if (is.na(newver)) stop("Cannot find embedded ", pkg) - if (is.na(oldver)) { + hash_path <- sprintf(hash_tmpl, pkg) + new_hash <- dir_hash(file.path("library", pkg)) + if (!file.exists(hash_path)) { message("Adding ", pkg) rimraf(file.path(lib, pkg)) # in case it is a broken install install_one(pkg, lib) - } else if (oldver != newver) { - message("Updating ", pkg, " ", oldver, " -> ", newver) - rimraf(file.path(lib, pkg)) - install_one(pkg, lib) + writeLines(new_hash, hash_path) + upd[pkg] <- TRUE + } else { + old_hash <- readLines(hash_path) + new_hash <- dir_hash(file.path("library", pkg)) + oldver <- get_ver(file.path(lib, pkg)) + newver <- get_ver(file.path("library", pkg)) + if (old_hash != new_hash) { + message("Updating ", pkg, " ", oldver, " -> ", newver) + rimraf(file.path(lib, pkg)) + install_one(pkg, lib) + writeLines(new_hash, hash_path) + upd[pkg] <- TRUE + } } } + upd } load_all <- function() { @@ -244,9 +343,18 @@ load_all <- function() { if (length(args) < 2) { stop("Usage: install-embedded.R [ --load-all library-dir ]") } - update_all(args[2]) + upd <- update_all(args[2]) + if (any(upd)) { + bundle_rds(args[2]) + } + if (Sys.getenv("TEST_COVERAGE_PAK") == "true") { + bundle_covr_rds(args[2]) + } } +# Parse the `--build` and `--target` arguments passed to +# `./configure`. Also check if we are called from `load_all()`. + parse_platforms <- function(args) { build <- if (grepl("^--build=", args[1])) { substr(args[1], 9, 1000) @@ -262,6 +370,182 @@ parse_platforms <- function(args) { ) } +# Need to set the environments of functions within packages. + +set_function_envs <- function(within, new) { + old <- .libPaths() + .libPaths(character()) + on.exit(.libPaths(old), add = TRUE) + nms <- names(within) + + # We don't reset closures. `R6::R6Class()` must be handled specially, + # because it is a closure, but its environment has a name. + is_target_env <- function(x) { + identical(x, base::.GlobalEnv) || + (!environmentName(x) %in% c("", "R6_capsule")) + } + + suppressWarnings({ + for (nm in nms) { + if (is.function(within[[nm]])) { + if (is_target_env(environment(within[[nm]]))) { + environment(within[[nm]]) <- new + } else if (is_target_env(parent.env(environment(within[[nm]])))) { + parent.env(environment(within[[nm]])) <- new + } + } else if ("R6ClassGenerator" %in% class(within[[nm]])) { + within[[nm]]$parent_env <- new + for (mth in names(within[[nm]]$public_methods)) { + environment(within[[nm]]$public_methods[[mth]]) <- new + } + for (mth in names(within[[nm]]$private_methods)) { + environment(within[[nm]]$private_methods[[mth]]) <- new + } + } + } + }) + + invisible() +} + +patch_env_refs <- function(pkg_env) { + pkg_env[["::"]] <- function(pkg, name) { + pkg <- as.character(substitute(pkg)) + name <- as.character(substitute(name)) + if (pkg %in% names(pkg_data$ns)) { + pkg_data$ns[[pkg]][[name]] + } else { + # Fall back to a regular package, so we can call base packages + getExportedValue(pkg, name) + } + } + environment(pkg_env[["::"]]) <- pkg_env + + pkg_env[["system.file"]] <- function(..., package = "base", + lib.loc = NULL, mustWork = FALSE) { + if (package %in% names(pkg_data$ns)) { + base::system.file( + ..., + package = package, + lib.loc = file.path(getNamespaceInfo("pak", "path"), "library"), + mustWork = mustWork + ) + } else { + base::system.file( + ..., + package = package, + lib.loc = lib.loc, + mustWork = mustWork + ) + } + } + environment(pkg_env[["system.file"]]) <- pkg_env + + pkg_env[["loadNamespace"]] <- function(package, ...) { + if (package %in% names(pkg_data$ns)) { + TRUE + } else { + base::loadNamespace(package, ...) + } + } + environment(pkg_env[["loadNamespace"]]) <- pkg_env + + pkg_env[["requireNamespace"]] <- function(package, ..., quietly = FALSE) { + if (package %in% names(pkg_data$ns)) { + TRUE + } else { + base::requireNamespace(package, ..., quietly = quietly) + } + } + environment(pkg_env[["requireNamespace"]]) <- pkg_env + + pkg_env[["asNamespace"]] <- function(ns, ...) { + if (ns %in% names(pkg_data$ns)) { + pkg_data$ns[[ns]] + } else { + base::asNamespace(ns, ...) + } + } + environment(pkg_env[["asNamespace"]]) <- pkg_env + + pkg_env[["UseMethod"]] <- function(generic, object) { + base::UseMethod(generic, object) + } + environment(pkg_env[["UseMethod"]]) <- pkg_env +} + +# Put R code of all dependencies from their R/ directories, into a +# single RDS file. + +bundle_rds <- function(lib = NULL) { + message("Updating bundled dependencies") + lib <- lib %||% get_lib(lib) + ns <- new.env(parent = emptyenv()) + pkgs <- setdiff( + dir(lib, pattern = "^[^_]"), + c("deps.rds", "deps-covr.rds", "deps-cnt.rds") + ) + for (pkg in pkgs) { + pkg_env <- new.env(parent = emptyenv()) + pkg_env[[".packageName"]] <- pkg + ns[[pkg]] <- pkg_env + lazyLoad(file.path(lib, pkg, "R", pkg), envir = pkg_env) + sysdata <- file.path(lib, pkg, "R", "sysdata.rdb") + if (file.exists(sysdata)) { + lazyLoad(file.path(lib, pkg, "R", "sysdata"), envir = pkg_env) + } + set_function_envs(pkg_env, pkg_env) + ## Sometimes a package refers to its env, this is one known instance. + ## We could also walk the whole tree, but probably not worth it. + if (!is.null(pkg_env$err$.internal$package_env)) { + pkg_env$err$.internal$package_env <- pkg_env + } + + patch_env_refs(pkg_env) + + invisible() + } + + # pkgdepends has functions in a list, update those as well + pds <- ns[["pkgdepends"]] + pkgdepends_conf <- names(pds[["pkgdepends_config"]]) + for (nm in pkgdepends_conf) { + if (is.function(pds[["pkgdepends_config"]][[nm]][["default"]])) { + environment(pds[["pkgdepends_config"]][[nm]][["default"]]) <- pds + } + } + parent.env(pds[["config"]][[".internal"]]) <- pds + + # make sure functions are byte-compiled + compiler::compilePKGS(TRUE) + saveRDS(ns, file.path(lib, "deps.rds")) + compiler::compilePKGS(FALSE) +} + +bundle_covr_rds <- function(lib = NULL) { + lib <- lib %||% get_lib(lib) + rds <- file.path(lib, "deps.rds") + covrds <- file.path(lib, "deps-covr.rds") + cntrds <- file.path(lib, "deps-cnt.rds") + if (!file.exists(covrds) || !file.exists(cntrds) || + file.mtime(covrds) < file.mtime(rds) || + file.mtime(cntrds) < file.mtime(rds)) { + message("Instrumenting dependency code for covr") + ns <- readRDS(rds) + ns <- covrlabs::serialize_to_file( + ns, + covrds, + closxp_callback = covrlabs::trace_calls + ) + covrlabs::serialize_to_file(covrlabs:::.counters, cntrds) + } else { + message("Instruments code bundle is current") + } +} + +# ------------------------------------------------------------------------- +# Main function, called if we run as a script + install_embedded_main <- function() { unlink("DONE") # Parse platforms @@ -305,6 +589,10 @@ install_embedded_main <- function() { } install_all() + bundle_rds() + clean_up_r() + + file.create("DONE") } if (is.null(sys.calls())) { diff --git a/src/library/curl/vignettes/intro.Rmd b/src/library/curl/vignettes/intro.Rmd deleted file mode 100644 index b3da55ab2..000000000 --- a/src/library/curl/vignettes/intro.Rmd +++ /dev/null @@ -1,403 +0,0 @@ ---- -title: "The curl package: a modern R interface to libcurl" -date: "`r Sys.Date()`" -output: - html_document: - fig_caption: false - toc: true - toc_float: - collapsed: false - smooth_scroll: false - toc_depth: 3 -vignette: > - %\VignetteIndexEntry{The curl package: a modern R interface to libcurl} - %\VignetteEngine{knitr::rmarkdown} - %\VignetteEncoding{UTF-8} ---- - - -```{r, echo = FALSE, message = FALSE} -knitr::opts_chunk$set(comment = "") -options(width = 120, max.print = 100) -wrap.simpleError <- function(x, options) { - paste0("```\n## Error: ", x$message, "\n```") -} -library(curl) -library(jsonlite) -``` - -The curl package provides bindings to the [libcurl](https://curl.se/libcurl/) C library for R. The package supports retrieving data in-memory, downloading to disk, or streaming using the [R "connection" interface](https://stat.ethz.ch/R-manual/R-devel/library/base/html/connections.html). Some knowledge of curl is recommended to use this package. For a more user-friendly HTTP client, have a look at the [httr](https://cran.r-project.org/package=httr/vignettes/quickstart.html) package which builds on curl with HTTP specific tools and logic. - -## Request interfaces - -The curl package implements several interfaces to retrieve data from a URL: - - - `curl_fetch_memory()` saves response in memory - - `curl_download()` or `curl_fetch_disk()` writes response to disk - - `curl()` or `curl_fetch_stream()` streams response data - - `curl_fetch_multi()` (Advanced) process responses via callback functions - -Each interface performs the same HTTP request, they only differ in how response data is processed. - -### Getting in memory - -The `curl_fetch_memory` function is a blocking interface which waits for the request to complete and returns a list with all content (data, headers, status, timings) of the server response. - - -```{r} -req <- curl_fetch_memory("https://hb.cran.dev/get?foo=123") -str(req) -parse_headers(req$headers) -jsonlite::prettify(rawToChar(req$content)) -``` - -The `curl_fetch_memory` interface is the easiest interface and most powerful for building API clients. However it is not suitable for downloading really large files because it is fully in-memory. If you are expecting 100G of data, you probably need one of the other interfaces. - -### Downloading to disk - -The second method is `curl_download`, which has been designed as a drop-in replacement for `download.file` in r-base. It writes the response straight to disk, which is useful for downloading (large) files. - -```{r} -tmp <- tempfile() -curl_download("https://hb.cran.dev/get?bar=456", tmp) -jsonlite::prettify(readLines(tmp)) -``` - -### Streaming data - -The most flexible interface is the `curl` function, which has been designed as a drop-in replacement for base `url`. It will create a so-called connection object, which allows for incremental (asynchronous) reading of the response. - -```{r} -con <- curl("https://hb.cran.dev/get") -open(con) - -# Get 3 lines -out <- readLines(con, n = 3) -cat(out, sep = "\n") - -# Get 3 more lines -out <- readLines(con, n = 3) -cat(out, sep = "\n") - -# Get remaining lines -out <- readLines(con) -close(con) -cat(out, sep = "\n") -``` - -The example shows how to use `readLines` on an opened connection to read `n` lines at a time. Similarly `readBin` is used to read `n` bytes at a time for stream parsing binary data. - -#### Non blocking connections - -As of version 2.3 it is also possible to open connections in non-blocking mode. In this case `readBin` and `readLines` will return immediately with data that is available without waiting. For non-blocking connections we use `isIncomplete` to check if the download has completed yet. - -```{r, eval=FALSE} -# This httpbin mirror doesn't cache -con <- curl("https://nghttp2.org/httpbin/drip?duration=1&numbytes=50") -open(con, "rb", blocking = FALSE) -while(isIncomplete(con)){ - buf <- readBin(con, raw(), 1024) - if(length(buf)) - cat("received: ", rawToChar(buf), "\n") -} -close(con) -``` - -The `curl_fetch_stream` function provides a very simple wrapper around a non-blocking connection. - - -### Async requests - -As of `curl 2.0` the package provides an async interface which can perform multiple simultaneous requests concurrently. The `curl_fetch_multi` adds a request to a pool and returns immediately; it does not actually perform the request. - -```{r} -pool <- new_pool() -cb <- function(req){cat("done:", req$url, ": HTTP:", req$status, "\n")} -curl_fetch_multi('https://www.google.com', done = cb, pool = pool) -curl_fetch_multi('https://cloud.r-project.org', done = cb, pool = pool) -curl_fetch_multi('https://hb.cran.dev/blabla', done = cb, pool = pool) -``` - -When we call `multi_run()`, all scheduled requests are performed concurrently. The callback functions get triggered when each request completes. - -```{r} -# This actually performs requests: -out <- multi_run(pool = pool) -print(out) -``` - -The system allows for running many concurrent non-blocking requests. However it is quite complex and requires careful specification of handler functions. - -## Exception handling - -A HTTP requests can encounter two types of errors: - - 1. Connection failure: network down, host not found, invalid SSL certificate, etc - 2. HTTP non-success status: 401 (DENIED), 404 (NOT FOUND), 503 (SERVER PROBLEM), etc - -The first type of errors (connection failures) will always raise an error in R for each interface. However if the requests succeeds and the server returns a non-success HTTP status code, only `curl()` and `curl_download()` will raise an error. Let's dive a little deeper into this. - -### Error automatically - -The `curl` and `curl_download` functions are safest to use because they automatically raise an error if the request was completed but the server returned a non-success (400 or higher) HTTP status. This mimics behavior of base functions `url` and `download.file`. Therefore we can safely write code like this: - -```{r} -# This is OK -curl_download('https://cloud.r-project.org/CRAN_mirrors.csv', 'mirrors.csv') -mirros <- read.csv('mirrors.csv') -unlink('mirrors.csv') -``` - -If the HTTP request was unsuccessful, R will not continue: - -```{r, error=TRUE, purl = FALSE} -# Oops! A typo in the URL! -curl_download('https://cloud.r-project.org/CRAN_mirrorZ.csv', 'mirrors.csv') -con <- curl('https://cloud.r-project.org/CRAN_mirrorZ.csv') -open(con) -``` - -```{r, echo = FALSE, message = FALSE, warning=FALSE} -close(con) -rm(con) -``` - - -### Check manually - -When using any of the `curl_fetch_*` functions it is important to realize that these do **not** raise an error if the request was completed but returned a non-200 status code. When using `curl_fetch_memory` or `curl_fetch_disk` you need to implement such application logic yourself and check if the response was successful. - -```{r} -req <- curl_fetch_memory('https://cloud.r-project.org/CRAN_mirrors.csv') -print(req$status_code) -``` - -Same for downloading to disk. If you do not check your status, you might have downloaded an error page! - -```{r} -# Oops a typo! -req <- curl_fetch_disk('https://cloud.r-project.org/CRAN_mirrorZ.csv', 'mirrors.csv') -print(req$status_code) - -# This is not the CSV file we were expecting! -head(readLines('mirrors.csv')) -unlink('mirrors.csv') -``` - -If you *do* want the `curl_fetch_*` functions to automatically raise an error, you should set the [`FAILONERROR`](https://curl.se/libcurl/c/CURLOPT_FAILONERROR.html) option to `TRUE` in the handle of the request. - -```{r, error=TRUE, purl = FALSE} -h <- new_handle(failonerror = TRUE) -curl_fetch_memory('https://cloud.r-project.org/CRAN_mirrorZ.csv', handle = h) -``` - -## Customizing requests - -By default libcurl uses HTTP GET to issue a request to an HTTP url. To send a customized request, we first need to create and configure a curl handle object that is passed to the specific download interface. - -### Setting handle options - -Creating a new handle is done using `new_handle`. After creating a handle object, we can set the libcurl options and http request headers. - -```{r} -h <- new_handle() -handle_setopt(h, copypostfields = "moo=moomooo"); -handle_setheaders(h, - "Content-Type" = "text/moo", - "Cache-Control" = "no-cache", - "User-Agent" = "A cow" -) -``` - -Use the `curl_options()` function to get a list of the options supported by your version of libcurl. The [libcurl documentation](https://curl.se/libcurl/c/curl_easy_setopt.html) explains what each option does. Option names are not case sensitive. - -It is important you check the [libcurl documentation](https://curl.se/libcurl/c/curl_easy_setopt.html) to set options of the correct type. Options in libcurl take several types: - - - number - - string - - slist (vector of strings) - - enum (long) option - -The R bindings will automatically do some type checking and coercion to convert R values to appropriate libcurl option values. Logical (boolean) values in R automatically get converted to `0` or `1` for example [CURLOPT_VERBOSE](https://curl.se/libcurl/c/CURLOPT_VERBOSE.html): - - -```{r} -handle <- new_handle(verbose = TRUE) -``` - -However R does not know if an option is actually boolean. So passing `TRUE`/ `FALSE` to any numeric option will simply set it to `0` or `1` without a warning or error. If an option value cannot be coerced, you get an error: - -```{r, error = TRUE} -# URLOPT_MASFILESIZE must be a number -handle_setopt(handle, maxfilesize = "foo") - -# CURLOPT_USERAGENT must be a string -handle_setopt(handle, useragent = 12345) -``` - - -### ENUM (long) options - -Some curl options take an long in C that actually corresponds to an ENUM value. - -For example the [CURLOPT_USE_SSL](https://curl.se/libcurl/c/CURLOPT_USE_SSL.html) docs explains that there are 4 possible values for this option: `CURLUSESSL_NONE`, `CURLUSESSL_TRY`, `CURLUSESSL_CONTROL`, and `CURLUSESSL_ALL`. To use this option you have to lookup the integer values for these enums in the symbol table. These symbol values never change, so you only need to lookup the value you need once and then hardcode the integer value in your R code. - -```{r} -curl::curl_symbols("CURLUSESSL") -``` - -So suppose we want to set `CURLOPT_USE_SSL` to `CURLUSESSL_ALL` we would use this R code: - -```{r} -handle_setopt(handle, use_ssl = 3) -``` - -### Disabling HTTP/2 - -Another example is the [CURLOPT_HTTP_VERSION](https://curl.se/libcurl/c/CURLOPT_HTTP_VERSION.html) option. This option is needed to disable or enable HTTP/2. However some users are not aware this is actually an ENUM and not a regular numeric value! - -The [docs](https://curl.se/libcurl/c/CURLOPT_HTTP_VERSION.html) explain HTTP_VERSION can be set to one of several strategies for negotiating the HTTP version between client and server. Valid values are: - -```{r} -curl_symbols('CURL_HTTP_VERSION_') -``` - -As seen, the value `2` corresponds to `CURL_HTTP_VERSION_1_1` and `3` corresponds to `CURL_HTTP_VERSION_2_0`. - -As of libcurl 7.62.0, the default `http_version` is `CURL_HTTP_VERSION_2TLS` which uses HTTP/2 when possible, but only for HTTPS connections. Package authors should usually leave the default to let curl select the best appropriate http protocol. - -One exception is when writing a client for a server that seems to be running a buggy HTTP/2 server. Unfortunately this is not uncommon, and curl is a bit more picky than browsers. If you are frequently seeing `Error in the HTTP2 framing layer` error messages, then there is likely a problem with the HTTP/2 layer on the server. - -The easiest remedy is to __disable http/2__ for this server by forcing http 1.1 until the service has upgraded their webservers. To do so, set the `http_version` to `CURL_HTTP_VERSION_1_1` (value: `2`): - -```{r} -# Force using HTTP 1.1 (the number 2 is an enum value, see above) -handle_setopt(handle, http_version = 2) -``` - -Note that the value `1` corresponds to HTTP 1.0 which is a legacy version of HTTP that you should not use! -Code that sets `http_version` to `1` (or even `1.1` which R simply rounds to 1) is almost always a bug. - -## Performing the request - -After the handle has been configured, it can be used with any of the download interfaces to perform the request. For example `curl_fetch_memory` will load store the output of the request in memory: - -```{r} -req <- curl_fetch_memory("https://hb.cran.dev/post", handle = h) -jsonlite::prettify(rawToChar(req$content)) -``` - -Alternatively we can use `curl()` to read the data of via a connection interface: - -```{r} -con <- curl("https://hb.cran.dev/post", handle = h) -jsonlite::prettify(readLines(con)) -``` - -```{r, echo = FALSE, message = FALSE, warning=FALSE} -close(con) -``` - -Or we can use `curl_download` to write the response to disk: - -```{r} -tmp <- tempfile() -curl_download("https://hb.cran.dev/post", destfile = tmp, handle = h) -jsonlite::prettify(readLines(tmp)) -``` - -Or perform the same request with a multi pool: - -```{r} -curl_fetch_multi("https://hb.cran.dev/post", handle = h, done = function(res){ - cat("Request complete! Response content:\n") - cat(rawToChar(res$content)) -}) - -# Perform the request -out <- multi_run() -``` - -### Reading cookies - -Curl handles automatically keep track of cookies set by the server. At any given point we can use `handle_cookies` to see a list of current cookies in the handle. - -```{r} -# Start with a fresh handle -h <- new_handle() - -# Ask server to set some cookies -req <- curl_fetch_memory("https://hb.cran.dev/cookies/set?foo=123&bar=ftw", handle = h) -req <- curl_fetch_memory("https://hb.cran.dev/cookies/set?baz=moooo", handle = h) -handle_cookies(h) - -# Unset a cookie -req <- curl_fetch_memory("https://hb.cran.dev/cookies/delete?foo", handle = h) -handle_cookies(h) -``` - -The `handle_cookies` function returns a data frame with 7 columns as specified in the [netscape cookie file format](http://www.cookiecentral.com/faq/#3.5). - -### On reusing handles - -In most cases you should not re-use a single handle object for more than one request. The only benefit of reusing a handle for multiple requests is to keep track of cookies set by the server (seen above). This could be needed if your server uses session cookies, but this is rare these days. Most APIs set state explicitly via http headers or parameters, rather than implicitly via cookies. - -In recent versions of the curl package there are no performance benefits of reusing handles. The overhead of creating and configuring a new handle object is negligible. The safest way to issue multiple requests, either to a single server or multiple servers is by using a separate handle for each request (which is the default) - -```{r} -req1 <- curl_fetch_memory("https://hb.cran.dev/get") -req2 <- curl_fetch_memory("https://www.r-project.org") -``` - -In past versions of this package you needed to manually use a handle to take advantage of http Keep-Alive. However as of version 2.3 this is no longer the case: curl automatically maintains global a pool of open http connections shared by all handles. When performing many requests to the same server, curl automatically uses existing connections when possible, eliminating TCP/SSL handshaking overhead: - -```{r} -req <- curl_fetch_memory("https://api.github.com/users/ropensci") -req$times - -req2 <- curl_fetch_memory("https://api.github.com/users/rstudio") -req2$times -``` - -If you really need to re-use a handle, do note that that curl does not cleanup the handle after each request. All of the options and internal fields will linger around for all future request until explicitly reset or overwritten. This can sometimes leads to unexpected behavior. - -```{r} -handle_reset(h) -``` - -The `handle_reset` function will reset all curl options and request headers to the default values. It will **not** erase cookies and it will still keep alive the connections. Therefore it is good practice to call `handle_reset` after performing a request if you want to reuse the handle for a subsequent request. Still it is always safer to create a new fresh handle when possible, rather than recycling old ones. - -### Posting forms - -The `handle_setform` function is used to perform a `multipart/form-data` HTTP POST request (a.k.a. posting a form). Values can be either strings, raw vectors (for binary data) or files. - -```{r} -# Posting multipart -h <- new_handle() -handle_setform(h, - foo = "blabla", - bar = charToRaw("boeboe"), - iris = form_data(serialize(iris, NULL), "application/rda"), - description = form_file(system.file("DESCRIPTION")), - logo = form_file(file.path(R.home('doc'), "html/logo.jpg"), "image/jpeg") -) -req <- curl_fetch_memory("https://hb.cran.dev/post", handle = h) -``` - -The `form_file` function is used to upload files with the form post. It has two arguments: a file path, and optionally a content-type value. If no content-type is set, curl will guess the content type of the file based on the file extension. - -The `form_data` function is similar but simply posts a string or raw value with a custom content-type. - -### Using pipes - -All of the `handle_xxx` functions return the handle object so that function calls can be chained using the popular pipe operators: - -```{r} -library(magrittr) - -new_handle() %>% - handle_setopt(copypostfields = "moo=moomooo") %>% - handle_setheaders("Content-Type"="text/moo", "Cache-Control"="no-cache", "User-Agent"="A cow") %>% - curl_fetch_memory(url = "https://hb.cran.dev/post") %$% content %>% - rawToChar %>% jsonlite::prettify() -``` diff --git a/src/library/curl/vignettes/windows.Rmd b/src/library/curl/vignettes/windows.Rmd deleted file mode 100644 index 6e5a9fb27..000000000 --- a/src/library/curl/vignettes/windows.Rmd +++ /dev/null @@ -1,151 +0,0 @@ ---- -title: "Proxies and Certificates on Windows Networks" -output: - html_document: - fig_caption: false - toc: true - toc_float: - collapsed: false - smooth_scroll: false - toc_depth: 3 -vignette: > - %\VignetteIndexEntry{Proxies and Certificates on Windows Networks} - %\VignetteEngine{knitr::rmarkdown} - %\VignetteEncoding{UTF-8} ---- - -This document describes a few notes specifically for Windows users on networks with custom certificates or proxy settings. For regular Windows users, things should work out of the box. - - -```{r setup} -library(curl) -``` - - -## Multiple SSL Backends - -In order to make SSL (https) connections, libcurl uses an SSL backend. Currently the Windows version of the `curl` package supports two SSL backends: __OpenSSL__ and __Windows Secure Channel__. Only one can be enabled, which is determined when the curl package is first loaded in your R session. - - - -| | Secure Channel | OpenSSL | -|------------------------------------|---------------------------------|------------------------| -| __trust certificates__ | Windows Cert Store | Windows Cert Store or `curl-ca-bundle.crt` file [^1] | -| __supports HTTP/2__ | No | Yes | -| __works on corporate networks__ | Usually Yes | Maybe not | -| __support http proxy server__ | Yes | Yes | -| __support https proxy server__ | No | Yes | -| __support client certificate authentication__ | No | Yes | - -[^1]: As of version 5 we now default to [`CURLSSLOPT_NATIVE_CA`](https://curl.se/libcurl/c/CURLOPT_SSL_OPTIONS.html). To use a traditional PEM bundle set the CURL_CA_BUNDLE environment variable. - -The default backend on Windows 7 and up is Secure Channel. This uses the native Windows SSL API and certificates, which is the safest choice for most users. - -Have a look at `curl::curl_version()` to see which ssl backends are available and which one is in use. - -```r -curl::curl_version() -#> $version -#> [1] "7.64.1" -#> -#> $ssl_version -#> [1] "(OpenSSL/1.1.1a) Schannel" -#> -#> $libz_version -#> [1] "1.2.8" -#> ... -``` - -The part in parentheses means this backend is available but currently not in use. Hence the output above means that the current active backend is Secure Channel, but OpenSSL is also supported, but currently not in use. - -To switch to OpenSSL, you need to set an environment variable [`CURL_SSL_BACKEND`](https://curl.se/libcurl/c/libcurl-env.html) to `"openssl"` when starting R. A good place to set this is in your `.Renviron` file in your user home (Documents) directory; the `?Startup` manual has more details. The following will write to your `~/.Renviron` file. - -```r -write('CURL_SSL_BACKEND=openssl', file = "~/.Renviron", append = TRUE) -``` - -Now if you restart R, the default back-end should have changed: - -```r -> curl::curl_version()$ssl_version -[1] "OpenSSL/1.1.1m (Schannel)" -``` - -Optionally, you can also set `CURL_CA_BUNDLE` in your `~/.Renviron` to use a custom trust bundle. If `CURL_CA_BUNDLE` is not set, we use the Windows cert store. When using Schannel, no trust bundle can be specified because we always use the certificates from the native Windows cert store. - -It is not possible to change the SSL backend once the `curl` package has been loaded. - -## Using a Proxy Server - -Windows proxy servers are a complicated topic because depending on your corporate network configuration, different settings may be needed. If your company uses proxies with custom certificates, this might also interact with the previous topic. - -Proxy settings can either be configured in the handle for a single request, or globally via environment variables. This is explained in detail on the curl website detail in the manual pages for [CURLOPT_PROXY](https://curl.se/libcurl/c/CURLOPT_PROXY.html) and [libcurl-env](https://curl.se/libcurl/c/libcurl-env.html). - -If you know the address of your proxy server you can set it via the `curlopt_proxy` option: - -```r -h <- new_handle(proxy = "http://proxyserver:8080", verbose = TRUE) -req <- curl_fetch_memory("https://httpbin.org/get", handle = h) -#> Verbose output here... -``` - -The example above should yield some verbose output indicating if the proxy connection was successful. - -If this did not work, study the verbose output from above to see what seems to be the problem. Note that curl supports many options related to proxies (types, auth, etc), the details of which you can find on the libcurl homepage. - -```{r} -curl_options('proxy') -``` - -To use a global proxy server for all your requests, you can set the environment variable `http_proxy` (lowercase!) or `https_proxy` or `ALL_PROXY`. See [this page](https://curl.se/libcurl/c/libcurl-env.html) for details. This variable may be set or changed in R at runtime, for example: - -```r -Sys.setenv(ALL_PROXY = "http://proxy.mycorp.com:8080") -req <- curl_fetch_memory("https://httpbin.org/get") -#> verbose output here... -``` - -To use a default proxy server for all your R sessions, a good place to set this environment variable is in your `.Renviron` as explained above: - -``` -ALL_PROXY="http://proxy.mycorp.com:8080" -``` - -An additional benefit of setting these environment variables is that they are also supported by base R `download.file` and `install.packages`. The manual page for `?download.file` has a special section on "Setting Proxies" which explains this. - - -## Discovering Your Proxy Server - -If you don't know what your proxy server is, the `curl` package has a few utilities that interact with Internet Explorer to help you find out. First have a look at `ie_proxy_info()` to see IE settings: - -```r -curl::ie_proxy_info() -#> $AutoDetect -#> [1] FALSE -#> -#> $AutoConfigUrl -#> [1] "http://173.45.10.27:8543/proxypac.pac" -#> -#> $Proxy -#> [1] "173.45.10.27:3228" -#> -#> $ProxyBypass -#> [1] "10.*;173.*;mail.mycorp.org;autodiscover.mycorp.org;ev.mycorp.org;ecms.mycorp.org" -``` - -There are a few settings here, such as default proxy server and a list of hosts which do _not_ need proxying, usually hosts within the corporate intranet (these can probably be used in [`CURLOPT_NOPROXY`](https://curl.se/libcurl/c/CURLOPT_NOPROXY.html)). - -The most complicated case is when your network uses different proxy servers for different target urls. The `AutoConfigUrl` field above refers to a [proxy auto config](https://en.wikipedia.org/wiki/Proxy_auto-config) (PAC) script that Internet Explorer has to run to find out which proxy server it has to use for a given host. The `curl` package exposes another function which calls out to Internet Explorer do it's thing and tell us the appropriate proxy server for a given host: - -```r -curl::ie_get_proxy_for_url("https://www.google.com") -#> [1] "http://173.45.10.27:3228" - -curl::ie_get_proxy_for_url("http://mail.mycorp.org") -#> NULL -``` - -The exact logic that Windows uses to derive the appropriate proxy server for a given host from the settings above is very complicated and may involve some trial and error until something works. - -Currently `curl` does not automatically set IE proxies, so you need to manually set these in the handles or environment variables. One day we could try to make the `curl` package automatically discover and apply Windows proxy settings. However to make sure we cover all edge cases we need more examples from users in real world corporate networks. - diff --git a/src/library/desc/NEWS.md b/src/library/desc/NEWS.md index 23bdd6e35..917f2b4a8 100644 --- a/src/library/desc/NEWS.md +++ b/src/library/desc/NEWS.md @@ -1,5 +1,8 @@ # desc 1.4.3 +* `$set()` and `desc_set()` now can omit checks if `check = FALSE` + is set. + * `$set()` and `desc_set()` now can omit checks if `check = FALSE` is set. diff --git a/src/library/jsonlite/vignettes/json-aaquickstart.Rmd b/src/library/jsonlite/vignettes/json-aaquickstart.Rmd deleted file mode 100644 index c5d4fdec1..000000000 --- a/src/library/jsonlite/vignettes/json-aaquickstart.Rmd +++ /dev/null @@ -1,126 +0,0 @@ ---- -Title: "Getting started with JSON and jsonlite" -date: "`r Sys.Date()`" -output: - html_document -vignette: > - %\VignetteIndexEntry{Getting started with JSON and jsonlite} - %\VignetteEngine{knitr::rmarkdown} - \usepackage[utf8]{inputenc} ---- - - -```{r echo=FALSE} -library(knitr) -opts_chunk$set(comment="") - -#this replaces tabs by spaces because latex-verbatim doesn't like tabs -#no longer needed because yajl does not use tabs. -#toJSON <- function(...){ -# gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); -#} -``` - -# Getting started with JSON and jsonlite - -The jsonlite package is a JSON parser/generator optimized for the web. Its main strength is that it implements a bidirectional mapping between JSON data and the most important R data types. Thereby we can convert between R objects and JSON without loss of type or information, and without the need for any manual data munging. This is ideal for interacting with web APIs, or to build pipelines where data structures seamlessly flow in and out of R using JSON. - -```{r message=FALSE} -library(jsonlite) -all.equal(mtcars, fromJSON(toJSON(mtcars))) -``` - -This vignette introduces basic concepts to get started with jsonlite. For a more detailed outline and motivation of the mapping, see: [arXiv:1403.2805](https://arxiv.org/abs/1403.2805). - -## Simplification - -Simplification is the process where JSON arrays automatically get converted from a list into a more specific R class. The `fromJSON` function has 3 arguments which control the simplification process: `simplifyVector`, `simplifyDataFrame` and `simplifyMatrix`. Each one is enabled by default. - -| JSON structure | Example JSON data | Simplifies to R class | Argument in fromJSON | -| ----------------------|----------------------------------------------------------|-----------------------|----------------------| -| Array of primitives | `["Amsterdam", "Rotterdam", "Utrecht", "Den Haag"]` | Atomic Vector | simplifyVector | -| Array of objects | `[{"name":"Erik", "age":43}, {"name":"Anna", "age":32}]` | Data Frame | simplifyDataFrame | -| Array of arrays | `[ [1, 2, 3], [4, 5, 6] ]` | Matrix | simplifyMatrix | - -### Atomic Vectors - -When `simplifyVector` is enabled, JSON arrays containing **primitives** (strings, numbers, booleans or null) simplify into an atomic vector: - -```{r} -# A JSON array of primitives -json <- '["Mario", "Peach", null, "Bowser"]' - -# Simplifies into an atomic vector -fromJSON(json) -``` - -Without simplification, any JSON array turns into a list: - -```{r} -# No simplification: -fromJSON(json, simplifyVector = FALSE) -``` - - -### Data Frames - -When `simplifyDataFrame` is enabled, JSON arrays containing **objects** (key-value pairs) simplify into a data frame: - -```{r} -json <- -'[ - {"Name" : "Mario", "Age" : 32, "Occupation" : "Plumber"}, - {"Name" : "Peach", "Age" : 21, "Occupation" : "Princess"}, - {}, - {"Name" : "Bowser", "Occupation" : "Koopa"} -]' -mydf <- fromJSON(json) -mydf -``` - -The data frame gets converted back into the original JSON structure by `toJSON` (whitespace and line breaks are ignorable in JSON). - -```{r} -mydf$Ranking <- c(3, 1, 2, 4) -toJSON(mydf, pretty=TRUE) -``` - -Hence you can go back and forth between dataframes and JSON, without any manual data restructuring. - -### Matrices and Arrays - -When `simplifyMatrix` is enabled, JSON arrays containing **equal-length sub-arrays** simplify into a matrix (or higher order R array): - -```{r} -json <- '[ - [1, 2, 3, 4], - [5, 6, 7, 8], - [9, 10, 11, 12] -]' -mymatrix <- fromJSON(json) -mymatrix -``` - -Again, we can use `toJSON` to convert the matrix or array back into the original JSON structure: - -```{r} -toJSON(mymatrix, pretty = TRUE) -``` - -The simplification works for arrays of arbitrary dimensionality, as long as the dimensions match (R does not support ragged arrays). - -```{r} -json <- '[ - [[1, 2], - [3, 4]], - [[5, 6], - [7, 8]], - [[9, 10], - [11, 12]] -]' -myarray <- fromJSON(json) -myarray[1, , ] -myarray[ , ,1] -``` - -This is all there is to it! For a more detailed outline and motivation of the mapping, see: [arXiv:1403.2805](https://arxiv.org/abs/1403.2805). diff --git a/src/library/jsonlite/vignettes/json-apis.Rmd b/src/library/jsonlite/vignettes/json-apis.Rmd deleted file mode 100644 index 9b427b293..000000000 --- a/src/library/jsonlite/vignettes/json-apis.Rmd +++ /dev/null @@ -1,258 +0,0 @@ ---- -title: "Fetching JSON data from REST APIs" -date: "2022-01-16" -output: - html_document -vignette: > - %\VignetteIndexEntry{Fetching JSON data from REST APIs} - %\VignetteEngine{knitr::rmarkdown} - \usepackage[utf8]{inputenc} ---- - - - -This section lists some examples of public HTTP APIs that publish data in JSON format. These are great to get a sense of the complex structures that are encountered in real world JSON data. All services are free, but some require registration/authentication. Each example returns lots of data, therefore not all output is printed in this document. - - -```r -library(jsonlite) -``` - -## Github - -Github is an online code repository and has APIs to get live data on almost all activity. Below some examples from a well known R package and author: - - -```r -hadley_orgs <- fromJSON("https://api.github.com/users/hadley/orgs") -hadley_repos <- fromJSON("https://api.github.com/users/hadley/repos") -gg_commits <- fromJSON("https://api.github.com/repos/hadley/ggplot2/commits") -gg_issues <- fromJSON("https://api.github.com/repos/hadley/ggplot2/issues") - -#latest issues -paste(format(gg_issues$user$login), ":", gg_issues$title) -``` - -``` - [1] "petres : Inconsistency in `scale_` functions for args `values` and `labels` for `NA` values" - [2] "PursuitOfDataScience : Update data.R" - [3] "cmaimone : scale_x_datetime limits with histogram/stat_bin: warning message about missing values" - [4] "tomsaunders98 : Add options to guide_colourstep" - [5] "teunbrand : Duplicated aes warning with multiple modified aesthetics" - [6] "krlmlr : Support scale functions in packages not attached via scale_type()" - [7] "elong0527 : Shall `NextMethod` be used in `$<-.uneval`" - [8] "bkmgit : Update presidential terms dataset" - [9] "bkmgit : Presidents terms" -[10] "coolbutuseless : Export `datetime_scale`" -[11] "davidhodge931 : Create one single legend for a numeric colour variable coloured and filled along a gradient of colours with different alpha values" -[12] "yutannihilation : Use pak in R-CMD-check.yaml" -[13] "hadley : Option to make default colour schemes accessible?" -[14] "davidhodge931 : scale_fill_gradientn and scale_colour_gradientn should support na.translate" -[15] "davidhodge931 : should legend styling elements within guide_* instead be within theme?" -[16] "markjrieke : `geom_ribbon`: aesthetics can not vary with a ribbon" -[17] "bkmgit : Dataset licenses" -[18] "jxu : geom_segment example confusing" -[19] "twest820 : alpha legend keys darken when geom_ribbons share an alpha value" -[20] "WillForan : discarded breaks in scale_x_continuous(trans=\"log10\") w/ min(x)==0" -[21] "billdenney : na.rm is ignored with geom_area" -[22] "hugesingleton : Is it possible to make the width of geom_boxplot and binwidth in geom_density, and position \"mapable\" in aes()?" -[23] "bersbersbers : Fix warning in geom_violin with draw_quantiles" -[24] "eliocamp : `geom_contour()` documentation states false precedence of bin and binwidth parameters" -[25] "jtlandis : Unexpected results using `guides(x = guide_axis(position = ...))`" -[26] "albert-ying : Smarter axis label -- allow string manipulate function in `labs` or `theme`" -[27] "Cumol : Width argument to geom_errorbar not passed on when using stat_summary_bin" -[28] "twest820 : apparently spurious is.na() warning on use of language in label" -[29] "benjaminrich : Add `trans` option in `annotation_logticks()`" -[30] "benjaminrich : `annotation_logticks()` with secondary axis" -``` - -## CitiBike NYC - -A single public API that shows location, status and current availability for all stations in the New York City bike sharing imitative. - - -```r -citibike <- fromJSON("https://gbfs.citibikenyc.com/gbfs/en/station_information.json") -stations <- citibike$data$stations -colnames(stations) -``` - -``` - [1] "electric_bike_surcharge_waiver" "eightd_station_services" "lat" "external_id" - [5] "station_type" "name" "short_name" "station_id" - [9] "rental_methods" "rental_uris" "has_kiosk" "region_id" -[13] "capacity" "legacy_id" "lon" "eightd_has_key_dispenser" -``` - -```r -nrow(stations) -``` - -``` -[1] 1598 -``` - -## Ergast - -The Ergast Developer API is an experimental web service which provides a historical record of motor racing data for non-commercial purposes. - - -```r -res <- fromJSON('http://ergast.com/api/f1/2004/1/results.json') -drivers <- res$MRData$RaceTable$Races$Results[[1]]$Driver -colnames(drivers) -``` - -``` -[1] "driverId" "code" "url" "givenName" "familyName" "dateOfBirth" "nationality" -[8] "permanentNumber" -``` - -```r -drivers[1:10, c("givenName", "familyName", "code", "nationality")] -``` - -``` - givenName familyName code nationality -1 Michael Schumacher MSC German -2 Rubens Barrichello BAR Brazilian -3 Fernando Alonso ALO Spanish -4 Ralf Schumacher SCH German -5 Juan Pablo Montoya MON Colombian -6 Jenson Button BUT British -7 Jarno Trulli TRU Italian -8 David Coulthard COU British -9 Takuma Sato SAT Japanese -10 Giancarlo Fisichella FIS Italian -``` - - -## ProPublica - -Below an example from the [ProPublica Nonprofit Explorer API](https://projects.propublica.org/nonprofits/api) where we retrieve the first 10 pages of tax-exempt organizations in the USA, ordered by revenue. The `rbind_pages` function is used to combine the pages into a single data frame. - - - -```r -#store all pages in a list first -baseurl <- "https://projects.propublica.org/nonprofits/api/v2/search.json?order=revenue&sort_order=desc" -pages <- list() -for(i in 0:10){ - mydata <- fromJSON(paste0(baseurl, "&page=", i), flatten=TRUE) - message("Retrieving page ", i) - pages[[i+1]] <- mydata$organizations -} - -#combine all into one -organizations <- rbind_pages(pages) - -#check output -nrow(organizations) -``` - -``` -[1] 1100 -``` - -```r -organizations[1:10, c("name", "city", "strein")] -``` - -``` - name city strein -1 0 DEBT EDUCATION INC SANTA ROSA 46-4744976 -2 0 TOLERANCE INC SUWANEE 27-2620044 -3 00 MOVEMENT INC PENSACOLA 82-4704419 -4 00006 LOCAL MEDIA 22-6062777 -5 0003 POSTAL FAMILY CINCINNATI 31-0240910 -6 0005 GA HEPHZIBAH 58-1514574 -7 0005 WRIGHT-PATT CREDIT UNION BEAVERCREEK 31-0278870 -8 0009 DE GREENWOOD 26-4507405 -9 0011 CALIFORNIA REDWAY 36-4654777 -10 00141 LOCAL MEDIA 94-0507697 -``` - - -## New York Times - -The New York Times has several APIs as part of the NYT developer network. These interface to data from various departments, such as news articles, book reviews, real estate, etc. Registration is required (but free) and a key can be obtained at [here](http://developer.nytimes.com/signup). The code below includes some example keys for illustration purposes. - - -```r -#search for articles -article_key <- "&api-key=b75da00e12d54774a2d362adddcc9bef" -url <- "http://api.nytimes.com/svc/search/v2/articlesearch.json?q=obamacare+socialism" -req <- fromJSON(paste0(url, article_key)) -articles <- req$response$docs -colnames(articles) -``` - -``` - [1] "abstract" "web_url" "snippet" "lead_paragraph" "print_section" "print_page" "source" - [8] "multimedia" "headline" "keywords" "pub_date" "document_type" "news_desk" "section_name" -[15] "byline" "type_of_material" "_id" "word_count" "uri" "subsection_name" -``` - -```r -#search for best sellers -books_key <- "&api-key=76363c9e70bc401bac1e6ad88b13bd1d" -url <- "http://api.nytimes.com/svc/books/v2/lists/overview.json?published_date=2013-01-01" -req <- fromJSON(paste0(url, books_key)) -bestsellers <- req$results$list -category1 <- bestsellers[[1, "books"]] -subset(category1, select = c("author", "title", "publisher")) -``` - -``` - author title publisher -1 Gillian Flynn GONE GIRL Crown Publishing -2 John Grisham THE RACKETEER Knopf Doubleday Publishing -3 E L James FIFTY SHADES OF GREY Knopf Doubleday Publishing -4 Nicholas Sparks SAFE HAVEN Grand Central Publishing -5 David Baldacci THE FORGOTTEN Grand Central Publishing -``` - -## Twitter - -The twitter API requires OAuth2 authentication. Some example code: - - -```r -#Create your own appication key at https://dev.twitter.com/apps -consumer_key = "EZRy5JzOH2QQmVAe9B4j2w"; -consumer_secret = "OIDC4MdfZJ82nbwpZfoUO4WOLTYjoRhpHRAWj6JMec"; - -#Use basic auth -secret <- jsonlite::base64_enc(paste(consumer_key, consumer_secret, sep = ":")) -req <- httr::POST("https://api.twitter.com/oauth2/token", - httr::add_headers( - "Authorization" = paste("Basic", gsub("\n", "", secret)), - "Content-Type" = "application/x-www-form-urlencoded;charset=UTF-8" - ), - body = "grant_type=client_credentials" -); - -#Extract the access token -httr::stop_for_status(req, "authenticate with twitter") -token <- paste("Bearer", httr::content(req)$access_token) - -#Actual API call -url <- "https://api.twitter.com/1.1/statuses/user_timeline.json?count=10&screen_name=Rbloggers" -req <- httr::GET(url, httr::add_headers(Authorization = token)) -json <- httr::content(req, as = "text") -tweets <- fromJSON(json) -substring(tweets$text, 1, 100) -``` - -``` - [1] "Surface reconstruction with R(CGAL) {https://t.co/Kou9gFUmod} #rstats #DataScience" - [2] "A dashboard illustrating bivariate time series forecasting with `ahead` {https://t.co/HYS6UIKMgl} #" - [3] "Handling Categorical Data in R – Part 4 {https://t.co/aZa7O7Ppxd} #rstats #DataScience" - [4] "Solving the ‘preserving the sum after rounding’ problem for a soccer waffle viz {https://t.co/uNtOL" - [5] "Community Management Transition for rOpenSci. A Message from Stefanie Butland {https://t.co/r7YuZjV" - [6] "New R job: Data Services Specialist https://t.co/sy2goVMxbq #rstats #DataScience #jobs" - [7] "RTutor: Gasoline Taxes and Consumer Behavior {https://t.co/nIxUNfihoK} #rstats #DataScience" - [8] "Shinywordle: A shiny app to solve the game Worldle and the power of regular expressions {https://t." - [9] "10 New books added to Big Book of R {https://t.co/jD0IYutHTN} #rstats #DataScience" -[10] "Clipping an isosurface to a ball, and more {https://t.co/Yz0qbSB3IB} #rstats #DataScience" -``` diff --git a/src/library/jsonlite/vignettes/json-apis.Rmd.orig b/src/library/jsonlite/vignettes/json-apis.Rmd.orig deleted file mode 100644 index 47598cb36..000000000 --- a/src/library/jsonlite/vignettes/json-apis.Rmd.orig +++ /dev/null @@ -1,140 +0,0 @@ ---- -title: "Fetching JSON data from REST APIs" -date: "`r Sys.Date()`" -output: - html_document -vignette: > - %\VignetteIndexEntry{Fetching JSON data from REST APIs} - %\VignetteEngine{knitr::rmarkdown} - \usepackage[utf8]{inputenc} ---- - -```{r echo=FALSE} -library(knitr) -opts_chunk$set(comment="") - -#this replaces tabs by spaces because latex-verbatim doesn't like tabs -#no longer needed with yajl -#toJSON <- function(...){ -# gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); -#} -``` - -This section lists some examples of public HTTP APIs that publish data in JSON format. These are great to get a sense of the complex structures that are encountered in real world JSON data. All services are free, but some require registration/authentication. Each example returns lots of data, therefore not all output is printed in this document. - -```{r message=FALSE} -library(jsonlite) -``` - -## Github - -Github is an online code repository and has APIs to get live data on almost all activity. Below some examples from a well known R package and author: - -```{r} -hadley_orgs <- fromJSON("https://api.github.com/users/hadley/orgs") -hadley_repos <- fromJSON("https://api.github.com/users/hadley/repos") -gg_commits <- fromJSON("https://api.github.com/repos/hadley/ggplot2/commits") -gg_issues <- fromJSON("https://api.github.com/repos/hadley/ggplot2/issues") - -#latest issues -paste(format(gg_issues$user$login), ":", gg_issues$title) -``` - -## CitiBike NYC - -A single public API that shows location, status and current availability for all stations in the New York City bike sharing imitative. - -```{r} -citibike <- fromJSON("https://gbfs.citibikenyc.com/gbfs/en/station_information.json") -stations <- citibike$data$stations -colnames(stations) -nrow(stations) -``` - -## Ergast - -The Ergast Developer API is an experimental web service which provides a historical record of motor racing data for non-commercial purposes. - -```{r} -res <- fromJSON('http://ergast.com/api/f1/2004/1/results.json') -drivers <- res$MRData$RaceTable$Races$Results[[1]]$Driver -colnames(drivers) -drivers[1:10, c("givenName", "familyName", "code", "nationality")] -``` - - -## ProPublica - -Below an example from the [ProPublica Nonprofit Explorer API](https://projects.propublica.org/nonprofits/api) where we retrieve the first 10 pages of tax-exempt organizations in the USA, ordered by revenue. The `rbind_pages` function is used to combine the pages into a single data frame. - - -```{r, message=FALSE} -#store all pages in a list first -baseurl <- "https://projects.propublica.org/nonprofits/api/v2/search.json?order=revenue&sort_order=desc" -pages <- list() -for(i in 0:10){ - mydata <- fromJSON(paste0(baseurl, "&page=", i), flatten=TRUE) - message("Retrieving page ", i) - pages[[i+1]] <- mydata$organizations -} - -#combine all into one -organizations <- rbind_pages(pages) - -#check output -nrow(organizations) -organizations[1:10, c("name", "city", "strein")] -``` - - -## New York Times - -The New York Times has several APIs as part of the NYT developer network. These interface to data from various departments, such as news articles, book reviews, real estate, etc. Registration is required (but free) and a key can be obtained at [here](http://developer.nytimes.com/signup). The code below includes some example keys for illustration purposes. - -```{r} -#search for articles -article_key <- "&api-key=b75da00e12d54774a2d362adddcc9bef" -url <- "http://api.nytimes.com/svc/search/v2/articlesearch.json?q=obamacare+socialism" -req <- fromJSON(paste0(url, article_key)) -articles <- req$response$docs -colnames(articles) - -#search for best sellers -books_key <- "&api-key=76363c9e70bc401bac1e6ad88b13bd1d" -url <- "http://api.nytimes.com/svc/books/v2/lists/overview.json?published_date=2013-01-01" -req <- fromJSON(paste0(url, books_key)) -bestsellers <- req$results$list -category1 <- bestsellers[[1, "books"]] -subset(category1, select = c("author", "title", "publisher")) -``` - -## Twitter - -The twitter API requires OAuth2 authentication. Some example code: - -```{r} -#Create your own appication key at https://dev.twitter.com/apps -consumer_key = "EZRy5JzOH2QQmVAe9B4j2w"; -consumer_secret = "OIDC4MdfZJ82nbwpZfoUO4WOLTYjoRhpHRAWj6JMec"; - -#Use basic auth -secret <- jsonlite::base64_enc(paste(consumer_key, consumer_secret, sep = ":")) -req <- httr::POST("https://api.twitter.com/oauth2/token", - httr::add_headers( - "Authorization" = paste("Basic", gsub("\n", "", secret)), - "Content-Type" = "application/x-www-form-urlencoded;charset=UTF-8" - ), - body = "grant_type=client_credentials" -); - -#Extract the access token -httr::stop_for_status(req, "authenticate with twitter") -token <- paste("Bearer", httr::content(req)$access_token) - -#Actual API call -url <- "https://api.twitter.com/1.1/statuses/user_timeline.json?count=10&screen_name=Rbloggers" -req <- httr::GET(url, httr::add_headers(Authorization = token)) -json <- httr::content(req, as = "text") -tweets <- fromJSON(json) -substring(tweets$text, 1, 100) -``` diff --git a/src/library/jsonlite/vignettes/json-mapping.Rnw.orig b/src/library/jsonlite/vignettes/json-mapping.Rnw.orig deleted file mode 100644 index 3d9fd44ec..000000000 --- a/src/library/jsonlite/vignettes/json-mapping.Rnw.orig +++ /dev/null @@ -1,583 +0,0 @@ -%\VignetteEngine{knitr::knitr} -%\VignetteIndexEntry{A mapping between JSON data and R objects} - -<>= -#For JSS -#opts_chunk$set(prompt=TRUE, highlight=FALSE, background="white") -#options(prompt = "R> ", continue = "+ ", width = 70, useFancyQuotes = FALSE) -@ - -%This is a template. -%Actual text goes in sources/content.Rnw -\documentclass{article} -\author{Jeroen Ooms} - -%useful packages -\usepackage{url} -\usepackage{fullpage} -\usepackage{xspace} -\usepackage{booktabs} -\usepackage{enumitem} -\usepackage[hidelinks]{hyperref} -\usepackage[round]{natbib} -\usepackage{fancyvrb} -\usepackage[toc,page]{appendix} -\usepackage{breakurl} - -%for table positioning -\usepackage{float} -\restylefloat{table} - -%support for accents -\usepackage[utf8]{inputenc} - -%support for ascii art -\usepackage{pmboxdraw} - -%use vspace instead of indentation for paragraphs -\usepackage{parskip} - -%extra line spacing -\usepackage{setspace} -\setstretch{1.25} - -%knitr style verbatim blocks -\newenvironment{codeblock}{ - \VerbatimEnvironment - \definecolor{shadecolor}{rgb}{0.95, 0.95, 0.95}\color{fgcolor} - \color{black} - \begin{kframe} - \begin{BVerbatim} -}{ - \end{BVerbatim} - \end{kframe} -} - -%placeholders for JSS/RJournal -\newcommand{\pkg}[1]{\texttt{#1}} -\newcommand{\code}[1]{\texttt{#1}} -\newcommand{\proglang}[1]{\texttt{#1}} - -%shorthands -\newcommand{\JSON}{\texttt{JSON}\xspace} -\newcommand{\R}{\proglang{R}\xspace} -\newcommand{\C}{\proglang{C}\xspace} -\newcommand{\toJSON}{\code{toJSON}\xspace} -\newcommand{\fromJSON}{\code{fromJSON}\xspace} -\newcommand{\XML}{\pkg{XML}\xspace} -\newcommand{\jsonlite}{\pkg{jsonlite}\xspace} -\newcommand{\RJSONIO}{\pkg{RJSONIO}\xspace} -\newcommand{\API}{\texttt{API}\xspace} -\newcommand{\JavaScript}{\proglang{JavaScript}\xspace} - - -%trick for using same content file as chatper and article -\newcommand{\maintitle}[1]{ - \title{#1} - \maketitle -} - -%actual document -\begin{document} - - -\maintitle{The \jsonlite Package: A Practical and Consistent Mapping Between \JSON Data and \R Objects} - -<>= -library(jsonlite) -library(knitr) -opts_chunk$set(comment="") - -#this replaces tabs by spaces because latex-verbatim doesn't like tabs -toJSON <- function(...){ - gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); -} -@ - -\begin{abstract} -A naive realization of \JSON data in \R maps \JSON \emph{arrays} to an unnamed list, and \JSON \emph{objects} to a named list. However, in practice a list is an awkward, inefficient type to store and manipulate data. Most statistical applications work with (homogeneous) vectors, matrices or data frames. Therefore \JSON packages in \R typically define certain special cases of \JSON structures which map to simpler \R types. Currently no formal guidelines or consensus exists on how \R data should be represented in \JSON. Furthermore, upon closer inspection, even the most basic data structures in \R actually do not perfectly map to their \JSON counterparts and leave some ambiguity for edge cases. These problems have resulted in different behavior between implementations and can lead to unexpected output for edge cases. This paper explicitly describes a mapping between \R classes and \JSON data, highlights potential problems, and outlines conventions that generalize the mapping to cover all common structures. We emphasize the importance of type consistency when using \JSON to exchange dynamic data, and illustrate using examples and anecdotes. The \jsonlite package is used throughout the paper as a reference implementation. -\end{abstract} - - -\section{Introduction} - -\emph{JavaScript Object Notation} (\JSON) is a text format for the serialization of structured data \citep{crockford2006application}. It is derived from the object literals of \proglang{JavaScript}, as defined in the \proglang{ECMAScript} programming language standard \citep{ecma1999262}. Design of \JSON is simple and concise in comparison with other text based formats, and it was originally proposed by Douglas Crockford as a ``fat-free alternative to \XML'' \citep{crockford2006json}. The syntax is easy for humans to read and write, easy for machines to parse and generate and completely described in a single page at \url{http://www.json.org}. The character encoding of \JSON text is always Unicode, using \texttt{UTF-8} by default \citep{crockford2006application}, making it naturally compatible with non-latin alphabets. Over the past years, \JSON has become hugely popular on the internet as a general purpose data interchange format. High quality parsing libraries are available for almost any programming language, making it easy to implement systems and applications that exchange data over the network using \JSON. For \R \citep{R}, several packages that assist the user in generating, parsing and validating \JSON are available through CRAN, including \pkg{rjson} \citep{rjson}, \pkg{RJSONIO} \citep{RJSONIO}, and \pkg{jsonlite} \citep{jsonlite}. - -The emphasis of this paper is not on discussing the \JSON format or any particular implementation for using \JSON with \R. We refer to \cite{nolan2014xml} for a comprehensive introduction, or one of the many tutorials available on the web. Instead we take a high level view and discuss how \R data structures are most naturally represented in \JSON. This is not a trivial problem, particularly for complex or relational data as they frequently appear in statistical applications. Several \R packages implement \toJSON and \fromJSON functions which directly convert \R objects into \JSON and vice versa. However, the exact mapping between the various \R data classes \JSON structures is not self evident. Currently, there are no formal guidelines, or even consensus between implementations on how \R data should be represented in \JSON. Furthermore, upon closer inspection, even the most basic data structures in \R actually do not perfectly map to their \JSON counterparts, and leave some ambiguity for edge cases. These problems have resulted in different behavior between implementations, and can lead to unexpected output for certain special cases. Furthermore, best practices of representing data in \JSON have been established outside the \R community. Incorporating these conventions where possible is important to maximize interoperability. - -%When relying on \JSON as the data interchange format, the mapping between \R objects and \JSON data must be consistent and unambiguous. Clients relying on \JSON to get data in and out of \R must know exactly what to expect in order to facilitate reliable communication, even if the data themselves are dynamic. Similarly, \R code using dynamic \JSON data from an external source is only reliable when the conversion from \JSON to \R is consistent. This document attempts to take away some of the ambiguity by explicitly describing the mapping between \R classes and \JSON data, highlighting problems and propose conventions that can generalize the mapping to cover all common classes and cases in R. - -\subsection{Parsing and type safety} - -The \JSON format specifies 4 primitive types (\texttt{string}, \texttt{number}, \texttt{boolean}, \texttt{null}) and two \emph{universal structures}: - -\begin{itemize} %[itemsep=3pt, topsep=5pt] - \item A \JSON \emph{object}: an unordered collection of zero or more name-value - pairs, where a name is a string and a value is a string, number, - boolean, null, object, or array. - \item A \JSON \emph{array}: an ordered sequence of zero or more values. -\end{itemize} - -\noindent Both these structures are heterogeneous; i.e. they are allowed to contain elements of different types. Therefore, the native \R realization of these structures is a \texttt{named list} for \JSON objects, and \texttt{unnamed list} for \JSON arrays. However, in practice a list is an awkward, inefficient type to store and manipulate data in \R. Most statistical applications work with (homogeneous) vectors, matrices or data frames. In order to give these data structures a \JSON representation, we can define certain special cases of \JSON structures which get parsed into other, more specific \R types. For example, one convention which all current implementations have in common is that a homogeneous array of primitives gets parsed into an \texttt{atomic vector} instead of a \texttt{list}. The \pkg{RJSONIO} documentation uses the term ``simplify'' for this behavior, and we adopt this jargon. - -<<>>= -txt <- '[12, 3, 7]' -x <- fromJSON(txt) -is(x) -print(x) -@ - -This seems very reasonable and it is the only practical solution to represent vectors in \JSON. However the price we pay is that automatic simplification can compromise type-safety in the context of dynamic data. For example, suppose an \R package uses \fromJSON to pull data from a \JSON \API on the web and that for some particular combination of parameters the result includes a \texttt{null} value, e.g: \texttt{[12, null, 7]}. This is actually quite common, many \API's use \texttt{null} for missing values or unset fields. This case makes the behavior of parser ambiguous, because the \JSON array is technically no longer homogeneous. And indeed, some implementations will now return a \texttt{list} instead of a \texttt{vector}. If the user had not anticipated this scenario and the script assumes a \texttt{vector}, the code is likely to run into type errors. - -The lesson here is that we need to be very specific and explicit about the mapping that is implemented to convert between \JSON data and \R objects. When relying on \JSON as a data interchange format, the behavior of the parser must be consistent and unambiguous. Clients relying on \JSON to get data in and out of \R must know exactly what to expect in order to facilitate reliable communication, even if the content of the data is dynamic. Similarly, \R code using dynamic \JSON data from an external source is only reliable when the conversion from \JSON to \R is consistent. Moreover a practical mapping must incorporate existing conventions and use the most natural representation of certain structures in \R. In the example above, we could argue that instead of falling back on a \texttt{list}, the array is more naturally interpreted as a numeric vector where the \texttt{null} becomes a missing value (\texttt{NA}). These principles will extrapolate as we start discussing more complex \JSON structures representing matrices and data frames. - -% \subsection{A Bidirectional Mapping} -% -% - bidirectional: one-to-one correspondence between JSON and \R classes with minimal coersing. -% - relation is functional in each direction: json interface to \R objects, and \R objects can be used to manipulate a JSON structure. -% - Results in unique coupling between json and objects that makes it natural to manipulate JSON in \R, and access \R objects from their JSON representation. -% - Mild assumption of consistency. -% - Supported classes: vectors of type numeric, character, logical, data frame and matrix. -% - Natural class is implicit in the structure, rather than explicitly encode using metadata. -% - Will show examples of why this is powerful. - -\subsection[Reference implementation: the jsonlite package]{Reference implementation: the \jsonlite package} - -The \jsonlite package provides a reference implementation of the conventions proposed in this document. It is a fork of the \RJSONIO package by Duncan Temple Lang, which builds on \texttt{libjson} \texttt{C++} library from Jonathan Wallace. \jsonlite uses the parser from \RJSONIO, but the \R code has been rewritten from scratch. Both packages implement \toJSON and \fromJSON functions, but their output is quite different. Finally, the \jsonlite package contains a large set of unit tests to validate that \R objects are correctly converted to \JSON and vice versa. These unit tests cover all classes and edge cases mentioned in this document, and could be used to validate if other implementations follow the same conventions. - -<>= -library(testthat) -test_package("jsonlite") -@ - -Note that even though \JSON allows for inserting arbitrary white space and indentation, the unit tests assume that white space is trimmed. - -\subsection{Class-based versus type-based encoding} -\label{serializejson} - -The \jsonlite package actually implements two systems for translating between \R objects and \JSON. This document focuses on the \toJSON and \fromJSON functions which use \R's class-based method dispatch. For all of the common classes in \R, the \jsonlite package implements \toJSON methods as described in this document. Users in \R can extend this system by implementing additional methods for other classes. This also means that classes that do not have the \toJSON method defined are not supported. Furthermore, the implementation of a specific \toJSON method determines which data and metadata in the objects of this class gets encoded in its \JSON representation, and how. In this respect, \toJSON is similar to e.g. the \texttt{print} function, which also provides a certain \emph{representation} of an object based on its class and optionally some print parameters. This representation does not necessarily reflect all information stored in the object, and there is no guaranteed one-to-one correspondence between \R objects and \JSON. I.e. calling \code{fromJSON(toJSON(object))} will return an object which only contains the data that was encoded by the \toJSON method for this particular class, and which might even have a different class than the original. - -The alternative to class-based method dispatch is to use type-based encoding, which \jsonlite implements in the functions \texttt{serializeJSON} and \code{unserializeJSON}. All data structures in \R get stored in memory using one of the internal \texttt{SEXP} storage types, and \code{serializeJSON} defines an encoding schema which captures the type, value, and attributes for each storage type. The resulting \JSON closely resembles the internal structure of the underlying \C data types, and can be perfectly restored to the original \R object using \code{unserializeJSON}. This system is relatively straightforward to implement, but the resulting \JSON is very verbose, hard to interpret, and cumbersome to generate in the context of another language or system. For most applications this is actually impractical because it requires the client/consumer to understand and manipulate \R data types, which is difficult and reduces interoperability. Instead we can make data in \R more accessible to third parties by defining sensible \JSON representations that are natural for the class of an object, rather than its internal storage type. This document does not discuss the \code{serializeJSON} system in any further detail, and solely treats the class based system implemented in \toJSON and \fromJSON. However the reader that is interested in full serialization of \R objects into \JSON is encouraged to have a look at the respective manual pages. - -\subsection{Scope and limitations} - -Before continuing, we want to stress some limitations of encoding \R data structures in \JSON. Most importantly, there are limitations to the types of objects that can be represented. In general, temporary in-memory properties such as connections, file descriptors and (recursive) memory references are always difficult if not impossible to store in a sensible way, regardless of the language or serialization method. This document focuses on the common \R classes that hold \emph{data}, such as vectors, factors, lists, matrices and data frames. We do not treat language level constructs such as expressions, functions, promises, which hold little meaning outside the context of \R. We also don't treat special compound classes such as linear models or custom classes defined in contributed packages. When designing systems or protocols that interact with \R, it is highly recommended to stick with the standard data structures for the interface input/output. - -Then there are limitations introduced by the format. Because \JSON is a human readable, text-based format, it does not support binary data, and numbers are stored in their decimal notation. The latter leads to loss of precision for real numbers, depending on how many digits the user decides to print. Several dialects of \JSON exists such as \texttt{BSON} \citep{chodorow2013mongodb} or \texttt{MSGPACK} \citep{msgpack}, which extend the format with various binary types. However, these formats are much less popular, less interoperable, and often impractical, precisely because they require binary parsing and abandon human readability. The simplicity of \JSON is what makes it an accessible and widely applicable data interchange format. In cases where it is really needed to include some binary data in \JSON, we can encode a blob as a string using \texttt{base64}. - -Finally, as mentioned earlier, \fromJSON is not a perfect inverse function of \toJSON, as is the case for \code{serialializeJSON} and \code{unserializeJSON}. The class based mappings are designed for concise and practical encoding of the various common data structures. Our implementation of \toJSON and \fromJSON approximates a reversible mapping between \R objects and \JSON for the standard data classes, but there are always limitations and edge cases. For example, the \JSON representation of an empty vector, empty list or empty data frame are all the same: \texttt{"[ ]"}. Also some special vector types such as factors, dates or timestamps get coerced to strings, as they would in for example \texttt{CSV}. This is a quite typical and expected behavior among text based formats, but it does require some additional interpretation on the consumer side. - -% \subsection{Goals: Consistent and Practical} -% -% It can be helpful to see the problem from both sides. The \R user needs to interface external \JSON data from within \R. This includes reading data from a public source/API, or posting a specific \JSON structure to an online service. From perspective of the \R user, \JSON data should be realized in \R using classes which are most natural in \R for a particular structure. A proper mapping is one which allows the \R user to read any incoming data or generate a specific \JSON structures using the familiar methods and classes in \R. Ideally, the \R user would like to forget about the interchange format at all, and think about the external data interface in terms of its corresponding \R structures rather than a \JSON schema. The other perspective is that of an third party client or language, which needs to interface data in \R using \JSON. This actor wants to access and manipulate \R objects via their \JSON representation. A good mapping is one that allows a 3rd party client to get data in and out of \R, without necessarily understanding the specifics of the underlying \R classes. Ideally, the external client could forget about the \R objects and classes at all, and think about input and output of data in terms of the \JSON schema, or the corresponding realization in the language of the client. -% -% Both sides come together in the context of an RPC service such as OpenCPU. OpenCPU exposes a HTTP API to let 3rd party clients call \R functions over HTTP. The function arguments are posted using \JSON and OpenCPU automatically converts these into \R objects to construct the \R function call. The return value of the function is then converted to \JSON and sent back to the client. To the client, the service works as a \JSON API, but it is implemented as standard \R function uses standard data structures for its arguments and return value. For this to work, the conversion between \JSON data and \R objects must be consistent and unambiguous. In the design of our mapping we have pursued the following requirements: -% -% \begin{itemize} -% \item{Recognize and comply with existing conventions of encoding common data structures in \JSON, in particular (relational) data sets.} -% \item{Consistently use a particular schema for a class of objects, including edge cases.} -% \item{Avoid R-specific peculiarities to minimize opportunities for misinterpretation.} -% \item{Mapping should optimally be reversible, but at least coercible for the standard classes.} -% \item{Robustness principle: be strict on output but tolerant on input.} -% \end{itemize} - - -\section[Converting between JSON data and R classes]{Converting between \JSON data and \R classes} - -This section lists examples of how the common \R classes are represented in \JSON. As explained before, the \toJSON function relies on method dispatch, which means that objects get encoded according to their \texttt{class} attribute. If an object has multiple \texttt{class} values, \R uses the first occurring class which has a \toJSON method. If none of the classes of an object has a \toJSON method, an error is raised. - -\subsection{Atomic vectors} - -The most basic data type in \R is the atomic vector. Atomic vectors hold an ordered, homogeneous set of values of type \texttt{logical} (booleans), \texttt{character} (strings), \texttt{raw} (bytes), \texttt{numeric} (doubles), \texttt{complex} (complex numbers with a real and imaginary part), or \texttt{integer}. Because \R is fully vectorized, there is no user level notion of a primitive: a scalar value is considered a vector of length 1. Atomic vectors map to \JSON arrays: - -<<>>= -x <- c(1, 2, pi) -toJSON(x) -@ - -The \JSON array is the only appropriate structure to encode a vector, even though vectors in \R are homogeneous, whereas the \JSON array is actually heterogeneous, but \JSON does not make this distinction. - -\subsubsection{Missing values} - -A typical domain specific problem when working with statistical data is presented by missing values: a concept foreign to many other languages. Besides regular values, each vector type in \R except for \texttt{raw} can hold \texttt{NA} as a value. Vectors of type \texttt{double} and \texttt{complex} define three additional types of non finite values: \texttt{NaN}, \texttt{Inf} and \texttt{-Inf}. The \JSON format does not natively support any of these types; therefore such values values need to be encoded in some other way. There are two obvious approaches. The first one is to use the \JSON \texttt{null} type. For example: - -<<>>= -x <- c(TRUE, FALSE, NA) -toJSON(x) -@ - -The other option is to encode missing values as strings by wrapping them in double quotes: - -<<>>= -x <- c(1,2,NA,NaN,Inf,10) -toJSON(x) -@ - -Both methods result in valid \JSON, but both have a limitation: the problem with the \texttt{null} type is that it is impossible to distinguish between different types of missing data, which could be a problem for numeric vectors. The values \texttt{Inf}, \texttt{-Inf}, \texttt{NA} and \texttt{NaN} carry different meanings, and these should not get lost in the encoding. The problem with encoding missing values as strings is that this method can not be used for character vectors, because the consumer won't be able to distinguish the actual string \texttt{"NA"} and the missing value \texttt{NA}. This would create a likely source of bugs, where clients mistakenly interpret \texttt{"NA"} as an actual string value, which is a common problem with text-based formats such as \texttt{CSV}. For this reason, \jsonlite uses the following defaults: - -\begin{itemize} - \item Missing values in non-numeric vectors (\texttt{logical}, \texttt{character}) are encoded as \texttt{null}. - \item Missing values in numeric vectors (\texttt{double}, \texttt{integer}, \texttt{complex}) are encoded as strings. -\end{itemize} - -We expect that these conventions are most likely to result in the correct interpretation of missing values. Some examples: - -<<>>= -toJSON(c(TRUE, NA, NA, FALSE)) -toJSON(c("FOO", "BAR", NA, "NA")) -toJSON(c(3.14, NA, NaN, 21, Inf, -Inf)) - -#Non-default behavior -toJSON(c(3.14, NA, NaN, 21, Inf, -Inf), na="null") -@ - -\subsubsection{Special vector types: dates, times, factor, complex} - -Besides missing values, \JSON also lacks native support for some of the basic vector types in \R that frequently appear in data sets. These include vectors of class \texttt{Date}, \texttt{POSIXt} (timestamps), \texttt{factors} and \texttt{complex} vectors. By default, the \jsonlite package coerces these types to strings (using \texttt{as.character}): - -<<>>= -toJSON(Sys.time() + 1:3) -toJSON(as.Date(Sys.time()) + 1:3) -toJSON(factor(c("foo", "bar", "foo"))) -toJSON(complex(real=runif(3), imaginary=rnorm(3))) -@ - -When parsing such \JSON strings, these values will appear as character vectors. In order to obtain the original types, the user needs to manually coerce them back to the desired type using the corresponding \texttt{as} function, e.g. \code{as.POSIXct}, \code{as.Date}, \code{as.factor} or \code{as.complex}. In this respect, \JSON is subject to the same limitations as text based formats such as \texttt{CSV}. - -\subsubsection{Special cases: vectors of length 0 or 1} - -Two edge cases deserve special attention: vectors of length 0 and vectors of length 1. In \jsonlite these are encoded respectively as an empty array, and an array of length 1: - -<<>>= -#vectors of length 0 and 1 -toJSON(vector()) -toJSON(pi) - -#vectors of length 0 and 1 in a named list -toJSON(list(foo=vector())) -toJSON(list(foo=pi)) - -#vectors of length 0 and 1 in an unnamed list -toJSON(list(vector())) -toJSON(list(pi)) -@ - -This might seem obvious but these cases result in very different behavior between different \JSON packages. This is probably caused by the fact that \R does not have a scalar type, and some package authors decided to treat vectors of length 1 as if they were a scalar. For example, in the current implementations, both \RJSONIO and \pkg{rjson} encode a vector of length one as a \JSON primitive when it appears within a list: - -<<>>= -# Other packages make different choices: -cat(rjson::toJSON(list(n = c(1)))) -cat(rjson::toJSON(list(n = c(1, 2)))) -@ - -When encoding a single dataset this seems harmless, but in the context of dynamic data this inconsistency is almost guaranteed to cause bugs. For example, imagine an \R web service which lets the user fit a linear model and sends back the fitted parameter estimates as a \JSON array. The client code then parses the \JSON, and iterates over the array of coefficients to display them in a \texttt{GUI}. All goes well, until the user decides to fit a model with only one predictor. If the \JSON encoder suddenly returns a primitive value where the client is expecting an array, the application will likely break. Therefore, any consumer or client would need to be aware of the special case where the vector becomes a primitive, and explicitly take this exception into account when processing the result. When the client fails to do so and proceeds as usual, it will probably call an iterator or loop method on a primitive value, resulting in the obvious errors. To avoid this, \jsonlite uses consistent encoding schemes which do not depend on variable object properties such as its length. Hence, a vector is always encoded as an array, even when it is of length 0 or 1. - -\subsection{Matrices} - -Arguably one of the strongest sides of \R is its ability to interface libraries for basic linear algebra subprograms \citep{lawson1979basic} such as \texttt{LAPACK} \citep{anderson1999lapack}. These libraries provide well tuned, high performance implementations of important linear algebra operations to calculate anything from inner products and eigen values to singular value decompositions, which are in turn building blocks of statistical methods such as linear regression or principal component analysis. Linear algebra methods operate on \emph{matrices}, making the matrix one of the most central data classes in \R. Conceptually, a matrix consists of a 2 dimensional structure of homogeneous values. It is indexed using 2 numbers (or vectors), representing the rows and columns of the matrix respectively. - -<<>>= -x <- matrix(1:12, nrow=3, ncol=4) -print(x) -print(x[2,4]) -@ - - A matrix is stored in memory as a single atomic vector with an attribute called \texttt{"dim"} defining the dimensions of the matrix. The product of the dimensions is equal to the length of the vector. - -<<>>= -attributes(volcano) -length(volcano) -@ - - Even though the matrix is stored as a single vector, the way it is printed and indexed makes it conceptually a 2 dimensional structure. In \jsonlite a matrix maps to an array of equal-length subarrays: - -<<>>= -x <- matrix(1:12, nrow=3, ncol=4) -toJSON(x) -@ - -We expect this representation will be the most intuitive to interpret, also within languages that do not have a native notion of a matrix. Note that even though \R stores matrices in \emph{column major} order, \jsonlite encodes matrices in \emph{row major} order. This is a more conventional and intuitive way to represent matrices and is consistent with the row-based encoding of data frames discussed in the next section. When the \JSON string is properly indented (recall that white space and line breaks are optional in \JSON), it looks very similar to the way \R prints matrices: - -\begin{verbatim} -[ [ 1, 4, 7, 10 ], - [ 2, 5, 8, 11 ], - [ 3, 6, 9, 12 ] ] -\end{verbatim} - - Because the matrix is implemented in \R as an atomic vector, it automatically inherits the conventions mentioned earlier with respect to edge cases and missing values: - -<<>>= -x <- matrix(c(1,2,4,NA), nrow=2) -toJSON(x) -toJSON(x, na="null") -toJSON(matrix(pi)) -@ - - -\subsubsection{Matrix row and column names} - -Besides the \texttt{"dim"} attribute, the matrix class has an additional, optional attribute: \texttt{"dimnames"}. This attribute holds names for the rows and columns in the matrix. However, we decided not to include this information in the default \JSON mapping for matrices for several reasons. First of all, because this attribute is optional, either row or column names or both could be \texttt{NULL}. This makes it difficult to define a practical mapping that covers all cases with and without row and/or column names. Secondly, the names in matrices are mostly there for annotation only; they are not actually used in calculations. The linear algebra subroutines mentioned before completely ignore them, and never include any names in their output. So there is often little purpose of setting names in the first place, other than annotation. - -When row or column names of a matrix seem to contain vital information, we might want to transform the data into a more appropriate structure. \cite{tidydata} calls this \emph{``tidying''} the data and outlines best practices on storing statistical data in its most appropriate form. He lists the issue where \emph{``column headers are values, not variable names''} as the most common source of untidy data. This often happens when the structure is optimized for presentation (e.g. printing), rather than computation. In the following example taken from Wickham, the predictor variable (treatment) is stored in the column headers rather than the actual data. As a result, these values do not get included in the \JSON output: - -<<>>= -x <- matrix(c(NA,1,2,5,NA,3), nrow=3) -row.names(x) <- c("Joe", "Jane", "Mary"); -colnames(x) <- c("Treatment A", "Treatment B") -print(x) -toJSON(x) -@ - -Wickham recommends that the data be \emph{melted} into its \emph{tidy} form. Once the data is tidy, the \JSON encoding will naturally contain the treatment values: - -<<>>= -library(reshape2) -y <- melt(x, varnames=c("Subject", "Treatment")) -print(y) -toJSON(y, pretty=TRUE) -@ - -In some other cases, the column headers actually do contain variable names, and melting is inappropriate. For data sets with records consisting of a set of named columns (fields), \R has more natural and flexible class: the data-frame. The \toJSON method for data frames (described later) is more suitable when we want to refer to rows or fields by their name. Any matrix can easily be converted to a data-frame using the \code{as.data.frame} function: - -<<>>= -toJSON(as.data.frame(x), pretty=TRUE) -@ - -For some cases this results in the desired output, but in this example melting seems more appropriate. - -\subsection{Lists} - -The \texttt{list} is the most general purpose data structure in \R. It holds an ordered set of elements, including other lists, each of arbitrary type and size. Two types of lists are distinguished: named lists and unnamed lists. A list is considered a named list if it has an attribute called \texttt{"names"}. In practice, a named list is any list for which we can access an element by its name, whereas elements of an unnamed lists can only be accessed using their index number: - -<<>>= -mylist1 <- list("foo" = 123, "bar"= 456) -print(mylist1$bar) -mylist2 <- list(123, 456) -print(mylist2[[2]]) -@ - -\subsubsection{Unnamed lists} - -Just like vectors, an unnamed list maps to a \JSON array: - -<<>>= -toJSON(list(c(1,2), "test", TRUE, list(c(1,2)))) -@ - -Note that even though both vectors and lists are encoded using \JSON arrays, they can be distinguished from their contents: an \R vector results in a \JSON array containing only primitives, whereas a list results in a \JSON array containing only objects and arrays. This allows the \JSON parser to reconstruct the original type from encoded vectors and arrays: - -<<>>= -x <- list(c(1,2,NA), "test", FALSE, list(foo="bar")) -identical(fromJSON(toJSON(x)), x) -@ - - The only exception is the empty list and empty vector, which are both encoded as \texttt{[ ]} and therefore indistinguishable, but this is rarely a problem in practice. - -\subsubsection{Named lists} - -A named list in \R maps to a \JSON \emph{object}: - -<<>>= -toJSON(list(foo=c(1,2), bar="test")) -@ - - Because a list can contain other lists, this works recursively: - -<>= -toJSON(list(foo=list(bar=list(baz=pi)))) -@ - - Named lists map almost perfectly to \JSON objects with one exception: list elements can have empty names: - -<<>>= -x <- list(foo=123, "test", TRUE) -attr(x, "names") -x$foo -x[[2]] -@ - - In a \JSON object, each element in an object must have a valid name. To ensure this property, \jsonlite uses the same solution as the \code{print} method, which is to fall back on indices for elements that do not have a proper name: - -<<>>= -x <- list(foo=123, "test", TRUE) -print(x) -toJSON(x) -@ - - This behavior ensures that all generated \JSON is valid, however named lists with empty names should be avoided where possible. When actually designing \R objects that should be interoperable, it is recommended that each list element is given a proper name. - -\subsection{Data frame} - -The \texttt{data frame} is perhaps the most central data structure in \R from the user point of view. This class holds tabular data in which each column is named and (usually) homogeneous. Conceptually it is very similar to a table in relational data bases such as \texttt{MySQL}, where \emph{fields} are referred to as \emph{column names}, and \emph{records} are called \emph{rows}. Like a matrix, a data frame can be subsetted with two indices, to extract certain rows and columns of the data: - -<<>>= -is(iris) -names(iris) -print(iris[1:3, c(1,5)]) -print(iris[1:3, c("Sepal.Width", "Species")]) -@ - - For the previously discussed classes such as vectors and matrices, behavior of \jsonlite was quite similar to the other available packages that implement \toJSON and \fromJSON functions, with only minor differences for missing values and edge cases. But when it comes to data frames, \jsonlite takes a completely different approach. The behavior of \jsonlite is designed for compatibility with conventional ways of encoding table-like structures outside the \R community. The implementation is more involved, but results in a powerful and more natural way of representing data frames in \JSON. - -\subsubsection{Column based versus row based tables} - -Generally speaking, tabular data structures can be implemented in two different ways: in a column based, or row based fashion. A column based structure consists of a named collection of equal-length, homogeneous arrays representing the table columns. In a row-based structure on the other hand, the table is implemented as a set of heterogeneous associative arrays representing table rows with field values for each particular record. Even though most languages provide flexible and abstracted interfaces that hide these implementation details from the user, they can have huge implications for performance. A column based structure is efficient for inserting or extracting certain columns of the data, but it is inefficient for manipulating individual rows. For example to insert a single row somewhere in the middle, each of the columns has to be sliced and stitched back together. For row-based implementations, it is the exact other way around: we can easily manipulate a particular record, but to insert/extract a whole column we would need to iterate over all records in the table and read/modify the appropriate field in each of them. - -The data frame class in \R is implemented in a column based fashion: it constitutes of a \texttt{named list} of equal-length vectors. Thereby the columns in the data frame naturally inherit the properties from atomic vectors discussed before, such as homogeneity, missing values, etc. Another argument for column-based implementation is that statistical methods generally operate on columns. For example, the \code{lm} function fits a \emph{linear regression} by extracting the columns from a data frame as specified by the \texttt{formula} argument. \R simply binds the specified columns together into a matrix $X$ and calls out to a highly optimized \proglang{FORTRAN} subroutine to calculate the OLS estimates $\hat{\beta} = (X^TX)X^Ty$ using the $QR$ factorization of $X$. Many other statistical modeling functions follow similar steps, and are computationally efficient because of the column-based data storage in \R. - -Unfortunately \R is an exception in its preference for column-based storage: most languages, systems, databases, \API's, etc, are optimized for record based operations. For this reason, the conventional way to store and communicate tabular data in \JSON seems to almost exclusively row based. This discrepancy presents various complications when converting between data frames and \JSON. The remaining of this section discusses details and challenges of consistently mapping record based \JSON data as frequently encountered on the web, into column-based data frames which are convenient for statistical computing. - -\subsubsection{Row based data frame encoding} - -The encoding of data frames is one of the major differences between \jsonlite and implementations from other currently available packages. Instead of using the column-based encoding also used for lists, \jsonlite maps data frames by default to an array of records: - -<<>>= -toJSON(iris[1:2,], pretty=TRUE) -@ - - This output looks a bit like a list of named lists. However, there is one major difference: the individual records contain \JSON primitives, whereas lists always contain \JSON objects or arrays: - -<<>>= -toJSON(list(list(Species="Foo", Width=21)), pretty=TRUE) -@ - - This leads to the following convention: when encoding \R objects, \JSON primitives only appear in vectors and data-frame rows. Primitives within a \JSON array indicate a vector, and primitives appearing inside a \JSON object indicate a data-frame row. A \JSON encoded \texttt{list}, (named or unnamed) will never contain \JSON primitives. This is a subtle but important convention that helps to distinguish between \R classes from their \JSON representation, without explicitly encoding any metadata. - -\subsubsection{Missing values in data frames} - -The section on atomic vectors discussed two methods of encoding missing data appearing in a vector: either using strings or using the \JSON \texttt{null} type. When a missing value appears in a data frame, there is a third option: simply not include this field in \JSON record: - -<<>>= -x <- data.frame(foo=c(FALSE, TRUE,NA,NA), bar=c("Aladdin", NA, NA, "Mario")) -print(x) -toJSON(x, pretty=TRUE) -@ - - The default behavior of \jsonlite is to omit missing data from records in a data frame. This seems to be the most conventional method used on the web, and we expect this encoding will most likely lead to the correct interpretation of \emph{missingness}, even in languages without an explicit notion of \texttt{NA}. - -\subsubsection{Relational data: nested records} - -Nested datasets are somewhat unusual in \R, but frequently encountered in \JSON. Such structures do not really fit the vector based paradigm which makes them harder to manipulate in \R. However, nested structures are too common in \JSON to ignore, and with a little work most cases still map to a data frame quite nicely. The most common scenario is a dataset in which a certain field within each record contains a \emph{subrecord} with additional fields. The \jsonlite implementation maps these subrecords to a nested data frame. Whereas the data frame class usually consists of vectors, technically a column can also be list or another data frame with matching dimension (this stretches the meaning of the word ``column'' a bit): - -<>= -options(stringsAsFactors=FALSE) -x <- data.frame(driver = c("Bowser", "Peach"), occupation = c("Koopa", "Princess")) -x$vehicle <- data.frame(model = c("Piranha Prowler", "Royal Racer")) -x$vehicle$stats <- data.frame(speed = c(55, 34), weight = c(67, 24), drift = c(35, 32)) -str(x) -toJSON(x, pretty=TRUE) -myjson <- toJSON(x) -y <- fromJSON(myjson) -identical(x,y) -@ - - When encountering \JSON data containing nested records on the web, chances are that these data were generated from \emph{relational} database. The \JSON field containing a subrecord represents a \emph{foreign key} pointing to a record in an external table. For the purpose of encoding these into a single \JSON structure, the tables were joined into a nested structure. The directly nested subrecord represents a \emph{one-to-one} or \emph{many-to-one} relation between the parent and child table, and is most naturally stored in \R using a nested data frame. In the example above, the \texttt{vehicle} field points to a table of vehicles, which in turn contains a \texttt{stats} field pointing to a table of stats. When there is no more than one subrecord for each record, we easily \emph{flatten} the structure into a single non-nested data frame. - -<<>>= -y <- fromJSON(myjson, flatten=TRUE) -str(y) -@ - -\subsubsection{Relational data: nested tables} - -The one-to-one relation discussed above is relatively easy to store in \R, because each record contains at most one subrecord. Therefore we can use either a nested data frame, or flatten the data frame. However, things get more difficult when \JSON records contain a field with a nested array. Such a structure appears in relational data in case of a \emph{one-to-many} relation. A standard textbook illustration is the relation between authors and titles. For example, a field can contain an array of values: - -<>= -x <- data.frame(author = c("Homer", "Virgil", "Jeroen")) -x$poems <- list(c("Iliad", "Odyssey"), c("Eclogues", "Georgics", "Aeneid"), vector()); -names(x) -toJSON(x, pretty = TRUE) -@ - - As can be seen from the example, the way to store this in a data frame is using a list of character vectors. This works, and although unconventional, we can still create and read such structures in \R relatively easily. However, in practice the one-to-many relation is often more complex. It results in fields containing a \emph{set of records}. In \R, the only way to model this is as a column containing a list of data frames, one separate data frame for each row: - -<>= -x <- data.frame(author = c("Homer", "Virgil", "Jeroen")) -x$poems <- list( - data.frame(title=c("Iliad", "Odyssey"), year=c(-1194, -800)), - data.frame(title=c("Eclogues", "Georgics", "Aeneid"), year=c(-44, -29, -19)), - data.frame() -) -toJSON(x, pretty=TRUE) -@ - - Because \R doesn't have native support for relational data, there is no natural class to store such structures. The best we can do is a column containing a list of sub-dataframes. This does the job, and allows the \R user to access or generate nested \JSON structures. However, a data frame like this cannot be flattened, and the class does not guarantee that each of the individual nested data frames contain the same fields, as would be the case in an actual relational data base. - - -\section{Structural consistency and type safety in dynamic data} - -Systems that automatically exchange information over some interface, protocol or \API require well defined and unambiguous meaning and arrangement of data. In order to process and interpret input and output, contents must obey a steady structure. Such structures are usually described either informally in documentation or more formally in a schema language. The previous section emphasized the importance of consistency in the mapping between \JSON data and \R classes. This section takes a higher level view and explains the importance of structure consistency for dynamic data. This topic can be a bit subtle because it refers to consistency among different instantiations of a \JSON structure, rather than a single case. We try to clarify by breaking down the concept into two important parts, and illustrate with analogies and examples from \R. - -\subsection{Classes, types and data} - -Most object-oriented languages are designed with the idea that all objects of a certain class implement the same fields and methods. In strong-typed languages such as \proglang{S4} or \proglang{Java}, names and types of the fields are formally declared in a class definition. In other languages such as \proglang{S3} or \proglang{JavaScript}, the fields are not enforced by the language but rather at the discretion of the programmer. One way or another they assume that members of a certain class agree on field names and types, so that the same methods can be applied to any object of a particular class. This basic principle holds for dynamic data exactly the same way as for objects. Software that process dynamic data can only work reliably if the various elements of the data have consistent names and structure. Consensus must exist between the different parties on data that is exchanged as part an interface or protocol. This requires the structure to follow some sort of template that specifies which attributes can appear in the data, what they mean and how they are composed. Thereby each possible scenario can be accounted for in the software so that data can be interpreted and processed appropriately with no exceptions during run-time. - -Some data interchange formats such as \texttt{XML} or \texttt{Protocol Buffers} take a formal approach to this matter, and have well established \emph{schema languages} and \emph{interface description languages}. Using such a meta language it is possible to define the exact structure, properties and actions of data interchange in a formal arrangement. However, in \JSON, such formal definitions are relatively uncommon. Some initiatives for \JSON schema languages exist \citep{jsonschema}, but they are not very well established and rarely seen in practice. One reason for this might be that defining and implementing formal schemas is complicated and a lot of work which defeats the purpose of using an lightweight format such as \JSON in the first place. But another reason is that it is often simply not necessary to be overly formal. The \JSON format is simple and intuitive, and under some general conventions, a well chosen example can suffice to characterize the structure. This section describes two important rules that are required to ensure that data exchange using \JSON is type safe. - -\subsection{Rule 1: Fixed keys} - -When using \JSON without a schema, there are no restrictions on the keys (field names) that can appear in a particular object. However, a source of data that returns a different set of keys every time it is called makes it very difficult to write software to process these data. Hence, the first rule is to limit \JSON interfaces to a finite set of keys that are known \emph{a priory} by all parties. It can be helpful to think about this in analogy with for example a relational database. Here, the database model separates the data from metadata. At run time, records can be inserted or deleted, and a certain query might return different content each time it is executed. But for a given query, each execution will return exactly the same \emph{field names}; hence as long as the table definitions are unchanged, the \emph{structure} of the output consistent. Client software needs this structure to validate input, optimize implementation, and process each part of the data appropriately. In \JSON, data and metadata are not formally separated as in a database, but similar principles that hold for fields in a database, apply to keys in dynamic \JSON data. - -A beautiful example of this in practice was given by Mike Dewar at the New York Open Statistical Programming Meetup on Jan. 12, 2012 \citep{jsonkeys}. In his talk he emphasizes to use \JSON keys only for \emph{names}, and not for \emph{data}. He refers to this principle as the ``golden rule'', and explains how he learned his lesson the hard way. In one of his early applications, timeseries data was encoded by using the epoch timestamp as the \JSON key. Therefore the keys are different each time the query is executed: - -\begin{verbatim} -[ - { "1325344443" : 124 }, - { "1325344456" : 131 }, - { "1325344478" : 137 } -] -\end{verbatim} - - Even though being valid \JSON, dynamic keys as in the example above are likely to introduce trouble. Most software will have great difficulty processing these values if we can not specify the keys in the code. Moreover when documenting the API, either informally or formally using a schema language, we need to describe for each property in the data what the value means and is composed of. Thereby a client or consumer can implement code that interprets and process each element in the data in an appropriate manner. Both the documentation and interpretation of \JSON data rely on fixed keys with well defined meaning. Also note that the structure is difficult to extend in the future. If we want to add an additional property to each observation, the entire structure needs to change. In his talk, Dewar explains that life gets much easier when we switch to the following encoding: - -\begin{verbatim} -[ - { "time": "1325344443" : "price": 124 }, - { "time": "1325344456" : "price": 131 }, - { "time": "1325344478" : "price": 137 } -] -\end{verbatim} - - This structure will play much nicer with existing software that assumes fixed keys. Moreover, the structure can easily be described in documentation, or captured in a schema. Even when we have no intention of writing documentation or a schema for a dynamic \JSON source, it is still wise to design the structure in such away that it \emph{could} be described by a schema. When the keys are fixed, a well chosen example can provide all the information required for the consumer to implement client code. Also note that the new structure is extensible: additional properties can be added to each observation without breaking backward compatibility. - -In the context of \R, consistency of keys is closely related to Wikcham's concept of \emph{tidy data} discussed earlier. Wickham states that the most common reason for messy data are column headers containing values instead of variable names. Column headers in tabular datasets become keys when converted to \JSON. Therefore, when headers are actually values, \JSON keys contain in fact data and can become unpredictable. The cure to inconsistent keys is almost always to tidy the data according to recommendations given by \cite{tidydata}. - -\subsection{Rule 2: Consistent types} - -In a strong typed language, fields declare their class before any values are assigned. Thereby the type of a given field is identical in all objects of a particular class, and arrays only contain objects of a single type. The \proglang{S3} system in \R is weakly typed and puts no formal restrictions on the class of a certain properties, or the types of objects that can be combined into a collection. For example, the list below contains a character vector, a numeric vector and a list: - -<<>>= -#Heterogeneous lists are bad! -x <- list("FOO", 1:3, list("bar"=pi)) -toJSON(x) -@ - - However even though it is possible to generate such \JSON, it is bad practice. Fields or collections with ambiguous object types are difficult to describe, interpret and process in the context of inter-system communication. When using \JSON to exchange dynamic data, it is important that each property and array is \emph{type consistent}. In dynamically typed languages, the programmer needs to make sure that properties are of the correct type before encoding into \JSON. For \R, this means that the \texttt{unnamed lists} type is best avoided when designing interoperable structures because this type is not homogeneous. - - Note that consistency is somewhat subjective as it refers to the \emph{meaning} of the elements; they do not necessarily have precisely the same structure. What is important is to keep in mind that the consumer of the data can interpret and process each element identically, e.g. iterate over the elements in the collection and apply the same method to each of them. To illustrate this, lets take the example of the data frame: - -<<>>= -#conceptually homogenous array -x <- data.frame(name=c("Jay", "Mary", NA, NA), gender=c("M", NA, NA, "F")) -toJSON(x, pretty=TRUE) -@ - -The \JSON array above has 4 elements, each of which a \JSON object. However, due to the \texttt{NA} values, some records have more fields than others. But as long as they are conceptually the same type (e.g. a person), the consumer can iterate over the elements to process each person in the set according to a predefined action. For example each element could be used to construct a \texttt{Person} object. A collection of different object classes should be separated and organized using a named list: - -<>= -x <- list( - humans = data.frame(name = c("Jay", "Mary"), married = c(TRUE, FALSE)), - horses = data.frame(name = c("Star", "Dakota"), price = c(5000, 30000)) -) -toJSON(x, pretty=TRUE) -@ - - This might seem obvious, but dynamic languages such as \R can make it dangerously tempting to generate data containing mixed-type collections. Such inconsistent typing makes it very difficult to consume the data and creates a likely source of nasty bugs. Using consistent field names/types and homogeneous \JSON arrays is a strong convention among public \JSON \API's, for good reasons. We recommend \R users to respect these conventions when generating \JSON data in \R. - - -%references -\bibliographystyle{plainnat} -\bibliography{references} - -%end -\end{document} diff --git a/src/library/jsonlite/vignettes/json-mapping.pdf.asis b/src/library/jsonlite/vignettes/json-mapping.pdf.asis deleted file mode 100644 index 1b7eb6494..000000000 --- a/src/library/jsonlite/vignettes/json-mapping.pdf.asis +++ /dev/null @@ -1,6 +0,0 @@ -%\VignetteIndexEntry{A mapping between JSON data and R objects} -%\VignetteEngine{R.rsp::asis} -%\VignetteKeyword{PDF} -%\VignetteKeyword{HTML} -%\VignetteKeyword{vignette} -%\VignetteKeyword{package} diff --git a/src/library/jsonlite/vignettes/json-opencpu.Rnw.orig b/src/library/jsonlite/vignettes/json-opencpu.Rnw.orig deleted file mode 100644 index a3df48e00..000000000 --- a/src/library/jsonlite/vignettes/json-opencpu.Rnw.orig +++ /dev/null @@ -1,132 +0,0 @@ -%\VignetteEngine{knitr::knitr} -%\VignetteIndexEntry{Simple JSON RPC with OpenCPU} - -%This is a template. -%Actual text goes in sources/content.Rnw -\documentclass{article} -\author{Jeroen Ooms} - -%useful packages -\usepackage{url} -\usepackage{fullpage} -\usepackage{xspace} -\usepackage{hyperref} -\usepackage{fancyvrb} - -%for table positioning -\usepackage{float} -\restylefloat{table} - -%support for accents -\usepackage[utf8]{inputenc} - -%support for ascii art -\usepackage{pmboxdraw} - -%use vspace instead of indentation for paragraphs -\usepackage{parskip} - -%extra line spacing -\usepackage{setspace} -\setstretch{1.25} - -%knitr style verbatim blocks -\newenvironment{codeblock}{ - \VerbatimEnvironment - \definecolor{shadecolor}{rgb}{0.95, 0.95, 0.95}\color{fgcolor} - \color{black} - \begin{kframe} - \begin{BVerbatim} -}{ - \end{BVerbatim} - \end{kframe} -} - -%placeholders for JSS/RJournal -\newcommand{\pkg}[1]{\texttt{#1}} -\newcommand{\code}[1]{\texttt{#1}} -\newcommand{\file}[1]{\texttt{#1}} -\newcommand{\dfn}[1]{\emph{#1}} -\newcommand{\proglang}[1]{\texttt{#1}} - -%shorthands -\newcommand{\JSON}{\texttt{JSON}\xspace} -\newcommand{\R}{\texttt{R}\xspace} -\newcommand{\C}{\texttt{C}\xspace} -\newcommand{\toJSON}{\texttt{toJSON}\xspace} -\newcommand{\fromJSON}{\texttt{fromJSON}\xspace} -\newcommand{\XML}{\pkg{XML}\xspace} -\newcommand{\jsonlite}{\pkg{jsonlite}\xspace} -\newcommand{\RJSONIO}{\pkg{RJSONIO}\xspace} -\newcommand{\API}{\texttt{API}\xspace} -\newcommand{\JavaScript}{\texttt{JavaScript}\xspace} - -%trick for using same content file as chatper and article -\newcommand{\maintitle}[1]{ - \title{#1} - \maketitle -} - -%actual document -\begin{document} - - - -\section*{Simple \JSON RPC with OpenCPU} - -The \jsonlite package is used by \texttt{OpenCPU} to convert between \JSON data and \R objects. Thereby clients can retrieve \R objects, or remotely call \R functions using \JSON where the function arguments as well as function return value are \JSON objects. For example to download the \texttt{Boston} data from the \texttt{MASS} package:\\ - -\begin{tabular}{|l|l|} - \hline - \textbf{Command in R} & \textbf{Example URL on OpenCPU} \\ - \hline - \texttt{toJSON(Boston, digits=4)} & \url{https://cran.ocpu.io/MASS/data/Boston/json?digits=4} \\ - \hline - \texttt{toJSON(Boston, dataframe="col")} & \url{https://cran.ocpu.io/MASS/data/Boston/json?dataframe=col} \\ - \hline - \texttt{toJSON(Boston, pretty=FALSE)} & \url{https://cran.ocpu.io/MASS/data/Boston/json?pretty=false} \\ - \hline -\end{tabular} -\newline - -To calculate the variance of some the numbers \texttt{1:9} in the command line using using \texttt{curl}: - -\begin{Verbatim}[frame=single] -curl https://cran.ocpu.io/stats/R/var/json -d "x=[1,2,3,4,5,6,7,8,9]" -\end{Verbatim} - -Or equivalently post the entire body in \JSON format: - -\begin{Verbatim}[frame=single] -curl https://cran.ocpu.io/stats/R/var/json -H "Content-Type: application/json" \ --d "{\"x\":[1,2,3,4,5,6,7,8,9]}" -\end{Verbatim} - -Below an example where we call the \texttt{melt} function from the \texttt{reshape2} package using some example rows from the \texttt{airquality} data. Here both input and output consist of a data frame. - -\begin{Verbatim}[frame=single] -curl https://cran.ocpu.io/reshape2/R/melt/json -d 'id=["Month", "Day"]&data=[ - { "Ozone" : 41, "Solar.R" : 190, "Wind" : 7.4, "Temp" : 67, "Month" : 5, "Day" : 1 }, - { "Ozone" : 36, "Solar.R" : 118, "Wind" : 8, "Temp" : 72, "Month" : 5, "Day" : 2 } ]' -\end{Verbatim} - -Or equivalently: - -\begin{Verbatim}[frame=single] -curl https://cran.ocpu.io/reshape2/R/melt/json -H "Content-Type: application/json" \ - -d '{"id" : ["Month", "Day"], "data" : [ - { "Ozone" : 41, "Solar.R" : 190, "Wind" : 7.4, "Temp" : 67, "Month" : 5, "Day" : 1 }, - { "Ozone" : 36, "Solar.R" : 118, "Wind" : 8, "Temp" : 72, "Month" : 5, "Day" : 2 } - ] }' -\end{Verbatim} - -This request basically executes the following \R code: - -<>= -mydata <- airquality[1:2,] -y <- reshape2::melt(data = mydata, id = c("Month", "Day")) -toJSON(y) -@ - -%end -\end{document} diff --git a/src/library/jsonlite/vignettes/json-opencpu.pdf.asis b/src/library/jsonlite/vignettes/json-opencpu.pdf.asis deleted file mode 100644 index 3c2c86c8f..000000000 --- a/src/library/jsonlite/vignettes/json-opencpu.pdf.asis +++ /dev/null @@ -1,6 +0,0 @@ -%\VignetteIndexEntry{Simple JSON RPC with OpenCPU} -%\VignetteEngine{R.rsp::asis} -%\VignetteKeyword{PDF} -%\VignetteKeyword{HTML} -%\VignetteKeyword{vignette} -%\VignetteKeyword{package} diff --git a/src/library/jsonlite/vignettes/json-paging.Rmd b/src/library/jsonlite/vignettes/json-paging.Rmd deleted file mode 100644 index 669cd1dea..000000000 --- a/src/library/jsonlite/vignettes/json-paging.Rmd +++ /dev/null @@ -1,122 +0,0 @@ ---- -title: "Combining pages of JSON data with jsonlite" -date: "2022-01-16" -output: - html_document -vignette: > - %\VignetteIndexEntry{Combining pages of JSON data with jsonlite} - %\VignetteEngine{knitr::rmarkdown} - \usepackage[utf8]{inputenc} ---- - - - - - - -The [jsonlite](https://cran.r-project.org/package=jsonlite) package is a `JSON` parser/generator for R which is optimized for pipelines and web APIs. It is used by the OpenCPU system and many other packages to get data in and out of R using the `JSON` format. - -## A bidirectional mapping - -One of the main strengths of `jsonlite` is that it implements a bidirectional [mapping](https://arxiv.org/abs/1403.2805) between JSON and data frames. Thereby it can convert nested collections of JSON records, as they often appear on the web, immediately into the appropriate R structure. For example to grab some data from ProPublica we can simply use: - - -```r -library(jsonlite) -mydata <- fromJSON("https://projects.propublica.org/forensics/geos.json", flatten = TRUE) -View(mydata) -``` - -The `mydata` object is a data frame which can be used directly for modeling or visualization, without the need for any further complicated data manipulation. - -## Paging with jsonlite - -A question that comes up frequently is how to combine pages of data. Most web APIs limit the amount of data that can be retrieved per request. If the client needs more data than what can fits in a single request, it needs to break down the data into multiple requests that each retrieve a fragment (page) of data, not unlike pages in a book. In practice this is often implemented using a `page` parameter in the API. Below an example from the [ProPublica Nonprofit Explorer API](https://projects.propublica.org/nonprofits/api) where we retrieve the first 3 pages of tax-exempt organizations in the USA, ordered by revenue: - - -```r -baseurl <- "https://projects.propublica.org/nonprofits/api/v2/search.json?order=revenue&sort_order=desc" -mydata0 <- fromJSON(paste0(baseurl, "&page=0"), flatten = TRUE) -mydata1 <- fromJSON(paste0(baseurl, "&page=1"), flatten = TRUE) -mydata2 <- fromJSON(paste0(baseurl, "&page=2"), flatten = TRUE) - -#The actual data is in the organizations element -mydata0$organizations[1:10, c("name", "city", "strein")] -``` - -``` - name city strein -1 0 DEBT EDUCATION INC SANTA ROSA 46-4744976 -2 0 TOLERANCE INC SUWANEE 27-2620044 -3 00 MOVEMENT INC PENSACOLA 82-4704419 -4 00006 LOCAL MEDIA 22-6062777 -5 0003 POSTAL FAMILY CINCINNATI 31-0240910 -6 0005 GA HEPHZIBAH 58-1514574 -7 0005 WRIGHT-PATT CREDIT UNION BEAVERCREEK 31-0278870 -8 0009 DE GREENWOOD 26-4507405 -9 0011 CALIFORNIA REDWAY 36-4654777 -10 00141 LOCAL MEDIA 94-0507697 -``` - -To analyze or visualize these data, we need to combine the pages into a single dataset. We can do this with the `rbind_pages` function. Note that in this example, the actual data is contained by the `organizations` field: - - -```r -#Rows per data frame -nrow(mydata0$organizations) -``` - -``` -[1] 100 -``` - -```r -#Combine data frames -organizations <- rbind_pages( - list(mydata0$organizations, mydata1$organizations, mydata2$organizations) -) - -#Total number of rows -nrow(organizations) -``` - -``` -[1] 300 -``` - -## Automatically combining many pages - -We can write a simple loop that automatically downloads and combines many pages. For example to retrieve the first 20 pages with non-profits from the example above: - - -```r -#store all pages in a list first -baseurl <- "https://projects.propublica.org/nonprofits/api/v2/search.json?order=revenue&sort_order=desc" -pages <- list() -for(i in 0:20){ - mydata <- fromJSON(paste0(baseurl, "&page=", i)) - message("Retrieving page ", i) - pages[[i+1]] <- mydata$organizations -} - -#combine all into one -organizations <- rbind_pages(pages) - -#check output -nrow(organizations) -``` - -``` -[1] 2100 -``` - -```r -colnames(organizations) -``` - -``` - [1] "ein" "strein" "name" "sub_name" "city" "state" "ntee_code" "raw_ntee_code" - [9] "subseccd" "has_subseccd" "have_filings" "have_extracts" "have_pdfs" "score" -``` - -From here, we can go straight to analyzing the organizations data without any further tedious data manipulation. diff --git a/src/library/jsonlite/vignettes/json-paging.Rmd.orig b/src/library/jsonlite/vignettes/json-paging.Rmd.orig deleted file mode 100644 index 7a21e7dd6..000000000 --- a/src/library/jsonlite/vignettes/json-paging.Rmd.orig +++ /dev/null @@ -1,92 +0,0 @@ ---- -title: "Combining pages of JSON data with jsonlite" -date: "`r Sys.Date()`" -output: - html_document -vignette: > - %\VignetteIndexEntry{Combining pages of JSON data with jsonlite} - %\VignetteEngine{knitr::rmarkdown} - \usepackage[utf8]{inputenc} ---- - - -```{r echo=FALSE} -library(knitr) -opts_chunk$set(comment="") - -#this replaces tabs by spaces because latex-verbatim doesn't like tabs -toJSON <- function(...){ - gsub("\t", " ", jsonlite::toJSON(...), fixed=TRUE); -} -``` - -```{r echo=FALSE, message=FALSE} -library(jsonlite) -``` - -The [jsonlite](https://cran.r-project.org/package=jsonlite) package is a `JSON` parser/generator for R which is optimized for pipelines and web APIs. It is used by the OpenCPU system and many other packages to get data in and out of R using the `JSON` format. - -## A bidirectional mapping - -One of the main strengths of `jsonlite` is that it implements a bidirectional [mapping](http://arxiv.org/abs/1403.2805) between JSON and data frames. Thereby it can convert nested collections of JSON records, as they often appear on the web, immediately into the appropriate R structure. For example to grab some data from ProPublica we can simply use: - -```{r eval=FALSE} -library(jsonlite) -mydata <- fromJSON("https://projects.propublica.org/forensics/geos.json", flatten = TRUE) -View(mydata) -``` - -The `mydata` object is a data frame which can be used directly for modeling or visualization, without the need for any further complicated data manipulation. - -## Paging with jsonlite - -A question that comes up frequently is how to combine pages of data. Most web APIs limit the amount of data that can be retrieved per request. If the client needs more data than what can fits in a single request, it needs to break down the data into multiple requests that each retrieve a fragment (page) of data, not unlike pages in a book. In practice this is often implemented using a `page` parameter in the API. Below an example from the [ProPublica Nonprofit Explorer API](https://projects.propublica.org/nonprofits/api) where we retrieve the first 3 pages of tax-exempt organizations in the USA, ordered by revenue: - -```{r} -baseurl <- "https://projects.propublica.org/nonprofits/api/v2/search.json?order=revenue&sort_order=desc" -mydata0 <- fromJSON(paste0(baseurl, "&page=0"), flatten = TRUE) -mydata1 <- fromJSON(paste0(baseurl, "&page=1"), flatten = TRUE) -mydata2 <- fromJSON(paste0(baseurl, "&page=2"), flatten = TRUE) - -#The actual data is in the organizations element -mydata0$organizations[1:10, c("name", "city", "strein")] -``` - -To analyze or visualize these data, we need to combine the pages into a single dataset. We can do this with the `rbind_pages` function. Note that in this example, the actual data is contained by the `organizations` field: - -```{r} -#Rows per data frame -nrow(mydata0$organizations) - -#Combine data frames -organizations <- rbind_pages( - list(mydata0$organizations, mydata1$organizations, mydata2$organizations) -) - -#Total number of rows -nrow(organizations) -``` - -## Automatically combining many pages - -We can write a simple loop that automatically downloads and combines many pages. For example to retrieve the first 20 pages with non-profits from the example above: - -```{r, message=FALSE} -#store all pages in a list first -baseurl <- "https://projects.propublica.org/nonprofits/api/v2/search.json?order=revenue&sort_order=desc" -pages <- list() -for(i in 0:20){ - mydata <- fromJSON(paste0(baseurl, "&page=", i)) - message("Retrieving page ", i) - pages[[i+1]] <- mydata$organizations -} - -#combine all into one -organizations <- rbind_pages(pages) - -#check output -nrow(organizations) -colnames(organizations) -``` - -From here, we can go straight to analyzing the organizations data without any further tedious data manipulation. diff --git a/src/library/jsonlite/vignettes/precompile.R b/src/library/jsonlite/vignettes/precompile.R deleted file mode 100644 index 3d952b0be..000000000 --- a/src/library/jsonlite/vignettes/precompile.R +++ /dev/null @@ -1,5 +0,0 @@ -#Vignettes that depend on internet access have been precompiled: - -library(knitr) -knit("vignettes/json-apis.Rmd.orig", "vignettes/json-apis.Rmd") -knit("vignettes/json-paging.Rmd.orig", "vignettes/json-paging.Rmd") diff --git a/src/library/jsonlite/vignettes/references.bib b/src/library/jsonlite/vignettes/references.bib deleted file mode 100644 index 5e235046a..000000000 --- a/src/library/jsonlite/vignettes/references.bib +++ /dev/null @@ -1,150 +0,0 @@ -@manual{jsonschema, - title={{JSON Schema: Core Definitions and Terminology}}, - organization={Internet Engineering Task Force (IETF)}, - author={F. Galiegue and K. Zyp}, - year={2013}, - url={https://tools.ietf.org/html/draft-zyp-json-schema-04}, -} - -@manual{msgpack, - title={{MessagePack: It's Like JSON. But Fast and Small}}, - author={Sadayuki Furuhashi}, - year={2014}, - url={http://msgpack.org/}, -} - -@BOOK{chodorow2013mongodb, - title={MongoDB: The Definitive Guide}, - author={Kristina Chodorow}, - publisher={O'Reilly Media}, - year={2013}, - month={5}, - edition={Second}, - isbn={9781449344689}, - url={http://amazon.com/o/ASIN/1449344682/}, - price={$39.99}, - totalpages={432}, - timestamp={2014.05.05}, -} - -@misc{jsonkeys, - title={First Steps in Data Visualisation Using \texttt{d3.js}}, - author={Mike Dewar}, - organization={bit.ly}, - year={2012}, - note={New York Open Statistical Programming Meetup on Jan. 12, 2012}, - url={http://vimeo.com/35005701#t=7m17s} -} - -@article{lawson1979basic, - author = {Lawson, C. L. and Hanson, R. J. and Kincaid, D. R. and Krogh, F. T.}, - title = {Basic Linear Algebra Subprograms for Fortran Usage}, - journal = {ACM Transactions on Mathematical Software}, - issue_date = {Sept. 1979}, - volume = {5}, - number = {3}, - month = sep, - year = {1979}, - issn = {0098-3500}, - pages = {308--323}, - numpages = {16}, - url = {http://doi.acm.org/10.1145/355841.355847}, - doi = {10.1145/355841.355847}, - acmid = {355847}, - publisher = {ACM}, - address = {New York, NY, USA}, -} - -@BOOK{anderson1999lapack, - title={LAPACK Users' Guide (Software, Environments and Tools)}, - author={E. Anderson and Z. Bai and C. Bischof and S. Blackford and J. Demmel and J. Dongarra and J. Du Croz and A. Greenbaum and S. Hammarling and A. McKenney and D. Sorensen}, - publisher={Society for Industrial and Applied Mathematics}, - year={1987}, - month={1}, - edition={3}, - isbn={9780898714470}, - url={http://amazon.com/o/ASIN/0898714478/}, - price={$65.00}, - totalpages={429}, - timestamp={2014.05.05}, -} - -@Manual{R, - title = {R: A Language and Environment for Statistical Computing}, - author = {{R Core Team}}, - organization = {R Foundation for Statistical Computing}, - address = {Vienna, Austria}, - year = {2014}, - url = {http://www.R-project.org/}, -} - -@Manual{RJSONIO, - title = {{\pkg{RJSONIO}: Serialize \R Objects to \JSON, \JavaScript Object Notation}}, - author = {Duncan {Temple Lang}}, - year = {2013}, - note = {\R package version 1.0-3}, - url = {http://CRAN.R-project.org/package=RJSONIO}, -} - -@Manual{rjson, - title = {{\pkg{rjson}: \JSON for \R}}, - author = {Alex Couture-Beil}, - year = {2013}, - note = {\R package version 0.2.13}, - url = {http://CRAN.R-project.org/package=rjson}, -} - -@Manual{jsonlite, - title = {{\pkg{jsonlite}: A Smarter \JSON Encoder for \R}}, - author = {Jeroen Ooms and Duncan Temple Lang and Jonathan Wallace}, - note = {\R package version 0.9.8}, - url = {http://github.com/jeroenooms/jsonlite#readme}, - year = {2014} -} - -@misc{crockford2006application, - author="D. Crockford", - title="{The \texttt{application/json} Media Type for \JavaScript Object Notation (\JSON)}", - series="Request for Comments", - number="4627", - howpublished="RFC 4627 (Informational)", - publisher="IETF", - organization="Internet Engineering Task Force", - year=2006, - month=jul, - note="Obsoleted by RFCs 7158, 7159", - url="http://www.ietf.org/rfc/rfc4627.txt", -} - -@article{ecma1999262, - title={{\proglang{ECMAScript} Language Specification}}, - author={{Ecma International}}, - journal={{European Association for Standardizing Information and Communication Systems}}, - year={1999}, - url={http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-262.pdf} -} - -@article{tidydata, - title={{Tidy Data}}, - author={Wickham, Hadley}, - journal={Under review}, - year={2014}, - url={http://vita.had.co.nz/papers/tidy-data.pdf} -} - -@inproceedings{crockford2006json, - title={{JSON: The Fat-free Alternative to XML}}, - author={Crockford, Douglas}, - booktitle={Proceedings of XML}, - volume={2006}, - year={2006}, - url={http://www.json.org/fatfree.html} -} - -@book{nolan2014xml, - title={XML and Web Technologies for Data Sciences with \R}, - author={Nolan, Deborah and Temple Lang, Duncan}, - year={2014}, - publisher={Springer-Verlag}, - url={http://link.springer.com/book/10.1007/978-1-4614-7900-0} -} \ No newline at end of file diff --git a/src/library/pkgcache.patch b/src/library/pkgcache.patch new file mode 100644 index 000000000..98cbe6c40 --- /dev/null +++ b/src/library/pkgcache.patch @@ -0,0 +1,21 @@ +diff --git a/src/library/pkgcache/R/compat-vctrs.R b/src/library/pkgcache/R/compat-vctrs.R +index 34860cf4..51681766 100644 +--- a/src/library/pkgcache/R/compat-vctrs.R ++++ b/src/library/pkgcache/R/compat-vctrs.R +@@ -1,4 +1,4 @@ +- ++# nocov start + compat_vctrs <- local({ + + # Modified from https://github.com/r-lib/rlang/blob/master/R/compat-vctrs.R +--- a/src/library/pkgcache/R/metadata-cache.R ++++ b/src/library/pkgcache/R/metadata-cache.R +@@ -389,7 +389,7 @@ cmc_cleanup <- function(self, private, force) { + } + + local_cache_dir <- private$get_cache_files("replica") +- unlink(local_cache_dir, recursive = TRUE, force = TRUE) ++ unlink(local_cache_dir$root, recursive = TRUE, force = TRUE) + private$data <- NULL + private$data_messaged <- NULL + cli::cli_alert_info("Cleaning up cache directory {.path {cache_dir}}.") diff --git a/src/library/pkgcache/R/compat-vctrs.R b/src/library/pkgcache/R/compat-vctrs.R index 34860cf4a..516817664 100644 --- a/src/library/pkgcache/R/compat-vctrs.R +++ b/src/library/pkgcache/R/compat-vctrs.R @@ -1,4 +1,4 @@ - +# nocov start compat_vctrs <- local({ # Modified from https://github.com/r-lib/rlang/blob/master/R/compat-vctrs.R diff --git a/src/library/pkgcache/R/metadata-cache.R b/src/library/pkgcache/R/metadata-cache.R index f7cede07d..368070cb0 100644 --- a/src/library/pkgcache/R/metadata-cache.R +++ b/src/library/pkgcache/R/metadata-cache.R @@ -389,7 +389,7 @@ cmc_cleanup <- function(self, private, force) { } local_cache_dir <- private$get_cache_files("replica") - unlink(local_cache_dir, recursive = TRUE, force = TRUE) + unlink(local_cache_dir$root, recursive = TRUE, force = TRUE) private$data <- NULL private$data_messaged <- NULL cli::cli_alert_info("Cleaning up cache directory {.path {cache_dir}}.") diff --git a/src/library/pkgcache/R/platform.R b/src/library/pkgcache/R/platform.R index fa909acd4..c7de8360b 100644 --- a/src/library/pkgcache/R/platform.R +++ b/src/library/pkgcache/R/platform.R @@ -1,4 +1,3 @@ - #' R platforms #' #' @details @@ -57,7 +56,6 @@ #' @export #' @examples #' current_r_platform() - current_r_platform <- function() { current_r_platform_data()$platform } @@ -92,16 +90,20 @@ forced_platform <- function() { stop("The `pkg.current_platform` option must be a string scalar.") } if (!valid_platform_string(opt)) { - stop("The pkg.current_platform` option must be a valid platform ", - "triple: `cpu-vendor-os`. \"", opt, "\" is not.") + stop( + "The pkg.current_platform` option must be a valid platform ", + "triple: `cpu-vendor-os`. \"", opt, "\" is not." + ) } return(opt) } env <- Sys.getenv("PKG_CURRENT_PLATFORM") if (env != "") { if (is.na(env) || !valid_platform_string(env)) { - stop("The `PKG_CURRENT_PLATFORM` environment variable must be a valid ", - "platform triple: \"cpu-vendor-os\". \"", env, "\" is not.") + stop( + "The `PKG_CURRENT_PLATFORM` environment variable must be a valid ", + "platform triple: \"cpu-vendor-os\". \"", env, "\" is not." + ) } return(env) } @@ -124,7 +126,6 @@ get_platform <- function(forced = TRUE) { #' @export #' @examples #' default_platforms() - default_platforms <- function() { unique(c(current_r_platform(), "source")) } @@ -144,8 +145,10 @@ parse_platform <- function(x) { get_cran_extension <- function(platform) { res <- rep(NA_character_, length(platform)) res[platform == "source"] <- ".tar.gz" - res[platform %in% c("windows", "i386+x86_64-w64-mingw32", - "x86_64-w64-mingw32", "i386-w64-mingw32")] <- ".zip" + res[platform %in% c( + "windows", "i386+x86_64-w64-mingw32", + "x86_64-w64-mingw32", "i386-w64-mingw32" + )] <- ".zip" res[platform == "macos"] <- ".tgz" dtl <- parse_platform(platform) @@ -167,7 +170,10 @@ get_all_package_dirs <- function(platforms, rversions) { rversion = character(), contriburl = character() ) - res <- lapply(res, function(x) { colnames(x) <- names(empty); x }) + res <- lapply(res, function(x) { + colnames(x) <- names(empty) + x + }) res <- c(list(empty), res) mat <- do.call(rbind, c(res, list(stringsAsFactors = FALSE))) @@ -190,17 +196,17 @@ get_package_dirs_for_platform <- function(pl, minors) { if (pl == "source") { return(cbind("source", "*", "src/contrib")) - } - if (pl %in% c("x86_64-w64-mingw32", "i386-w64-mingw32", - "i386+x86_64-w64-mingw32")) { + if (pl %in% c( + "x86_64-w64-mingw32", "i386-w64-mingw32", + "i386+x86_64-w64-mingw32" + )) { return(cbind( pl, minors, paste0("bin/windows/contrib/", minors) )) - } if (pl == "windows") { @@ -222,7 +228,7 @@ get_package_dirs_for_platform <- function(pl, minors) { } else { "x86_64" } - rpl <- rpl[prpl$cpu == target_cpu,, drop = FALSE ] + rpl <- rpl[prpl$cpu == target_cpu, , drop = FALSE] if (nrow(rpl)) { cbind(rpl$platform, v, paste0( "bin/macosx/", @@ -233,12 +239,11 @@ get_package_dirs_for_platform <- function(pl, minors) { } }) return(do.call(rbind, res1)) - } ## Which R versions match this platform on CRAN? mcp <- macos_cran_platforms - cranmrv <- mcp[mcp$platform == pl & mcp$rversion %in% minors,] + cranmrv <- mcp[mcp$platform == pl & mcp$rversion %in% minors, ] rbind( if (nrow(cranmrv)) { @@ -258,7 +263,7 @@ macos_cran_platforms <- read.table( header = TRUE, stringsAsFactors = FALSE, textConnection( - "rversion platform subdir + "rversion platform subdir 3.1.3 x86_64-apple-darwin10.8.0 mavericks 3.2.0 x86_64-apple-darwin13.4.0 mavericks 3.2.1 x86_64-apple-darwin13.4.0 mavericks @@ -299,7 +304,9 @@ macos_cran_platforms <- read.table( 4.3.0 aarch64-apple-darwin20 big-sur-arm64 4.4.0 x86_64-apple-darwin20 big-sur-x86_64 4.4.0 aarch64-apple-darwin20 big-sur-arm64 -")) +" + ) +) # For now we only use the minor version number, because the CRAN OS version # does not change for a patch version. @@ -315,7 +322,7 @@ macos_cran_platforms$rversion <- get_minor_r_version( macos_cran_platforms <- unique(macos_cran_platforms) get_cran_macos_platform <- function(v) { - macos_cran_platforms[macos_cran_platforms$rversion %in% v,,drop = FALSE] + macos_cran_platforms[macos_cran_platforms$rversion %in% v, , drop = FALSE] } #' Query the default CRAN repository for this session @@ -332,7 +339,6 @@ get_cran_macos_platform <- function(v) { #' @export #' @examples #' default_cran_mirror() - default_cran_mirror <- function() { mirror <- getOption("repos")["CRAN"] if (is.null(mirror) || is.na(mirror) || mirror == "@CRAN@") { @@ -362,7 +368,6 @@ default_cran_mirror <- function() { #' bioc_version() #' bioc_version("4.0") #' bioc_version("4.1") - bioc_version <- function(r_version = getRversion(), forget = FALSE) { bioconductor$get_bioc_version(r_version, forget) } @@ -382,7 +387,6 @@ bioc_version <- function(r_version = getRversion(), forget = FALSE) { #' @export #' @examplesIf pkgcache:::run_examples() #' bioc_version_map() - bioc_version_map <- function(forget = FALSE) { as_data_frame(bioconductor$get_version_map(forget)) } @@ -397,7 +401,6 @@ bioc_version_map <- function(forget = FALSE) { #' @export #' @examplesIf pkgcache:::run_examples() #' bioc_devel_version() - bioc_devel_version <- function(forget = FALSE) { bioconductor$get_devel_version(forget) } @@ -412,7 +415,6 @@ bioc_devel_version <- function(forget = FALSE) { #' @export #' @examplesIf pkgcache:::run_examples() #' bioc_release_version() - bioc_release_version <- function(forget = FALSE) { bioconductor$get_release_version(forget) } @@ -433,7 +435,6 @@ bioc_release_version <- function(forget = FALSE) { #' @export #' @examplesIf pkgcache:::run_examples() #' bioc_repos() - bioc_repos <- function(bioc_version = "auto", forget = FALSE) { bioconductor$get_repos(bioc_version, forget) } diff --git a/src/library/pkgcache/R/utils.R b/src/library/pkgcache/R/utils.R index 20ef2438a..ad1f5814e 100644 --- a/src/library/pkgcache/R/utils.R +++ b/src/library/pkgcache/R/utils.R @@ -1,4 +1,3 @@ - repoman_data <- new.env(parent = emptyenv()) `%||%` <- function(l, r) if (is.null(l)) r else l @@ -31,13 +30,16 @@ mapx <- function(...) { lens <- setdiff(unique(viapply(data, length)), 1L) if (any(lens == 0)) { - data <- lapply(data, function(x) { length(x) <- 0; x }) + data <- lapply(data, function(x) { + length(x) <- 0 + x + }) lens <- 0 } if (length(lens) > 1) { stop( "Incompatible data lengths in `mapx()`: ", - paste(lens, collapse = ", ") + paste(lens, collapse = ", ") ) } @@ -47,8 +49,8 @@ mapx <- function(...) { ) } -lapply_rows <- function(df, fun, ...) { - lapply(seq_len(nrow(df)), function(i) fun(df[i,], ...)) +lapply_rows <- function(df, fun, ...) { + lapply(seq_len(nrow(df)), function(i) fun(df[i, ], ...)) } zip_vecs <- function(...) { @@ -86,16 +88,12 @@ interpret_dependencies <- function(dp) { res <- if (isTRUE(dp)) { list(c(hard, "Suggests"), hard) - } else if (identical(dp, FALSE)) { list(character(), character()) - } else if (is_na_scalar(dp)) { list(hard, hard) - } else if (is.list(dp) && all(names(dp) == c("direct", "indirect"))) { dp - } else { list(dp, dp) } @@ -111,7 +109,7 @@ interpret_dependencies <- function(dp) { base_packages <- function() { if (is.null(repoman_data$base_packages)) { repoman_data$base_packages <- - parse_installed(.Library, priority="base")$Package + parse_installed(.Library, priority = "base")$Package } repoman_data$base_packages } @@ -135,8 +133,8 @@ is_na_scalar <- function(x) { identical(x, NA) } -drop_nulls <- function(x) { - x[! vlapply(x, is.null)] +drop_nulls <- function(x) { + x[!vlapply(x, is.null)] } null2na <- function(x) { @@ -151,7 +149,7 @@ shasum256 <- function(x) { cli::hash_file_sha256(x) } -file.size <- function (...) { +file.size <- function(...) { file.info(...)$size } @@ -170,7 +168,7 @@ run_examples <- function() { # If this is not a check, then OK TRUE } else if (identical(Sys.getenv("NOT_CRAN"), "true") && - isTRUE(as.logical(Sys.getenv("CI")))) { + isTRUE(as.logical(Sys.getenv("CI")))) { # If NOT_CRAN is set and we are on the CI, then we run examples TRUE } else { @@ -206,7 +204,7 @@ gzip_decompress <- function(from, chunk_size = 5 * 1000 * 1000) { pieces <- list() while (1) { pieces[[length(pieces) + 1]] <- readBin(con, what = "raw", n = chunk_size) - if (length(pieces[[length(pieces)]]) == 0) break; + if (length(pieces[[length(pieces)]]) == 0) break } do.call("c", pieces) } diff --git a/src/library/pkgsearch.patch b/src/library/pkgsearch.patch new file mode 100644 index 000000000..62410a92f --- /dev/null +++ b/src/library/pkgsearch.patch @@ -0,0 +1,10 @@ +diff --git a/src/library/pkgsearch/R/compat-vctrs.R b/src/library/pkgsearch/R/compat-vctrs.R +index 49dd03f9..09f68ba1 100644 +--- a/src/library/pkgsearch/R/compat-vctrs.R ++++ b/src/library/pkgsearch/R/compat-vctrs.R +@@ -1,4 +1,4 @@ +- ++# nocov start + compat_vctrs <- local({ + + # Modified from https://github.com/r-lib/rlang/blob/main/R/compat-vctrs.R diff --git a/src/library/pkgsearch/R/api.R b/src/library/pkgsearch/R/api.R index 9d8e7c869..11f7d10f1 100644 --- a/src/library/pkgsearch/R/api.R +++ b/src/library/pkgsearch/R/api.R @@ -1,4 +1,3 @@ - ## ---------------------------------------------------------------------- s_data <- new.env(parent = emptyenv()) @@ -62,11 +61,11 @@ s_data <- new.env(parent = emptyenv()) #' # See the underlying data frame #' ps("ropensci") #' ps()[] - pkg_search <- function(query = NULL, format = c("short", "long"), from = 1, size = 10) { - - if (is.null(query)) return(pkg_search_again()) + if (is.null(query)) { + return(pkg_search_again()) + } format <- match.arg(format) server <- Sys.getenv("R_PKG_SEARCH_SERVER", "search.r-pkg.org") port <- as.integer(Sys.getenv("R_PKG_SEARCH_PORT", "80")) @@ -80,12 +79,15 @@ pkg_search <- function(query = NULL, format = c("short", "long"), ps <- pkg_search make_pkg_search <- function(query, format, from, size, server, port) { - qry <- make_query(query = query) - rsp <- do_query(qry, server = server, port = port, from = from, - size = size) - rst <- format_result(rsp, query = query, format = format, from = from, - size = size, server = server, port = port) + rsp <- do_query(qry, + server = server, port = port, from = from, + size = size + ) + rst <- format_result(rsp, + query = query, format = format, from = from, + size = size, server = server, port = port + ) s_data$prev_q <- list(type = "simple", result = rst) @@ -111,7 +113,6 @@ more <- function(format = NULL, size = NULL) { server = meta(rst)$server, port = meta(rst)$port ) - } else if (s_data$prev_q$type == "advanced") { advanced_search( json = meta(rst)$qstr, @@ -119,18 +120,18 @@ more <- function(format = NULL, size = NULL) { from = meta(rst)$from + meta(rst)$size, size = size %||% meta(rst)$size ) - } else { throw(new_error("Unknown search type, internal pkgsearch error :(")) } } make_query <- function(query) { - check_string(query) - fields <- c("Package^20", "Title^10", "Description^2", - "Author^5", "Maintainer^6", "_all") + fields <- c( + "Package^20", "Title^10", "Description^2", + "Author^5", "Maintainer^6", "_all" + ) query_object <- list( query = list( @@ -140,36 +141,36 @@ make_query <- function(query) { field_value_factor = list( field = "revdeps", modifier = "sqrt", - factor = 1) + factor = 1 + ) ) ), - query = list( bool = list( ## This is simply word by work match, scores add up for fields must = list( list(multi_match = list( - query = query, - type = "most_fields" - )) + query = query, + type = "most_fields" + )) ), should = list( ## This is matching the complete phrase, so it takes priority list(multi_match = list( - query = query, - fields = c("Title^10", "Description^2", "_all"), - type = "phrase", - analyzer = "english_and_synonyms", - boost = 10 - )), + query = query, + fields = c("Title^10", "Description^2", "_all"), + type = "phrase", + analyzer = "english_and_synonyms", + boost = 10 + )), ## This is if all words match (but not as a phrase) list(multi_match = list( - query = query, - fields = fields, - operator = "and", - analyzer = "english_and_synonyms", - boost = 5 - )) + query = query, + fields = fields, + operator = "and", + analyzer = "english_and_synonyms", + boost = 5 + )) ) ) ) @@ -184,7 +185,6 @@ make_query <- function(query) { } do_query <- function(query, server, port, from, size) { - check_count(from) check_count(size) @@ -192,8 +192,10 @@ do_query <- function(query, server, port, from, size) { "/package/_search?from=" %+% as.character(from - 1) %+% "&size=" %+% as.character(size) result <- http_post( - url, body = query, - headers = c("Content-Type" = "application/json")) + url, + body = query, + headers = c("Content-Type" = "application/json") + ) chain_error( http_stop_for_status(result), new_query_error(result, "search server failure") @@ -220,15 +222,18 @@ print.pkgsearch_query_error <- function(x, ...) { err$print_this(x, ...) # error message from Elastic, if any - tryCatch({ - rsp <- x$response - cnt <- jsonlite::fromJSON(rawToChar(rsp$content), simplifyVector = FALSE) - if ("error" %in% names(cnt) && + tryCatch( + { + rsp <- x$response + cnt <- jsonlite::fromJSON(rawToChar(rsp$content), simplifyVector = FALSE) + if ("error" %in% names(cnt) && "root_cause" %in% names(cnt$error) && "reason" %in% names(cnt$error$root_cause[[1]])) { - cat("", cnt$error$root_cause[[1]]$reason, "", sep = "\n") - } - }, error = function(x) NULL) + cat("", cnt$error$root_cause[[1]]$reason, "", sep = "\n") + } + }, + error = function(x) NULL + ) # parent error(s) err$print_parents(x, ...) diff --git a/src/library/pkgsearch/R/compat-vctrs.R b/src/library/pkgsearch/R/compat-vctrs.R index 49dd03f93..09f68ba13 100644 --- a/src/library/pkgsearch/R/compat-vctrs.R +++ b/src/library/pkgsearch/R/compat-vctrs.R @@ -1,4 +1,4 @@ - +# nocov start compat_vctrs <- local({ # Modified from https://github.com/r-lib/rlang/blob/main/R/compat-vctrs.R diff --git a/tests/common.R b/tests/common.R new file mode 100644 index 000000000..2f7b5044b --- /dev/null +++ b/tests/common.R @@ -0,0 +1,132 @@ +options( + cranyon.enabled = FALSE, + cli.hyperlink = FALSE, + cli.hyperlink_run = FALSE, + cli.hyperlink_help = FALSE, + cli.hyperlink_vignette = FALSE, + cli.dynamic = FALSE, + cli.unicode = FALSE, + cli.condition_width = Inf, + cli.num_colors = 1L, + useFancyQuotes = FALSE, + lifecycle_verbosity = "warning", + OutDec = ".", + rlang_interactive = FALSE, + max.print = 99999 +) +Sys.unsetenv("RSTUDIO") + + +if (Sys.getenv("PAK_EXTRA_TESTS") != "true") { + stop("Set PAK_EXTRA_TESTS=true to run these tests") +} + +attach(asNamespace("pak"), name = "pak-internals") +library(pak) + +if (Sys.which("docker") == "") { + stop("No 'docker', giving up now") +} + +cnt <- system2("docker", c("ps", "-q", "--filter", "name=fake"), stdout = TRUE) +if (length(cnt) == 1) { + message("Fake container already running") +} else { + message("Starting fake container") + system2("docker", c("rm", "fake")) + if (system2("docker", c( + "run", "-d", "-p", "3100:3100", "-p", "3101:3101", + "-p", "3102:3102", "-p", "3103:3103", "-p", "3104:3104", + "-p", "3105:3105", "-p", "3106:3106", "--name", "fake", "fake" + )) != 0) { + stop("Could not start docker container") + } +} + +Sys.setenv("R_PKG_CACHE_DIR" = tempfile()) + +test_that <- function(label, code) { + tryCatch( + withCallingHandlers( + { + code + message(" OK ") + }, + error = function(cnd) { + message(" FAIL") + } + ), + skip = function(cnd) { + message(" SKIP: ", conditionMessage(cnd)) + } + ) +} + +expect_equal <- function(object, expected) { + stopifnot(all.equal(object, expected)) +} + +expect_true <- function(expr) { + stopifnot(isTRUE(expr)) +} + +expect_false <- function(expr) { + stopifnot(identical(expr, FALSE)) +} + +expect_snapshot <- function(x, error = FALSE, transform = NULL) { + if (error) { + try(x) + } else { + x + } +} + +skip_on_cran <- function(...) { + # do nothing +} + +skip <- function(message = "Skipping") { + message <- paste0(message, collapse = "\n") + cond <- structure(list(message = paste0("Reason: ", message)), + class = c("skip", "condition") + ) + stop(cond) +} + +setup_fake_apps <- function(...) { + options( + repos = c(CRAN = "http://127.0.0.1:3100"), + cran_metadata_url = "http://127.0.0.1:3100" + ) + Sys.setenv( + R_PKG_CRAN_METADATA_URL = "http://127.0.0.1:3100", + R_BIOC_CONFIG_URL = "http://127.0.0.1:3101/config.yaml", + R_BIOC_MIRROR = "http://127.0.0.1:3101", + R_PKG_GITHUB_API_URL = "http://127.0.0.1:3105" + ) + Sys.unsetenv("R_BIOC_VERSION") +} + +stub <- function(...) { + # need to skip for now + cond <- structure( + list(message = "no mockery"), + class = c("skip", "condition") + ) + stop(cond) +} + +expect_error <- function(object, regexp, ...) { + tryCatch( + object, + error = function(cnd) { + stopifnot(grepl(regexp, conditionMessage(cnd), ...)) + } + ) +} + +local_tempdir <- function() { + dir.create(tmp <- tempfile(), showWarnings = FALSE, recursive = TRUE) + tmp +} diff --git a/tests/test-cache.R b/tests/test-cache.R new file mode 100644 index 000000000..e1582b85e --- /dev/null +++ b/tests/test-cache.R @@ -0,0 +1,2 @@ +source("common.R") +source("testthat/test-cache.R") diff --git a/tests/test-confirmation-1.R b/tests/test-confirmation-1.R new file mode 100644 index 000000000..34a12a0c2 --- /dev/null +++ b/tests/test-confirmation-1.R @@ -0,0 +1,2 @@ +source("common.R") +source("testthat/test-confirmation-1.R") diff --git a/tests/testthat.R b/tests/testthat.R index 65d5e8e27..b33c52c67 100644 --- a/tests/testthat.R +++ b/tests/testthat.R @@ -1,4 +1,3 @@ - do <- function() { old <- Sys.getenv("R_PKG_CACHE_DIR", NA_character_) if (is.na(old)) { @@ -13,6 +12,7 @@ do <- function() { test_check("pak", reporter = "summary") } -if (Sys.getenv("NOT_CRAN") == "true") { +if (Sys.getenv("NOT_CRAN") == "true" && + Sys.getenv("PAK_TESTS", "true") != "false") { do() } diff --git a/tests/testthat/_snaps/confirmation-1.md b/tests/testthat/_snaps/confirmation-1.md new file mode 100644 index 000000000..3ebc7e0eb --- /dev/null +++ b/tests/testthat/_snaps/confirmation-1.md @@ -0,0 +1,143 @@ +# print_install_details + + Code + print_install_details(sol, local, character()) + Message + + > Will install 2 packages. + > Will download 2 CRAN packages (). + + pkg1 1.0.0 [bld][dl] () + + pkg2 1.0.0 [bld][dl] () + +--- + + Code + print_install_details(sol, local, character()) + Message + + > Will update 1 package. + > Will download 1 CRAN package (). + + pkg1 0.0.0 > 1.0.0 [bld][dl] () + +--- + + Code + print_install_details(sol, local, character()) + Message + + > Will install 1 package. + > The package () is cached. + + pkg1 1.0.0 [bld] + +--- + + Code + print_install_details(sol, local, character()) + Message + + > Will install 2 packages. + > All 2 packages () are cached. + + pkg1 1.0.0 [bld] + + pkg2 1.0.0 [bld] + +--- + + Code + print_install_details(sol, local, character()) + Message + + > Will install 1 package. + > Will download 1 package with unknown size. + + pkgu 1.0.0 [bld][dl] + +--- + + Code + print_install_details(sol, local, character()) + Message + + > Will install 3 packages. + > Will download 1 package (), cached: 2 (). + + pkg1 1.0.0 [bld] + + pkg2 1.0.0 [bld] + + pkg3 1.0.0 [bld][dl] () + +--- + + Code + print_install_details(sol, local, character()) + Message + + > Will install 4 packages. + > Will download 1 CRAN package (), cached: 2 (). + > Will download 1 package with unknown size. + + pkg1 1.0.0 [bld] + + pkg2 1.0.0 [bld] + + pkg3 1.0.0 [bld][dl] () + + pkgu 1.0.0 [bld][dl] + +# get_answer + + Code + res <- get_answer(c("this", "bar")) + Output + ? Your choice [this]: foo + ? Your choice [this]: bar + +# offer_restart + + Code + offer_restart() + Message + + ! pak had to unload some packages before installation, and the + current R session may be unstable. It is best to restart R now. + + +--- + + Code + offer_restart() + Message + + ! pak had to unload some packages before installation, and the + current R session may be unstable. It is best to restart R now. + + 1. Restart R without saving data. + 2. Save data to `.RData` and restart R. + 3. Do not restart R. + + Output + [1] "restart" + +--- + + Code + offer_restart() + Message + + ! pak had to unload some packages before installation, and the + current R session may be unstable. It is best to restart R now. + + 1. Restart R without saving data. + 2. Save data to `.RData` and restart R. + 3. Do not restart R. + + Saving workspace to .RData... + Output + [1] "save-restart" + +--- + + Code + expect_equal(offer_restart(), "OK") + Message + + ! pak had to unload some packages before installation, and the + current R session may be unstable. It is best to restart R now. + + 1. Restart R without saving data. + 2. Save data to `.RData` and restart R. + 3. Do not restart R. + + diff --git a/tests/testthat/_snaps/confirmation-2.md b/tests/testthat/_snaps/confirmation-2.md new file mode 100644 index 000000000..36135e831 --- /dev/null +++ b/tests/testthat/_snaps/confirmation-2.md @@ -0,0 +1,27 @@ +# print_sysreqs_details + + Code + print_sysreqs_details(prop) + Message + v All system requirements are already installed. + Output + NULL + +--- + + Code + print_sysreqs_details(prop) + Message + > Will install 1 system package: + Output + NULL + +--- + + Code + print_sysreqs_details(prop) + Message + x Missing 1 system package. You'll probably need to install it manually: + Output + NULL + diff --git a/tests/testthat/_snaps/confirmation-3.md b/tests/testthat/_snaps/confirmation-3.md new file mode 100644 index 000000000..cc24f90b0 --- /dev/null +++ b/tests/testthat/_snaps/confirmation-3.md @@ -0,0 +1,11 @@ +# print_install_details, warn_for_loaded_packages on windows + + Code + print_install_details(sol, local, character()) + Message + + > Will install 2 packages. + > Will download 2 CRAN packages (). + + pkg1 1.0.0 [bld][dl] () + + pkg2 1.0.0 [bld][dl] () + diff --git a/tests/testthat/_snaps/deps-explain.md b/tests/testthat/_snaps/deps-explain.md new file mode 100644 index 000000000..5b26339f7 --- /dev/null +++ b/tests/testthat/_snaps/deps-explain.md @@ -0,0 +1,14 @@ +# pkg_deps_explain + + Code + pkg_deps_explain("pkg3", "pkg1") + Output + pkg3 -> pkg2 -> pkg1 + +--- + + Code + pkg_deps_explain("pkg1", "pkg3") + Output + x pkg3 + diff --git a/tests/testthat/_snaps/docs.md b/tests/testthat/_snaps/docs.md new file mode 100644 index 000000000..b5acc7aa8 --- /dev/null +++ b/tests/testthat/_snaps/docs.md @@ -0,0 +1,159 @@ +# doc_config + + Code + writeLines(doc_config()) + Output + \itemize{\item \sQuote{build_vignettes}: (Env var: \code{PKG_BUILD_VIGNETTES}, option: \code{pkg.build_vignettes}.) Whether to build vignettes for package trees. + This is only used if the package is obtained from a package tree, + and not from a source (or binary) package archive. By default + vignettes are not built in this case. If you set this to \code{TRUE}, + then you need to make sure that the vignette builder packages are + available, as these are not installed by default currently. + \item \sQuote{cache_dir}: (Env var: \code{PKG_CACHE_DIR}, option: \code{pkg.cache_dir}.) Directory to download the packages to. Defaults to a temporary + directory within the R session temporary directory, see + \code{\link[base:tempfile]{base::tempdir()}}. + \item \sQuote{cran_mirror}: (Env var: \code{PKG_CRAN_MIRROR}, option: \code{pkg.cran_mirror}.) CRAN mirror to use. Defaults to the \code{repos} option + (see \code{\link[base:options]{base::options()}}), if that's not set then + \verb{https://cran.rstudio.com}. See also \code{\link[pak:repo_add]{pak::repo_add()}} and + \code{\link[pak:repo_get]{pak::repo_get()}} + \item \sQuote{include_linkingto}: (Env var: \code{PKG_INCLUDE_LINKINGTO}, option: \code{pkg.include_linkingto}.) Whether to always include \code{LinkingTo} dependencies in the solution + of and installation, even if they are needed because the packages + are installed from binaries. This is sometimes useful, see e.g. + \url{https://github.com/r-lib/pak/issues/485} for an example use case. + \item \sQuote{library}: (Env var: \code{PKG_LIBRARY}, option: \code{pkg.library}.) Package library to install packages to. It is also used for + already installed packages when considering dependencies. + \item \sQuote{metadata_cache_dir}: (Env var: \code{PKG_METADATA_CACHE_DIR}, option: \code{pkg.metadata_cache_dir}.) Location of metadata replica of + \code{\link[pkgcache:cranlike_metadata_cache]{pkgcache::cranlike_metadata_cache}}. Defaults to a temporary + directory within the R session temporary directory, see + \code{\link[base:tempfile]{base::tempdir()}}. + \item \sQuote{metadata_update_after}: (Env var: \code{PKG_METADATA_UPDATE_AFTER}, option: \code{pkg.metadata_update_after}.) A time interval as a \link{difftime} object. pak will update the + metadata cache if it is older than this. The default is one day. + The \code{PKG_METADATA_UPDATE_AFTER} environment variable may be set + in seconds (\code{s} suffix), minutes (\code{m} suffix), hours (\code{h} suffix), + or days (\code{d} suffix). E.g: \verb{1d} means one day. + \item \sQuote{package_cache_dir}: (Env var: \code{PKG_PACKAGE_CACHE_DIR}, option: \code{pkg.package_cache_dir}.) Location of the package cache on the disk. See + \code{\link[pak:cache]{pak::cache_summary()}}. Default is selected by pkgcache. + \item \sQuote{platforms}: (Env var: \code{PKG_PLATFORMS}, option: \code{pkg.platforms}.) Character vector of platforms to \emph{download} or \emph{install} packages + for. See \code{\link[pkgdepends:default_platforms]{pkgdepends::default_platforms()}} for possible platform + names. Defaults to the platform of the current R session, plus + \code{"source"}. + \item \sQuote{r_versions}: (Env var: \code{PKG_R_VERSIONS}, option: \code{pkg.r_versions}.) Character vector, R versions to download or install + packages for. It defaults to the current R version. + \item \sQuote{sysreqs}: (Env var: \code{PKG_SYSREQS}, option: \code{pkg.sysreqs}.) Whether to automatically look up and install system requirements. + If \code{TRUE}, then pkgdepends will try to install required + system packages. If \code{FALSE}, then system requirements are still + printed (including OS packages on supported platforms), but they + are not installed. + By default it is \code{TRUE} on supported platforms, + if the current user is the root user or password-less \code{sudo} is + configured for the current user. + \item \sQuote{sysreqs_db_update}: (Env var: \code{PKG_SYSREQS_DB_UPDATE}, option: \code{pkg.sysreqs_db_update}.) Whether to try to update the system requirements database from + GitHub. If the update fails, then the cached or the build-in + database if used. Defaults to TRUE. + \item \sQuote{sysreqs_db_update_timeout}: (Env var: \code{PKG_SYSREQS_DB_UPDATE_TIMEOUT}, option: \code{pkg.sysreqs_db_update_timeout}.) Timeout for the system requirements database update. + Defaults to five seconds. + \item \sQuote{sysreqs_dry_run}: (Env var: \code{PKG_SYSREQS_DRY_RUN}, option: \code{pkg.sysreqs_dry_run}.) If \code{TRUE}, then pak only prints the system commands to + install system requirements, but does not execute them. + \item \sQuote{sysreqs_platform}: (Env var: \code{PKG_SYSREQS_PLATFORM}, option: \code{pkg.sysreqs_platform}.) The platform to use for system requirements lookup. On Linux, where + system requirements are currently supported, it must be a string + containing the distribution name and release, separated by a dash. + E.g.: \code{"ubuntu-22.04"}, or \code{"rhel-9"}. + \item \sQuote{sysreqs_rspm_repo_id}: (Env var: \code{PKG_SYSREQS_RSPM_REPO_ID}, option: \code{pkg.sysreqs_rspm_repo_id}.) Posit Package Manager (formerly RStudio Package Manager) repository + id to use for CRAN system requirements lookup. Defaults to the + \code{RSPM_REPO_ID} environment variable, if set. If not set, then it + defaults to \code{1}. + \item \sQuote{sysreqs_rspm_url}: (Env var: \code{PKG_SYSREQS_RSPM_URL}, option: \code{pkg.sysreqs_rspm_url}.) Root URL of Posit Package Manager (formerly RStudio Package + Manager) for system requirements lookup. By default the \code{RSPM_ROOT} + environment variable is used, if set. If not set, + it defaults to \verb{https://packagemanager.posit.co}. + \item \sQuote{sysreqs_sudo}: (Env var: \code{PKG_SYSREQS_SUDO}, option: \code{pkg.sysreqs_sudo}.) Whether to use \code{sudo} to install system requirements, + on Unix. By default it is \code{TRUE} on Linux if the effective user id + of the current process is not the \code{root} user. + \item \sQuote{sysreqs_update}: (Env var: \code{PKG_SYSREQS_UPDATE}, option: \code{pkg.sysreqs_update}.) Whether to try to update system packages that are already installed. + It defaults to \code{TRUE} on CI systems: if the \code{CI} environment + variable is set to \code{true}. + \item \sQuote{sysreqs_verbose}: (Env var: \code{PKG_SYSREQS_VERBOSE}, option: \code{pkg.sysreqs_verbose}.) Whether to echo the output of system requirements installation. + Defaults to \code{TRUE} if the \code{CI} environment variable is set. + \item \sQuote{use_bioconductor}: (Env var: \code{PKG_USE_BIOCONDUCTOR}, option: \code{pkg.use_bioconductor}.) Whether to automatically use the Bioconductor repositories. + Defaults to \code{TRUE}. + \item \sQuote{windows_archs}: (Env var: \code{PKG_WINDOWS_ARCHS}, option: \code{pkg.windows_archs}.) Character scalar specifying which architectures + to download/install for on Windows. Its possible values are: + \itemize{ + \item \code{"prefer-x64"}: Generally prefer x64 binaries. If the current R + session is \code{x64}, then we download/install x64 packages. + (These packages might still be multi-architecture binaries!) + If the current R session is \code{i386}, then we download/install + packages for both architectures. This might mean compiling + packages from source if the binary packages are for \code{x64} only, + like the CRAN Windows binaries for R 4.2.x currently. + \code{"prefer-x64"} is the default for R 4.2.0 and later. + \item \code{"both"}: Always download/install packages for both \code{i386} and + \code{x64} architectures. This might need compilation from source + if the available binaries are for \code{x64} only, like the CRAN + Windows binaries for R 4.2.x currently. \code{"both"} is the default + for R 4.2.0 and earlier. + }} + +# include_docs + + Code + writeLines(include_docs("pkgdepends", "docs/lib-status-return.rds")) + Output + It has always has columns: + \itemize{ + \item \code{biocviews}: the corresponding field from \code{DESCRIPTION}, it must be + present for all Bioconductor packages, other packages typically don't + have it. + \item \code{built}: the \code{Built} field from \code{DESCRIPTION}. + \item \code{depends}, \code{suggests}, \code{Imports}, \code{linkingto}, \code{enhances}: the corresponding + fields from the \code{DESCRIPTION} files. + \item \code{deps}: A list or data frames, the dependencies of the package. It has + columns: \code{ref}, \code{type} (dependency type in lowercase), \code{package} + (dependent package, or \code{R}), \code{op} and \code{version}, for last two are for + version requirement. \code{op} can be \code{>=}, \code{>}, \code{==} or \code{<=}, although the + only the first one is common in practice. + \item \code{library}: path to the package library containing the package. + \item \code{license}: from \code{DESCRIPTION}. + \item \code{md5sum}: from \code{DESCTIPTION}, typically \code{NA}, except on Windows. + \item \code{needscompilation}: from \code{DESCRIPTION}, this column is logical. + \item \code{package}: package name. + \item \code{platform}: from the \code{Built} field in \code{DESCRIPTION}, the current platform + if missing from \code{DESCRIPTION}. + \item \code{priority}: from \code{DESCRIPTION}, usually \code{base}, \code{recommended}, or missing. + \item \code{ref}: the corresponding \verb{installed::*} package reference. + \item \code{repository}: from \code{DESCRIPTION}. For packages from a CRAN repository this + is \code{CRAN}, some other repositories, e.g. R-universe adds the repository + URL here. + \item \code{repotype}: \code{cran}, \code{bioc} or missing. + \item \code{rversion}: from the \code{Built} field. If no such field, then the current + R version. + \item \code{sysreqs}: the \code{SystemRequirements} field from \code{DESCRIPTION}. + \item \code{title}: package title. + \item \code{type}: always \code{installed}. + \item \code{version}: package version (as string). + } + + Most of these columns are unchanged from \code{DESCRIPTION}, but + pak also adds a couple. + \subsection{Notes:}{ + \itemize{ + \item In addition, it also has all \verb{remote*} and \verb{config/needs/*} entries from + the \code{DESCRIPTION} files. (Case insensitive.) + \item All columns are of type \code{character}, except for \code{needscompilation}, which + is logical and \code{deps}, which is a list columns. + \item If an entry is missing for a package, it is set to \code{NA}. + \item Note that column names are lowercase, even if the corresponding entries + are not in \code{DESCRIPTION}. + \item The order of the columns is not deterministic, so don't assume any order. + \item Additional columns might be present, these are internal for + pak and should not be used in user code. + } + } + +# man_config_link + + Code + man_config_link("configuration option") + Output + [1] "\\link[=pak-config]{configuration option}" + diff --git a/tests/testthat/_snaps/extra.md b/tests/testthat/_snaps/extra.md new file mode 100644 index 000000000..681e2cea0 --- /dev/null +++ b/tests/testthat/_snaps/extra.md @@ -0,0 +1,35 @@ +# extra_paalkages + + Code + extra_packages() + Output + [1] "pillar" + +# pak_install_extra + + Code + pak_install_extra() + Message + i installing extra package: `pillar`. + +# load_extra + + Code + load_extra("foobar") + Message + ! Optional package `foobar` is not available for pak. + Use `pak::pak_install_extra()` to install optional packages. + Use `options(pak.no_extra_messages = TRUE)` to suppress this message. + +--- + + Code + load_extra("foobar") + +--- + + Code + load_extra("foobar") + Output + NULL + diff --git a/tests/testthat/_snaps/global-handler.md b/tests/testthat/_snaps/global-handler.md new file mode 100644 index 000000000..223a8ef78 --- /dev/null +++ b/tests/testthat/_snaps/global-handler.md @@ -0,0 +1,56 @@ +# handle_package_not_found + + Code + handle_package_not_found(err = list(package = "foo", lib.loc = "/lib")) + Message + + x Failed to load package foo. Do you want to install it into the default + library at '/lib'? + + 1. Yes, install it. + 2. No, stop now, and I'll handle it myself. + + Output + + NULL + +--- + + Code + handle_package_not_found(err = list(package = "foo", lib.loc = "/lib")) + Message + + x Failed to load package foo. Do you want to install it into the default + library at '/lib'? + + 1. Yes, install it. + 2. No, stop now, and I'll handle it myself. + + Output + + Message + -- start installation ---------------------------------------------------------- + Installing... + -- end installation ------------------------------------------------------------ + + +--- + + Code + handle_package_not_found(err = list(package = "foo", lib.loc = "/lib")) + Message + + x Failed to load package foo. Do you want to install it into the default + library at '/lib'? + + 1. Yes, install it, and continue the original computation. + 2. No, stop now, and I'll handle it myself. + + Output + + Message + -- start installation ---------------------------------------------------------- + Installing... + -- end installation ------------------------------------------------------------ + + diff --git a/tests/testthat/_snaps/local.md b/tests/testthat/_snaps/local.md new file mode 100644 index 000000000..3ab91d89e --- /dev/null +++ b/tests/testthat/_snaps/local.md @@ -0,0 +1,52 @@ +# local_deps & co + + Code + local_deps(file.path(dld, "pkg4"))$package + Output + [1] "pkg4" "pkg1" "pkg2" + +--- + + Code + local_dev_deps(file.path(dld, "pkg4"))$package + Output + [1] "pkg4" "pkg1" "pkg2" "pkg3" + +--- + + Code + local_deps_tree(file.path(dld, "pkg4")) + Output + local:://pkg4 1.0.0 [new][bld][dl] (unknown size) + \-pkg2 1.0.0 [new][bld][dl] () + \-pkg1 1.0.0 [new][bld][dl] () + + Key: [new] new | [dl] download | [bld] build + +--- + + Code + local_dev_deps_tree(file.path(dld, "pkg4")) + Output + local:://pkg4 1.0.0 [new][bld][dl] (unknown size) + +-pkg2 1.0.0 [new][bld][dl] () + | \-pkg1 1.0.0 [new][bld][dl] () + \-pkg3 1.0.0 [new][bld][dl] () + \-pkg2 + + Key: [new] new | [dl] download | [bld] build + +--- + + Code + local_deps_explain("pkg1", file.path(dld, "pkg4")) + Output + pkg4 -> pkg2 -> pkg1 + +--- + + Code + local_dev_deps_explain("pkg3", file.path(dld, "pkg4")) + Output + pkg4 -> pkg3 + diff --git a/tests/testthat/_snaps/lockfile.md b/tests/testthat/_snaps/lockfile.md new file mode 100644 index 000000000..d0446e5d1 --- /dev/null +++ b/tests/testthat/_snaps/lockfile.md @@ -0,0 +1,226 @@ +# lockfile_create, lockfile_install + + Code + writeLines(readLines("deps.lock")) + Output + { + "lockfile_version": 1, + "os": "", + "r_version": "", + "platform": "", + "packages": [ + { + "ref": "deps::.", + "package": "pkg4-deps", + "version": "1.0.0", + "type": "deps", + "direct": true, + "binary": false, + "dependencies": ["pkg2"], + "vignettes": false, + "needscompilation": false, + "metadata": { + "RemotePkgRef": "deps::.", + "RemoteType": "deps" + }, + "sources": [], + "target": "src/contrib/pkg4-deps_1.0.0.tar.gz", + "platform": "", + "rversion": "*", + "directpkg": true, + "dep_types": ["Depends", "Imports", "LinkingTo"], + "params": [], + "install_args": "", + "sysreqs": "" + }, + { + "ref": "pkg1", + "package": "pkg1", + "version": "1.0.0", + "type": "standard", + "direct": false, + "binary": false, + "dependencies": [], + "vignettes": false, + "needscompilation": false, + "metadata": { + "RemoteType": "standard", + "RemotePkgRef": "pkg1", + "RemoteRef": "pkg1", + "RemoteRepos": "http://127.0.0.1:/", + "RemotePkgPlatform": "source", + "RemoteSha": "1.0.0" + }, + "sources": ["http://127.0.0.1://src/contrib/pkg1_1.0.0.tar.gz", "http://127.0.0.1://src/contrib/Archive/pkg1/pkg1_1.0.0.tar.gz"], + "target": "src/contrib/pkg1_1.0.0.tar.gz", + "platform": "", + "rversion": "*", + "directpkg": false, + "sha256": "", + "filesize": 9999, + "dep_types": ["Depends", "Imports", "LinkingTo"], + "params": [], + "install_args": "", + "repotype": "cran" + }, + { + "ref": "pkg2", + "package": "pkg2", + "version": "1.0.0", + "type": "standard", + "direct": false, + "binary": false, + "dependencies": ["pkg1"], + "vignettes": false, + "needscompilation": false, + "metadata": { + "RemoteType": "standard", + "RemotePkgRef": "pkg2", + "RemoteRef": "pkg2", + "RemoteRepos": "http://127.0.0.1:/", + "RemotePkgPlatform": "source", + "RemoteSha": "1.0.0" + }, + "sources": ["http://127.0.0.1://src/contrib/pkg2_1.0.0.tar.gz", "http://127.0.0.1://src/contrib/Archive/pkg2/pkg2_1.0.0.tar.gz"], + "target": "src/contrib/pkg2_1.0.0.tar.gz", + "platform": "", + "rversion": "*", + "directpkg": false, + "sha256": "", + "filesize": 9999, + "dep_types": ["Depends", "Imports", "LinkingTo"], + "params": [], + "install_args": "", + "repotype": "cran" + } + ] + } + +--- + + Code + writeLines(readLines("dev.lock")) + Output + { + "lockfile_version": 1, + "os": "", + "r_version": "", + "platform": "", + "packages": [ + { + "ref": "deps::.", + "package": "pkg4-deps", + "version": "1.0.0", + "type": "deps", + "direct": true, + "binary": false, + "dependencies": ["pkg2"], + "vignettes": false, + "needscompilation": false, + "metadata": { + "RemotePkgRef": "deps::.", + "RemoteType": "deps" + }, + "sources": [], + "target": "src/contrib/pkg4-deps_1.0.0.tar.gz", + "platform": "", + "rversion": "*", + "directpkg": true, + "dep_types": ["Depends", "Imports", "LinkingTo", "Suggests"], + "params": [], + "install_args": "", + "sysreqs": "" + }, + { + "ref": "pkg1", + "package": "pkg1", + "version": "1.0.0", + "type": "standard", + "direct": false, + "binary": false, + "dependencies": [], + "vignettes": false, + "needscompilation": false, + "metadata": { + "RemoteType": "standard", + "RemotePkgRef": "pkg1", + "RemoteRef": "pkg1", + "RemoteRepos": "http://127.0.0.1:/", + "RemotePkgPlatform": "source", + "RemoteSha": "1.0.0" + }, + "sources": ["http://127.0.0.1://src/contrib/pkg1_1.0.0.tar.gz", "http://127.0.0.1://src/contrib/Archive/pkg1/pkg1_1.0.0.tar.gz"], + "target": "src/contrib/pkg1_1.0.0.tar.gz", + "platform": "", + "rversion": "*", + "directpkg": false, + "sha256": "", + "filesize": 9999, + "dep_types": ["Depends", "Imports", "LinkingTo"], + "params": [], + "install_args": "", + "repotype": "cran" + }, + { + "ref": "pkg2", + "package": "pkg2", + "version": "1.0.0", + "type": "standard", + "direct": false, + "binary": false, + "dependencies": ["pkg1"], + "vignettes": false, + "needscompilation": false, + "metadata": { + "RemoteType": "standard", + "RemotePkgRef": "pkg2", + "RemoteRef": "pkg2", + "RemoteRepos": "http://127.0.0.1:/", + "RemotePkgPlatform": "source", + "RemoteSha": "1.0.0" + }, + "sources": ["http://127.0.0.1://src/contrib/pkg2_1.0.0.tar.gz", "http://127.0.0.1://src/contrib/Archive/pkg2/pkg2_1.0.0.tar.gz"], + "target": "src/contrib/pkg2_1.0.0.tar.gz", + "platform": "", + "rversion": "*", + "directpkg": false, + "sha256": "", + "filesize": 9999, + "dep_types": ["Depends", "Imports", "LinkingTo"], + "params": [], + "install_args": "", + "repotype": "cran" + }, + { + "ref": "pkg3", + "package": "pkg3", + "version": "1.0.0", + "type": "standard", + "direct": false, + "binary": false, + "dependencies": ["pkg2"], + "vignettes": false, + "needscompilation": false, + "metadata": { + "RemoteType": "standard", + "RemotePkgRef": "pkg3", + "RemoteRef": "pkg3", + "RemoteRepos": "http://127.0.0.1:/", + "RemotePkgPlatform": "source", + "RemoteSha": "1.0.0" + }, + "sources": ["http://127.0.0.1://src/contrib/pkg3_1.0.0.tar.gz", "http://127.0.0.1://src/contrib/Archive/pkg3/pkg3_1.0.0.tar.gz"], + "target": "src/contrib/pkg3_1.0.0.tar.gz", + "platform": "", + "rversion": "*", + "directpkg": false, + "sha256": "", + "filesize": 9999, + "dep_types": ["Depends", "Imports", "LinkingTo"], + "params": [], + "install_args": "", + "repotype": "cran" + } + ] + } + diff --git a/tests/testthat/_snaps/name-check.md b/tests/testthat/_snaps/name-check.md new file mode 100644 index 000000000..119738d01 --- /dev/null +++ b/tests/testthat/_snaps/name-check.md @@ -0,0 +1,92 @@ +# pkg_name_check + + Code + pkg_name_check("tools") + Output + +------------------------------------------------------------------------------+ + | --*-- tools --*-- | + +------------------------------------------------------------------------------+ + +------------------------------------------------------------------------------+ + | v valid name x CRAN v Bioconductor v not a profanity | + +------------------------------------------------------------------------------+ + + Wikipedia -------------------------------------------------------------------+ + | Tool (from Tools) A tool is an object that can extend an individual's | + | ability to modify features of the surrounding environment or help them | + | accomplish a particular task. Although many animals use simple tools, only | + | human beings, whose use of stone tools dates back hundreds of millennia, | + | have been observed using tools to make other tools. | + | | + | ... | + +------------------------------------------ https://en.wikipedia.org/wiki/Tool + + + Wiktionary ------------------------------------------------------------------+ + | tools Noun: tools | + | plural of tool | + | Verb: tools | + | third-person singular simple present indicative form of tool | + | Anagrams: loots, lotos, sloot, sotol, stool, tosol | + +---------------------------------------- https://en.wiktionary.org/wiki/tools + + +------------------------------------------------------------------------------+ + | Sentiment: :| (0) | + +------------------------------------------------------------------------------+ + +--- + + Code + pkg_name_check("tools", "urban") + Output + +------------------------------------------------------------------------------+ + | --*-- tools --*-- | + +------------------------------------------------------------------------------+ + +------------------------------------------------------------------------------+ + | v valid name x CRAN v Bioconductor v not a profanity | + +------------------------------------------------------------------------------+ + + Urban dictionary ------------------------------------------------------------+ + | slang for the [hypodermic] [needles] used to [inject] drugs. | + +-------------------------------------------- http://tools.urbanup.com/1443484 + + +# format.pak_pkg_name_check, print.pak_pkg_name_check + + Code + print(ret[[1]]) + Output + +------------------------------------------------------------------------------+ + | --*-- tools --*-- | + +------------------------------------------------------------------------------+ + +------------------------------------------------------------------------------+ + | v valid name x CRAN v Bioconductor v not a profanity | + +------------------------------------------------------------------------------+ + + Wikipedia -------------------------------------------------------------------+ + | Tool (from Tools) A tool is an object that can extend an individual's | + | ability to modify features of the surrounding environment or help them | + | accomplish a particular task. Although many animals use simple tools, only | + | human beings, whose use of stone tools dates back hundreds of millennia, | + | have been observed using tools to make other tools. | + | | + | ... | + +------------------------------------------ https://en.wikipedia.org/wiki/Tool + + + Wiktionary ------------------------------------------------------------------+ + | tools Noun: tools | + | plural of tool | + | Verb: tools | + | third-person singular simple present indicative form of tool | + | Anagrams: loots, lotos, sloot, sotol, stool, tosol | + +---------------------------------------- https://en.wiktionary.org/wiki/tools + + +------------------------------------------------------------------------------+ + | Sentiment: :| (0) | + +------------------------------------------------------------------------------+ + +--- + + Code + print(ret[[2]]) + Output + +------------------------------------------------------------------------------+ + | --*-- tools --*-- | + +------------------------------------------------------------------------------+ + +------------------------------------------------------------------------------+ + | v valid name x CRAN v Bioconductor v not a profanity | + +------------------------------------------------------------------------------+ + + Urban dictionary ------------------------------------------------------------+ + | slang for the [hypodermic] [needles] used to [inject] drugs. | + +-------------------------------------------- http://tools.urbanup.com/1443484 + + diff --git a/tests/testthat/_snaps/package.md b/tests/testthat/_snaps/package.md new file mode 100644 index 000000000..d20f3c238 --- /dev/null +++ b/tests/testthat/_snaps/package.md @@ -0,0 +1,10 @@ +# pkg_deps_tree + + Code + print(pdt) + Output + pkg2 1.0.0 [new][bld][dl] () + \-pkg1 1.0.0 [new][bld][dl] () + + Key: [new] new | [dl] download | [bld] build + diff --git a/tests/testthat/_snaps/pak-cleanup.md b/tests/testthat/_snaps/pak-cleanup.md new file mode 100644 index 000000000..4e22fa434 --- /dev/null +++ b/tests/testthat/_snaps/pak-cleanup.md @@ -0,0 +1,28 @@ +# pak_cleanup_package_cache + + Code + pak_cleanup_package_cache(force = FALSE) + Message + > Package cache is in '' (0 B) + +--- + + Code + pak_cleanup_package_cache(force = TRUE) + Message + v Cleaned up package cache + +# pak_cleanup_metadata_cache + + Code + pak_cleanup_metadata_cache(force = FALSE) + Message + > Metadata cache is in '' (0 B) + +--- + + Code + pak_cleanup_metadata_cache(force = TRUE) + Message + v Cleaned up metadata cache + diff --git a/tests/testthat/_snaps/pak-update.md b/tests/testthat/_snaps/pak-update.md new file mode 100644 index 000000000..1827f59df --- /dev/null +++ b/tests/testthat/_snaps/pak-update.md @@ -0,0 +1,387 @@ +# detect_platform + + Code + detect_platform() + Output + $os + [1] "darwin20" + + $arch + [1] "aarch64" + + $rver + [1] "4.3" + + +--- + + Code + detect_platform() + Output + $os + [1] "linux" + + $arch + [1] "x86_64" + + $rver + [1] "4.3" + + +# pak_repo + + Code + pak_repo("devel") + Output + [1] "https://r-lib.github.io/p/pak/devel/" + Code + pak_repo("rc") + Output + [1] "https://r-lib.github.io/p/pak/rc/" + Code + pak_repo("stable") + Output + [1] "https://r-lib.github.io/p/pak/stable/" + +# pak_repo_metadata + + Code + pak_repo_metadata(meta_path) + Output + Package Version Depends Imports License + 1 pak 0.7.1.9000 R (>= 4.1), R (<= 4.1.99) tools, utils GPL-3 + 2 pak 0.7.1.9000 R (>= 4.2), R (<= 4.2.99) tools, utils GPL-3 + 3 pak 0.7.1.9000 R (>= 4.0), R (<= 4.0.99) tools, utils GPL-3 + 4 pak 0.7.1.9000 R (>= 4.1), R (<= 4.1.99) tools, utils GPL-3 + 5 pak 0.7.1.9000 R (>= 4.2), R (<= 10.0.0) tools, utils GPL-3 + 6 pak 0.7.1.9000 R (>= 3.5), R (<= 3.5.99) tools, utils GPL-3 + 7 pak 0.7.1.9000 R (>= 3.6), R (<= 10.0.0) tools, utils GPL-3 + 8 pak 0.7.1.9000 R (>= 3.5), R (<= 3.5.99) tools, utils GPL-3 + 9 pak 0.7.1.9000 R (>= 3.6), R (<= 3.6.99) tools, utils GPL-3 + 10 pak 0.7.1.9000 R (>= 4.0), R (<= 4.0.99) tools, utils GPL-3 + 11 pak 0.7.1.9000 R (>= 4.1), R (<= 4.1.99) tools, utils GPL-3 + 12 pak 0.7.1.9000 R (>= 4.2), R (<= 4.2.99) tools, utils GPL-3 + 13 pak 0.7.1.9000 R (>= 3.5), R (<= 3.5.99) tools, utils GPL-3 + 14 pak 0.7.1.9000 R (>= 3.6), R (<= 3.6.99) tools, utils GPL-3 + 15 pak 0.7.1.9000 R (>= 4.0), R (<= 4.0.99) tools, utils GPL-3 + 16 pak 0.7.1.9000 R (>= 4.1), R (<= 4.1.99) tools, utils GPL-3 + 17 pak 0.7.1.9000 R (>= 4.2), R (<= 4.2.99) tools, utils GPL-3 + 18 pak 0.7.1.9000 R (>= 3.6), R (<= 3.6.99) tools, utils GPL-3 + 19 pak 0.7.1.9000 R (>= 4.2), R (<= 4.2.99) tools, utils GPL-3 + 20 pak 0.7.1.9000 R (>= 4.0), R (<= 4.0.99) tools, utils GPL-3 + 21 pak 0.7.1.9000 R (>= 4.1), R (<= 4.1.99) tools, utils GPL-3 + 22 pak 0.7.1.9000 R (>= 3.5), R (<= 3.5.99) tools, utils GPL-3 + 23 pak 0.7.1.9000 R (>= 4.3), R (<= 4.3.99) tools, utils GPL-3 + 24 pak 0.7.1.9000 R (>= 4.3), R (<= 4.3.99) tools, utils GPL-3 + 25 pak 0.7.1.9000 R (>= 4.3), R (<= 4.3.99) tools, utils GPL-3 + 26 pak 0.7.1.9000 R (>= 4.3), R (<= 4.3.99) tools, utils GPL-3 + 27 pak 0.7.1.9000 R (>= 4.4), R (<= 10.0.0) tools, utils GPL-3 + 28 pak 0.7.1.9000 R (>= 4.4), R (<= 10.0.0) tools, utils GPL-3 + 29 pak 0.7.1.9000 R (>= 4.4), R (<= 10.0.0) tools, utils GPL-3 + 30 pak 0.7.1.9000 R (>= 4.4), R (<= 10.0.0) tools, utils GPL-3 + 31 pak 0.7.1.9000 R (>= 4.3), R (<= 4.3.99) tools, utils GPL-3 + 32 pak 0.7.1.9000 R (>= 4.4), R (<= 10.0.0) tools, utils GPL-3 + MD5sum + 1 d6be0de2e85aa8342c757a888115adc9 + 2 77a9de8a5bd3815c95cab2979003c5a8 + 3 328e252deffd659aec6e43064a2e2bf2 + 4 787a119f7993533006d76a33dbd1dfcf + 5 7abf05aa4c5357ddd040acbe726f5acb + 6 b80dfe3b102a3237710d879ed3d44940 + 7 a86277023b0022c60e08b7e94c74428f + 8 6c6bbc82ef3af1b89ec48901e0f9b63f + 9 712df4dedd91ad73f142cab1c85f3dae + 10 770fdac818ca34c5c96f72cadbf19663 + 11 a56088c5c5bc08d849fd5fc9dc94094f + 12 e191788534de3151a8f188c04ebd5693 + 13 02fc35953e31c457c819153711c28e5e + 14 20b7fcd3fef5a55b24068d064a4cd17a + 15 eb447fc72541669ee093ee05ba7d6b36 + 16 d34be60769b8fdbee82ebb05dc009c38 + 17 5fdfd38a5f46f61defea709749595fcb + 18 2208b9b4b800a5f0d44efa1de205bf59 + 19 9a88655ad28ec6529c11c416fd21c915 + 20 6d3be1c70d7088901a54e0dddb708e9c + 21 06cdd31a21ff0c2335bc883af50bc5c0 + 22 5850a1f5e38cab8a7db9773f2de4e11a + 23 1444df2f169ff3c7af48d7bd3a529704 + 24 0b3a03b26e40dbc89403f0f8892775e7 + 25 6f96832d6476a0db54bdedf0443ebaa9 + 26 0fccc61c0d0b12bd2c5564e51670bfae + 27 cb096e6d657d7bfc4088822ee3982f22 + 28 706e755e6e1501b90e06a8b8c5a60d11 + 29 83aef98253311517a0511377a6fd6626 + 30 4c2bc6b4c490f087e02e2b404f3e74b9 + 31 28a94a92b84a1d21c8221a2c8c050006 + 32 a006f1cc31c50cae59cbe5608a389654 + Sha256 + 1 85207a14088d3994a2630ed16fb83beab551a95442bf2a0ed66c5dafe1bdfb97 + 2 25efeb747b7993a3819d67528ba5f1a8290fbc24427a38fa52d24b1cfb6a3c74 + 3 0cba9e8b54ceccdc42d08cc2e40b911b663cf03049eba020f5e792589ccd2a49 + 4 fb09d65e9e54cad4e41ace6facbf5faae91b6f6a737877541e212bd1cda63a0d + 5 611305de3c47ae6bd7c15cfe3496c818d7b0ab782eed51eef92d390712a4fa0e + 6 8658957a1c533ad283410a8a31ce02acbf9158affe683ee07ba184bc65859e3e + 7 ffabce7c4ee411e0edb54914828a7d395aa7b54cb53371cb3b6c7c9b0586692a + 8 bc92fe327c62c6dd8ac230beacbc6e3ed6539250f3a8b2fef948d7eb46e2aa06 + 9 8abda8c649b75dae0315a82ba3c0e1a795a0976ed75aa66883e4d7c3e47e0bae + 10 9b10dbea740c713b62832f4356e5a16d34617a70bfe843d1ec89c9a304ac1bf1 + 11 d24f1bf6a3ae0703168106985dd87ab178192659cbc8da02f01996f9c1e37cb2 + 12 3cd642c60cb2bf9151227b1304052f6ab5adc9de5e162bc9a74dc78793267427 + 13 3b22ed3c2c3720d54b98a1e0e2099e787aa72d75ba1ac0e5f8f757cbeaf02bcd + 14 9a8dadc27c79a36517733992695e4b4f81bf3bb9426ac32008879b3a4fd3b92b + 15 c9055eccf9dbcc1e24a80e16471cfa0a10768af6f5683523f4995967ebef9dc9 + 16 97dbb9c973408dc0fd845ae12b5f18f99c98b45e3f1cbb5ccf24311fa4a2f39b + 17 fba55bdabaa50e659923cf6350061cd23ffae46d936fcb079bdcef7dadc756ad + 18 c3b99af557c8419dcef121c397f5b2c1f6b1f4bf63194342ccd0f4e7d1eeaf85 + 19 6dd8c002c066c8793f734eabc7ffb1b7fd8b12bbe9b96315398379223bb51110 + 20 b64bd7690c1e979f9ac45c2dbd6425f20566040b03728f220283a77bd54a4421 + 21 af45e6a5c251bd90a26549bd7019c086a0d1378bdcefebaf29ad109203042b5e + 22 c2658bf61db4c17452008560b4ae1b531057b6ca6c4e8e4134f59b27c9ad62b6 + 23 92b641a9be98723f784f1f61c4a9c0dd06676ee352b7d3a12c226617702f9d18 + 24 b89e0d3bc8306611eff5898558a87542d4901aaa8bc356462be725d4e1ed0dc4 + 25 587c0563f9f7438ccabba61395d80d441629deedb4905376e08e148bb7b40077 + 26 793ad956e592a2912c72218621654c03ed471d7383a013eb84294194fd643c70 + 27 1854f28e0933d30937cbba4cbb82c646a4e764a44d6ff3c5baa9bea58d31dd1d + 28 4f8a6c5a99b76844c056b1748c1f878145d32deff9c2c6a42463ff5480ef9d4b + 29 5df7a4803661d3b6d1f184161f6c92d81f6c14861edd3c6868b3430fa996cd6f + 30 9914a7a37fa38ac35170763ae20b16d6f50d2f302cd73ad71ad803b75de1e3d1 + 31 11aa764ad48c3fd6c1c65eac139ff0a621d1330a7fd0c0fc13b5d95396a126c6 + 32 b11077403d6aab4c7bb6ed12981d11a11b530cffa5c05df757f273b7185d4bdc + NeedsCompilation + 1 no + 2 no + 3 no + 4 no + 5 no + 6 no + 7 no + 8 no + 9 no + 10 no + 11 no + 12 no + 13 no + 14 no + 15 no + 16 no + 17 no + 18 no + 19 no + 20 no + 21 no + 22 no + 23 no + 24 no + 25 no + 26 no + 27 no + 28 no + 29 no + 30 no + 31 no + 32 no + Built + 1 R 4.1.3; aarch64-apple-darwin20; 2024-01-26 05:37:06 UTC; unix + 2 R 4.2.3; aarch64-apple-darwin20; 2024-01-26 05:39:11 UTC; unix + 3 R 4.0.5; x86_64-apple-darwin17.0; 2024-01-26 05:40:36 UTC; unix + 4 R 4.1.3; x86_64-apple-darwin17.0; 2024-01-26 05:41:58 UTC; unix + 5 R 4.2.3; x86_64-apple-darwin17.0; 2024-01-26 05:43:21 UTC; unix + 6 R 3.5.3; x86_64-apple-darwin15.6.0; 2024-01-26 05:37:45 UTC; unix + 7 R 3.6.3; x86_64-apple-darwin15.6.0; 2024-01-26 05:39:06 UTC; unix + 8 R 3.5.3; x86_64-w64-mingw32; 2024-01-12 05:40:54 UTC; windows + 9 R 3.6.3; x86_64-w64-mingw32; 2024-01-26 05:37:15 UTC; windows + 10 R 4.0.5; x86_64-w64-mingw32; 2024-01-26 05:37:18 UTC; windows + 11 R 4.1.3; x86_64-w64-mingw32; 2024-01-26 05:41:13 UTC; windows + 12 R 4.2.3; x86_64-w64-mingw32; 2024-01-26 05:40:15 UTC; windows + 13 R 3.5.3; x86_64-pc-linux-gnu; 2023-12-30 05:34:53 UTC; unix + 14 R 3.6.3; x86_64-pc-linux-musl; 2024-01-26 05:34:35 UTC; unix + 15 R 4.0.5; x86_64-pc-linux-musl; 2024-01-26 05:34:36 UTC; unix + 16 R 4.1.3; x86_64-pc-linux-musl; 2024-01-26 05:34:36 UTC; unix + 17 R 4.2.3; x86_64-pc-linux-musl; 2024-01-26 05:34:41 UTC; unix + 18 R 3.6.3; aarch64-unknown-linux-musl; 2024-01-26 05:36:19 UTC; unix + 19 R 4.2.3; aarch64-unknown-linux-musl; 2024-01-26 05:36:41 UTC; unix + 20 R 4.0.5; aarch64-unknown-linux-musl; 2024-01-26 05:36:43 UTC; unix + 21 R 4.1.3; aarch64-unknown-linux-musl; 2024-01-26 05:36:42 UTC; unix + 22 R 3.5.3; aarch64-unknown-linux-gnu; 2023-12-30 05:36:34 UTC; unix + 23 R 4.3.2; aarch64-apple-darwin20; 2024-01-26 05:41:13 UTC; unix + 24 R 4.3.2; aarch64-unknown-linux-musl; 2024-01-26 05:36:55 UTC; unix + 25 R 4.3.2; x86_64-w64-mingw32; 2024-01-26 05:38:17 UTC; windows + 26 R 4.3.2; x86_64-pc-linux-musl; 2024-01-26 05:34:38 UTC; unix + 27 R 4.4.0; x86_64-pc-linux-musl; 2024-01-26 05:34:36 UTC; unix + 28 R 4.4.0; x86_64-w64-mingw32; 2024-01-26 05:39:41 UTC; windows + 29 R 4.4.0; aarch64-apple-darwin20; 2024-01-26 05:45:14 UTC; unix + 30 R 4.4.0; aarch64-unknown-linux-musl; 2024-01-26 05:36:40 UTC; unix + 31 R 4.3.2; x86_64-apple-darwin20; 2024-01-26 05:44:42 UTC; unix + 32 R 4.4.0; x86_64-apple-darwin20; 2024-01-26 05:46:03 UTC; unix + File + 1 pak_0.7.1.9000_R-4-1_aarch64-darwin20.tgz + 2 pak_0.7.1.9000_R-4-2_aarch64-darwin20.tgz + 3 pak_0.7.1.9000_R-4-0_x86_64-darwin17.0.tgz + 4 pak_0.7.1.9000_R-4-1_x86_64-darwin17.0.tgz + 5 pak_0.7.1.9000_R-4-2_x86_64-darwin17.0.tgz + 6 pak_0.7.1.9000_R-3-5_x86_64-darwin15.6.0.tgz + 7 pak_0.7.1.9000_R-3-6_x86_64-darwin15.6.0.tgz + 8 pak_0.7.1.9000_R-3-5_x86_64-mingw32.zip + 9 pak_0.7.1.9000_R-3-6_x86_64-mingw32.zip + 10 pak_0.7.1.9000_R-4-0_x86_64-mingw32.zip + 11 pak_0.7.1.9000_R-4-1_x86_64-mingw32.zip + 12 pak_0.7.1.9000_R-4-2_x86_64-mingw32.zip + 13 pak_0.7.1.9000_R-3-5_x86_64-linux.tar.gz + 14 pak_0.7.1.9000_R-3-6_x86_64-linux.tar.gz + 15 pak_0.7.1.9000_R-4-0_x86_64-linux.tar.gz + 16 pak_0.7.1.9000_R-4-1_x86_64-linux.tar.gz + 17 pak_0.7.1.9000_R-4-2_x86_64-linux.tar.gz + 18 pak_0.7.1.9000_R-3-6_aarch64-linux.tar.gz + 19 pak_0.7.1.9000_R-4-2_aarch64-linux.tar.gz + 20 pak_0.7.1.9000_R-4-0_aarch64-linux.tar.gz + 21 pak_0.7.1.9000_R-4-1_aarch64-linux.tar.gz + 22 pak_0.7.1.9000_R-3-5_aarch64-linux.tar.gz + 23 pak_0.7.1.9000_R-4-3_aarch64-darwin20.tgz + 24 pak_0.7.1.9000_R-4-3_aarch64-linux.tar.gz + 25 pak_0.7.1.9000_R-4-3_x86_64-mingw32.zip + 26 pak_0.7.1.9000_R-4-3_x86_64-linux.tar.gz + 27 pak_0.7.1.9000_R-4-4_x86_64-linux.tar.gz + 28 pak_0.7.1.9000_R-4-4_x86_64-mingw32.zip + 29 pak_0.7.1.9000_R-4-4_aarch64-darwin20.tgz + 30 pak_0.7.1.9000_R-4-4_aarch64-linux.tar.gz + 31 pak_0.7.1.9000_R-4-3_x86_64-darwin20.tgz + 32 pak_0.7.1.9000_R-4-4_x86_64-darwin20.tgz + DownloadURL + 1 https://ghcr.io/v2/r-lib/pak/blobs/sha256:85207a14088d3994a2630ed16fb83beab551a95442bf2a0ed66c5dafe1bdfb97 + 2 https://ghcr.io/v2/r-lib/pak/blobs/sha256:25efeb747b7993a3819d67528ba5f1a8290fbc24427a38fa52d24b1cfb6a3c74 + 3 https://ghcr.io/v2/r-lib/pak/blobs/sha256:0cba9e8b54ceccdc42d08cc2e40b911b663cf03049eba020f5e792589ccd2a49 + 4 https://ghcr.io/v2/r-lib/pak/blobs/sha256:fb09d65e9e54cad4e41ace6facbf5faae91b6f6a737877541e212bd1cda63a0d + 5 https://ghcr.io/v2/r-lib/pak/blobs/sha256:611305de3c47ae6bd7c15cfe3496c818d7b0ab782eed51eef92d390712a4fa0e + 6 https://ghcr.io/v2/r-lib/pak/blobs/sha256:8658957a1c533ad283410a8a31ce02acbf9158affe683ee07ba184bc65859e3e + 7 https://ghcr.io/v2/r-lib/pak/blobs/sha256:ffabce7c4ee411e0edb54914828a7d395aa7b54cb53371cb3b6c7c9b0586692a + 8 https://ghcr.io/v2/r-lib/pak/blobs/sha256:bc92fe327c62c6dd8ac230beacbc6e3ed6539250f3a8b2fef948d7eb46e2aa06 + 9 https://ghcr.io/v2/r-lib/pak/blobs/sha256:8abda8c649b75dae0315a82ba3c0e1a795a0976ed75aa66883e4d7c3e47e0bae + 10 https://ghcr.io/v2/r-lib/pak/blobs/sha256:9b10dbea740c713b62832f4356e5a16d34617a70bfe843d1ec89c9a304ac1bf1 + 11 https://ghcr.io/v2/r-lib/pak/blobs/sha256:d24f1bf6a3ae0703168106985dd87ab178192659cbc8da02f01996f9c1e37cb2 + 12 https://ghcr.io/v2/r-lib/pak/blobs/sha256:3cd642c60cb2bf9151227b1304052f6ab5adc9de5e162bc9a74dc78793267427 + 13 https://ghcr.io/v2/r-lib/pak/blobs/sha256:3b22ed3c2c3720d54b98a1e0e2099e787aa72d75ba1ac0e5f8f757cbeaf02bcd + 14 https://ghcr.io/v2/r-lib/pak/blobs/sha256:9a8dadc27c79a36517733992695e4b4f81bf3bb9426ac32008879b3a4fd3b92b + 15 https://ghcr.io/v2/r-lib/pak/blobs/sha256:c9055eccf9dbcc1e24a80e16471cfa0a10768af6f5683523f4995967ebef9dc9 + 16 https://ghcr.io/v2/r-lib/pak/blobs/sha256:97dbb9c973408dc0fd845ae12b5f18f99c98b45e3f1cbb5ccf24311fa4a2f39b + 17 https://ghcr.io/v2/r-lib/pak/blobs/sha256:fba55bdabaa50e659923cf6350061cd23ffae46d936fcb079bdcef7dadc756ad + 18 https://ghcr.io/v2/r-lib/pak/blobs/sha256:c3b99af557c8419dcef121c397f5b2c1f6b1f4bf63194342ccd0f4e7d1eeaf85 + 19 https://ghcr.io/v2/r-lib/pak/blobs/sha256:6dd8c002c066c8793f734eabc7ffb1b7fd8b12bbe9b96315398379223bb51110 + 20 https://ghcr.io/v2/r-lib/pak/blobs/sha256:b64bd7690c1e979f9ac45c2dbd6425f20566040b03728f220283a77bd54a4421 + 21 https://ghcr.io/v2/r-lib/pak/blobs/sha256:af45e6a5c251bd90a26549bd7019c086a0d1378bdcefebaf29ad109203042b5e + 22 https://ghcr.io/v2/r-lib/pak/blobs/sha256:c2658bf61db4c17452008560b4ae1b531057b6ca6c4e8e4134f59b27c9ad62b6 + 23 https://ghcr.io/v2/r-lib/pak/blobs/sha256:92b641a9be98723f784f1f61c4a9c0dd06676ee352b7d3a12c226617702f9d18 + 24 https://ghcr.io/v2/r-lib/pak/blobs/sha256:b89e0d3bc8306611eff5898558a87542d4901aaa8bc356462be725d4e1ed0dc4 + 25 https://ghcr.io/v2/r-lib/pak/blobs/sha256:587c0563f9f7438ccabba61395d80d441629deedb4905376e08e148bb7b40077 + 26 https://ghcr.io/v2/r-lib/pak/blobs/sha256:793ad956e592a2912c72218621654c03ed471d7383a013eb84294194fd643c70 + 27 https://ghcr.io/v2/r-lib/pak/blobs/sha256:1854f28e0933d30937cbba4cbb82c646a4e764a44d6ff3c5baa9bea58d31dd1d + 28 https://ghcr.io/v2/r-lib/pak/blobs/sha256:4f8a6c5a99b76844c056b1748c1f878145d32deff9c2c6a42463ff5480ef9d4b + 29 https://ghcr.io/v2/r-lib/pak/blobs/sha256:5df7a4803661d3b6d1f184161f6c92d81f6c14861edd3c6868b3430fa996cd6f + 30 https://ghcr.io/v2/r-lib/pak/blobs/sha256:9914a7a37fa38ac35170763ae20b16d6f50d2f302cd73ad71ad803b75de1e3d1 + 31 https://ghcr.io/v2/r-lib/pak/blobs/sha256:11aa764ad48c3fd6c1c65eac139ff0a621d1330a7fd0c0fc13b5d95396a126c6 + 32 https://ghcr.io/v2/r-lib/pak/blobs/sha256:b11077403d6aab4c7bb6ed12981d11a11b530cffa5c05df757f273b7185d4bdc + OS Arch RVersion + 1 darwin20 aarch64 4.1 + 2 darwin20 aarch64 4.2 + 3 darwin17.0 x86_64 4.0 + 4 darwin17.0 x86_64 4.1 + 5 darwin17.0 x86_64 4.2 + 6 darwin15.6.0 x86_64 3.5 + 7 darwin15.6.0 x86_64 3.6 + 8 mingw32 x86_64 3.5 + 9 mingw32 x86_64 3.6 + 10 mingw32 x86_64 4.0 + 11 mingw32 x86_64 4.1 + 12 mingw32 x86_64 4.2 + 13 linux x86_64 3.5 + 14 linux x86_64 3.6 + 15 linux x86_64 4.0 + 16 linux x86_64 4.1 + 17 linux x86_64 4.2 + 18 linux aarch64 3.6 + 19 linux aarch64 4.2 + 20 linux aarch64 4.0 + 21 linux aarch64 4.1 + 22 linux aarch64 3.5 + 23 darwin20 aarch64 4.3 + 24 linux aarch64 4.3 + 25 mingw32 x86_64 4.3 + 26 linux x86_64 4.3 + 27 linux x86_64 4.4 + 28 mingw32 x86_64 4.4 + 29 darwin20 aarch64 4.4 + 30 linux aarch64 4.4 + 31 darwin20 x86_64 4.3 + 32 darwin20 x86_64 4.4 + +# pak_update + + Code + pak_update() + Message + pak has devel binaries for the following platforms: + * Windows (mingw32), x86_64, R 3.5.x, R 3.6.x, R 4.0.x, R 4.1.x, R 4.2.x, R 4.3.x, R 4.4.x + * linux, aarch64, R 3.5.x, R 3.6.x, R 4.0.x, R 4.1.x, R 4.2.x, R 4.3.x, R 4.4.x + * linux, x86_64, R 3.5.x, R 3.6.x, R 4.0.x, R 4.1.x, R 4.2.x, R 4.3.x, R 4.4.x + * macOS darwin15.6.0, x86_64, R 3.5.x, R 3.6.x + * macOS darwin17.0, x86_64, R 4.0.x, R 4.1.x, R 4.2.x + * macOS darwin20, aarch64, R 4.1.x, R 4.2.x, R 4.3.x, R 4.4.x + * macOS darwin20, x86_64, R 4.3.x, R 4.4.x + + Condition + Error in `pak_update_unsupported_platform()`: + ! pak is not available for linux, s390x, R 4.3.x. Aborting now + +--- + + Code + pak_update() + Condition + Warning in `pak_update()`: + `load_all()`-d pak package, updating in default library at + `` + Message + + Current version is the latest, no need to update. + +--- + + Code + pak_update() + Message + + Current version is the latest, no need to update. + +# pak_update_unsupported_platform + + Code + pak_update_unsupported_platform("devel", me, meta) + Message + pak has devel binaries for the following platforms: + * Windows (mingw32), x86_64, R 3.5.x, R 3.6.x, R 4.0.x, R 4.1.x, R 4.2.x, R 4.3.x, R 4.4.x + * linux, aarch64, R 3.5.x, R 3.6.x, R 4.0.x, R 4.1.x, R 4.2.x, R 4.3.x, R 4.4.x + * linux, x86_64, R 3.5.x, R 3.6.x, R 4.0.x, R 4.1.x, R 4.2.x, R 4.3.x, R 4.4.x + * macOS darwin15.6.0, x86_64, R 3.5.x, R 3.6.x + * macOS darwin17.0, x86_64, R 4.0.x, R 4.1.x, R 4.2.x + * macOS darwin20, aarch64, R 4.1.x, R 4.2.x, R 4.3.x, R 4.4.x + * macOS darwin20, x86_64, R 4.3.x, R 4.4.x + + Condition + Error in `pak_update_unsupported_platform()`: + ! pak is not available for Linux, s390x, R 4.3.x. Aborting now + +# check_mac_cran_r + + Code + check_mac_cran_r(me) + Condition + Error in `check_mac_cran_r()`: + ! pak only has binaries for the CRAN build of R, and this seems to be a brew or another non-CRAN build. + +# should_update_to + + Code + expect_true(should_update_to()) + Message + + pak platform mismatch, trying to update to fix this... + diff --git a/tests/testthat/_snaps/ppm.md b/tests/testthat/_snaps/ppm.md new file mode 100644 index 000000000..6d81d3446 --- /dev/null +++ b/tests/testthat/_snaps/ppm.md @@ -0,0 +1,62 @@ +# ppm_platforms + + Code + as.data.frame(ppm_platforms()) + Output + name os binary_url distribution release binaries + 1 centos7 linux centos7 centos 7 TRUE + 2 centos8 linux centos8 centos 8 TRUE + 3 rhel9 linux rhel9 rockylinux 9 TRUE + 4 opensuse15 linux opensuse15 opensuse 15 TRUE + 5 opensuse152 linux opensuse152 opensuse 15.2 TRUE + 6 opensuse153 linux opensuse153 opensuse 15.3 TRUE + 7 opensuse154 linux opensuse154 opensuse 15.4 TRUE + 8 opensuse42 linux opensuse42 opensuse 42.3 TRUE + 9 rhel7 linux centos7 redhat 7 TRUE + 10 rhel8 linux centos8 redhat 8 TRUE + 11 rhel9 (unused alias) linux rhel9 redhat 9 TRUE + 12 sles12 linux opensuse42 sle 12.3 TRUE + 13 sles15 linux opensuse15 sle 15 TRUE + 14 sles152 linux opensuse152 sle 15.2 TRUE + 15 sles153 linux opensuse153 sle 15.3 TRUE + 16 sles154 linux opensuse154 sle 15.4 TRUE + 17 xenial linux xenial ubuntu 16.04 TRUE + 18 bionic linux bionic ubuntu 18.04 TRUE + 19 focal linux focal ubuntu 20.04 TRUE + 20 jammy linux jammy ubuntu 22.04 TRUE + 21 buster linux buster debian 10 FALSE + 22 bullseye linux bullseye debian 11 FALSE + 23 windows windows windows all TRUE + 24 macOS macOS macOS all FALSE + +# ppm_r_versions + + Code + as.data.frame(ppm_r_versions()) + Output + r_version + 1 3.5 + 2 3.6 + 3 4.0 + 4 4.1 + 5 4.2 + +# ppm_snapshots + + Code + ppm_snapshots() + Output + # A data frame: 10 x 2 + date id + + 1 2021-01-25 00:00:00 997643 + 2 2021-01-26 00:00:00 1014755 + 3 2021-01-27 00:00:00 1033374 + 4 2021-01-28 00:00:00 1053473 + 5 2021-01-29 00:00:00 1069075 + 6 2021-02-01 00:00:00 1123445 + 7 2021-02-02 00:00:00 1140568 + 8 2021-02-03 00:00:00 1160641 + 9 2021-02-04 00:00:00 1175516 + 10 2021-02-05 00:00:00 1194160 + diff --git a/tests/testthat/_snaps/subprocess.md b/tests/testthat/_snaps/subprocess.md deleted file mode 100644 index 49b73a5ea..000000000 --- a/tests/testthat/_snaps/subprocess.md +++ /dev/null @@ -1,18 +0,0 @@ -# remote messages - - Code - invisible(remote(function() cli::cli_text("just once"))) - Message - just once - ---- - - Code - withCallingHandlers(invisible(remote(function() cli::cli_text("just once"))), - message = function(m) print(m)) - Output - - Message - just once - diff --git a/tests/testthat/_snaps/utils.md b/tests/testthat/_snaps/utils.md new file mode 100644 index 000000000..b06ab3224 --- /dev/null +++ b/tests/testthat/_snaps/utils.md @@ -0,0 +1,83 @@ +# vcapply + + Code + vcapply(letters, function(x) 1L) + Condition + Error in `vapply()`: + ! values must be type 'character', + but FUN(X[[1]]) result is type 'integer' + Code + vcapply(1:5, function(x) c("foo", "bar")) + Condition + Error in `vapply()`: + ! values must be length 1, + but FUN(X[[1]]) result is length 2 + +# vlapply + + Code + vlapply(letters, function(x) 1L) + Condition + Error in `vapply()`: + ! values must be type 'logical', + but FUN(X[[1]]) result is type 'integer' + Code + vlapply(1:5, function(x) c(TRUE, FALSE)) + Condition + Error in `vapply()`: + ! values must be length 1, + but FUN(X[[1]]) result is length 2 + +# viapply + + Code + viapply(letters, function(x) 1) + Condition + Error in `vapply()`: + ! values must be type 'integer', + but FUN(X[[1]]) result is type 'double' + Code + viapply(1:5, function(x) 1:2) + Condition + Error in `vapply()`: + ! values must be length 1, + but FUN(X[[1]]) result is length 2 + +# vdapply + + Code + vdapply(letters, function(x) "boo") + Condition + Error in `vapply()`: + ! values must be type 'double', + but FUN(X[[1]]) result is type 'character' + Code + vdapply(1:5, function(x) 1:2 / 2) + Condition + Error in `vapply()`: + ! values must be length 1, + but FUN(X[[1]]) result is length 2 + +# cat0 + + Code + cat0(c("foo", "bar"), "foobar") + Output + foobarfoobar + +# mkdirp + + Code + mkdirp(c("foo", "bar"), "Created these") + Message + i Created these: 'foo' and 'bar'. + +# base_packages + + Code + base_packages() + Output + [1] "base" "compiler" "datasets" "graphics" "grDevices" "grid" + [7] "methods" "parallel" "splines" "stats" "stats4" "tcltk" + [13] "tools" "utils" + diff --git a/tests/testthat/fixtures/git-repo.tar.gz b/tests/testthat/fixtures/git-repo.tar.gz new file mode 100644 index 000000000..07b1be4f1 Binary files /dev/null and b/tests/testthat/fixtures/git-repo.tar.gz differ diff --git a/tests/testthat/fixtures/metadata.json b/tests/testthat/fixtures/metadata.json new file mode 100644 index 000000000..142d2b7b5 --- /dev/null +++ b/tests/testthat/fixtures/metadata.json @@ -0,0 +1,482 @@ +[ + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.1), R (<= 4.1.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "d6be0de2e85aa8342c757a888115adc9", + "Sha256": "85207a14088d3994a2630ed16fb83beab551a95442bf2a0ed66c5dafe1bdfb97", + "NeedsCompilation": "no", + "Built": "R 4.1.3; aarch64-apple-darwin20; 2024-01-26 05:37:06 UTC; unix", + "File": "pak_0.7.1.9000_R-4-1_aarch64-darwin20.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:85207a14088d3994a2630ed16fb83beab551a95442bf2a0ed66c5dafe1bdfb97", + "OS": "darwin20", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.2), R (<= 4.2.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "77a9de8a5bd3815c95cab2979003c5a8", + "Sha256": "25efeb747b7993a3819d67528ba5f1a8290fbc24427a38fa52d24b1cfb6a3c74", + "NeedsCompilation": "no", + "Built": "R 4.2.3; aarch64-apple-darwin20; 2024-01-26 05:39:11 UTC; unix", + "File": "pak_0.7.1.9000_R-4-2_aarch64-darwin20.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:25efeb747b7993a3819d67528ba5f1a8290fbc24427a38fa52d24b1cfb6a3c74", + "OS": "darwin20", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.0), R (<= 4.0.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "328e252deffd659aec6e43064a2e2bf2", + "Sha256": "0cba9e8b54ceccdc42d08cc2e40b911b663cf03049eba020f5e792589ccd2a49", + "NeedsCompilation": "no", + "Built": "R 4.0.5; x86_64-apple-darwin17.0; 2024-01-26 05:40:36 UTC; unix", + "File": "pak_0.7.1.9000_R-4-0_x86_64-darwin17.0.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:0cba9e8b54ceccdc42d08cc2e40b911b663cf03049eba020f5e792589ccd2a49", + "OS": "darwin17.0", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.1), R (<= 4.1.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "787a119f7993533006d76a33dbd1dfcf", + "Sha256": "fb09d65e9e54cad4e41ace6facbf5faae91b6f6a737877541e212bd1cda63a0d", + "NeedsCompilation": "no", + "Built": "R 4.1.3; x86_64-apple-darwin17.0; 2024-01-26 05:41:58 UTC; unix", + "File": "pak_0.7.1.9000_R-4-1_x86_64-darwin17.0.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:fb09d65e9e54cad4e41ace6facbf5faae91b6f6a737877541e212bd1cda63a0d", + "OS": "darwin17.0", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.2), R (<= 10.0.0)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "7abf05aa4c5357ddd040acbe726f5acb", + "Sha256": "611305de3c47ae6bd7c15cfe3496c818d7b0ab782eed51eef92d390712a4fa0e", + "NeedsCompilation": "no", + "Built": "R 4.2.3; x86_64-apple-darwin17.0; 2024-01-26 05:43:21 UTC; unix", + "File": "pak_0.7.1.9000_R-4-2_x86_64-darwin17.0.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:611305de3c47ae6bd7c15cfe3496c818d7b0ab782eed51eef92d390712a4fa0e", + "OS": "darwin17.0", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 3.5), R (<= 3.5.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "b80dfe3b102a3237710d879ed3d44940", + "Sha256": "8658957a1c533ad283410a8a31ce02acbf9158affe683ee07ba184bc65859e3e", + "NeedsCompilation": "no", + "Built": "R 3.5.3; x86_64-apple-darwin15.6.0; 2024-01-26 05:37:45 UTC; unix", + "File": "pak_0.7.1.9000_R-3-5_x86_64-darwin15.6.0.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:8658957a1c533ad283410a8a31ce02acbf9158affe683ee07ba184bc65859e3e", + "OS": "darwin15.6.0", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 3.6), R (<= 10.0.0)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "a86277023b0022c60e08b7e94c74428f", + "Sha256": "ffabce7c4ee411e0edb54914828a7d395aa7b54cb53371cb3b6c7c9b0586692a", + "NeedsCompilation": "no", + "Built": "R 3.6.3; x86_64-apple-darwin15.6.0; 2024-01-26 05:39:06 UTC; unix", + "File": "pak_0.7.1.9000_R-3-6_x86_64-darwin15.6.0.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:ffabce7c4ee411e0edb54914828a7d395aa7b54cb53371cb3b6c7c9b0586692a", + "OS": "darwin15.6.0", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 3.5), R (<= 3.5.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "6c6bbc82ef3af1b89ec48901e0f9b63f", + "Sha256": "bc92fe327c62c6dd8ac230beacbc6e3ed6539250f3a8b2fef948d7eb46e2aa06", + "NeedsCompilation": "no", + "Built": "R 3.5.3; x86_64-w64-mingw32; 2024-01-12 05:40:54 UTC; windows", + "File": "pak_0.7.1.9000_R-3-5_x86_64-mingw32.zip", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:bc92fe327c62c6dd8ac230beacbc6e3ed6539250f3a8b2fef948d7eb46e2aa06", + "OS": "mingw32", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 3.6), R (<= 3.6.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "712df4dedd91ad73f142cab1c85f3dae", + "Sha256": "8abda8c649b75dae0315a82ba3c0e1a795a0976ed75aa66883e4d7c3e47e0bae", + "NeedsCompilation": "no", + "Built": "R 3.6.3; x86_64-w64-mingw32; 2024-01-26 05:37:15 UTC; windows", + "File": "pak_0.7.1.9000_R-3-6_x86_64-mingw32.zip", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:8abda8c649b75dae0315a82ba3c0e1a795a0976ed75aa66883e4d7c3e47e0bae", + "OS": "mingw32", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.0), R (<= 4.0.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "770fdac818ca34c5c96f72cadbf19663", + "Sha256": "9b10dbea740c713b62832f4356e5a16d34617a70bfe843d1ec89c9a304ac1bf1", + "NeedsCompilation": "no", + "Built": "R 4.0.5; x86_64-w64-mingw32; 2024-01-26 05:37:18 UTC; windows", + "File": "pak_0.7.1.9000_R-4-0_x86_64-mingw32.zip", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:9b10dbea740c713b62832f4356e5a16d34617a70bfe843d1ec89c9a304ac1bf1", + "OS": "mingw32", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.1), R (<= 4.1.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "a56088c5c5bc08d849fd5fc9dc94094f", + "Sha256": "d24f1bf6a3ae0703168106985dd87ab178192659cbc8da02f01996f9c1e37cb2", + "NeedsCompilation": "no", + "Built": "R 4.1.3; x86_64-w64-mingw32; 2024-01-26 05:41:13 UTC; windows", + "File": "pak_0.7.1.9000_R-4-1_x86_64-mingw32.zip", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:d24f1bf6a3ae0703168106985dd87ab178192659cbc8da02f01996f9c1e37cb2", + "OS": "mingw32", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.2), R (<= 4.2.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "e191788534de3151a8f188c04ebd5693", + "Sha256": "3cd642c60cb2bf9151227b1304052f6ab5adc9de5e162bc9a74dc78793267427", + "NeedsCompilation": "no", + "Built": "R 4.2.3; x86_64-w64-mingw32; 2024-01-26 05:40:15 UTC; windows", + "File": "pak_0.7.1.9000_R-4-2_x86_64-mingw32.zip", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:3cd642c60cb2bf9151227b1304052f6ab5adc9de5e162bc9a74dc78793267427", + "OS": "mingw32", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 3.5), R (<= 3.5.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "02fc35953e31c457c819153711c28e5e", + "Sha256": "3b22ed3c2c3720d54b98a1e0e2099e787aa72d75ba1ac0e5f8f757cbeaf02bcd", + "NeedsCompilation": "no", + "Built": "R 3.5.3; x86_64-pc-linux-gnu; 2023-12-30 05:34:53 UTC; unix", + "File": "pak_0.7.1.9000_R-3-5_x86_64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:3b22ed3c2c3720d54b98a1e0e2099e787aa72d75ba1ac0e5f8f757cbeaf02bcd", + "OS": "linux", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 3.6), R (<= 3.6.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "20b7fcd3fef5a55b24068d064a4cd17a", + "Sha256": "9a8dadc27c79a36517733992695e4b4f81bf3bb9426ac32008879b3a4fd3b92b", + "NeedsCompilation": "no", + "Built": "R 3.6.3; x86_64-pc-linux-musl; 2024-01-26 05:34:35 UTC; unix", + "File": "pak_0.7.1.9000_R-3-6_x86_64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:9a8dadc27c79a36517733992695e4b4f81bf3bb9426ac32008879b3a4fd3b92b", + "OS": "linux", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.0), R (<= 4.0.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "eb447fc72541669ee093ee05ba7d6b36", + "Sha256": "c9055eccf9dbcc1e24a80e16471cfa0a10768af6f5683523f4995967ebef9dc9", + "NeedsCompilation": "no", + "Built": "R 4.0.5; x86_64-pc-linux-musl; 2024-01-26 05:34:36 UTC; unix", + "File": "pak_0.7.1.9000_R-4-0_x86_64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:c9055eccf9dbcc1e24a80e16471cfa0a10768af6f5683523f4995967ebef9dc9", + "OS": "linux", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.1), R (<= 4.1.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "d34be60769b8fdbee82ebb05dc009c38", + "Sha256": "97dbb9c973408dc0fd845ae12b5f18f99c98b45e3f1cbb5ccf24311fa4a2f39b", + "NeedsCompilation": "no", + "Built": "R 4.1.3; x86_64-pc-linux-musl; 2024-01-26 05:34:36 UTC; unix", + "File": "pak_0.7.1.9000_R-4-1_x86_64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:97dbb9c973408dc0fd845ae12b5f18f99c98b45e3f1cbb5ccf24311fa4a2f39b", + "OS": "linux", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.2), R (<= 4.2.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "5fdfd38a5f46f61defea709749595fcb", + "Sha256": "fba55bdabaa50e659923cf6350061cd23ffae46d936fcb079bdcef7dadc756ad", + "NeedsCompilation": "no", + "Built": "R 4.2.3; x86_64-pc-linux-musl; 2024-01-26 05:34:41 UTC; unix", + "File": "pak_0.7.1.9000_R-4-2_x86_64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:fba55bdabaa50e659923cf6350061cd23ffae46d936fcb079bdcef7dadc756ad", + "OS": "linux", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 3.6), R (<= 3.6.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "2208b9b4b800a5f0d44efa1de205bf59", + "Sha256": "c3b99af557c8419dcef121c397f5b2c1f6b1f4bf63194342ccd0f4e7d1eeaf85", + "NeedsCompilation": "no", + "Built": "R 3.6.3; aarch64-unknown-linux-musl; 2024-01-26 05:36:19 UTC; unix", + "File": "pak_0.7.1.9000_R-3-6_aarch64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:c3b99af557c8419dcef121c397f5b2c1f6b1f4bf63194342ccd0f4e7d1eeaf85", + "OS": "linux", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.2), R (<= 4.2.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "9a88655ad28ec6529c11c416fd21c915", + "Sha256": "6dd8c002c066c8793f734eabc7ffb1b7fd8b12bbe9b96315398379223bb51110", + "NeedsCompilation": "no", + "Built": "R 4.2.3; aarch64-unknown-linux-musl; 2024-01-26 05:36:41 UTC; unix", + "File": "pak_0.7.1.9000_R-4-2_aarch64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:6dd8c002c066c8793f734eabc7ffb1b7fd8b12bbe9b96315398379223bb51110", + "OS": "linux", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.0), R (<= 4.0.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "6d3be1c70d7088901a54e0dddb708e9c", + "Sha256": "b64bd7690c1e979f9ac45c2dbd6425f20566040b03728f220283a77bd54a4421", + "NeedsCompilation": "no", + "Built": "R 4.0.5; aarch64-unknown-linux-musl; 2024-01-26 05:36:43 UTC; unix", + "File": "pak_0.7.1.9000_R-4-0_aarch64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:b64bd7690c1e979f9ac45c2dbd6425f20566040b03728f220283a77bd54a4421", + "OS": "linux", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.1), R (<= 4.1.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "06cdd31a21ff0c2335bc883af50bc5c0", + "Sha256": "af45e6a5c251bd90a26549bd7019c086a0d1378bdcefebaf29ad109203042b5e", + "NeedsCompilation": "no", + "Built": "R 4.1.3; aarch64-unknown-linux-musl; 2024-01-26 05:36:42 UTC; unix", + "File": "pak_0.7.1.9000_R-4-1_aarch64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:af45e6a5c251bd90a26549bd7019c086a0d1378bdcefebaf29ad109203042b5e", + "OS": "linux", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 3.5), R (<= 3.5.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "5850a1f5e38cab8a7db9773f2de4e11a", + "Sha256": "c2658bf61db4c17452008560b4ae1b531057b6ca6c4e8e4134f59b27c9ad62b6", + "NeedsCompilation": "no", + "Built": "R 3.5.3; aarch64-unknown-linux-gnu; 2023-12-30 05:36:34 UTC; unix", + "File": "pak_0.7.1.9000_R-3-5_aarch64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:c2658bf61db4c17452008560b4ae1b531057b6ca6c4e8e4134f59b27c9ad62b6", + "OS": "linux", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.3), R (<= 4.3.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "1444df2f169ff3c7af48d7bd3a529704", + "Sha256": "92b641a9be98723f784f1f61c4a9c0dd06676ee352b7d3a12c226617702f9d18", + "NeedsCompilation": "no", + "Built": "R 4.3.2; aarch64-apple-darwin20; 2024-01-26 05:41:13 UTC; unix", + "File": "pak_0.7.1.9000_R-4-3_aarch64-darwin20.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:92b641a9be98723f784f1f61c4a9c0dd06676ee352b7d3a12c226617702f9d18", + "OS": "darwin20", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.3), R (<= 4.3.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "0b3a03b26e40dbc89403f0f8892775e7", + "Sha256": "b89e0d3bc8306611eff5898558a87542d4901aaa8bc356462be725d4e1ed0dc4", + "NeedsCompilation": "no", + "Built": "R 4.3.2; aarch64-unknown-linux-musl; 2024-01-26 05:36:55 UTC; unix", + "File": "pak_0.7.1.9000_R-4-3_aarch64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:b89e0d3bc8306611eff5898558a87542d4901aaa8bc356462be725d4e1ed0dc4", + "OS": "linux", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.3), R (<= 4.3.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "6f96832d6476a0db54bdedf0443ebaa9", + "Sha256": "587c0563f9f7438ccabba61395d80d441629deedb4905376e08e148bb7b40077", + "NeedsCompilation": "no", + "Built": "R 4.3.2; x86_64-w64-mingw32; 2024-01-26 05:38:17 UTC; windows", + "File": "pak_0.7.1.9000_R-4-3_x86_64-mingw32.zip", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:587c0563f9f7438ccabba61395d80d441629deedb4905376e08e148bb7b40077", + "OS": "mingw32", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.3), R (<= 4.3.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "0fccc61c0d0b12bd2c5564e51670bfae", + "Sha256": "793ad956e592a2912c72218621654c03ed471d7383a013eb84294194fd643c70", + "NeedsCompilation": "no", + "Built": "R 4.3.2; x86_64-pc-linux-musl; 2024-01-26 05:34:38 UTC; unix", + "File": "pak_0.7.1.9000_R-4-3_x86_64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:793ad956e592a2912c72218621654c03ed471d7383a013eb84294194fd643c70", + "OS": "linux", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.4), R (<= 10.0.0)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "cb096e6d657d7bfc4088822ee3982f22", + "Sha256": "1854f28e0933d30937cbba4cbb82c646a4e764a44d6ff3c5baa9bea58d31dd1d", + "NeedsCompilation": "no", + "Built": "R 4.4.0; x86_64-pc-linux-musl; 2024-01-26 05:34:36 UTC; unix", + "File": "pak_0.7.1.9000_R-4-4_x86_64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:1854f28e0933d30937cbba4cbb82c646a4e764a44d6ff3c5baa9bea58d31dd1d", + "OS": "linux", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.4), R (<= 10.0.0)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "706e755e6e1501b90e06a8b8c5a60d11", + "Sha256": "4f8a6c5a99b76844c056b1748c1f878145d32deff9c2c6a42463ff5480ef9d4b", + "NeedsCompilation": "no", + "Built": "R 4.4.0; x86_64-w64-mingw32; 2024-01-26 05:39:41 UTC; windows", + "File": "pak_0.7.1.9000_R-4-4_x86_64-mingw32.zip", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:4f8a6c5a99b76844c056b1748c1f878145d32deff9c2c6a42463ff5480ef9d4b", + "OS": "mingw32", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.4), R (<= 10.0.0)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "83aef98253311517a0511377a6fd6626", + "Sha256": "5df7a4803661d3b6d1f184161f6c92d81f6c14861edd3c6868b3430fa996cd6f", + "NeedsCompilation": "no", + "Built": "R 4.4.0; aarch64-apple-darwin20; 2024-01-26 05:45:14 UTC; unix", + "File": "pak_0.7.1.9000_R-4-4_aarch64-darwin20.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:5df7a4803661d3b6d1f184161f6c92d81f6c14861edd3c6868b3430fa996cd6f", + "OS": "darwin20", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.4), R (<= 10.0.0)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "4c2bc6b4c490f087e02e2b404f3e74b9", + "Sha256": "9914a7a37fa38ac35170763ae20b16d6f50d2f302cd73ad71ad803b75de1e3d1", + "NeedsCompilation": "no", + "Built": "R 4.4.0; aarch64-unknown-linux-musl; 2024-01-26 05:36:40 UTC; unix", + "File": "pak_0.7.1.9000_R-4-4_aarch64-linux.tar.gz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:9914a7a37fa38ac35170763ae20b16d6f50d2f302cd73ad71ad803b75de1e3d1", + "OS": "linux", + "Arch": "aarch64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.3), R (<= 4.3.99)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "28a94a92b84a1d21c8221a2c8c050006", + "Sha256": "11aa764ad48c3fd6c1c65eac139ff0a621d1330a7fd0c0fc13b5d95396a126c6", + "NeedsCompilation": "no", + "Built": "R 4.3.2; x86_64-apple-darwin20; 2024-01-26 05:44:42 UTC; unix", + "File": "pak_0.7.1.9000_R-4-3_x86_64-darwin20.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:11aa764ad48c3fd6c1c65eac139ff0a621d1330a7fd0c0fc13b5d95396a126c6", + "OS": "darwin20", + "Arch": "x86_64" + }, + { + "Package": "pak", + "Version": "0.7.1.9000", + "Depends": "R (>= 4.4), R (<= 10.0.0)", + "Imports": "tools, utils", + "License": "GPL-3", + "MD5sum": "a006f1cc31c50cae59cbe5608a389654", + "Sha256": "b11077403d6aab4c7bb6ed12981d11a11b530cffa5c05df757f273b7185d4bdc", + "NeedsCompilation": "no", + "Built": "R 4.4.0; x86_64-apple-darwin20; 2024-01-26 05:46:03 UTC; unix", + "File": "pak_0.7.1.9000_R-4-4_x86_64-darwin20.tgz", + "DownloadURL": "https://ghcr.io/v2/r-lib/pak/blobs/sha256:b11077403d6aab4c7bb6ed12981d11a11b530cffa5c05df757f273b7185d4bdc", + "OS": "darwin20", + "Arch": "x86_64" + } +] \ No newline at end of file diff --git a/tests/testthat/fixtures/name-check.rds b/tests/testthat/fixtures/name-check.rds new file mode 100644 index 000000000..f2eb504d8 Binary files /dev/null and b/tests/testthat/fixtures/name-check.rds differ diff --git a/tests/testthat/fixtures/ppm-status.json b/tests/testthat/fixtures/ppm-status.json new file mode 100644 index 000000000..283189c39 --- /dev/null +++ b/tests/testthat/fixtures/ppm-status.json @@ -0,0 +1 @@ +{"version":"2022.11.2-18","build_date":"2022-11-18T20:02:59Z","metrics_enabled":true,"r_configured":true,"binaries_enabled":true,"display_ash":false,"custom_home":true,"ga_id":"UA-20375833-3","distros":[{"name":"centos7","os":"linux","binaryDisplay":"CentOS/RHEL 7","binaryURL":"centos7","display":"CentOS 7","distribution":"centos","release":"7","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"centos8","os":"linux","binaryDisplay":"CentOS/RHEL 8","binaryURL":"centos8","display":"CentOS 8","distribution":"centos","release":"8","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"rhel9","os":"linux","binaryDisplay":"Rocky Linux 9","binaryURL":"rhel9","display":"Rocky Linux 9","distribution":"rockylinux","release":"9","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"opensuse15","os":"linux","binaryDisplay":"OpenSUSE 15.1, SLES 15 SP1","binaryURL":"opensuse15","display":"OpenSUSE 15.1","distribution":"opensuse","release":"15","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"opensuse152","os":"linux","binaryDisplay":"OpenSUSE 15.2, SLES 15 SP2","binaryURL":"opensuse152","display":"OpenSUSE 15.2","distribution":"opensuse","release":"15.2","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"opensuse153","os":"linux","binaryDisplay":"OpenSUSE 15.3, SLES 15 SP3","binaryURL":"opensuse153","display":"OpenSUSE 15.3","distribution":"opensuse","release":"15.3","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"opensuse154","os":"linux","binaryDisplay":"OpenSUSE 15.4, SLES 15 SP4","binaryURL":"opensuse154","display":"OpenSUSE 15.4","distribution":"opensuse","release":"15.4","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"opensuse42","os":"linux","binaryDisplay":"OpenSUSE 42.3, SLES 12 SP5","binaryURL":"opensuse42","display":"OpenSUSE 42.3","distribution":"opensuse","release":"42.3","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"rhel7","os":"linux","binaryDisplay":"CentOS/RHEL 7","binaryURL":"centos7","display":"Red Hat Enterprise Linux 7","distribution":"redhat","release":"7","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"rhel8","os":"linux","binaryDisplay":"RHEL 8","binaryURL":"centos8","display":"Red Hat Enterprise Linux 8","distribution":"redhat","release":"8","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"rhel9 (unused alias)","os":"linux","binaryDisplay":"RHEL 9","binaryURL":"rhel9","display":"Red Hat Enterprise Linux 9","distribution":"redhat","release":"9","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"sles12","os":"linux","binaryDisplay":"OpenSUSE 42.3, SLES 12 SP5","binaryURL":"opensuse42","display":"SLES 12 SP5","distribution":"sle","release":"12.3","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"sles15","os":"linux","binaryDisplay":"OpenSUSE 15.1, SLES 15 SP1","binaryURL":"opensuse15","display":"SLES 15 SP1","distribution":"sle","release":"15","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"sles152","os":"linux","binaryDisplay":"OpenSUSE 15.2, SLES 15 SP2","binaryURL":"opensuse152","display":"SLES 15 SP2","distribution":"sle","release":"15.2","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"sles153","os":"linux","binaryDisplay":"OpenSUSE 15.3, SLES 15 SP3","binaryURL":"opensuse153","display":"SLES 15 SP3","distribution":"sle","release":"15.3","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"sles154","os":"linux","binaryDisplay":"OpenSUSE 15.4, SLES 15 SP4","binaryURL":"opensuse154","display":"SLES 15 SP4","distribution":"sle","release":"15.4","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"xenial","os":"linux","binaryDisplay":"Ubuntu 16.04 (Xenial)","binaryURL":"xenial","display":"Ubuntu 16.04 (Xenial)","distribution":"ubuntu","release":"16.04","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"bionic","os":"linux","binaryDisplay":"Ubuntu 18.04 (Bionic)","binaryURL":"bionic","display":"Ubuntu 18.04 (Bionic)","distribution":"ubuntu","release":"18.04","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"focal","os":"linux","binaryDisplay":"Ubuntu 20.04 (Focal)","binaryURL":"focal","display":"Ubuntu 20.04 (Focal)","distribution":"ubuntu","release":"20.04","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"jammy","os":"linux","binaryDisplay":"Ubuntu 22.04 (Jammy)","binaryURL":"jammy","display":"Ubuntu 22.04 (Jammy)","distribution":"ubuntu","release":"22.04","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"buster","os":"linux","binaryDisplay":"Debian 10 (Buster)","binaryURL":"buster","display":"Debian 10 (Buster)","distribution":"debian","release":"10","sysReqs":true,"binaries":false,"hidden":true,"official_rspm":true},{"name":"bullseye","os":"linux","binaryDisplay":"Debian 11 (Bullseye)","binaryURL":"bullseye","display":"Debian 11 (Bullseye)","distribution":"debian","release":"11","sysReqs":true,"binaries":false,"hidden":true,"official_rspm":true},{"name":"windows","os":"windows","binaryDisplay":"","binaryURL":"","display":"Windows","distribution":"windows","release":"all","sysReqs":false,"binaries":true,"hidden":false,"official_rspm":true},{"name":"macOS","os":"macOS","binaryDisplay":"","binaryURL":"","display":"macOS","distribution":"macOS","release":"all","sysReqs":false,"binaries":false,"hidden":false,"official_rspm":true}],"cran_repo":"cran","bioc_versions":[{"bioc_version":"3.17","r_version":"4.3","cran_snapshot":"latest"},{"bioc_version":"3.16","r_version":"4.2","cran_snapshot":"latest"},{"bioc_version":"3.15","r_version":"4.2","cran_snapshot":"2022-11-02"},{"bioc_version":"3.14","r_version":"4.1","cran_snapshot":"2022-04-27"},{"bioc_version":"3.13","r_version":"4.1","cran_snapshot":"2021-10-27"},{"bioc_version":"3.12","r_version":"4.0","cran_snapshot":"2021-05-20"},{"bioc_version":"3.11","r_version":"4.0","cran_snapshot":"2020-10-28"},{"bioc_version":"3.10","r_version":"3.6","cran_snapshot":"2020-04-27"},{"bioc_version":"3.9","r_version":"3.6","cran_snapshot":"2019-10-28"},{"bioc_version":"3.8","r_version":"3.5","cran_snapshot":"2019-05-02"},{"bioc_version":"3.7","r_version":"3.5","cran_snapshot":"2018-10-29"},{"bioc_version":"3.6","r_version":"3.4","cran_snapshot":"2018-04-30"},{"bioc_version":"3.5","r_version":"3.4","cran_snapshot":"2017-10-30"},{"bioc_version":"3.4","r_version":"3.3","cran_snapshot":""},{"bioc_version":"3.3","r_version":"3.3","cran_snapshot":""},{"bioc_version":"3.2","r_version":"3.2","cran_snapshot":""},{"bioc_version":"3.1","r_version":"3.2","cran_snapshot":""}],"r_versions":["3.5","3.6","4.0","4.1","4.2"]} \ No newline at end of file diff --git a/tests/testthat/fixtures/ppm-versions.json b/tests/testthat/fixtures/ppm-versions.json new file mode 100644 index 000000000..b95480c75 --- /dev/null +++ b/tests/testthat/fixtures/ppm-versions.json @@ -0,0 +1 @@ +[{"id":1194160,"created":"2021-02-05T14:33:45.896477Z","completed":"2021-02-05T14:33:47.981439Z","type":3,"tag":"Sync on 2021-02-05 14:33:45.896125592 +0000 UTC m=+5774001.199074308","source_id":1,"guid":"","packages_added":35,"packages_updated":null,"packages_archived":30,"packages_removed":2,"action":"Sync","published_from":"2021-02-04T00:00:00Z","published_to":"2021-02-05T00:00:00Z","result":0},{"id":1175516,"created":"2021-02-04T13:33:26.567859Z","completed":"2021-02-04T13:33:28.671843Z","type":3,"tag":"Sync on 2021-02-04 13:33:26.567142757 +0000 UTC m=+5683985.862935156","source_id":1,"guid":"","packages_added":63,"packages_updated":null,"packages_archived":49,"packages_removed":0,"action":"Sync","published_from":"2021-02-03T00:00:00Z","published_to":"2021-02-04T00:00:00Z","result":0},{"id":1160641,"created":"2021-02-03T17:33:24.555283Z","completed":"2021-02-03T17:33:26.618373Z","type":3,"tag":"Sync on 2021-02-03 17:33:24.55753815 +0000 UTC m=+5611983.853330561","source_id":1,"guid":"","packages_added":54,"packages_updated":null,"packages_archived":56,"packages_removed":0,"action":"Sync","published_from":"2021-02-02T00:00:00Z","published_to":"2021-02-03T00:00:00Z","result":0},{"id":1140568,"created":"2021-02-02T14:33:36.516377Z","completed":"2021-02-02T14:33:38.591104Z","type":3,"tag":"Sync on 2021-02-02 14:33:36.516160202 +0000 UTC m=+5514795.811952613","source_id":1,"guid":"","packages_added":42,"packages_updated":null,"packages_archived":32,"packages_removed":0,"action":"Sync","published_from":"2021-02-01T00:00:00Z","published_to":"2021-02-02T00:00:00Z","result":0},{"id":1123445,"created":"2021-02-01T15:33:29.046485Z","completed":"2021-02-01T15:33:31.122849Z","type":3,"tag":"Sync on 2021-02-01 15:33:29.046711806 +0000 UTC m=+5431988.342504215","source_id":1,"guid":"","packages_added":92,"packages_updated":null,"packages_archived":88,"packages_removed":0,"action":"Sync","published_from":"2021-01-29T00:00:00Z","published_to":"2021-02-01T00:00:00Z","result":0},{"id":1069075,"created":"2021-01-29T14:33:14.305268Z","completed":"2021-01-29T14:33:16.404439Z","type":3,"tag":"Sync on 2021-01-29 14:33:14.305163772 +0000 UTC m=+5169173.600956173","source_id":1,"guid":"","packages_added":38,"packages_updated":null,"packages_archived":30,"packages_removed":0,"action":"Sync","published_from":"2021-01-28T00:00:00Z","published_to":"2021-01-29T00:00:00Z","result":0},{"id":1053473,"created":"2021-01-28T17:33:27.979661Z","completed":"2021-01-28T17:33:30.065975Z","type":3,"tag":"Sync on 2021-01-28 17:33:27.979306008 +0000 UTC m=+5093587.275098427","source_id":1,"guid":"","packages_added":81,"packages_updated":null,"packages_archived":73,"packages_removed":0,"action":"Sync","published_from":"2021-01-27T00:00:00Z","published_to":"2021-01-28T00:00:00Z","result":0},{"id":1033374,"created":"2021-01-27T14:33:56.561496Z","completed":"2021-01-27T14:33:58.631111Z","type":3,"tag":"Sync on 2021-01-27 14:33:56.559083264 +0000 UTC m=+4996415.854875681","source_id":1,"guid":"","packages_added":36,"packages_updated":null,"packages_archived":37,"packages_removed":0,"action":"Sync","published_from":"2021-01-26T00:00:00Z","published_to":"2021-01-27T00:00:00Z","result":0},{"id":1014755,"created":"2021-01-26T13:33:16.575111Z","completed":"2021-01-26T13:33:18.634306Z","type":3,"tag":"Sync on 2021-01-26 13:33:16.57552076 +0000 UTC m=+4906375.871313183","source_id":1,"guid":"","packages_added":69,"packages_updated":null,"packages_archived":46,"packages_removed":2,"action":"Sync","published_from":"2021-01-25T00:00:00Z","published_to":"2021-01-26T00:00:00Z","result":0},{"id":997643,"created":"2021-01-25T14:33:53.993192Z","completed":"2021-01-25T14:33:56.066895Z","type":3,"tag":"Sync on 2021-01-25 14:33:53.993727544 +0000 UTC m=+4823613.289519947","source_id":1,"guid":"","packages_added":110,"packages_updated":null,"packages_archived":96,"packages_removed":3,"action":"Sync","published_from":"2021-01-22T00:00:00Z","published_to":"2021-01-25T00:00:00Z","result":0}] diff --git a/tests/testthat/fixtures/r-versions.json b/tests/testthat/fixtures/r-versions.json new file mode 100644 index 000000000..20355cb37 --- /dev/null +++ b/tests/testthat/fixtures/r-versions.json @@ -0,0 +1 @@ +[{"version":"3.6.2","date":"2019-12-12T08:05:03.679160Z","nickname":"Dark and Stormy Night"},{"version":"3.6.3","date":"2020-02-29T08:05:16.744223Z","nickname":"Holding the Windsock"},{"version":"4.0.0","date":"2020-04-24T07:05:34.612930Z","nickname":"Arbor Day"},{"version":"4.0.1","date":"2020-06-06T07:05:16.469439Z","nickname":"See Things Now"},{"version":"4.0.2","date":"2020-06-22T07:05:19.236082Z","nickname":"Taking Off Again"},{"version":"4.0.3","date":"2020-10-10T07:05:24.661746Z","nickname":"Bunny-Wunnies Freak Out"}] diff --git a/tests/testthat/helper-apps.R b/tests/testthat/helper-apps.R new file mode 100644 index 000000000..08a4dea35 --- /dev/null +++ b/tests/testthat/helper-apps.R @@ -0,0 +1,644 @@ +# ------------------------------------------------------------------------- +# Dummy CRAN app + +cran_app <- function(...) { + asNamespace("pkgcache")$cran_app(...) +} + +bioc_app <- function(...) { + asNamespace("pkgcache")$bioc_app(...) +} + +dcf <- function(...) { + asNamespace("pkgcache")$dcf(...) +} + +fix_port <- function(...) { + asNamespace("pkgcache")$fix_port(...) +} + +cran_app_pkgs <- dcf(" + Package: pkg1 + Version: 1.0.0 + + Package: pkg1 + Version: 0.9.0 + + Package: pkg1 + Version: 0.8.0 + + Package: pkg2 + Version: 1.0.0 + Depends: pkg1 + + Package: pkg3 + Version: 1.0.0 + Depends: pkg2 + + Package: pkg3 + Version: 0.9.9 + + Package: pkg4 + Version: 1.0.0 + Imports: pkg2 + Suggests: pkg3 + + Package: crayon + Version: 1.0.0 + + Package: needspak + Imports: pak + + Package: pak + + Package: futurama + Depends: R (>= 3000.0) + + Package: needsfuturama + Imports: futurama + + Package: dplyr + Imports: tibble + Suggests: testthat + + Package: tibble + + Package: testthat + + Package: curl + SystemRequirements: libcurl: libcurl-devel (rpm) or libcurl4-openssl-dev (deb). +") + +fake_cran <- webfakes::local_app_process( + cran_app(cran_app_pkgs), + opts = webfakes::server_opts(num_threads = 3) +) + +bioc_app_pkgs <- dcf(" + Package: Biobase + Version: 1.2.3 + Depends: R (>= 2.10), BiocGenerics(>= 0.27.1), utils + Imports: methods + Suggests: tools, tkWidgets, ALL, RUnit, golubEsets +") + +fake_bioc <- webfakes::local_app_process( + bioc_app(bioc_app_pkgs), + opts = webfakes::server_opts(num_threads = 3) +) + +setup_fake_apps <- function( + cran_app = NULL, + bioc_app = NULL, + cran_repo = NULL, + bioc_repo = NULL, + cran_options = NULL, + bioc_options = NULL, + .local_envir = parent.frame()) { + cran_app <- if (!is.null(cran_app)) { + cran_app + } else if (!is.null(cran_repo)) { + app <- webfakes::local_app_process( + cran_app(cran_repo, options = as.list(cran_options)), + opts = webfakes::server_opts(num_threads = 3), + .local_envir = .local_envir + ) + assign(".cran_app", app, envir = .local_envir) + app + } else { + fake_cran + } + + bioc_app <- if (!is.null(bioc_app)) { + bioc_app + } else if (!is.null(bioc_repo)) { + app <- webfakes::local_app_process( + bioc_app(bioc_repo, options = as.list(bioc_options)), + opts = webfakes::server_opts(num_threads = 3), + .local_envir = .local_envir + ) + assign(".bioc_app", app, envir = .local_envir) + app + } else { + fake_bioc + } + + withr::local_options( + repos = c(CRAN = cran_app$url()), + pkg.cran_metadata_url = cran_app$url(), + .local_envir = .local_envir + ) + withr::local_envvar( + R_PKG_CRAN_METADATA_URL = cran_app$url(), + R_BIOC_CONFIG_URL = paste0(bioc_app$url(), "/config.yaml"), + R_BIOC_VERSION = NA_character_, + R_BIOC_MIRROR = bioc_app$url(), + .local_envir = .local_envir + ) +} + +# ------------------------------------------------------------------------- +# GH app + +gh_app_desc <- function(pkg) { + sprintf("Package: %s\nVersion: 1.0.0\n", pkg) +} + +random_sha <- function() { + paste( + sample(c(0:9, letters[1:6]), 64, replace = TRUE), + collapse = "" + ) +} + +gh_app_repos <- list( + users = list( + "r-lib" = list( + repos = list( + pak = list( + commits = list( + list( + sha = "111ef906acb58fe406370f7bc0a72cac55dbbb231ea687494c25742ca521255a", + branch = "main", + tag = "HEAD", + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ), + list( + sha = "a503fe843f11c279864f29d58137f8de319d115b239ce48ccc15406306019480", + branch = "main", + tag = "v0.1.2", + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ), + list( + sha = "e65de1e9630dbfcaf1044718b742bf806486b107239ce48ccc15406306019480", + branch = "main", + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ), + list( + sha = "b001d6ddeab1589ad367b62baabbeeb2af3b0ebac2e61d239df660c1d63e3232", + branch = "somebranch", + pull = 90, + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ), + list( + sha = "b001d6ddeab1589ad367b62baabbeeb2af3b0ebac2e61d239df660c1d63e3232", + latestRelease = TRUE, + tagName = "v1.2.3", + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ) + ) + ), + bad = list( + commits = list( + list( + sha = "546d9eab84b002c35302dda3822560950c7528cfc9ef1b916cecd9dbef3cf6b6", + tag = "HEAD", + branch = "main", + files = list( + DESCRIPTION = "this is not\na good file\n", + "bin/DESCRIPTION" = charToRaw("\xf0\xb0\xa0") + ) + ), + list( + sha = "546d9eab84b002c35302dda3822560950c7528cfc9ef1b916cecd9dbef3cf6b6", + pull = 100, + branch = "other", + files = list(DESCRIPTION = "this is not\na good file\n") + ) + ) + ), + crayon = list( + commits = list( + list( + sha = "bdd9a1bcf062396790c341cf1dba563eb0277f2ca0a6d524bc3da98a9a6f2975", + tag = "HEAD", + branch = "main", + files = list(DESCRIPTION = gh_app_desc("crayon"), NAMESPACE = "") + ), + list( + sha = "b5221ab024605019800ddea474f7a0981a4d53f719f5af2b1af627b34e0760b2", + branch = "b5221ab024605019800ddea474f7a0981a4d53f719f5af2b1af627b34e0760b2", + files = list(DESCRIPTION = gh_app_desc("crayon"), NAMESPACE = "") + ), + list( + sha = "9d93692f8f7c1d6b2308d0c4aa83cdc2d99ec1fd0097cede1d9aa1301247cb01", + branch = "pr61", + pull = 79, + files = list(DESCRIPTION = gh_app_desc("crayon"), NAMESPACE = "") + ) + ) + ), + pkgconfig = list( + commits = list( + list( + sha = "c9be9cde5e91ad771d1b5150781e6e8d32a7be0e9ab227bdf45cb41ad513004c", + branch = "pr7", + pull = 7, + files = list(DESCRIPTION = gh_app_desc("pkgconfig"), NAMESPACE = "") + ) + ) + ) + ) + ), + "wesm" = list( + repos = list( + "feather" = list( + commits = list( + list( + sha = "ec40c1eae1ac83b86fc41bb2f5cd916152d19015649c3d209f2c08115dd993b1", + tag = "HEAD", + branch = "main", + files = list("R/DESCRIPTION" = gh_app_desc("feather"), NAMESPACE = "") + ) + ) + ) + ) + ), + "gaborcsardi" = list( + repos = list( + "secret-test" = list( + commits = list( + list( + sha = "599cc5d745d2079eddf1ff582b83d381e885cd30f33bafebbe83e73d010cfa93", + tag = "HEAD", + branch = "main", + token = "b9984750bea6a170081ca98255c3b43fe5fb0978", + files = list("DESCRIPTION" = gh_app_desc("secret"), NAMESPACE = "") + ) + ) + ), + "secret" = list( + commits = list( + list( + sha = "7f9fb08e26015e05529cd4d7fc2a7edbd88c783d456ff83a96dcc58ace1d3ea5", + tag = "HEAD", + branch = "x", + files = list("DESCRIPTION" = gh_app_desc("secret"), NAMESPACE = "") + ) + ) + ) + ) + ), + "tidyverse" = list( + repos = list( + "tidyverse.org" = list( + commits = list( + list( + sha = "d998eab68c66d862c31a6091f9e71200b13bb44ea754e3371d098dcaa20e51a4", + tag = "HEAD", + branch = "main", + files = list("foo" = "bar") + ) + ) + ) + ) + ), + "cran" = list( + repos = list( + "rJava" = list( + commits = list( + list( + sha = "dfb3b64b13343e07b2db038777d9dc2aba5d824c5eca8c891c87bd4fd38d7256", + tag = "HEAD", + branch = "master", + files = list( + DESCRIPTION = "Package: rJava\nVersion: 1.0-6\nSystemRequirements: Java JDK 1.2 or higher (for JRI/REngine JDK 1.4 or higher), GNU make\n", + NAMESPACE = "" + ) + ) + ) + ) + ) + ) + ) +) + +fake_gh <- webfakes::local_app_process( + gh_app(gh_app_repos), + opts = webfakes::server_opts(num_threads = 3) +) + +setup_fake_gh_app <- function(.local_envir = parent.frame()) { + withr::local_envvar( + .local_envir = .local_envir, + R_PKG_GITHUB_API_URL = fake_gh$url() + ) +} + + +# ------------------------------------------------------------------------- +# Name check app + +new_check_app <- function() { + `%||%` <- function(l, r) if (is.null(l)) r else l + + app <- webfakes::new_app() + + app$use(webfakes::mw_json()) + app$use(webfakes::mw_urlencoded()) + + app$get("/crandb", function(req, res) { + pkg <- sub("\"$", "", sub("^\"", "", req$query$key)) + if (pkg == "dbi") { + res$send_json(list( + total_rows = 20000, + offset = 14000, + rows = list(list(id = "DBI", key = "dbi", value = "DBI")) + ), auto_unbox = TRUE) + } else { + res$send_json(list( + total_rows = 20000, + offset = 14000, + rows = list() + )) + } + }) + + app$post("/wikipedia", function(req, res) { + titles <- strsplit(req$form$titles, "|", fixed = TRUE)[[1]] + Titles <- tools::toTitleCase(titles) + ret <- list(query = list( + normalized = list(list(from = titles, to = Titles)), + pages = list(`11178` = list( + pageid = 11178, + title = Titles, + extract = "The terms foobar (), foo, bar, and others are used ..." + )) + )) + res$send_json(ret, auto_unbox = TRUE) + }) + + app$all(c("/echo", "/echo/define"), function(req, res) { + out <- list( + method = req$method, + query = req$query_string, + type = req$get_header("Content-Type") %||% NA_character_, + body = rawToChar(req$.body %||% raw()) + ) + res$send_json(out, auto_unbox = TRUE) + }) + + app$get("/sentiment", function(req, res) { + txt <- "abuse\t-3\nirony\t-1\nxo\t3\nxoxoxo\t4\n" + res$send(txt) + }) + + app$get("/bioc/a", function(req, res) { + res$send(paste0(collapse = "", c( + "hello nobody, this is httpd@ip-172-30-0-33 running gitolite3 v3.6.6-6-g7c8f0ab on git 2.28.0", + "", + " R \tpackages/a4", + " R \tpackages/a4Base", + " R \tpackages/a4Classif", + " R \tpackages/a4Core", + " R \tpackages/a4Preproc", + " R \tpackages/a4Reporting", + " R \tpackages/aCGH", + " R \tpackages/abseqR", + " R \tpackages/ag.db" + ), "\n")) + }) + + app$get("/bioc/A", function(req, res) { + res$send(paste0(collapse = "", c( + "hello nobody, this is httpd@ip-172-30-0-33 running gitolite3 v3.6.6-6-g7c8f0ab on git 2.28.0", + "", + " R \tpackages/ABAData", + " R \tpackages/ABAEnrichment", + " R \tpackages/ABSSeq", + " R \tpackages/AGDEX", + " R \tpackages/AHPathbankDbs", + " R \tpackages/AIMS", + " R \tpackages/ALDEx2", + " R \tpackages/ALL", + " R \tpackages/ALLMLL", + " R \tpackages/ALPS", + " R \tpackages/AMARETTO" + ), "\n")) + }) + + app$get("/biocann/src/contrib/PACKAGES.gz", function(req, res) { + tmp <- tempfile(fileext = ".gz") + on.exit(unlink(tmp), add = TRUE) + l <- c( + "Package: adme16cod.db", + "Version: 3.4.0", + "Depends: R (>= 2.7.0), methods, AnnotationDbi (>= 1.31.18),", + " org.Rn.eg.db (>= 3.2.1)", + "Imports: methods, AnnotationDbi", + "Suggests: annotate, RUnit", + "License: Artistic-2.0", + "MD5sum: 3902516a40a503302ef732143b2394b9", + "NeedsCompilation: no", + "", + "Package: ag.db", + "Version: 3.2.3", + "Depends: R (>= 2.7.0), methods, AnnotationDbi (>= 1.34.3),", + " org.At.tair.db (>= 3.3.0)", + "Imports: methods, AnnotationDbi", + "Suggests: DBI, annotate, RUnit", + "License: Artistic-2.0", + "MD5sum: e5913da38fe4487202306cacd885840d", + "NeedsCompilation: no", + "", + "Package: agcdf", + "Version: 2.18.0", + "Depends: utils", + "Imports: AnnotationDbi", + "License: LGPL", + "MD5sum: 5dd14bc6a6d2729f5e7b170105c78e48", + "NeedsCompilation: no" + ) + writeLines(l, con <- gzfile(tmp, open = "wb")) + close(con) + + # We don't use send_file, because of a webfakes bug on Windows + # with absolute paths. Webfakes prepends '/' to 'c:/...'. + blob <- readBin(tmp, what = "raw", n = 10000) + res$ + set_type("application/gzip")$ + send(blob) + }) + + app +} + + +new_sysreqs_app <- function() { + app <- webfakes::new_app() + app$use(webfakes::mw_json()) + app$use(webfakes::mw_urlencoded()) + + db <- list( + ubuntu = list( + "22.04" = list( + "java" = list( + install_scripts = list("apt-get install -y default-jdk"), + post_install = list(list(command = "R CMD javareconf")) + ), + "openssl" = list( + install_scripts = list("apt-get install -y libssl-dev") + ), + "libcurl" = list( + install_scripts = list("apt-get install -y libcurl4-openssl-dev") + ) + ), + "16.04" = list( + "\\bgeos\\b" = list( + pre_install = list( + list(command = "apt-get install -y software-properties-common"), + list(command = "add-apt-repository -y ppa:ubuntugis/ppa"), + list(command = "apt-get update") + ), + install_scripts = list("apt-get install -y libgeos-dev") + ) + ) + ) + ) + + app$post("/__api__/repos/:id/sysreqs", function(req, res) { + dist <- req$query$distribution + rele <- req$query$release + + dsc <- desc::desc(text = rawToChar(req$.body)) + pkgsrq <- trimws(dsc$get("SystemRequirements")) + if (is.na(pkgsrq)) pkgsrq <- "" + + if (dist == "ubuntu" && rele %in% c("16.04", "18.04", "20.04", "22.04")) { + mydb <- db[[dist]][[rele]] + srq <- lapply(names(mydb), function(nm) { + if (grepl(nm, pkgsrq)) mydb[[nm]] else NULL + }) + + bf <- unlist(lapply(srq, "[[", "pre_install"), recursive = FALSE) + is <- unlist(lapply(srq, "[[", "install_scripts")) + af <- unlist(lapply(srq, "[[", "post_install"), recursive = FALSE) + + res$send_json(list( + name = jsonlite::unbox("pkgdependssysreqs"), + pre_install = bf, + install_scripts = is, + post_install = af + )) + } else { + res$set_status(400) + res$send_json(list( + code = jsonlite::unbox(14), + error = jsonlite::unbox("Unsupported system"), + payload = jsonlite::unbox(NA) + )) + } + }) + + app +} + +fake_sysreqs <- webfakes::local_app_process(new_sysreqs_app()) + +setup_fake_sysreqs_app <- function(.local_envir = parent.frame()) { + withr::local_envvar( + .local_envir = .local_envir, + RSPM_ROOT = sub("/$", "", fake_sysreqs$url()) + ) +} + +transform_sysreqs_server <- function(x) { + x <- gsub("https://packagemanager.posit.co", "", x, fixed = TRUE) + x <- gsub("http://127.0.0.1:[0-9]+", "", x) + x <- gsub("http://localhost:[0-9]+", "", x) + x +} + +show_request <- function(req) { + x <- jsonlite::fromJSON(rawToChar(req$content)) + cat(toupper(x$method), " ", x$type, sep = "", "\n") + cat("Query string: ", x$query, sep = "", "\n") + cat("Body: ", x$body, sep = "", "\n") +} + +check_app <- webfakes::new_app_process( + new_check_app(), + opts = webfakes::server_opts(num_threads = 4) +) + +transform_no_srcref <- function(x) { + x <- sub("[ ]*at [-a-zA-Z0-9]+[.]R:[0-9]+:[0-9]+", "", x) + x <- sub("[ ]*at line [0-9]+", "", x) + x <- sub("\033[90m\033[39m", "", x, fixed = TRUE) + x <- sub("Caused by error: ", "Caused by error:", x, fixed = TRUE) + if (x[length(x)] == "") x <- x[-length(x)] + x +} + +transform_local_port <- function(x) { + gsub("127\\.0\\.0\\.1:[0-9]+", "127.0.0.1:", x) +} + +transform_bioc_version <- function(x) { + sub("3[.][0-9]+/bioc", "/bioc", x) +} + +transform_bytes <- function(x) { + gsub("[(][0-9]+ B[)]", "()", x) +} + +transform_ext <- function(x) { + x <- sub("[.](zip|tgz)", ".zip/.tgz/.tar.gz", x) + x <- sub("_R_[-_a-z0-9A-Z]+[.]tar[.]gz", ".zip/.tgz/.tar.gz", x) + x +} + +transform_sha <- function(x) { + gsub("[a-fA-F0-9]{64}", "", x) +} + +transform_hash <- function(x) { + x <- gsub("[a-f0-9]{32}", "", x) + x <- gsub("[a-f0-9]{10}", "", x) + x +} + +transform_etag <- function(x) { + sub("RemoteEtag: \"[a-z0-9]+\"", "RemoteEtag: \"\"", x) +} + +transform_tempdir <- function(x) { + x <- sub(tempdir(), "", x) + x <- sub(normalizePath(tempdir()), "", x) + x <- sub(normalizePath(tempdir(), winslash = "/"), "", x) + x <- sub("[\\\\/]file[a-zA-Z0-9]+", "/", x) + x <- sub("[A-Z]:.*Rtmp[a-zA-Z0-9]+/", "/", x) + x +} + +transform_show_cursor <- function(x) { + gsub("\033[?25h", "", x, fixed = TRUE) +} + +transform_no_links <- function(x) { + cli::ansi_strip(x, sgr = FALSE, csi = FALSE, link = TRUE) +} + +transform_installed_in_temp <- function(x) { + m <- regexpr("installed::.*$", x) + regmatches(x, m) <- paste0("installed::.../", long_basename(regmatches(x, m))) + x +} + +transform_lockfile <- function(x) { + x <- transform_local_port(x) + x <- sub("\"os\": \".*\"", "\"os\": \"\"", x) + x <- sub("\"r_version\": \".*\"", "\"r_version\": \"\"", x) + x <- sub("\"platform\": \".*\"", "\"platform\": \"\"", x) + x <- sub("\"filesize\": [0-9]+,", "\"filesize\": 9999,", x) + x <- sub("\"sha256\": \".*\"", "\"sha256\": \"\"", x) + x <- sub("\"install_args\": \".*\"", "\"install_args\": \"\"", x) + x +} + +fake_git <- local({ + dir.create(tmp <- tempfile()) + untar(testthat::test_path("fixtures/git-repo.tar.gz"), exdir = tmp) + app <- git_app(file.path(tmp, "repo")) + webfakes::local_app_process(app) +}) diff --git a/tests/testthat/helper-covr.R b/tests/testthat/helper-covr.R new file mode 100644 index 000000000..f27f6b895 --- /dev/null +++ b/tests/testthat/helper-covr.R @@ -0,0 +1,42 @@ +covr <- function(filter = NULL, pre_clean = TRUE, ...) { + # TODO: check if dependencies were installed w/ coverage support + # Clean up trace files -------------------------------------------------- + if (pre_clean) { + rtrace <- list.files(pattern = "^covr_trace_") + unlink(rtrace) + gcda <- list.files(pattern = "[.]gcda$", recursive = TRUE) + gcov <- list.files(pattern = "[.]gcov$", recursive = TRUE) + unlink(c(gcda, gcov)) + asNamespace("covrlabs")$reset_counters() + } + + # Run tests ------------------------------------------------------------- + testthat::test_dir("tests/testthat", filter = filter, ...) + + # Save R coverage ------------------------------------------------------- + asNamespace("covrlabs")$save_trace() + + # Save C coverage ------------------------------------------------------- + # The rest do not have a gcov_flush hook + # TODO: add hook to jsonlite, lpSolve, filelock, zip, curl, ps, processx. + if (!is.null(pkg_data[["ns"]][["cli"]])) { + .Call(pkg_data$ns$cli$clic__gcov_flush) + .Call(pkg_data$ns$pkgcache$pkgcache__gcov_flush) + } + + # Load and merge trace files + message("Parsing and reading coverage") + cov <- asNamespace("covrlabs")$parse_coverage() + + if (!is.null(filter)) { + cov <- cov[grepl(filter, cov$file), ] + } + + print(summary(cov)) + invisible(cov) +} + +test <- function(filter = NULL) { + load_all_private() + testthat::test_dir("tests/testthat", filter = filter) +} diff --git a/tests/testthat/helper-ppm.R b/tests/testthat/helper-ppm.R new file mode 100644 index 000000000..375f56374 --- /dev/null +++ b/tests/testthat/helper-ppm.R @@ -0,0 +1,37 @@ +ppm_app <- function() { + app <- webfakes::new_app() + + app$get("/ppmversions", function(req, res) { + res$send_json( + text = readLines(testthat::test_path("fixtures/ppm-versions.json")) + ) + }) + + app$get("/ppmstatus", function(req, res) { + res$send_json( + text = readLines(testthat::test_path("fixtures/ppm-status.json")) + ) + }) + + app$get("/rversions", function(req, res) { + res$send_json( + text = readLines(testthat::test_path("fixtures/r-versions.json")) + ) + }) + + app$get("/crandb/:pkg", function(req, res) { + if (req$params$pkg == "dplyr") { + res$send_json( + text = readLines(gzfile(testthat::test_path("fixtures/dplyr.json.gz"))) + ) + } else if (req$params$pkg == "bad") { + res$send_status(401) + } else { + res$send_status(404) + } + }) + + app +} + +ppm <- webfakes::local_app_process(ppm_app()) diff --git a/tests/testthat/helpers.R b/tests/testthat/helpers.R index ee1b568a9..c117c6e5b 100644 --- a/tests/testthat/helpers.R +++ b/tests/testthat/helpers.R @@ -1,4 +1,3 @@ - if_fail <- function(expr, fn) { withCallingHandlers(expr, expectation_failure = fn) } @@ -11,7 +10,8 @@ test_temp_file <- function(fileext = "", pattern = "test-file-", } else { withr::defer( try(unlink(tmp, recursive = TRUE, force = TRUE), silent = TRUE), - envir = envir) + envir = envir + ) } if (create) { cat("", file = tmp) @@ -48,3 +48,19 @@ skip_if_offline <- function() { skip_on_cran() if (is_offline()) skip("Offline") } + +local_tempdir <- function(...) { + withr::local_tempdir(...) +} + +stub <- function(where, what, how, depth = 1) { + # where_name <- deparse(substitute(where)) + cl <- as.call(list( + quote(mockery::stub), + substitute(where), + what, + how, + depth + )) + eval(cl, parent.frame()) +} diff --git a/tests/testthat/test-cache.R b/tests/testthat/test-cache.R new file mode 100644 index 000000000..1281de5b3 --- /dev/null +++ b/tests/testthat/test-cache.R @@ -0,0 +1,70 @@ +test_that("cache_summary", { + cs <- cache_summary() + expect_equal(names(cs), c("cachepath", "files", "size")) + expect_true(is_string(cs[["cachepath"]])) + expect_true(is_count(cs[["files"]])) + expect_true(is_count(cs[["size"]])) +}) + +test_that("cache_list, cache_delete", { + skip_on_cran() + cache_clean() + cl <- cache_list() + expect_equal(class(cl), c("tbl", "data.frame")) + expect_equal(nrow(cl), 0L) + + local <- withr::local_tempdir() + setup_fake_apps() + suppressMessages(pkg_download("pkg2", local, dependencies = TRUE)) + cl <- cache_list() + expect_equal(sort(cl$package), sort(c("pkg1", "pkg2"))) + expect_true(all(file.exists(cl$fullpath))) + + cache_delete(package = "pkg1") + cl <- cache_list() + expect_equal(cl$package, "pkg2") + + cache_clean() + cl <- cache_list() + expect_equal(nrow(cl), 0L) +}) + +test_that("meta_list, meta_summary, meta_update", { + skip_on_cran() + suppressMessages(meta_clean(force = TRUE)) + ms <- meta_summary() + expect_equal( + names(ms), + c("cachepath", "current_db", "raw_files", "db_files", "size") + ) + expect_equal(ms$size, 0L) + + local <- withr::local_tempdir() + setup_fake_apps() + ml <- meta_list() + expect_equal(class(ml), c("tbl", "data.frame")) + expect_true("pkg3" %in% ml$package) + + if (Sys.getenv("PAK_EXTRA_TESTS") != "true") { + repo <- dcf(" + Package: pkgx + Version: 1.0.0 + ") + setup_fake_apps(cran_repo = repo) + } else { + opt <- options( + repos = c(CRAN = "http://127.0.0.1:3106"), + cran_metadata_url = "http://127.0.0.1:3106" + ) + on.exit(options(opt), add = TRUE) + } + suppressMessages(meta_update()) + ml <- meta_list() + expect_true("pkgx" %in% ml$package) + expect_false("pkg3" %in% ml$package) +}) + +test_that("meta_clean confirmation", { + stub(meta_clean, "get_confirmation2", FALSE) + expect_error(meta_clean(), "aborted") +}) diff --git a/tests/testthat/test-confirmation-1.R b/tests/testthat/test-confirmation-1.R new file mode 100644 index 000000000..e39feaf2b --- /dev/null +++ b/tests/testthat/test-confirmation-1.R @@ -0,0 +1,168 @@ +test_that("should_ask_confirmation", { + expect_true(should_ask_confirmation( + list(lib_status = c("foo", "bar", "update")) + )) + expect_false(should_ask_confirmation( + list(lib_status = c("foo", "bar", "foobar")) + )) + expect_false(should_ask_confirmation( + list(lib_status = character()) + )) +}) + +test_that("print_install_details", { + skip_on_cran() + local <- local_tempdir() + setup_fake_apps() + cache_clean() + load_all_private() + pkgdepends <- pkg_data[["ns"]][["pkgdepends"]] + config <- list(library = local) + mkdirp(local) + + sol <- pkgdepends$new_pkg_installation_proposal("pkg2", config) + suppressMessages(sol$solve()) + expect_snapshot( + print_install_details(sol, local, character()), + transform = transform_bytes + ) + + # updates + mkdirp(file.path(local, "pkg1")) + writeLines( + c("Package: pkg1", "Version: 0.0.0"), + file.path(local, "pkg1", "DESCRIPTION") + ) + sol <- pkgdepends$new_pkg_installation_proposal("pkg1", config) + suppressMessages(sol$solve()) + expect_snapshot( + print_install_details(sol, local, character()), + transform = transform_bytes + ) + unlink(file.path(local, "pkg1"), recursive = TRUE) + + # cached package, or packages + dl <- local_tempdir() + suppressMessages(pkg_download("pkg2", dl, dependencies = TRUE)) + sol <- pkgdepends$new_pkg_installation_proposal("pkg1", config) + suppressMessages(sol$solve()) + expect_snapshot( + print_install_details(sol, local, character()), + transform = transform_bytes + ) + sol <- pkgdepends$new_pkg_installation_proposal("pkg2", config) + suppressMessages(sol$solve()) + expect_snapshot( + print_install_details(sol, local, character()), + transform = transform_bytes + ) + + # unknown download size + if (Sys.getenv("PAK_EXTRA_TESTS") == "true") { + skip("Can't run in extra tests.") + } + xpkgs <- dcf("Package: pkgu\nVersion: 1.0.0\n") + xrepo <- webfakes::local_app_process(cran_app( + xpkgs, + options = list(no_metadata = TRUE) + )) + withr::local_options(repos = c(getOption("repos"), XTRA = xrepo$url())) + + sol <- pkgdepends$new_pkg_installation_proposal("pkgu", config) + suppressMessages(sol$solve()) + expect_snapshot( + print_install_details(sol, local, character()), + transform = transform_bytes + ) + + # some cached, some not + sol <- pkgdepends$new_pkg_installation_proposal(c("pkg2", "pkg3"), config) + suppressMessages(sol$solve()) + expect_snapshot( + print_install_details(sol, local, character()), + transform = transform_bytes + ) + + # some cached, some not, some sizes unknown + sol <- pkgdepends$new_pkg_installation_proposal( + c("pkg2", "pkg3", "pkgu"), + config + ) + suppressMessages(sol$solve()) + expect_snapshot( + print_install_details(sol, local, character()), + transform = transform_bytes + ) +}) + +test_that("get_confirmation", { + stub(get_confirmation, "readline", "") + expect_silent(get_confirmation("yes")) + stub(get_confirmation, "readline", "y") + expect_silent(get_confirmation("yes")) + stub(get_confirmation, "readline", "Y") + expect_silent(get_confirmation("yes")) + stub(get_confirmation, "readline", "yes") + expect_silent(get_confirmation("yes")) + + stub(get_confirmation, "readline", "n") + expect_error(get_confirmation("yes", msg = "nope"), "nope") +}) + +test_that("get_confirmation2", { + stub(get_confirmation2, "readline", "") + expect_true(get_confirmation2()) + stub(get_confirmation2, "readline", "y") + expect_true(get_confirmation2()) + stub(get_confirmation2, "readline", "Y") + expect_true(get_confirmation2()) + stub(get_confirmation2, "readline", "yes") + expect_true(get_confirmation2()) + + stub(get_confirmation2, "readline", "n") + expect_false(get_confirmation2()) +}) + +test_that("get_answer", { + stub(get_answer, "readline", "foo") + expect_equal(get_answer(c("foo", "bar")), "foo") + + ans <- c("foo", "bar") + idx <- 1 + stub(get_answer, "readline", function(prompt = "") { + cat(prompt) + cat(ans[idx], "\n", sep = "") + idx <<- idx + 1L + ans[idx - 1L] + }) + expect_snapshot( + res <- get_answer(c("this", "bar")) + ) + expect_equal(res, "bar") +}) + +test_that("offer_restart", { + stub( + offer_restart, + "rstudio_detect", + list(type = "not_rstudio") + ) + expect_snapshot(offer_restart()) + + stub( + offer_restart, + "rstudio_detect", + list(type = "rstudio_console") + ) + stub(offer_restart, "get_answer", "1") + stub(offer_restart, "rstudioapi::restartSession", "restart") + expect_snapshot(offer_restart()) + + stub(offer_restart, "get_answer", "2") + stub(offer_restart, "save.image", NULL) + stub(offer_restart, "rstudioapi::restartSession", "save-restart") + expect_snapshot(offer_restart()) + + stub(offer_restart, "get_answer", "3") + expect_snapshot(expect_equal(offer_restart(), "OK")) +}) diff --git a/tests/testthat/test-confirmation-2.R b/tests/testthat/test-confirmation-2.R new file mode 100644 index 000000000..940cf76cb --- /dev/null +++ b/tests/testthat/test-confirmation-2.R @@ -0,0 +1,25 @@ +test_that("print_sysreqs_details", { + # no sysreqs + expect_silent(print_sysreqs_details(list(get_sysreqs = function() NULL))) + + # sysreqs already installed + srq <- list(miss = character(), upd = character(), inst = "libcurl") + prop <- list( + get_sysreqs = function() { + srq + }, + get_config = function() { + list(get = function(...) TRUE) + }, + show_sysreqs = function() {} + ) + expect_snapshot(print_sysreqs_details(prop)) + + # sysreqs will be installed + srq$miss <- "libxml2" + expect_snapshot(print_sysreqs_details(prop)) + + # sysreqs needed, but won't be installed + prop$get_config <- function() list(get = function(...) FALSE) + expect_snapshot(print_sysreqs_details(prop)) +}) diff --git a/tests/testthat/test-confirmation-3.R b/tests/testthat/test-confirmation-3.R new file mode 100644 index 000000000..c79b1e829 --- /dev/null +++ b/tests/testthat/test-confirmation-3.R @@ -0,0 +1,25 @@ +test_that("print_install_details, warn_for_loaded_packages on windows", { + skip_on_cran() + local <- withr::local_tempdir() + setup_fake_apps() + cache_clean() + load_all_private() + pkgdepends <- pkg_data[["ns"]][["pkgdepends"]] + config <- list(library = local) + mkdirp(local) + + sol <- pkgdepends$new_pkg_installation_proposal("pkg2", config) + suppressMessages(sol$solve()) + expect_snapshot( + print_install_details(sol, local, character()), + transform = transform_bytes + ) + + mockery::stub(print_install_details, "get_os", "win") + mockery::stub(print_install_details, "warn_for_loaded_packages", "foo") + + expect_equal( + suppressMessages(print_install_details(sol, local, NULL)$loaded_status), + "foo" + ) +}) diff --git a/tests/testthat/test-deps-explain.R b/tests/testthat/test-deps-explain.R new file mode 100644 index 000000000..32b4454a6 --- /dev/null +++ b/tests/testthat/test-deps-explain.R @@ -0,0 +1,9 @@ +test_that("pkg_deps_explain", { + skip_on_cran() + local <- withr::local_tempdir() + setup_fake_apps() + + suppressMessages(meta_list()) + expect_snapshot(pkg_deps_explain("pkg3", "pkg1")) + expect_snapshot(pkg_deps_explain("pkg1", "pkg3")) +}) diff --git a/tests/testthat/test-docs.R b/tests/testthat/test-docs.R new file mode 100644 index 000000000..87307e2ce --- /dev/null +++ b/tests/testthat/test-docs.R @@ -0,0 +1,30 @@ +test_that("doc_config", { + expect_snapshot(writeLines(doc_config())) + + mockery::stub(doc_config, "file.exists", FALSE) + expect_match(doc_config(), "Cannot look up documentation") +}) + +test_that("include_docs", { + expect_snapshot( + writeLines(include_docs("pkgdepends", "docs/lib-status-return.rds")) + ) + expect_match( + include_docs("pkgdepends", "foo.rds"), + "Error: cannot load pkgdepends docs" + ) + expect_match( + include_docs("pkgdepends", "foo.rds", top = TRUE), + "Cannot load pkgdepends docs" + ) +}) + +test_that("pak_or_pkgdepends", { + expect_equal(pak_or_pkgdepends(), "pak") +}) + +test_that("man_config_link", { + expect_snapshot( + man_config_link("configuration option") + ) +}) diff --git a/tests/testthat/test-extra.R b/tests/testthat/test-extra.R new file mode 100644 index 000000000..1169dfa9b --- /dev/null +++ b/tests/testthat/test-extra.R @@ -0,0 +1,62 @@ +test_that("extra_paalkages", { + expect_snapshot(extra_packages()) +}) + +test_that("pak_install_extra", { + pkg <- NULL + mockery::stub( + pak_install_extra, + "pak::pkg_install", + function(x, ...) pkg <<- x + ) + expect_snapshot(pak_install_extra()) + expect_equal(pkg, extra_packages()) +}) + +test_that("load_extra", { + mockery::stub(load_extra, "requireNamespace", FALSE) + mockery::stub(load_extra, "show_extra", TRUE) + mockery::stub(load_extra, "once_per_session", function(x) x) + expect_snapshot(load_extra("foobar")) + + mockery::stub(load_extra, "show_extra", FALSE) + expect_snapshot(load_extra("foobar")) + + mockery::stub(load_extra, "show_extra", TRUE) + mockery::stub(load_extra, "once_per_session", function(x) NULL) + expect_snapshot(load_extra("foobar")) +}) + +test_that("show_extra", { + withr::local_envvar("CI" = "true") + expect_false(show_extra()) + + withr::local_envvar("CI" = NA_character_) + withr::local_options(pak.no_extra_messages = TRUE) + expect_false(show_extra()) + + withr::local_envvar("CI" = NA_character_) + withr::local_options(pak.no_extra_messages = NULL) + expect_true(show_extra()) +}) + +test_that("hash", { + # hash differes on different locale, R version, so mild check + h <- hash(1:10) + expect_match(h, "^[0-9a-f]{32}$", perl = TRUE) + expect_equal(hash(1:10), h) +}) + +test_that("once_per_session", { + environment(once_per_session)$seen <- character() + expect_equal(once_per_session(1L + 1L + 1L), 3L) + expect_null(once_per_session(1L + 1L + 1L)) +}) + +test_that("pkg_is_installed", { + rpkg <- basename(tempfile()) + expect_equal( + pkg_is_installed(c("pak", "stats", rpkg)), + structure(c(TRUE, TRUE, FALSE), names = c("pak", "stats", rpkg)) + ) +}) diff --git a/tests/testthat/test-find-package-root.R b/tests/testthat/test-find-package-root.R new file mode 100644 index 000000000..848ddde15 --- /dev/null +++ b/tests/testthat/test-find-package-root.R @@ -0,0 +1,48 @@ +test_that("find_package_root", { + tmp <- tempfile() + on.exit(unlink(tmp, recursive = TRUE), add = TRUE) + mkdirp(file.path(tmp, "a", "b", "c", "d")) + lns <- "Package: this" + writeLines(lns, file.path(tmp, "DESCRIPTION")) + + expect_equal( + readLines(file.path(find_package_root(tmp), "DESCRIPTION")), + lns + ) + + expect_equal( + readLines(file.path( + find_package_root(file.path(tmp, "a")), "DESCRIPTION" + )), + lns + ) + + expect_equal( + readLines(file.path( + find_package_root(file.path(tmp, "a", "b", "c", "d")), "DESCRIPTION" + )), + lns + ) + + wd <- getwd() + on.exit(setwd(wd), add = TRUE) + setwd(file.path(tmp, "a", "b", "c")) + expect_equal( + readLines(file.path(find_package_root("."), "DESCRIPTION")), + lns + ) +}) + +test_that("find_package_root errors", { + expect_error( + find_package_root(basename(tempfile())), + "Path does not exist" + ) + + if (!file.exists("/DESCRIPTION")) { + expect_error( + find_package_root("/"), + "Could not find R package" + ) + } +}) diff --git a/tests/testthat/test-global-handler.R b/tests/testthat/test-global-handler.R new file mode 100644 index 000000000..c12183550 --- /dev/null +++ b/tests/testthat/test-global-handler.R @@ -0,0 +1,42 @@ +test_that("handle_package_not_found", { + mockery::stub(handle_package_not_found, "is_interactive", FALSE) + expect_silent(handle_package_not_found()) + + mockery::stub(handle_package_not_found, "is_interactive", TRUE) + mockery::stub(handle_package_not_found, "sink.number", 1) + expect_silent(handle_package_not_found()) + + mockery::stub(handle_package_not_found, "sink.number", 0) + mockery::stub(handle_package_not_found, "findRestart", NULL) + mockery::stub(handle_package_not_found, "get_answer", "2") + expect_snapshot( + handle_package_not_found(err = list(package = "foo", lib.loc = "/lib")) + ) + + mockery::stub(handle_package_not_found, "get_answer", "1") + mockery::stub( + handle_package_not_found, + "pkg_install", + function(x, ...) { + message("Installing...") + pkg <<- x + } + ) + pkg <- NULL + expect_snapshot( + handle_package_not_found(err = list(package = "foo", lib.loc = "/lib")) + ) + expect_equal(pkg, "foo") + + mockery::stub(handle_package_not_found, "findRestart", "foo") + mockery::stub( + handle_package_not_found, + "invokeRestart", + function(...) restart <<- TRUE + ) + restart <- NULL + expect_snapshot( + handle_package_not_found(err = list(package = "foo", lib.loc = "/lib")) + ) + expect_true(restart) +}) diff --git a/tests/testthat/test-json.R b/tests/testthat/test-json.R new file mode 100644 index 000000000..474434d0e --- /dev/null +++ b/tests/testthat/test-json.R @@ -0,0 +1,144 @@ +test_that("JSON is standalone", { + ## baseenv() makes sure that the remotes package env is not used + env <- new.env(parent = baseenv()) + env$json <- json + stenv <- env$json$.internal + objs <- ls(stenv, all.names = TRUE) + funs <- Filter(function(x) is.function(stenv[[x]]), objs) + funobjs <- mget(funs, stenv) + + expect_message( + mapply(codetools::checkUsage, funobjs, funs, + MoreArgs = list(report = message) + ), + NA + ) +}) + +test_that("JSON parser scalars", { + expect_equal(json$parse('"foobar"'), "foobar") + expect_equal(json$parse('""'), "") + + expect_equal(json$parse("42"), 42) + expect_equal(json$parse("-42"), -42) + expect_equal(json$parse("42.42"), 42.42) + expect_equal(json$parse("1e2"), 1e2) + expect_equal(json$parse("-0.1e-2"), -0.1e-2) + + expect_equal(json$parse("null"), NULL) + expect_equal(json$parse("true"), TRUE) + expect_equal(json$parse("false"), FALSE) +}) + +test_that("JSON parser arrays", { + cases <- list( + list("[1,2,3]", list(1, 2, 3)), + list("[1]", list(1)), + list("[]", list()), + list('["foo"]', list("foo")), + list('["foo", 1, "bar", true]', list("foo", 1, "bar", TRUE)) + ) + + for (c in cases) { + r <- json$parse(c[[1]]) + expect_equal(r, c[[2]], info = c[[1]]) + } +}) + +test_that("JSON parser nested arrays", { + cases <- list( + list('[1,2, ["foo", "bar"], 3]', list(1, 2, list("foo", "bar"), 3)), + list("[ [ [ 1 ] ] ]", list(list(list(1)))), + list("[ [ [ ] ] ]", list(list(list()))) + ) + + for (c in cases) { + r <- json$parse(c[[1]]) + expect_equal(r, c[[2]], info = c[[1]]) + } +}) + +test_that("JSON parser, real examples", { + inp <- ' +{ + "sha": "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", + "commit": { + "author": { + "name": "Hadley Wickham", + "email": "h.wickham@gmail.com", + "date": "2015-03-30T13:55:18Z" + }, + "committer": { + "name": "Hadley Wickham", + "email": "h.wickham@gmail.com", + "date": "2015-03-30T13:55:18Z" + }, + "message": "Merge pull request #22 from paulstaab/HEAD\\n\\nImprove error message for assertions of length 0", + "tree": { + "sha": "f2e840b7a134fbc118597842992aa50048e0fa04", + "url": "https://api.github.com/repos/hadley/assertthat/git/trees/f2e840b7a134fbc118597842992aa50048e0fa04" + }, + "url": "https://api.github.com/repos/hadley/assertthat/git/commits/e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", + "comment_count": 0 + } +}' + + exp <- list( + sha = "e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", + commit = list( + author = list( + name = "Hadley Wickham", + email = "h.wickham@gmail.com", + date = "2015-03-30T13:55:18Z" + ), + committer = list( + name = "Hadley Wickham", + email = "h.wickham@gmail.com", + date = "2015-03-30T13:55:18Z" + ), + message = "Merge pull request #22 from paulstaab/HEAD\\n\\nImprove error message for assertions of length 0", + tree = list( + sha = "f2e840b7a134fbc118597842992aa50048e0fa04", + url = "https://api.github.com/repos/hadley/assertthat/git/trees/f2e840b7a134fbc118597842992aa50048e0fa04" + ), + url = "https://api.github.com/repos/hadley/assertthat/git/commits/e183ccdc515bbb8e7f32d8d16586aed9eea6de0b", + comment_count = 0 + ) + ) + + expect_equal(json$parse(inp), exp) +}) + +test_that("JSON parser, errors", { + expect_error( + json$parse("[1,2,3,"), + "EXPECTED value GOT EOF" + ) + + expect_error( + json$parse('{ 123: "foo" }'), + "EXPECTED string GOT 123" + ) + + expect_error( + json$parse('{ "foo" "foobar" }'), + 'EXPECTED : GOT "foobar"' + ) + + expect_error( + json$parse('{ "foo": "foobar" "foo2": "foobar2" }'), + 'EXPECTED , or } GOT "foo2"' + ) + + expect_error( + json$parse("[1,2,3 4]"), + "EXPECTED , GOT 4" + ) +}) + +test_that("parse_file", { + local <- withr::local_tempdir() + path <- file.path(local, "test.json") + writeLines("[1,2,3,4]", path) + expect_equal(json$parse_file(path), as.list(1:4)) +}) diff --git a/tests/testthat/test-load-all-private.R b/tests/testthat/test-load-all-private.R new file mode 100644 index 000000000..946469515 --- /dev/null +++ b/tests/testthat/test-load-all-private.R @@ -0,0 +1,18 @@ +test_that("load_all_private", { + # we need to fix a finalizer, probably in cli for this to work, + # otherwise it crashes on older R versions + skip("may cause crash") + pkg_data[["ns"]] <- NULL + load_all_private() + expect_equal( + embedded_call("pkgdepends", "lib_status"), + pkg_data[["ns"]][["pkgdepends"]][["lib_status"]] + ) +}) + +test_that("embedded_call", { + expect_equal( + embedded_call("pkgdepends", "lib_status"), + pkg_data[["ns"]][["pkgdepends"]][["lib_status"]] + ) +}) diff --git a/tests/testthat/test-local.R b/tests/testthat/test-local.R new file mode 100644 index 000000000..6f72d49a1 --- /dev/null +++ b/tests/testthat/test-local.R @@ -0,0 +1,87 @@ +test_that("local_install", { + skip_on_cran() + lib <- withr::local_tempdir() + dld <- withr::local_tempdir() + cache_clean() + setup_fake_apps() + + suppressMessages(dl <- pkg_download("pkg3", dld)) + file.copy(dl$fulltarget, dld) + pkg <- file.path(dld, basename(dl$fulltarget)) + utils::untar(pkg, exdir = dld) + mkdirp(lib) + suppressMessages(local_install(file.path(dld, "pkg3"), lib = lib)) + expect_true(file.exists(file.path(lib, "pkg1"))) + expect_true(file.exists(file.path(lib, "pkg2"))) + expect_true(file.exists(file.path(lib, "pkg3"))) +}) + +test_that("local_install_deps", { + skip_on_cran() + lib <- withr::local_tempdir() + dld <- withr::local_tempdir() + cache_clean() + setup_fake_apps() + + suppressMessages(dl <- pkg_download("pkg4", dld)) + file.copy(dl$fulltarget, dld) + pkg <- file.path(dld, basename(dl$fulltarget)) + utils::untar(pkg, exdir = dld) + suppressMessages(local_install_deps( + file.path(dld, "pkg4"), + lib = lib + )) + expect_true(file.exists(file.path(lib, "pkg1"))) + expect_true(file.exists(file.path(lib, "pkg2"))) + expect_false(file.exists(file.path(lib, "pkg3"))) + expect_false(file.exists(file.path(lib, "pkg4"))) +}) + +test_that("local_install_dev_deps", { + skip_on_cran() + lib <- withr::local_tempdir() + dld <- withr::local_tempdir() + cache_clean() + setup_fake_apps() + + suppressMessages(dl <- pkg_download("pkg4", dld)) + file.copy(dl$fulltarget, dld) + pkg <- file.path(dld, basename(dl$fulltarget)) + utils::untar(pkg, exdir = dld) + suppressMessages(local_install_dev_deps( + file.path(dld, "pkg4"), + lib = lib + )) + expect_true(file.exists(file.path(lib, "pkg1"))) + expect_true(file.exists(file.path(lib, "pkg2"))) + expect_true(file.exists(file.path(lib, "pkg3"))) + expect_false(file.exists(file.path(lib, "pkg4"))) +}) + +test_that("local_deps & co", { + skip_on_cran() + lib <- withr::local_tempdir() + dld <- withr::local_tempdir() + cache_clean() + setup_fake_apps() + + suppressMessages(dl <- pkg_download("pkg4", dld)) + file.copy(dl$fulltarget, dld) + pkg <- file.path(dld, basename(dl$fulltarget)) + utils::untar(pkg, exdir = dld) + expect_snapshot(local_deps(file.path(dld, "pkg4"))$package) + expect_snapshot(local_dev_deps(file.path(dld, "pkg4"))$package) + + # otherwise long temp dir might be cut off + withr::local_options(cli.width = 500) + expect_snapshot( + local_deps_tree(file.path(dld, "pkg4")), + transform = function(x) transform_tempdir(transform_bytes(x)) + ) + expect_snapshot( + local_dev_deps_tree(file.path(dld, "pkg4")), + transform = function(x) transform_tempdir(transform_bytes(x)) + ) + expect_snapshot(local_deps_explain("pkg1", file.path(dld, "pkg4"))) + expect_snapshot(local_dev_deps_explain("pkg3", file.path(dld, "pkg4"))) +}) diff --git a/tests/testthat/test-lockfile.R b/tests/testthat/test-lockfile.R new file mode 100644 index 000000000..ca6b661f5 --- /dev/null +++ b/tests/testthat/test-lockfile.R @@ -0,0 +1,37 @@ +test_that("lockfile_create, lockfile_install", { + skip_on_cran() + dld <- withr::local_tempdir() + withr::local_options(pkg.sysreqs_platform = "", pkg.sysreqs = FALSE) + cache_clean() + setup_fake_apps() + + suppressMessages(dl <- pkg_download("pkg4", dld)) + file.copy(dl$fulltarget, dld) + pkg <- file.path(dld, basename(dl$fulltarget)) + utils::untar(pkg, exdir = dld) + withr::local_dir(file.path(dld, "pkg4")) + lockfile_create(lockfile = "deps.lock") + expect_snapshot( + writeLines(readLines("deps.lock")), + transform = transform_lockfile + ) + + lockfile_create(lockfile = "dev.lock", dependencies = TRUE) + expect_snapshot( + writeLines(readLines("dev.lock")), + transform = transform_lockfile + ) + + lib <- withr::local_tempdir() + suppressMessages(lockfile_install("deps.lock", lib = lib)) + expect_true(file.exists(file.path(lib, "pkg1"))) + expect_true(file.exists(file.path(lib, "pkg2"))) + expect_false(file.exists(file.path(lib, "pkg3"))) + expect_false(file.exists(file.path(lib, "pkg4"))) + + suppressMessages(lockfile_install("dev.lock", lib = lib)) + expect_true(file.exists(file.path(lib, "pkg1"))) + expect_true(file.exists(file.path(lib, "pkg2"))) + expect_true(file.exists(file.path(lib, "pkg3"))) + expect_false(file.exists(file.path(lib, "pkg4"))) +}) diff --git a/tests/testthat/test-name-check.R b/tests/testthat/test-name-check.R new file mode 100644 index 000000000..dc9248dff --- /dev/null +++ b/tests/testthat/test-name-check.R @@ -0,0 +1,23 @@ +test_that("pkg_name_check", { + ret <- readRDS(test_path("fixtures/name-check.rds")) + mockery::stub( + pkg_name_check, + "embedded_call", + function(...) function(...) ret[[1]] + ) + expect_snapshot(pkg_name_check("tools")) + mockery::stub( + pkg_name_check, + "embedded_call", + function(...) function(...) ret[[2]] + ) + expect_snapshot(pkg_name_check("tools", "urban")) +}) + +test_that("format.pak_pkg_name_check, print.pak_pkg_name_check", { + ret <- readRDS(test_path("fixtures/name-check.rds")) + class(ret[[1]]) <- c("pak_pkg_name_check", class(ret[[1]])) + expect_snapshot(print(ret[[1]])) + class(ret[[2]]) <- c("pak_pkg_name_check", class(ret[[2]])) + expect_snapshot(print(ret[[2]])) +}) diff --git a/tests/testthat/test-package.R b/tests/testthat/test-package.R new file mode 100644 index 000000000..55d2dbab4 --- /dev/null +++ b/tests/testthat/test-package.R @@ -0,0 +1,68 @@ +test_that("pkg_install", { + skip_on_cran() + local <- withr::local_tempdir() + setup_fake_apps() + suppressMessages(pkg_install("pkg2", lib = local)) + expect_true(file.exists(file.path(local, "pkg2"))) + # depedency is installed as well + expect_true(file.exists(file.path(local, "pkg1"))) +}) + +test_that("pkg_status", { + ps <- pkg_status("stats", lib = .Library) + expect_equal(ps$priority, "base") + + ps <- pkg_status(c("stats", "utils"), lib = .Library) + expect_equal(ps$priority, c("base", "base")) +}) + +test_that("pkg_remove", { + skip_on_cran() + local <- withr::local_tempdir() + setup_fake_apps() + suppressMessages(pkg_install("pkg2", lib = local)) + expect_true(file.exists(file.path(local, "pkg2"))) + # depedency is installed as well + expect_true(file.exists(file.path(local, "pkg1"))) + pkg_remove("pkg2", lib = local) + expect_false(file.exists(file.path(local, "pkg2"))) +}) + +test_that("pkg_deps", { + skip_on_cran() + local <- withr::local_tempdir() + setup_fake_apps() + suppressMessages(pd <- pkg_deps("pkg2")) + expect_equal(sort(pd$package), sort(c("pkg1", "pkg2"))) +}) + +test_that("pkg_deps_tree", { + skip_on_cran() + local <- withr::local_tempdir() + setup_fake_apps() + cache_clean() + suppressMessages(pdt <- pkg_deps_tree("pkg2")) + expect_equal(sort(pdt$package), sort(c("pkg1", "pkg2"))) + expect_snapshot(print(pdt), transform = transform_bytes) + # [] will drop the tree, keeps the data + expect_false("pak_pkg_deps_tree" %in% class(pdt[])) + expect_equal(sort(pdt$package), sort(c("pkg1", "pkg2"))) +}) + +test_that("pkg_list", { + skip_on_cran() + local <- withr::local_tempdir() + setup_fake_apps() + suppressMessages(pkg_install("pkg2", lib = local)) + pl <- pkg_list(lib = local) + expect_equal(sort(pl$package), sort(c("pkg1", "pkg2"))) +}) + +test_that("pkg_download", { + skip_on_cran() + local <- withr::local_tempdir() + setup_fake_apps() + suppressMessages(ret <- pkg_download("pkg2", local, dependencies = TRUE)) + expect_equal(sort(ret$package), sort(c("pkg1", "pkg2"))) + expect_true(all(file.exists(ret$fulltarget))) +}) diff --git a/tests/testthat/test-pak-cleanup.R b/tests/testthat/test-pak-cleanup.R new file mode 100644 index 000000000..0e601f465 --- /dev/null +++ b/tests/testthat/test-pak-cleanup.R @@ -0,0 +1,43 @@ +test_that("pak_cleanup", { + mockery::stub(pak_cleanup, "interactive", FALSE) + expect_error(pak_cleanup(), "Refused to clean up") + + mockery::stub(pak_cleanup, "pak_cleanup_package_cache", NULL) + mockery::stub(pak_cleanup, "pak_cleanup_metadata_cache", NULL) + expect_silent(pak_cleanup(force = TRUE)) + + mockery::stub(pak_cleanup, "interactive", TRUE) + expect_silent(pak_cleanup()) +}) + +test_that("pak_cleanup_package_cache", { + cache_clean() + mockery::stub(pak_cleanup_package_cache, "get_confirmation2", FALSE) + expect_snapshot( + pak_cleanup_package_cache(force = FALSE), + transform = function(x) sub("in '.*'", "in ''", x) + ) + + cache <- cache_summary()[["cachepath"]] + root <- dirname(cache) + mockery::stub(pak_cleanup_package_cache, "get_confirmation2", TRUE) + expect_snapshot(pak_cleanup_package_cache(force = TRUE)) + expect_false(file.exists(cache)) + expect_true(file.exists(root)) +}) + +test_that("pak_cleanup_metadata_cache", { + suppressMessages(meta_clean(force = TRUE)) + mockery::stub(pak_cleanup_metadata_cache, "get_confirmation2", FALSE) + expect_snapshot( + pak_cleanup_metadata_cache(force = FALSE), + transform = function(x) sub("in '.*'", "in ''", x) + ) + + cache <- meta_summary()[["cachepath"]] + root <- dirname(cache) + mockery::stub(pak_cleanup_metadata_cache, "get_confirmation2", TRUE) + expect_snapshot(pak_cleanup_metadata_cache(force = TRUE)) + expect_false(file.exists(cache)) + expect_true(file.exists(root)) +}) diff --git a/tests/testthat/test-pak-install.R b/tests/testthat/test-pak-install.R new file mode 100644 index 000000000..7f367489a --- /dev/null +++ b/tests/testthat/test-pak-install.R @@ -0,0 +1,3 @@ +test_that("pak_setup", { + expect_warning(pak_setup(), "deprecated") +}) diff --git a/tests/testthat/test-pak-sitrep.R b/tests/testthat/test-pak-sitrep.R new file mode 100644 index 000000000..8be2ffd07 --- /dev/null +++ b/tests/testthat/test-pak-sitrep.R @@ -0,0 +1,24 @@ +test_that("pak_sitrep", { + skip_on_cran() + sitrep <- capture_output(pak_sitrep()) + expect_match(sitrep, "pak version:", fixed = TRUE) + expect_match( + sitrep, + asNamespace("pak")[[".__NAMESPACE__."]][["spec"]][["version"]], + fixed = TRUE + ) + expect_match(sitrep, "pak platform:", fixed = TRUE) + expect_match(sitrep, "Optional packages installed:", fixed = TRUE) + expect_match(sitrep, "Library path:", fixed = TRUE) + expect_match(sitrep, .libPaths()[1], fixed = TRUE) + + mockery::stub(pak_sitrep, "pkg_is_installed", FALSE) + mockery::stub(pak_sitrep, "is_load_all", TRUE) + sitrep <- capture_output(pak_sitrep()) + expect_match(sitrep, "Optional packages missing", fixed = TRUE) + expect_match(sitrep, "Using `load_all()` from", fixed = TRUE) + + mockery::stub(pak_sitrep, "is_load_all", FALSE) + sitrep <- capture_output(pak_sitrep()) + expect_match(sitrep, "pak is installed at", fixed = TRUE) +}) diff --git a/tests/testthat/test-pak-update.R b/tests/testthat/test-pak-update.R new file mode 100644 index 000000000..67acb8490 --- /dev/null +++ b/tests/testthat/test-pak-update.R @@ -0,0 +1,154 @@ +test_that("detect_platform", { + macos <- list( + platform = "aarch64-apple-darwin20", + arch = "aarch64", + os = "darwin20", + system = "aarch64, darwin20", + status = "", + major = "4", + minor = "3.2", + year = "2023", + month = "10", + ay = "31", + `svn rev` = "85441", + language = "R", + version.string = "R version 4.3.2 (2023-10-31)", + nickname = "Eye Holes" + ) + mockery::stub(detect_platform, "R.Version", macos) + mockery::stub(detect_platform, "getRversion", package_version("4.3.2")) + expect_snapshot(detect_platform()) + + linux <- list( + platform = "x86_64-pc-linux-gnu", + arch = "x86_64", + os = "linux-gnu", + system = "x86_64, linux-gnu", + status = "", + major = "4", + minor = "3.2", + year = "2023", + month = "10", + day = "31", + `svn rev` = "85441", + language = "R", + version.string = "R version 4.3.2 (2023-10-31)", + nickname = "Eye Holes" + ) + mockery::stub(detect_platform, "R.Version", linux) + mockery::stub(detect_platform, "getRversion", package_version("4.3.2")) + expect_snapshot(detect_platform()) +}) + +test_that("pak_stream", { + expect_equal(pak_stream("foo"), "foo") + mockery::stub(pak_stream, "utils::packageVersion", "0.7.1") + expect_equal(pak_stream(), "stable") + mockery::stub(pak_stream, "utils::packageVersion", "0.7.1.9999") + expect_equal(pak_stream(), "rc") + mockery::stub(pak_stream, "utils::packageVersion", "0.7.1.9000") + expect_equal(pak_stream(), "devel") + mockery::stub(pak_stream, "utils::packageVersion", "0.7.1.9001") + expect_equal(pak_stream(), "devel") +}) + +test_that("pak_repo", { + expect_snapshot({ + pak_repo("devel") + pak_repo("rc") + pak_repo("stable") + }) +}) + +test_that("pak_repo_metadata", { + meta_path <- paste0("file:///", normalizePath(test_path("fixtures")), "/") + expect_snapshot(pak_repo_metadata(meta_path)) +}) + +test_that("pak_update", { + mockery::stub( + pak_update, + "pak_repo", + paste0("file:///", normalizePath(test_path("fixtures")), "/") + ) + macos <- list(os = "darwin20", arch = "aarch64", rver = "4.3") + s390x <- list(os = "linux", arch = "s390x", rver = "4.3") + mockery::stub(pak_update, "detect_platform", s390x) + mockery::stub(pak_update, "is_load_all", FALSE) + expect_snapshot(error = TRUE, pak_update()) + + # no need to update, load_all() warning + mockery::stub(pak_update, "detect_platform", macos) + mockery::stub(pak_update, "check_mac_cran_r", TRUE) + mockery::stub(pak_update, "should_update_to", FALSE) + mockery::stub(pak_update, "is_load_all", TRUE) + transform_lib <- function(x) { + sub(.libPaths()[1], "", x, fixed = TRUE) + } + expect_snapshot(pak_update(), transform = transform_lib) + + # same, w/o load_all() warning + mockery::stub(pak_update, "is_load_all", FALSE) + expect_snapshot(pak_update(), transform = transform_lib) + + mockery::stub(pak_update, "should_update_to", TRUE) +}) + +test_that("pak_update_unsupported_platform", { + meta_path <- paste0("file:///", normalizePath(test_path("fixtures")), "/") + meta <- pak_repo_metadata(meta_path) + me <- list(os = "Linux", arch = "s390x", rver = "4.3") + expect_snapshot( + pak_update_unsupported_platform("devel", me, meta), + error = TRUE + ) +}) + +test_that("check_mac_cran_r", { + expect_silent(check_mac_cran_r(list(os = "linux"))) + mockery::stub(check_mac_cran_r, "platform_pkgtype", "source") + me <- list(os = "darwin20") + expect_snapshot( + error = TRUE, + check_mac_cran_r(me) + ) +}) + +test_that("platform_pkgtype", { + expect_equal(platform_pkgtype(), .Platform$pkgType) +}) + +test_that("should_update_to", { + mockery::stub( + should_update_to, + "utils::packageDescription", + list( + Version = utils::packageVersion("pak"), + Built = "R 4.3.2; aarch64-apple-darwin20; 2024-01-25 11:40:41 UTC; unix" + ) + ) + expect_true(should_update_to(list(Version = "1000.0.0"))) + expect_true(should_update_to(list( + Version = utils::packageVersion("pak"), + Built = "R 4.3.2; aarch64-apple-darwin20; 2124-01-25 11:40:41 UTC; unix" + ))) + expect_false(should_update_to(list( + Version = utils::packageVersion("pak"), + Built = "R 4.3.2; aarch64-apple-darwin20; 2000-01-25 11:40:41 UTC; unix" + ))) + + mockery::stub( + should_update_to, + "R.Version", + list(platform = "s390x-pc-linux-gnu") + ) + expect_snapshot(expect_true(should_update_to())) +}) + +test_that("get_built_date", { + expect_equal(get_built_date(NULL), NA_character_) + expect_equal( + get_built_date("R 4.3.2; aarch64-apple-darwin20; 2024-01-25 11:40:41 UTC; unix"), + "2024-01-25 11:40:41 UTC" + ) +}) diff --git a/tests/testthat/test-pak.R b/tests/testthat/test-pak.R new file mode 100644 index 000000000..519548b82 --- /dev/null +++ b/tests/testthat/test-pak.R @@ -0,0 +1,21 @@ +test_that("pak", { + mode <- NULL + mockery::stub( + pak, + "local_install_dev_deps", + function(...) mode <<- list("li", ...) + ) + mockery::stub( + pak, + "pkg_install", + function(...) mode <<- list("pi", ...) + ) + pak() + expect_equal(mode, list("li")) + pak(arg = "arg") + expect_equal(mode, list("li", arg = "arg")) + pak("pkg1") + expect_equal(mode, list("pi", "pkg1")) + pak("pkg1", arg = "arg") + expect_equal(mode, list("pi", "pkg1", arg = "arg")) +}) diff --git a/tests/testthat/test-ppm.R b/tests/testthat/test-ppm.R new file mode 100644 index 000000000..87e095f71 --- /dev/null +++ b/tests/testthat/test-ppm.R @@ -0,0 +1,43 @@ +test_that("ppm_has_binaries", { + expect_silent(ppm_has_binaries()) +}) + +test_that("ppm_platforms", { + withr::local_envvar(PKGCACHE_PPM_STATUS_URL = ppm$url("/ppmstatus")) + expect_snapshot(as.data.frame(ppm_platforms())) +}) + +test_that("ppm_r_versions", { + withr::local_envvar(PKGCACHE_PPM_STATUS_URL = ppm$url("/ppmstatus")) + expect_snapshot(as.data.frame(ppm_r_versions())) +}) + +test_that("ppm_repo_url", { + withr::local_envvar( + PKGCACHE_PPM_URL = NA_character_, + PKGCACHE_RSPM_URL = my <- "https://my.rspm/repo" + ) + expect_equal(ppm_repo_url(), my) +}) + +test_that("ppm_repo_url 2", { + withr::local_envvar( + PKGCACHE_PPM_URL = NA_character_, + PKGCACHE_RSPM_URL = NA_character_ + ) + withr::local_options( + repos = c( + RSPM = "https://packagemanager.rstudio.com/all/__linux__/jammy/latest", + CRAN = "https://cran.rstudio.com" + ) + ) + + expect_equal(ppm_repo_url(), "https://packagemanager.rstudio.com/all") +}) + +test_that("ppm_snapshots", { + withr::local_envvar( + PKGCACHE_PPM_TRANSACTIONS_URL = ppm$url("/ppmversions") + ) + expect_snapshot(ppm_snapshots()) +}) diff --git a/tests/testthat/test-private-lib.R b/tests/testthat/test-private-lib.R index ab41d001d..39a4b26af 100644 --- a/tests/testthat/test-private-lib.R +++ b/tests/testthat/test-private-lib.R @@ -1,64 +1,23 @@ - -test_that("loading package from private lib", { - skip_on_cran() - on.exit(pkg_data$ns <- list(), add = TRUE) - pkg_data$ns$processx <- NULL - gc() - - ## Load - load_private_package("processx", "c_") - pkgdir <- normalizePath(pkg_data$ns$processx[["__pkg-dir__"]]) - - ## Check if loaded - expect_true(is.function(pkg_data$ns$processx$run)) - expect_true(file.exists(pkgdir)) - paths <- normalizePath(sapply(.dynLibs(), "[[", "path")) - expect_true(any(grepl(pkgdir, paths, fixed = TRUE))) +test_that("private_lib_dir", { + mockery::stub(private_lib_dir, "file.path", "foobar") + mockery::stub(private_lib_dir, "file.exists", TRUE) + expect_equal(private_lib_dir(), c(embedded = "foobar")) }) -test_that("cleanup of temp files", { - skip("cleanup not working") - skip_on_cran() - on.exit(pkg_data$ns <- list(), add = TRUE) - pkg_data$ns$processx <- NULL - gc() - - ## Load - load_private_package("processx", "c_") - pkgdir <- normalizePath(pkg_data$ns$processx[["__pkg-dir__"]]) - - ## Check if loaded - expect_true(is.function(pkg_data$ns$processx$run)) - expect_true(file.exists(pkgdir)) - paths <- normalizePath(sapply(.dynLibs(), "[[", "path")) - expect_true(any(grepl(pkgdir, paths, fixed = TRUE))) - - pkg_data <- asNamespace("pak")$pkg_data - pkg_data$ns$processx <- NULL - gc(); gc() - - expect_false(file.exists(pkgdir)) - paths <- sapply(.dynLibs(), "[[", "path") - expect_false(any(grepl(pkgdir, paths, fixed = TRUE))) +test_that("private_lib_dir 2", { + mockery::stub(private_lib_dir, "file.exists", FALSE) + withr::local_envvar(c(PAK_PRIVATE_LIBRARY = "foobar2")) + expect_equal(private_lib_dir(), "foobar2") }) -test_that("no interference", { - skip_on_cran() - on.exit(pkg_data$ns <- list(), add = TRUE) - pkg_data$ns$processx <- NULL - gc() - - asNamespace("ps") - expect_true("ps" %in% loadedNamespaces()) - expect_true("ps" %in% sapply(.dynLibs(), "[[", "name")) - - load_private_package("ps") - expect_true(is.function(pkg_data$ns$ps$ps)) - expect_true(is.function(asNamespace("ps")$ps)) - - pkg_data$ns$ps <- NULL - gc(); gc() - - expect_true("ps" %in% loadedNamespaces()) - expect_true("ps" %in% sapply(.dynLibs(), "[[", "name")) +test_that("private_lib_dir 3", { + mockery::stub(private_lib_dir, "file.exists", FALSE) + withr::local_envvar(c(PAK_PRIVATE_LIBRARY = NA_character_)) + mockery::stub(private_lib_dir, "user_cache_dir", "cached-dir") + mockery::stub(private_lib_dir, "get_minor_r_version", "4.3") + mockery::stub(private_lib_dir, "R.Version", list(arch = "arm64")) + expect_true(private_lib_dir() %in% c( + "cached-dir\\lib\\4.3\\arm64", + "cached-dir/lib/4.3/arm64" + )) }) diff --git a/tests/testthat/test-subprocess.R b/tests/testthat/test-subprocess.R deleted file mode 100644 index 637d65779..000000000 --- a/tests/testthat/test-subprocess.R +++ /dev/null @@ -1,45 +0,0 @@ - -test_that("no dependencies are loaded with pak", { - - skip_on_cran() - - ## Skip this is covr, because covr loads a bunch of other packages - ## for some reason - skip_if(Sys.getenv("R_COVR", "") == "true", "not run in covr") - - new_pkgs <- callr::r( - function() { - withr::with_options(list(pkg.subprocess = FALSE), { - orig <- loadedNamespaces() - library(pak) - new <- loadedNamespaces() - }) - setdiff(new, orig) - }, - timeout = 5 - ) - - if_fail( - expect_true(all(new_pkgs %in% c("pak", "rstudioapi", base_packages()))), - function(e) print(new_pkgs) - ) -}) - -test_that("remote", { - pid <- remote(function() Sys.getpid()) - expect_equal(pid, pkg_data$remote$get_pid()) - expect_equal(remote(function() 4 + 4), 8) -}) - -test_that("remote messages", { - skip_on_cran() - expect_snapshot( - invisible(remote(function() cli::cli_text("just once"))) - ) - expect_snapshot( - withCallingHandlers( - invisible(remote(function() cli::cli_text("just once"))), - message = function(m) print(m) - ) - ) -}) diff --git a/tests/testthat/test-utils.R b/tests/testthat/test-utils.R index 944873464..ae82f8e2b 100644 --- a/tests/testthat/test-utils.R +++ b/tests/testthat/test-utils.R @@ -1,16 +1,352 @@ +test_that("%||%", { + expect_equal("foo" %||% "bar", "foo") + expect_equal(NULL %||% "bar", "bar") + expect_equal(NULL %||% NULL, NULL) +}) + +test_that("vcapply", { + expect_equal( + vcapply(character(0), function(x) "foo"), + structure(character(), names = character()) + ) + expect_equal( + vcapply("foo", function(x) x), + c(foo = "foo") + ) + expect_equal( + vcapply(c(bar = "foo"), function(x) x), + c(bar = "foo") + ) + expect_equal( + vcapply(letters, function(x) x), + structure(letters, names = letters) + ) + expect_snapshot(error = TRUE, { + vcapply(letters, function(x) 1L) + vcapply(1:5, function(x) c("foo", "bar")) + }) +}) + +test_that("vlapply", { + expect_equal( + vlapply(character(0), function(x) TRUE), + structure(logical(), names = character()) + ) + expect_equal( + vlapply("foo", function(x) TRUE), + c(foo = TRUE) + ) + expect_equal( + vlapply(c(bar = "foo"), function(x) TRUE), + c(bar = TRUE) + ) + expect_equal( + vlapply(letters, function(x) match(x, letters) %% 2 == 0), + structure(seq_along(letters) %% 2 == 0, names = letters) + ) + expect_snapshot(error = TRUE, { + vlapply(letters, function(x) 1L) + vlapply(1:5, function(x) c(TRUE, FALSE)) + }) +}) + +test_that("viapply", { + expect_equal( + viapply(character(0), function(x) 1L), + structure(integer(), names = character()) + ) + expect_equal( + viapply("foo", function(x) 1L), + c(foo = 1L) + ) + expect_equal( + viapply(c(bar = "foo"), function(x) 1L), + c(bar = 1L) + ) + expect_equal( + viapply(letters, function(x) match(x, letters)), + structure(seq_along(letters), names = letters) + ) + expect_snapshot(error = TRUE, { + viapply(letters, function(x) 1.0) + viapply(1:5, function(x) 1:2) + }) +}) + +test_that("vdapply", { + expect_equal( + vdapply(character(0), function(x) 1.0), + structure(double(), names = character()) + ) + expect_equal( + vdapply("foo", function(x) 1.0), + c(foo = 1.0) + ) + expect_equal( + vdapply(c(bar = "foo"), function(x) 1.0), + c(bar = 1.0) + ) + expect_equal( + vdapply(letters, function(x) match(x, letters) / 2), + structure(seq_along(letters) / 2, names = letters) + ) + expect_snapshot(error = TRUE, { + vdapply(letters, function(x) "boo") + vdapply(1:5, function(x) 1:2 / 2) + }) +}) + +test_that("str_trim", { + expect_equal( + str_trim(c( + "x", " x", "x ", " x ", " x ", "\nx\n", "\tx\t", + " \u00a0x\u00a0\n" + )), + rep("x", 8) + ) +}) + +test_that("get_minor_r_version", { + expect_equal(get_minor_r_version("4.3.2"), "4.3") + expect_equal(get_minor_r_version("4.3.0"), "4.3") + expect_equal(get_minor_r_version("3.6.3.2"), "3.6") + expect_equal(get_minor_r_version("4.3"), "4.3") +}) + +test_that("get_os", { + expect_true(get_os() %in% c("win", "mac", "unix", "unknown")) +}) + +test_that("user_cache_dir", { + withr::local_envvar(R_PKG_CACHE_DIR = tmp <- basename(tempfile())) + expect_equal(user_cache_dir(), tmp) + + withr::local_envvar(R_PKG_CACHE_DIR = NA_character_) + withr::local_envvar(R_USER_CACHE_DIR = tmp <- basename(tempfile())) + # file.path() expands the path on windows??? + expect_equal(user_cache_dir("pak"), paste(tmp, "R", "pak", sep = "/")) + + withr::local_envvar(R_USER_CACHE_DIR = NA_character_) + mockery::stub(user_cache_dir, "get_os", "win") + withr::local_envvar(LOCALAPPDATA = "windir") + expect_equal( + user_cache_dir("pak"), + file_path("windir", "R", "Cache", "pak") + ) + + mockery::stub(user_cache_dir, "get_os", "mac") + expect_equal( + user_cache_dir("pak"), + path.expand( + file_path("~/Library/Caches", "org.R-project.R", "R", "pak") + ) + ) + + mockery::stub(user_cache_dir, "get_os", "unix") + withr::local_envvar(XDG_CACHE_HOME = "unixdir") + expect_equal( + user_cache_dir("pak"), + file_path("unixdir", "R", "pak") + ) + withr::local_envvar(XDG_CACHE_HOME = NA_character_) + expect_equal( + user_cache_dir("pak"), + path.expand(file_path("~/.cache", "R", "pak")) + ) + + mockery::stub(user_cache_dir, "get_os", "unknown") + mockery::stub(user_cache_dir, "tempdir", "tempdir") + expect_equal( + user_cache_dir("pak"), + file_path("tempdir", "r-pkg-cache", "pak") + ) +}) + +test_that("file_path", { + expect_equal( + file_path(c("foo", "bar", tmp <- basename(tempfile()))), + normalizePath(file.path("foo", "bar", tmp), mustWork = FALSE) + ) +}) + +test_that("win_path_local", { + withr::local_envvar(LOCALAPPDATA = "localappdatadir") + withr::local_envvar(USERPROFILE = "userdir") + expect_equal(win_path_local(), "localappdatadir") + + withr::local_envvar(LOCALAPPDATA = NA_character_) + expect_equal(win_path_local(), file.path("userdir", "AppData", "Local")) + + withr::local_envvar(USERPROFILE = NA_character_) + mockery::stub(win_path_local, "tempdir", "tempdir") + expect_equal(win_path_local(), file.path("tempdir", "r-pkg-cache")) +}) + +test_that("cat0", { + expect_snapshot({ + cat0(c("foo", "bar"), "foobar") + }) +}) + +test_that("mkdirp", { + tmp <- tempfile() + on.exit(unlink(tmp, recursive = TRUE), add = TRUE) + + dir <- file.path(tmp, "sub", "dir") + expect_silent(mkdirp(dir)) + expect_true(dir.exists(dir)) + + withr::local_dir(dir) + expect_snapshot( + mkdirp(c("foo", "bar"), "Created these") + ) + expect_true(dir.exists("foo")) + expect_true(dir.exists("bar")) +}) + +test_that("fix_macos_path_in_rstudio", { + # This is to restore PATH at the end of the test case + withr::local_path() + + get_path <- function() { + strsplit(Sys.getenv("PATH"), .Platform$path.sep)[[1]] + } + + mockery::stub(fix_macos_path_in_rstudio, "readLines", "extraaaa") + + withr::local_envvar(RSTUDIO = NA_character_) + fix_macos_path_in_rstudio() + expect_false("extraaaa" %in% get_path()) + + withr::local_envvar(RSTUDIO = "1") + mockery::stub(fix_macos_path_in_rstudio, "Sys.info", c(sysname = "Linux")) + fix_macos_path_in_rstudio() + expect_false("extraaaa" %in% get_path()) + + mockery::stub(fix_macos_path_in_rstudio, "Sys.info", c(sysname = "Darwin")) + mockery::stub(fix_macos_path_in_rstudio, "file.exists", FALSE) + fix_macos_path_in_rstudio() + expect_false("extraaaa" %in% get_path()) + + mockery::stub(fix_macos_path_in_rstudio, "file.exists", TRUE) + fix_macos_path_in_rstudio() + expect_true("extraaaa" %in% get_path()) +}) + +test_that("rimraf", { + tmp <- tempfile() + on.exit(unlink(tmp, recursive = TRUE), add = TRUE) + + mkdirp(file.path(tmp, "sub", "dir")) + expect_true(dir.exists(file.path(tmp, "sub", "dir"))) + + rimraf(file.path(tmp)) + expect_false(dir.exists(file.path(tmp, "sub", "dir"))) + expect_false(dir.exists(tmp)) +}) + +test_that("is_interactive", { + withr::local_options(rlib_interactive = TRUE) + expect_true(is_interactive()) + + withr::local_options(rlib_interactive = FALSE) + expect_false(is_interactive()) + + withr::local_options(rlib_interactive = NULL) + withr::local_options(knitr.in.progress = TRUE) + expect_false(is_interactive()) + + withr::local_options(knitr.in.progress = NULL) + withr::local_options(rstudio.notebook.executing = TRUE) + expect_false(is_interactive()) + + withr::local_options(rstudio.notebook.executing = NULL) + withr::local_envvar(TESTTHAT = "true") + expect_false(is_interactive()) + + withr::local_envvar(TESTTHAT = NULL) + mockery::stub(is_interactive, "interactive", FALSE) + expect_false(is_interactive()) + + mockery::stub(is_interactive, "interactive", TRUE) + expect_true(is_interactive()) +}) + +test_that("loaded_packages", { + pkgs <- loaded_packages(.Library) + expect_false("base" %in% pkgs) +}) + +test_that("lapply_with_names", { + expect_equal( + lapply_with_names(character(), identity), + structure(list(), names = character()) + ) + expect_equal( + lapply_with_names(letters, function(x) paste(x, x)), + structure(as.list(paste(letters, letters)), names = letters) + ) +}) + +test_that("na_omit", { + expect_equal(na_omit(integer()), integer()) + expect_equal(na_omit(letters), letters) + expect_equal(na_omit(c(1, NA, 2)), c(1, 2)) +}) + +test_that("base_packages", { + expect_snapshot( + base_packages() + ) +}) + +test_that("is_flag", { + bad <- list( + list(1), + 1L, + logical(), + c(TRUE, FALSE), + NA + ) + for (b in bad) expect_false(is_flag(b)) + + expect_true(is_flag(TRUE)) + expect_true(is_flag(FALSE)) + expect_true(is_flag(c(x = TRUE))) +}) + +test_that("is_string", { + bad <- list( + list(1), + 1L, + character(), + c("foo", "bar"), + NA_character_ + ) + for (b in bad) expect_false(is_string(b)) + + expect_true(is_string("foo")) + expect_true(is_string(c(x = "foo"))) +}) + +test_that("map_named", { + expect_equal( + map_named(c(a = 1, b = 2), list), + list(a = list("a", 1), b = list("b", 2)) + ) +}) + +test_that("cisort", { + vec <- c("abc", "ABD", "abE") + expect_equal(cisort(sample(vec)), vec) +}) -test_that("merge_new", { - expect_identical(merge_new("x", "y"), "y") - expect_identical(merge_new("x", "y", "replace"), "y") - expect_identical(merge_new("x", "y", "prepend"), c("y", "x")) - expect_identical(merge_new("x", "y", "append"), c("x", "y")) - expect_error(merge_new("x", "y", "foobar")) - - ## Some special values - expect_identical(merge_new("x", NULL), NULL) - expect_identical(merge_new(NULL, "x"), "x") - expect_identical(merge_new("x", NULL, "append"), "x") - expect_identical(merge_new(NULL, "x", "append"), "x") - expect_identical(merge_new("x", NULL, "prepend"), "x") - expect_identical(merge_new(NULL, "x", "prepend"), "x") +test_that("read_char", { + local <- withr::local_tempdir() + path <- file.path(local, "foo") + file.create(path) + expect_equal(read_char(path), "") + writeBin(charToRaw("foo\nbar\n\u00f0\nfoobar\n"), path) + expect_equal(read_char(path), "foo\nbar\n\u00f0\nfoobar\n") + expect_equal(Encoding(read_char(path)), "UTF-8") }) diff --git a/tools/build/fake/Dockerfile b/tools/build/fake/Dockerfile new file mode 100644 index 000000000..f981e08f0 --- /dev/null +++ b/tools/build/fake/Dockerfile @@ -0,0 +1,34 @@ +FROM rhub/r-minimal + +# webfakes + some optional dependencies + packages used in run.R +RUN installr -d -t "linux-headers curl-dev" \ + callr cli desc jsonlite pkgcache processx webfakes withr zip + +# for the git app +RUN apk add --no-cache git git-daemon + +COPY R /R +WORKDIR /R + +CMD ["R", "-s", "-f", "/R/run.R"] + +# CRAN +EXPOSE 3100 + +# BIOC +EXPOSE 3101 + +# PPM +EXPOSE 3102 + +# Name check app +EXPOSE 3103 + +# git +EXPOSE 3104 + +# GH +EXPOSE 3105 + +# CRAN2 +EXPOSE 3106 diff --git a/tools/build/fake/R/bioc-app.R b/tools/build/fake/R/bioc-app.R new file mode 100644 index 000000000..3be68c716 --- /dev/null +++ b/tools/build/fake/R/bioc-app.R @@ -0,0 +1,15 @@ +bioc_app <- function(...) { + asNamespace("pkgcache")$bioc_app(...) +} + +dcf <- function(...) { + asNamespace("pkgcache")$dcf(...) +} + +bioc_app_pkgs <- dcf(" + Package: Biobase + Version: 1.2.3 + Depends: R (>= 2.10), BiocGenerics(>= 0.27.1), utils + Imports: methods + Suggests: tools, tkWidgets, ALL, RUnit, golubEsets +") diff --git a/tools/build/fake/R/cran-app.R b/tools/build/fake/R/cran-app.R new file mode 100644 index 000000000..f96814edc --- /dev/null +++ b/tools/build/fake/R/cran-app.R @@ -0,0 +1,67 @@ +# ------------------------------------------------------------------------- +# Dummy CRAN app + +cran_app <- function(...) { + asNamespace("pkgcache")$cran_app(...) +} + +dcf <- function(...) { + asNamespace("pkgcache")$dcf(...) +} + +cran_app_pkgs <- dcf(" + Package: pkg1 + Version: 1.0.0 + + Package: pkg1 + Version: 0.9.0 + + Package: pkg1 + Version: 0.8.0 + + Package: pkg2 + Version: 1.0.0 + Depends: pkg1 + + Package: pkg3 + Version: 1.0.0 + Depends: pkg2 + + Package: pkg3 + Version: 0.9.9 + + Package: pkg4 + Version: 1.0.0 + Imports: pkg2 + Suggests: pkg3 + + Package: crayon + Version: 1.0.0 + + Package: needspak + Imports: pak + + Package: pak + + Package: futurama + Depends: R (>= 3000.0) + + Package: needsfuturama + Imports: futurama + + Package: dplyr + Imports: tibble + Suggests: testthat + + Package: tibble + + Package: testthat + + Package: curl + SystemRequirements: libcurl: libcurl-devel (rpm) or libcurl4-openssl-dev (deb). +") + +cran_app_pkgs2 <- dcf(" +Package: pkgx +Version: 1.0.0 +") diff --git a/tools/build/fake/R/fixtures/dplyr.json.gz b/tools/build/fake/R/fixtures/dplyr.json.gz new file mode 100644 index 000000000..bd37488b0 Binary files /dev/null and b/tools/build/fake/R/fixtures/dplyr.json.gz differ diff --git a/tools/build/fake/R/fixtures/git-repo.tar.gz b/tools/build/fake/R/fixtures/git-repo.tar.gz new file mode 100644 index 000000000..07b1be4f1 Binary files /dev/null and b/tools/build/fake/R/fixtures/git-repo.tar.gz differ diff --git a/tools/build/fake/R/fixtures/ppm-status.json b/tools/build/fake/R/fixtures/ppm-status.json new file mode 100644 index 000000000..283189c39 --- /dev/null +++ b/tools/build/fake/R/fixtures/ppm-status.json @@ -0,0 +1 @@ +{"version":"2022.11.2-18","build_date":"2022-11-18T20:02:59Z","metrics_enabled":true,"r_configured":true,"binaries_enabled":true,"display_ash":false,"custom_home":true,"ga_id":"UA-20375833-3","distros":[{"name":"centos7","os":"linux","binaryDisplay":"CentOS/RHEL 7","binaryURL":"centos7","display":"CentOS 7","distribution":"centos","release":"7","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"centos8","os":"linux","binaryDisplay":"CentOS/RHEL 8","binaryURL":"centos8","display":"CentOS 8","distribution":"centos","release":"8","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"rhel9","os":"linux","binaryDisplay":"Rocky Linux 9","binaryURL":"rhel9","display":"Rocky Linux 9","distribution":"rockylinux","release":"9","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"opensuse15","os":"linux","binaryDisplay":"OpenSUSE 15.1, SLES 15 SP1","binaryURL":"opensuse15","display":"OpenSUSE 15.1","distribution":"opensuse","release":"15","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"opensuse152","os":"linux","binaryDisplay":"OpenSUSE 15.2, SLES 15 SP2","binaryURL":"opensuse152","display":"OpenSUSE 15.2","distribution":"opensuse","release":"15.2","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"opensuse153","os":"linux","binaryDisplay":"OpenSUSE 15.3, SLES 15 SP3","binaryURL":"opensuse153","display":"OpenSUSE 15.3","distribution":"opensuse","release":"15.3","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"opensuse154","os":"linux","binaryDisplay":"OpenSUSE 15.4, SLES 15 SP4","binaryURL":"opensuse154","display":"OpenSUSE 15.4","distribution":"opensuse","release":"15.4","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"opensuse42","os":"linux","binaryDisplay":"OpenSUSE 42.3, SLES 12 SP5","binaryURL":"opensuse42","display":"OpenSUSE 42.3","distribution":"opensuse","release":"42.3","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"rhel7","os":"linux","binaryDisplay":"CentOS/RHEL 7","binaryURL":"centos7","display":"Red Hat Enterprise Linux 7","distribution":"redhat","release":"7","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"rhel8","os":"linux","binaryDisplay":"RHEL 8","binaryURL":"centos8","display":"Red Hat Enterprise Linux 8","distribution":"redhat","release":"8","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"rhel9 (unused alias)","os":"linux","binaryDisplay":"RHEL 9","binaryURL":"rhel9","display":"Red Hat Enterprise Linux 9","distribution":"redhat","release":"9","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"sles12","os":"linux","binaryDisplay":"OpenSUSE 42.3, SLES 12 SP5","binaryURL":"opensuse42","display":"SLES 12 SP5","distribution":"sle","release":"12.3","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"sles15","os":"linux","binaryDisplay":"OpenSUSE 15.1, SLES 15 SP1","binaryURL":"opensuse15","display":"SLES 15 SP1","distribution":"sle","release":"15","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"sles152","os":"linux","binaryDisplay":"OpenSUSE 15.2, SLES 15 SP2","binaryURL":"opensuse152","display":"SLES 15 SP2","distribution":"sle","release":"15.2","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"sles153","os":"linux","binaryDisplay":"OpenSUSE 15.3, SLES 15 SP3","binaryURL":"opensuse153","display":"SLES 15 SP3","distribution":"sle","release":"15.3","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"sles154","os":"linux","binaryDisplay":"OpenSUSE 15.4, SLES 15 SP4","binaryURL":"opensuse154","display":"SLES 15 SP4","distribution":"sle","release":"15.4","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"xenial","os":"linux","binaryDisplay":"Ubuntu 16.04 (Xenial)","binaryURL":"xenial","display":"Ubuntu 16.04 (Xenial)","distribution":"ubuntu","release":"16.04","sysReqs":true,"binaries":true,"hidden":true,"official_rspm":true},{"name":"bionic","os":"linux","binaryDisplay":"Ubuntu 18.04 (Bionic)","binaryURL":"bionic","display":"Ubuntu 18.04 (Bionic)","distribution":"ubuntu","release":"18.04","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"focal","os":"linux","binaryDisplay":"Ubuntu 20.04 (Focal)","binaryURL":"focal","display":"Ubuntu 20.04 (Focal)","distribution":"ubuntu","release":"20.04","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"jammy","os":"linux","binaryDisplay":"Ubuntu 22.04 (Jammy)","binaryURL":"jammy","display":"Ubuntu 22.04 (Jammy)","distribution":"ubuntu","release":"22.04","sysReqs":true,"binaries":true,"hidden":false,"official_rspm":true},{"name":"buster","os":"linux","binaryDisplay":"Debian 10 (Buster)","binaryURL":"buster","display":"Debian 10 (Buster)","distribution":"debian","release":"10","sysReqs":true,"binaries":false,"hidden":true,"official_rspm":true},{"name":"bullseye","os":"linux","binaryDisplay":"Debian 11 (Bullseye)","binaryURL":"bullseye","display":"Debian 11 (Bullseye)","distribution":"debian","release":"11","sysReqs":true,"binaries":false,"hidden":true,"official_rspm":true},{"name":"windows","os":"windows","binaryDisplay":"","binaryURL":"","display":"Windows","distribution":"windows","release":"all","sysReqs":false,"binaries":true,"hidden":false,"official_rspm":true},{"name":"macOS","os":"macOS","binaryDisplay":"","binaryURL":"","display":"macOS","distribution":"macOS","release":"all","sysReqs":false,"binaries":false,"hidden":false,"official_rspm":true}],"cran_repo":"cran","bioc_versions":[{"bioc_version":"3.17","r_version":"4.3","cran_snapshot":"latest"},{"bioc_version":"3.16","r_version":"4.2","cran_snapshot":"latest"},{"bioc_version":"3.15","r_version":"4.2","cran_snapshot":"2022-11-02"},{"bioc_version":"3.14","r_version":"4.1","cran_snapshot":"2022-04-27"},{"bioc_version":"3.13","r_version":"4.1","cran_snapshot":"2021-10-27"},{"bioc_version":"3.12","r_version":"4.0","cran_snapshot":"2021-05-20"},{"bioc_version":"3.11","r_version":"4.0","cran_snapshot":"2020-10-28"},{"bioc_version":"3.10","r_version":"3.6","cran_snapshot":"2020-04-27"},{"bioc_version":"3.9","r_version":"3.6","cran_snapshot":"2019-10-28"},{"bioc_version":"3.8","r_version":"3.5","cran_snapshot":"2019-05-02"},{"bioc_version":"3.7","r_version":"3.5","cran_snapshot":"2018-10-29"},{"bioc_version":"3.6","r_version":"3.4","cran_snapshot":"2018-04-30"},{"bioc_version":"3.5","r_version":"3.4","cran_snapshot":"2017-10-30"},{"bioc_version":"3.4","r_version":"3.3","cran_snapshot":""},{"bioc_version":"3.3","r_version":"3.3","cran_snapshot":""},{"bioc_version":"3.2","r_version":"3.2","cran_snapshot":""},{"bioc_version":"3.1","r_version":"3.2","cran_snapshot":""}],"r_versions":["3.5","3.6","4.0","4.1","4.2"]} \ No newline at end of file diff --git a/tools/build/fake/R/fixtures/ppm-versions.json b/tools/build/fake/R/fixtures/ppm-versions.json new file mode 100644 index 000000000..b95480c75 --- /dev/null +++ b/tools/build/fake/R/fixtures/ppm-versions.json @@ -0,0 +1 @@ +[{"id":1194160,"created":"2021-02-05T14:33:45.896477Z","completed":"2021-02-05T14:33:47.981439Z","type":3,"tag":"Sync on 2021-02-05 14:33:45.896125592 +0000 UTC m=+5774001.199074308","source_id":1,"guid":"","packages_added":35,"packages_updated":null,"packages_archived":30,"packages_removed":2,"action":"Sync","published_from":"2021-02-04T00:00:00Z","published_to":"2021-02-05T00:00:00Z","result":0},{"id":1175516,"created":"2021-02-04T13:33:26.567859Z","completed":"2021-02-04T13:33:28.671843Z","type":3,"tag":"Sync on 2021-02-04 13:33:26.567142757 +0000 UTC m=+5683985.862935156","source_id":1,"guid":"","packages_added":63,"packages_updated":null,"packages_archived":49,"packages_removed":0,"action":"Sync","published_from":"2021-02-03T00:00:00Z","published_to":"2021-02-04T00:00:00Z","result":0},{"id":1160641,"created":"2021-02-03T17:33:24.555283Z","completed":"2021-02-03T17:33:26.618373Z","type":3,"tag":"Sync on 2021-02-03 17:33:24.55753815 +0000 UTC m=+5611983.853330561","source_id":1,"guid":"","packages_added":54,"packages_updated":null,"packages_archived":56,"packages_removed":0,"action":"Sync","published_from":"2021-02-02T00:00:00Z","published_to":"2021-02-03T00:00:00Z","result":0},{"id":1140568,"created":"2021-02-02T14:33:36.516377Z","completed":"2021-02-02T14:33:38.591104Z","type":3,"tag":"Sync on 2021-02-02 14:33:36.516160202 +0000 UTC m=+5514795.811952613","source_id":1,"guid":"","packages_added":42,"packages_updated":null,"packages_archived":32,"packages_removed":0,"action":"Sync","published_from":"2021-02-01T00:00:00Z","published_to":"2021-02-02T00:00:00Z","result":0},{"id":1123445,"created":"2021-02-01T15:33:29.046485Z","completed":"2021-02-01T15:33:31.122849Z","type":3,"tag":"Sync on 2021-02-01 15:33:29.046711806 +0000 UTC m=+5431988.342504215","source_id":1,"guid":"","packages_added":92,"packages_updated":null,"packages_archived":88,"packages_removed":0,"action":"Sync","published_from":"2021-01-29T00:00:00Z","published_to":"2021-02-01T00:00:00Z","result":0},{"id":1069075,"created":"2021-01-29T14:33:14.305268Z","completed":"2021-01-29T14:33:16.404439Z","type":3,"tag":"Sync on 2021-01-29 14:33:14.305163772 +0000 UTC m=+5169173.600956173","source_id":1,"guid":"","packages_added":38,"packages_updated":null,"packages_archived":30,"packages_removed":0,"action":"Sync","published_from":"2021-01-28T00:00:00Z","published_to":"2021-01-29T00:00:00Z","result":0},{"id":1053473,"created":"2021-01-28T17:33:27.979661Z","completed":"2021-01-28T17:33:30.065975Z","type":3,"tag":"Sync on 2021-01-28 17:33:27.979306008 +0000 UTC m=+5093587.275098427","source_id":1,"guid":"","packages_added":81,"packages_updated":null,"packages_archived":73,"packages_removed":0,"action":"Sync","published_from":"2021-01-27T00:00:00Z","published_to":"2021-01-28T00:00:00Z","result":0},{"id":1033374,"created":"2021-01-27T14:33:56.561496Z","completed":"2021-01-27T14:33:58.631111Z","type":3,"tag":"Sync on 2021-01-27 14:33:56.559083264 +0000 UTC m=+4996415.854875681","source_id":1,"guid":"","packages_added":36,"packages_updated":null,"packages_archived":37,"packages_removed":0,"action":"Sync","published_from":"2021-01-26T00:00:00Z","published_to":"2021-01-27T00:00:00Z","result":0},{"id":1014755,"created":"2021-01-26T13:33:16.575111Z","completed":"2021-01-26T13:33:18.634306Z","type":3,"tag":"Sync on 2021-01-26 13:33:16.57552076 +0000 UTC m=+4906375.871313183","source_id":1,"guid":"","packages_added":69,"packages_updated":null,"packages_archived":46,"packages_removed":2,"action":"Sync","published_from":"2021-01-25T00:00:00Z","published_to":"2021-01-26T00:00:00Z","result":0},{"id":997643,"created":"2021-01-25T14:33:53.993192Z","completed":"2021-01-25T14:33:56.066895Z","type":3,"tag":"Sync on 2021-01-25 14:33:53.993727544 +0000 UTC m=+4823613.289519947","source_id":1,"guid":"","packages_added":110,"packages_updated":null,"packages_archived":96,"packages_removed":3,"action":"Sync","published_from":"2021-01-22T00:00:00Z","published_to":"2021-01-25T00:00:00Z","result":0}] diff --git a/tools/build/fake/R/fixtures/r-versions.json b/tools/build/fake/R/fixtures/r-versions.json new file mode 100644 index 000000000..20355cb37 --- /dev/null +++ b/tools/build/fake/R/fixtures/r-versions.json @@ -0,0 +1 @@ +[{"version":"3.6.2","date":"2019-12-12T08:05:03.679160Z","nickname":"Dark and Stormy Night"},{"version":"3.6.3","date":"2020-02-29T08:05:16.744223Z","nickname":"Holding the Windsock"},{"version":"4.0.0","date":"2020-04-24T07:05:34.612930Z","nickname":"Arbor Day"},{"version":"4.0.1","date":"2020-06-06T07:05:16.469439Z","nickname":"See Things Now"},{"version":"4.0.2","date":"2020-06-22T07:05:19.236082Z","nickname":"Taking Off Again"},{"version":"4.0.3","date":"2020-10-10T07:05:24.661746Z","nickname":"Bunny-Wunnies Freak Out"}] diff --git a/tools/build/fake/R/gh-app.R b/tools/build/fake/R/gh-app.R new file mode 100644 index 000000000..d69f66c1d --- /dev/null +++ b/tools/build/fake/R/gh-app.R @@ -0,0 +1,563 @@ +# ------------------------------------------------------------------------- +# GH app + +mkdirp <- function(x) { + dir.create(x, showWarnings = FALSE, recursive = TRUE) +} + +str_starts_with <- function(x, pre) { + substring(x, 1, nchar(pre)) == pre +} + +gr_response_headers_graphql <- function(upd = NULL) { + list( + server = "GitHub.com", + `content-type` = "application/json; charset=utf-8", + `x-oauth-scopes` = "delete:packages, delete_repo, read:org, repo, workflow, write:packages", + `x-accepted-oauth-scopes` = "repo", + `x-github-media-type` = "github.v3; format=json", + `x-ratelimit-limit` = "5000", + `x-ratelimit-remaining` = "4998", + `x-ratelimit-reset` = as.integer(Sys.time() + as.difftime(1, units = "hours")), + `x-ratelimit-used` = "2", + `x-ratelimit-resource` = "graphql", + `access-control-expose-headers` = "ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset", + `access-control-allow-origin` = "*", + `strict-transport-security` = "max-age=31536000; includeSubdomains; preload", + `x-frame-options` = "deny", + `x-content-type-options` = "nosniff", + `x-xss-protection` = "0", + `referrer-policy` = "origin-when-cross-origin, strict-origin-when-cross-origin", + `content-security-policy` = "default-src 'none'", + vary = "Accept-Encoding, Accept, X-Requested-With", + `x-github-request-id` = basename(tempfile()) + ) +} + +make_dummy_zip <- function(commit) { + mkdirp(tmp <- tempfile()) + old <- getwd() + on.exit(setwd(old), add = TRUE) + setwd(tmp) + root <- paste0(commit$repo, "-", commit$branch) + mkdirp(root) + setwd(root) + for (i in seq_along(commit$files)) { + nm <- names(commit$files)[[i]] + ct <- commit$files[[i]] + mkdirp(dirname(nm)) + if (is.raw(ct)) { + writeBin(ct, nm) + } else { + writeLines(ct, nm) + } + } + setwd(tmp) + zip::zip(paste0(root, ".zip"), root) + file.path(tmp, paste0(root, ".zip")) +} + +re_gh_auth <- function() { + paste0( + "^token (gh[pousr]_[A-Za-z0-9_]{36,251}|", + "[[:xdigit:]]{40})$" + ) +} + +process_repos <- function(repos) { + for (i in seq_along(repos$users)) { + u <- names(repos$users)[i] + repos$users[[i]]$user <- u + for (j in seq_along(repos$users[[i]]$repos)) { + r <- names(repos$users[[i]]$repos)[j] + repos$users[[i]]$repos[[j]]$user <- u + for (k in seq_along(repos$users[[i]]$repos[[j]]$commits)) { + repos$users[[i]]$repos[[j]]$commits[[k]]$user <- u + repos$users[[i]]$repos[[j]]$commits[[k]]$repo <- r + } + } + } + repos +} + +gh_fmt_desc <- function(dsc) { + if (is.null(dsc)) { + return(NA) + } else if (is.raw(dsc)) { + list( + isBinary = TRUE, + text = NA + ) + } else { + list( + isBinary = FALSE, + text = dsc + ) + } +} + +gh_app <- function(repos = NULL, log = interactive(), options = list()) { + app <- webfakes::new_app() + + # Log requests by default + if (log) app$use("logger" = webfakes::mw_log()) + + # Parse JSON body, even if no content-type header is sent + app$use("json body parser" = webfakes::mw_json( + type = c( + "", + "application/json", + "application/json; charset=utf-8" + ) + )) + + # app$use("text body parser" = webfakes::mw_text(type = c("text/plain", "application/json"))) + # app$use("multipart body parser" = webfakes::mw_multipart()) + # app$use("URL encoded body parser" = webfakes::mw_urlencoded()) + + # Add etags by default + app$use("add etag" = webfakes::mw_etag()) + + # Add date by default + app$use("add date" = function(req, res) { + res$set_header("Date", as.character(Sys.time())) + "next" + }) + + app$locals$repos <- process_repos(repos) + app$locals$data <- list() + + app$use(function(req, res) { + auth <- req$get_header("Authorization") + if (is.null(auth)) { + return("next") + } + if (!grepl(re_gh_auth(), auth)) { + res$set_status(401) + res$send_json( + auto_unbox = TRUE, + list( + message = "Bad credentials", + documentation_url = "https://docs.github.com/graphql" + ) + ) + } else { + req$.token <- sub("^token ", "", auth) + "next" + } + }) + + app$post("/404/graphql", function(req, res) { + res$send_status(404) + }) + + app$post("/graphql", function(req, res) { + re_ref <- paste0( + "owner:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "name:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "description:[ ]*object[(]expression:[ ]*\"[^:]+:(?[^\"]+)\"", "(?s:.)*", + "sha:[ ]*object[(]expression:[ ]*\"(?[^\"]+)\"" + ) + + psd <- re_match(req$json$query, re_ref) + if (is.na(psd$.match)) { + return("next") + } + + if (!psd$user %in% names(app$locals$repos$users)) { + send_user_not_found(res, psd) + return() + } + if (!psd$repo %in% names(app$locals$repos$users[[psd$user]]$repos)) { + send_repo_not_found(res, psd) + return() + } + + commits <- app$locals$repos$users[[psd$user]]$repos[[psd$repo]]$commits + for (cmt in commits) { + if ((!is.null(cmt$tag) && cmt$tag == psd$ref) || + (!is.null(cmt$branch) && cmt$branch == psd$ref) || + str_starts_with(cmt$sha, psd$ref)) { + add_gh_headers(res) + dsc <- cmt$files[[psd$path]] + if (!is.null(cmt$token) && + (is.null(req$.token) || req$.token != cmt$token)) { + send_repo_not_found(res, psd) + return() + } + res$send_json( + auto_unbox = TRUE, + list(data = list(repository = list( + description = gh_fmt_desc(dsc), + sha = list(oid = cmt$sha) + ))) + ) + return() + } + } + + res$set_status(200) + res$send_json( + auto_unbox = TRUE, + list(data = list(repository = list( + description = NA, + sha = NA + ))) + ) + }) + + app$post("/graphql", function(req, res) { + re_pull <- paste0( + "owner:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "name:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "pullRequest[(]number:[ ]*(?[0-9]+)[)]", "(?s:.)*", + "file[(]path:[ ]*\"(?.*)\"" + ) + + psd <- re_match(req$json$query, re_pull) + if (is.na(psd$.match)) { + return("next") + } + + if (!psd$user %in% names(app$locals$repos$users)) { + send_user_not_found(res, psd) + return() + } + if (!psd$repo %in% names(app$locals$repos$users[[psd$user]]$repos)) { + send_repo_not_found(res, psd) + return() + } + + commits <- app$locals$repos$users[[psd$user]]$repos[[psd$repo]]$commits + for (cmt in commits) { + if (!is.null(cmt$pull) && cmt$pull == psd$pull) { + add_gh_headers(res) + dsc <- cmt$files[[psd$path]] + res$send_json( + auto_unbox = TRUE, + list(data = list(repository = list(pullRequest = list( + headRefOid = cmt$sha, + headRef = list(target = list(file = list(object = gh_fmt_desc(dsc)))) + )))) + ) + return() + } + } + + send_pull_not_found(res, psd) + }) + + # @*release + app$post("/graphql", function(req, res) { + re_release <- paste0( + "owner:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "name:[ ]*\"(?[^\"]+)\"", "(?s:.)*", + "file[(]path:[ ]*\"(?.*)\"" + ) + + psd <- re_match(req$json$query, re_release) + if (is.na(psd$.match)) { + return("next") + } + + commits <- app$locals$repos$users[[psd$user]]$repos[[psd$repo]]$commits + for (cmt in commits) { + if (isTRUE(cmt$latestRelease)) { + add_gh_headers(res) + dsc <- cmt$files[[psd$path]] + res$send_json( + auto_unbox = TRUE, + list(data = list(repository = list(latestRelease = list( + tagName = cmt$tagName, + tagCommit = list( + oid = cmt$sha, + file = list(object = gh_fmt_desc(dsc)) + ) + )))) + ) + return() + } + } + + send_no_releases(res, psd) + }) + + app$get("/repos/:user/:repo/zipball/:sha", function(req, res) { + if (!req$params$user %in% names(app$locals$repos$users)) { + send_user_not_found(res, req$params) + return() + } + if (!req$params$repo %in% names(app$locals$repos$users[[req$params$user]]$repos)) { + send_repo_not_found(res, req$params) + return() + } + + commits <- app$locals$repos$users[[req$params$user]]$repos[[req$params$repo]]$commits + shas <- vapply(commits, "[[", "", "sha") + if (!req$params$sha %in% shas) { + send_sha_not_found(res, req$params) + return() + } + + cmt <- commits[[which(shas == req$params$sha)]] + z <- make_dummy_zip(cmt) + res$send_file(z, root = "/") + }) + + app +} + +add_gh_headers <- function(res) { + headers <- gr_response_headers_graphql() + for (i in seq_along(headers)) { + res$set_header(names(headers)[i], headers[i]) + } +} + +send_user_not_found <- function(res, psd) { + res$set_status(200) + res$send_json( + auto_unbox = TRUE, + list( + data = list(repository = NA), + errors = list( + list( + type = "NOT_FOUND", + path = list("repository"), + locations = list( + list( + line = 2, + column = 3 + ) + ), + message = sprintf( + "Could not resolve to a Repository with the name '%s'.", + paste0(psd$user, "/", psd$repo) + ) + ) + ) + ) + ) +} + +send_repo_not_found <- function(res, psd) { + send_user_not_found(res, psd) +} + +send_ref_not_found <- function(res, psd) { + res$send_status(404) +} + +send_pull_not_found <- function(res, psd) { + res$set_status(200) + res$send_json( + auto_unbox = TRUE, + list( + data = list(repository = list(pullRequest = NA)), + errors = list( + list( + type = "NOT_FOUND", + path = list("repository", "pullRequest"), + locations = list( + list( + line = 3L, + column = 5L + ) + ), + message = sprintf( + "Could not resolve to a PullRequest with the number of %s.", + psd$pull + ) + ) + ) + ) + ) +} + +send_sha_not_found <- function(res, psd) { + # TODO + res$send_status(404) +} + +send_no_releases <- function(res, psd) { + res$set_status(200) + res$send_json( + auto_unbox = TRUE, + list( + data = list(repository = list(latestRelease = NA)) + ) + ) +} + +gh_app_desc <- function(pkg) { + sprintf("Package: %s\nVersion: 1.0.0\n", pkg) +} + +random_sha <- function() { + paste( + sample(c(0:9, letters[1:6]), 64, replace = TRUE), + collapse = "" + ) +} + +gh_app_repos <- list( + users = list( + "r-lib" = list( + repos = list( + pak = list( + commits = list( + list( + sha = "111ef906acb58fe406370f7bc0a72cac55dbbb231ea687494c25742ca521255a", + branch = "main", + tag = "HEAD", + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ), + list( + sha = "a503fe843f11c279864f29d58137f8de319d115b239ce48ccc15406306019480", + branch = "main", + tag = "v0.1.2", + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ), + list( + sha = "e65de1e9630dbfcaf1044718b742bf806486b107239ce48ccc15406306019480", + branch = "main", + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ), + list( + sha = "b001d6ddeab1589ad367b62baabbeeb2af3b0ebac2e61d239df660c1d63e3232", + branch = "somebranch", + pull = 90, + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ), + list( + sha = "b001d6ddeab1589ad367b62baabbeeb2af3b0ebac2e61d239df660c1d63e3232", + latestRelease = TRUE, + tagName = "v1.2.3", + files = list("DESCRIPTION" = gh_app_desc("pak"), NAMESPACE = "") + ) + ) + ), + bad = list( + commits = list( + list( + sha = "546d9eab84b002c35302dda3822560950c7528cfc9ef1b916cecd9dbef3cf6b6", + tag = "HEAD", + branch = "main", + files = list( + DESCRIPTION = "this is not\na good file\n", + "bin/DESCRIPTION" = charToRaw("\xf0\xb0\xa0") + ) + ), + list( + sha = "546d9eab84b002c35302dda3822560950c7528cfc9ef1b916cecd9dbef3cf6b6", + pull = 100, + branch = "other", + files = list(DESCRIPTION = "this is not\na good file\n") + ) + ) + ), + crayon = list( + commits = list( + list( + sha = "bdd9a1bcf062396790c341cf1dba563eb0277f2ca0a6d524bc3da98a9a6f2975", + tag = "HEAD", + branch = "main", + files = list(DESCRIPTION = gh_app_desc("crayon"), NAMESPACE = "") + ), + list( + sha = "b5221ab024605019800ddea474f7a0981a4d53f719f5af2b1af627b34e0760b2", + branch = "b5221ab024605019800ddea474f7a0981a4d53f719f5af2b1af627b34e0760b2", + files = list(DESCRIPTION = gh_app_desc("crayon"), NAMESPACE = "") + ), + list( + sha = "9d93692f8f7c1d6b2308d0c4aa83cdc2d99ec1fd0097cede1d9aa1301247cb01", + branch = "pr61", + pull = 79, + files = list(DESCRIPTION = gh_app_desc("crayon"), NAMESPACE = "") + ) + ) + ), + pkgconfig = list( + commits = list( + list( + sha = "c9be9cde5e91ad771d1b5150781e6e8d32a7be0e9ab227bdf45cb41ad513004c", + branch = "pr7", + pull = 7, + files = list(DESCRIPTION = gh_app_desc("pkgconfig"), NAMESPACE = "") + ) + ) + ) + ) + ), + "wesm" = list( + repos = list( + "feather" = list( + commits = list( + list( + sha = "ec40c1eae1ac83b86fc41bb2f5cd916152d19015649c3d209f2c08115dd993b1", + tag = "HEAD", + branch = "main", + files = list("R/DESCRIPTION" = gh_app_desc("feather"), NAMESPACE = "") + ) + ) + ) + ) + ), + "gaborcsardi" = list( + repos = list( + "secret-test" = list( + commits = list( + list( + sha = "599cc5d745d2079eddf1ff582b83d381e885cd30f33bafebbe83e73d010cfa93", + tag = "HEAD", + branch = "main", + token = "b9984750bea6a170081ca98255c3b43fe5fb0978", + files = list("DESCRIPTION" = gh_app_desc("secret"), NAMESPACE = "") + ) + ) + ), + "secret" = list( + commits = list( + list( + sha = "7f9fb08e26015e05529cd4d7fc2a7edbd88c783d456ff83a96dcc58ace1d3ea5", + tag = "HEAD", + branch = "x", + files = list("DESCRIPTION" = gh_app_desc("secret"), NAMESPACE = "") + ) + ) + ) + ) + ), + "tidyverse" = list( + repos = list( + "tidyverse.org" = list( + commits = list( + list( + sha = "d998eab68c66d862c31a6091f9e71200b13bb44ea754e3371d098dcaa20e51a4", + tag = "HEAD", + branch = "main", + files = list("foo" = "bar") + ) + ) + ) + ) + ), + "cran" = list( + repos = list( + "rJava" = list( + commits = list( + list( + sha = "dfb3b64b13343e07b2db038777d9dc2aba5d824c5eca8c891c87bd4fd38d7256", + tag = "HEAD", + branch = "master", + files = list( + DESCRIPTION = "Package: rJava\nVersion: 1.0-6\nSystemRequirements: Java JDK 1.2 or higher (for JRI/REngine JDK 1.4 or higher), GNU make\n", + NAMESPACE = "" + ) + ) + ) + ) + ) + ) + ) +) diff --git a/tools/build/fake/R/git-app.R b/tools/build/fake/R/git-app.R new file mode 100644 index 000000000..88e882d7b --- /dev/null +++ b/tools/build/fake/R/git-app.R @@ -0,0 +1,201 @@ +dir.create(git_root <- tempfile()) +untar("fixtures/git-repo.tar.gz", exdir = git_root) + +# TODO: allow restriting to dumb, v1, v2 protocol + +git_app <- function(git_root, + git_timeout = as.difftime(1, units = "mins"), + filter = TRUE, + cleanup = TRUE) { + + app <- webfakes::new_app() + app$locals$git_root <- git_root + app$locals$git_timeout <- as.double(git_timeout, units = "secs") * 1000 + app$locals$git_config <- tempfile() + + reg.finalizer(app, function(app0) unlink(app$locals$git_config), TRUE) + writeLines( + c( + "[uploadpack]", + paste0("\tallowFilter = ", if (isTRUE(filter)) "true" else "false") + ), + app$locals$git_config + ) + + if (cleanup) { + reg.finalizer( + app, + function(app) unlink(app$locals$git_root, recursive = TRUE), + TRUE + ) + } + + app$get(webfakes::new_regexp("^(?.*)$"), function(req, res) { + out <- tempfile() + err <- tempfile() + on.exit(unlink(c(out, err)), add = TRUE) + px <- processx::process$new( + "git", + "http-backend", + env = git_env_vars(req), + stdout = out, + stderr = err + ) + px$wait(req$app$locals$git_timeout) + parse_cgi_output(px, out, err, res) + }) + + app$post(webfakes::new_regexp("^(?.*)$"), function(req, res) { + tmp <- tempfile() + out <- tempfile() + err <- tempfile() + on.exit(unlink(c(out, err, tmp)), add = TRUE) + writeBin(req$.body, tmp) + px <- processx::process$new( + "git", + "http-backend", + env = git_env_vars(req), + stdin = tmp, + stdout = out, + stderr = err + ) + px$wait(req$app$locals$git_timeout) + parse_cgi_output(px, out, err, res) + }) + + app +} + +git_env_vars <- function(req) { + url <- parse_url(req$url) + c( + "current", + + # For git + GIT_CONFIG_GLOBAL = req$app$locals$git_config, + GIT_HTTP_EXPORT_ALL = "true", + GIT_PROJECT_ROOT = req$app$locals$git_root, + GIT_PROTOCOL = req$get_header("Git-Protocol") %||% "", + HTTP_GIT_PROTOCOL = req$get_header("Git-Protocol") %||% "", + + # general CGI + CONTENT_LENGTH = if (length(req$.body)) length(req$.body), + CONTENT_TYPE = req$get_header("content-type") %||% "", + GATEWAY_INTERFACE = "CGI/1.1", + PATH_INFO = req$path, + QUERY_STRING = req$query_string, + REMOTE_ADDR = req$remote_addr, + REMOTE_HOST = req$remote_addr, + REMOTE_USER = "anonymous", + REQUEST_METHOD = toupper(req$method), + SERVER_NAME = url$host, + SERVER_PORT = url$port, + SERVER_PROTOCOL = paste0("http/", req$http_version), + SERVER_SOFTWARE = "https://github.com/r-lib/webfakes" + ) +} + +parse_cgi_output <- function(px, out, err, res) { + if (px$is_alive() || px$get_exit_status() != 0) { + px$kill() + res$ + set_status(500)$ + send(paste0("Internal git error: ", err)) + } + + out <- read_bin(out) + err <- read_char(err) + + cgi_res <- split_cgi_output(out) + headers <- cgi_res$headers + + for (idx in seq_along(headers)) { + if (tolower(names(headers)[idx]) == "status") { + res$set_status(parse_status(headers[[idx]])) + } else { + res$set_header(names(headers)[idx], headers[[idx]]) + } + } + + if (! "status" %in% names(headers)) { + res$set_status(200L) + } + + res$send(cgi_res$body) +} + +split_cgi_output <- function(x) { + nlnl <- grepRaw("\r?\n\r?\n", x)[1] + if (is.na(nlnl)) { + stop("Invalid response from git cgi, no headers?") + } + + headers <- parse_headers(rawToChar(x[1:(nlnl - 1L)])) + + body <- x[nlnl:length(x)] + ndrop <- 1L + while (body[ndrop] != 0x0a) ndrop <- ndrop + 1L + ndrop <- ndrop + 1L + while (body[ndrop] != 0x0a) ndrop <- ndrop + 1L + body <- utils::tail(body, -ndrop) + + list(headers = headers, body = body) +} + +parse_status <- function(x) { + status <- as.integer(strsplit(x, " ", fixed = TRUE)[[1]][1]) + if (is.na(status)) { + stop("Invalid status from git cgi: ", x) + } +} + +read_bin <- function(path) { + readBin(path, "raw", file.info(path)$size) +} + +parse_headers <- function (txt) { + headers <- grep(":", parse_headers0(txt), fixed = TRUE, value = TRUE) + out <- lapply(headers, split_header) + names <- tolower(vapply(out, `[[`, character(1), 1)) + values <- lapply(lapply(out, `[[`, 2), trimws) + names(values) <- names + values +} + +parse_headers0 <- function (txt, multiple = FALSE) { + if (!length(txt)) + return(NULL) + if (is.raw(txt)) { + txt <- rawToChar(txt) + } + stopifnot(is.character(txt)) + if (length(txt) > 1) { + txt <- paste(txt, collapse = "\n") + } + sets <- strsplit(txt, "\\r\\n\\r\\n|\\n\\n|\\r\\r")[[1]] + headers <- strsplit(sets, "\\r\\n|\\n|\\r") + if (multiple) { + headers + } + else { + headers[[length(headers)]] + } +} + +split_header <- function(x) { + pos <- grepRaw(":", x, fixed = TRUE)[1] + if (is.na(pos)) { + stop("Invalid response header from git cgi: ", x) + } + c(substr(x, 1, pos - 1L), substr(x, pos + 1L, nchar(x))) +} + +parse_url <- function(url) { + re_url <- paste0( + "^(?[a-zA-Z0-9]+)://", + "(?:(?[^@/:]+)(?::(?[^@/]+))?@)?", + "(?[^/]+)", + "(?.*)$" # don't worry about query params here... + ) + re_match(url, re_url)$groups +} diff --git a/tools/build/fake/R/name-check-app.R b/tools/build/fake/R/name-check-app.R new file mode 100644 index 000000000..132ed0aa4 --- /dev/null +++ b/tools/build/fake/R/name-check-app.R @@ -0,0 +1,136 @@ +# ------------------------------------------------------------------------- +# Name check app + +name_check_app <- function() { + `%||%` <- function(l, r) if (is.null(l)) r else l + + app <- webfakes::new_app() + + app$use(webfakes::mw_json()) + app$use(webfakes::mw_urlencoded()) + + app$get("/crandb", function(req, res) { + pkg <- sub("\"$", "", sub("^\"", "", req$query$key)) + if (pkg == "dbi") { + res$send_json(list( + total_rows = 20000, + offset = 14000, + rows = list(list(id = "DBI", key = "dbi", value = "DBI")) + ), auto_unbox = TRUE) + } else { + res$send_json(list( + total_rows = 20000, + offset = 14000, + rows = list() + )) + } + }) + + app$post("/wikipedia", function(req, res) { + titles <- strsplit(req$form$titles, "|", fixed = TRUE)[[1]] + Titles <- tools::toTitleCase(titles) + ret <- list(query = list( + normalized = list(list(from = titles, to = Titles)), + pages = list(`11178` = list( + pageid = 11178, + title = Titles, + extract = "The terms foobar (), foo, bar, and others are used ..." + )) + )) + res$send_json(ret, auto_unbox = TRUE) + }) + + app$all(c("/echo", "/echo/define"), function(req, res) { + out <- list( + method = req$method, + query = req$query_string, + type = req$get_header("Content-Type") %||% NA_character_, + body = rawToChar(req$.body %||% raw()) + ) + res$send_json(out, auto_unbox = TRUE) + }) + + app$get("/sentiment", function(req, res) { + txt <- "abuse\t-3\nirony\t-1\nxo\t3\nxoxoxo\t4\n" + res$send(txt) + }) + + app$get("/bioc/a", function(req, res) { + res$send(paste0(collapse = "", c( + "hello nobody, this is httpd@ip-172-30-0-33 running gitolite3 v3.6.6-6-g7c8f0ab on git 2.28.0", + "", + " R \tpackages/a4", + " R \tpackages/a4Base", + " R \tpackages/a4Classif", + " R \tpackages/a4Core", + " R \tpackages/a4Preproc", + " R \tpackages/a4Reporting", + " R \tpackages/aCGH", + " R \tpackages/abseqR", + " R \tpackages/ag.db" + ), "\n")) + }) + + app$get("/bioc/A", function(req, res) { + res$send(paste0(collapse = "", c( + "hello nobody, this is httpd@ip-172-30-0-33 running gitolite3 v3.6.6-6-g7c8f0ab on git 2.28.0", + "", + " R \tpackages/ABAData", + " R \tpackages/ABAEnrichment", + " R \tpackages/ABSSeq", + " R \tpackages/AGDEX", + " R \tpackages/AHPathbankDbs", + " R \tpackages/AIMS", + " R \tpackages/ALDEx2", + " R \tpackages/ALL", + " R \tpackages/ALLMLL", + " R \tpackages/ALPS", + " R \tpackages/AMARETTO" + ), "\n")) + }) + + app$get("/biocann/src/contrib/PACKAGES.gz", function(req, res) { + tmp <- tempfile(fileext = ".gz") + on.exit(unlink(tmp), add = TRUE) + l <- c( + "Package: adme16cod.db", + "Version: 3.4.0", + "Depends: R (>= 2.7.0), methods, AnnotationDbi (>= 1.31.18),", + " org.Rn.eg.db (>= 3.2.1)", + "Imports: methods, AnnotationDbi", + "Suggests: annotate, RUnit", + "License: Artistic-2.0", + "MD5sum: 3902516a40a503302ef732143b2394b9", + "NeedsCompilation: no", + "", + "Package: ag.db", + "Version: 3.2.3", + "Depends: R (>= 2.7.0), methods, AnnotationDbi (>= 1.34.3),", + " org.At.tair.db (>= 3.3.0)", + "Imports: methods, AnnotationDbi", + "Suggests: DBI, annotate, RUnit", + "License: Artistic-2.0", + "MD5sum: e5913da38fe4487202306cacd885840d", + "NeedsCompilation: no", + "", + "Package: agcdf", + "Version: 2.18.0", + "Depends: utils", + "Imports: AnnotationDbi", + "License: LGPL", + "MD5sum: 5dd14bc6a6d2729f5e7b170105c78e48", + "NeedsCompilation: no" + ) + writeLines(l, con <- gzfile(tmp, open = "wb")) + close(con) + + # We don't use send_file, because of a webfakes bug on Windows + # with absolute paths. Webfakes prepends '/' to 'c:/...'. + blob <- readBin(tmp, what = "raw", n = 10000) + res$ + set_type("application/gzip")$ + send(blob) + }) + + app +} diff --git a/tools/build/fake/R/ppm-app.R b/tools/build/fake/R/ppm-app.R new file mode 100644 index 000000000..2288466bb --- /dev/null +++ b/tools/build/fake/R/ppm-app.R @@ -0,0 +1,35 @@ +ppm_app <- function() { + app <- webfakes::new_app() + + app$get("/ppmversions", function(req, res) { + res$send_json( + text = readLines("fixtures/ppm-versions.json") + ) + }) + + app$get("/ppmstatus", function(req, res) { + res$send_json( + text = readLines("fixtures/ppm-status.json") + ) + }) + + app$get("/rversions", function(req, res) { + res$send_json( + text = readLines("fixtures/r-versions.json") + ) + }) + + app$get("/crandb/:pkg", function(req, res) { + if (req$params$pkg == "dplyr") { + res$send_json( + text = readLines(gzfile("fixtures/dplyr.json.gz")) + ) + } else if (req$params$pkg == "bad") { + res$send_status(401) + } else { + res$send_status(404) + } + }) + + app +} diff --git a/tools/build/fake/R/run.R b/tools/build/fake/R/run.R new file mode 100644 index 000000000..d65373dda --- /dev/null +++ b/tools/build/fake/R/run.R @@ -0,0 +1,64 @@ +message("Starting up fake apps") + +env <- new.env() +apps <- list() + +source("cran-app.R", local = env, verbose = FALSE) +apps[["cran"]] <- webfakes::new_app_process( + env$cran_app(env$cran_app_pkgs), + port = as.integer(Sys.getenv("CRAN_APP_PORT", "3100")), + opts = webfakes::server_opts(num_threads = 4, interface = "0.0.0.0") +) + +source("bioc-app.R", local = env, verbose = FALSE) +apps[["bioc"]] <- webfakes::new_app_process( + env$bioc_app(env$bioc_app_pkgs), + port = as.integer(Sys.getenv("BIOC_APP_PORT", "3101")), + opts = webfakes::server_opts(num_threads = 4, interface = "0.0.0.0") +) + +source("ppm-app.R", local = env, verbose = FALSE) +apps[["ppm"]] <- webfakes::local_app_process( + env$ppm_app(), + port = as.integer(Sys.getenv("PPM_APP_PORT", "3102")), + opts = webfakes::server_opts(num_threads = 3, interface = "0.0.0.0") +) + +source("name-check-app.R", local = env, verbose = FALSE) +apps[["name-check"]] <- webfakes::local_app_process( + env$name_check_app(), + port = as.integer(Sys.getenv("NAME_CHECK_APP_PORT", "3103")), + opts = webfakes::server_opts(num_threads = 3, interface = "0.0.0.0") +) + +source("git-app.R", local = env, verbose = FALSE) +apps[["git"]] <- webfakes::local_app_process( + webfakes::git_app(env$git_root), + port = as.integer(Sys.getenv("GIT_APP_PORT", "3104")), + opts = webfakes::server_opts(num_threads = 3, interface = "0.0.0.0") +) + +source("gh-app.R", local = env, verbose = FALSE) +apps[["gh"]] <- webfakes::local_app_process( + env$gh_app(env$gh_app_repos), + port = as.integer(Sys.getenv("GH_APP_PORT", "3105")), + opts = webfakes::server_opts(num_threads = 3, interface = "0.0.0.0") +) + +apps[["cran2"]] <- webfakes::new_app_process( + env$cran_app(env$cran_app_pkgs2), + port = as.integer(Sys.getenv("CRAN2_APP_PORT", "3106")), + opts = webfakes::server_opts(num_threads = 4, interface = "0.0.0.0") +) + +message("CRAN app: ", apps$cran$url()) +message("BIOC app: ", apps$bioc$url()) +message("PPM app: ", apps$ppm$url()) +message("Name check app:", apps[["name-check"]]$url()) +message("GIT app: ", apps$git$url()) +message("GH app: ", apps$gh$url()) +message("CRAN app: ", apps$cran2$url()) + +while (TRUE) { + Sys.sleep(10000) +}