Skip to content

Commit

Permalink
Apply styler::style_pkg(scope = "tokens")
Browse files Browse the repository at this point in the history
  • Loading branch information
florisvdh committed Nov 20, 2023
1 parent ac6721f commit 6a3b0ee
Show file tree
Hide file tree
Showing 9 changed files with 81 additions and 61 deletions.
14 changes: 8 additions & 6 deletions R/GRTSmh.R
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ convert_dec_to_base4frac <-
collapse = ""
)
},
'0'
"0"
)
) / 10^13
)
Expand Down Expand Up @@ -149,9 +149,11 @@ convert_dec_to_base4frac <-
#' # vector, level 5:
#' convert_base4frac_to_dec(c(NA, 0.1010101010101), level = 5)
#' # same vector, all sensible levels computed:
#' sapply(0:12, function(i) convert_base4frac_to_dec(c(NA, 0.1010101010101),
#' level = i
#' ))
#' sapply(0:12, function(i) {
#' convert_base4frac_to_dec(c(NA, 0.1010101010101),
#' level = i
#' )
#' })
#' options(oldoption)
#'
#' @export
Expand All @@ -175,15 +177,15 @@ convert_base4frac_to_dec <-
a <- x * 10^level2
a <- round(a - floor(a), 13 - level2)
a <- a %>%
as.character %>%
as.character() %>%
str_sub(start = 3) %>%
str_pad(
width = 13 - level2,
side = "right",
pad = "0"
) %>%
str_split("", simplify = TRUE) %>%
as.numeric
as.numeric()
t(a) %*% multipliers
})
}
Expand Down
20 changes: 14 additions & 6 deletions R/datawrangling.R
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ expand_types <- function(x,
} else {
x %>%
nest(data = -!!(group_vars(x))) %>%
ungroup %>%
ungroup() %>%
mutate(newdata = map(.data$data,
expand_types_plain,
type_var = type_var,
Expand Down Expand Up @@ -221,7 +221,9 @@ expand_types_plain <- function(x,
by = c("type" = "orig_abcd")
) %>%
group_by(.data$main_type) %>%
summarise(add = if (strict) all(!is.na(.data$present)) else {
summarise(add = if (strict) {
all(!is.na(.data$present))
} else {
any(!is.na(.data$present))
}) %>%
filter(.data$add) %>%
Expand Down Expand Up @@ -264,9 +266,11 @@ expand_types_plain <- function(x,
factor(.data$main_type_abcd,
levels = levels(.data$orig_abcd)
)
} else .data$main_type_abcd) %>%
} else {
.data$main_type_abcd
}) %>%
select(-.data$main_type_abcd) %>%
distinct %>%
distinct() %>%
set_colnames(gsub("orig_abcd", type_var, colnames(.))) %>%
bind_rows(x_expanded, .)
)
Expand Down Expand Up @@ -327,7 +331,9 @@ convertdf_enc <- function(x,
is_chfact <- function(vec) {
if (is.factor(vec)) {
is.character(levels(vec))
} else FALSE
} else {
FALSE
}
}

conv_levels <- function(fact, from, to, sub) {
Expand Down Expand Up @@ -359,6 +365,8 @@ convertdf_enc <- function(x,
to = to,
sub = sub
))
} else .
} else {
.
}
}
}
18 changes: 10 additions & 8 deletions R/filemanagement.R
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ download_zenodo <- function(doi,
record <- str_remove(doi, fixed("10.5281/zenodo."))

# Retrieve file name by records call
base_url <- 'https://zenodo.org/api/records/'
base_url <- "https://zenodo.org/api/records/"
req <- curl::curl_fetch_memory(paste0(base_url, record))
content <- jsonlite::fromJSON(rawToChar(req$content))

Expand Down Expand Up @@ -208,12 +208,14 @@ download_zenodo <- function(doi,
md5 <- unname(tools::md5sum(destfile))
zenodo_md5 <- str_split(file_md5[i], ":")[[1]][2]
if (identical(md5, zenodo_md5)) {
if (!quiet) message(
filename,
" was downloaded and its integrity verified (md5sum: ",
md5,
")"
)
if (!quiet) {
message(
filename,
" was downloaded and its integrity verified (md5sum: ",
md5,
")"
)
}
} else {
warning(
"Incorrect download! md5sum ",
Expand Down Expand Up @@ -255,7 +257,7 @@ human_filesize <- function(x) {
assert_that(all(x %% 1 == 0 & x >= 0))
magnitude <-
log(x, base = 1024) %>%
floor %>%
floor() %>%
pmin(8)
unit <- factor(magnitude,
levels = 0:8,
Expand Down
2 changes: 1 addition & 1 deletion R/read_ecoregions.R
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ read_ecoregions <-

er_levels <-
ecoregions %>%
st_drop_geometry %>%
st_drop_geometry() %>%
select(-.data$district_name)

ecoregions <-
Expand Down
22 changes: 13 additions & 9 deletions R/read_habitatdata.R
Original file line number Diff line number Diff line change
Expand Up @@ -688,7 +688,9 @@ read_watersurfaces <-
{
if (version == "watersurfaces_v1.2") {
rename(., water_level_management = .data$PEILBEHEER)
} else .
} else {
.
}
} %>%
select(
polygon_id = .data$WVLC,
Expand Down Expand Up @@ -752,7 +754,9 @@ read_watersurfaces <-
watersurfaces <-
watersurfaces %>%
{
if (version != "watersurfaces_v1.2") . else {
if (version != "watersurfaces_v1.2") {
.
} else {
mutate(., area_name = ifelse(.data$area_name == "<Null>",
NA,
.data$area_name
Expand Down Expand Up @@ -1368,8 +1372,8 @@ read_habitatmap_terr <-
#' hs2 <- read_habitatstreams(source_text = TRUE)
#' hs2
#' all.equal(
#' hs %>% st_drop_geometry,
#' hs2$lines %>% st_drop_geometry
#' hs %>% st_drop_geometry(),
#' hs2$lines %>% st_drop_geometry()
#' )
#' }
#'
Expand Down Expand Up @@ -1435,7 +1439,7 @@ read_habitatstreams <-
type = "3260" %>%
factor(levels = read_types() %>%
.$type %>%
levels)
levels())
) %>%
select(
.data$river_name,
Expand All @@ -1446,15 +1450,15 @@ read_habitatstreams <-
if (source_text) {
sources <-
habitatstreams %>%
st_drop_geometry %>%
st_drop_geometry() %>%
distinct(
source_id = .data$BRON,
source_text = .data$OMSCHR
) %>%
mutate(
source_id = factor(.data$source_id,
levels = lines %>% .$source_id %>%
levels
levels()
),
source_text = fct_reorder(
.data$source_text,
Expand Down Expand Up @@ -1609,7 +1613,7 @@ read_habitatsprings <-
typelevels <-
read_types() %>%
.$type %>%
levels
levels()

habitatsprings <-
read_sf(file) %>%
Expand Down Expand Up @@ -1858,7 +1862,7 @@ read_habitatquarries <-
typelevels <-
read_types() %>%
.$type %>%
levels
levels()

habitatquarries <-
suppressWarnings(
Expand Down
16 changes: 10 additions & 6 deletions R/read_soilmap.R
Original file line number Diff line number Diff line change
Expand Up @@ -233,10 +233,10 @@
#' soilmap_simple
#' soilmap_simple %>%
#' filter(!is.na(bsm_mo_substr)) %>%
#' glimpse
#' glimpse()
#' soilmap_simple %>%
#' filter(bsm_converted) %>%
#' glimpse
#' glimpse()
#' }
#'
#' @importFrom assertthat
Expand Down Expand Up @@ -448,14 +448,14 @@ read_soilmap <-
keys <- list()
soilmap_df <-
soilmap %>%
st_drop_geometry
st_drop_geometry()
for (i in keyvars) {
temp_df <-
soilmap_df %>%
select(matches(str_c(i, ".*"))) %>%
select(1:2) %>%
filter_at(1, function(x) !is.na(x)) %>%
distinct
distinct()
keys[[i]] <-
setNames(
temp_df %>% pull(2),
Expand Down Expand Up @@ -586,7 +586,9 @@ read_soilmap <-
soilmap <-
soilmap %>%
{
if (standardize_coastalplain) . else {
if (standardize_coastalplain) {
.
} else {
mutate(.,
bsm_converted = NA
)
Expand Down Expand Up @@ -614,7 +616,9 @@ read_soilmap <-
if (explan) . else select(., -matches("_explan"))
} %>%
{
if (standardize_coastalplain) . else {
if (standardize_coastalplain) {
.
} else {
select(., -.data$bsm_converted)
}
}
Expand Down
28 changes: 14 additions & 14 deletions R/read_textdata.R
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ read_namelist <-
attr(result, "source") <- NULL

result %>%
as_tibble
as_tibble()
}


Expand Down Expand Up @@ -102,7 +102,7 @@ pkgdatasource_path <-
system.file(str_c(file, extension),
package = "n2khab"
) %>%
dirname
dirname()
}


Expand Down Expand Up @@ -303,7 +303,7 @@ read_types <-

suppressMessages(suppressWarnings({
type_levels <-
tibble(codelevel = types_base$type %>% levels) %>%
tibble(codelevel = types_base$type %>% levels()) %>%
left_join(namelist,
by = c("codelevel" = "code")
) %>%
Expand All @@ -313,7 +313,7 @@ read_types <-
)

typeclass_levels <-
tibble(codelevel = types_base$typeclass %>% levels) %>%
tibble(codelevel = types_base$typeclass %>% levels()) %>%
left_join(namelist %>% select(-.data$shortname),
by = c("codelevel" = "code")
) %>%
Expand All @@ -328,7 +328,7 @@ read_types <-
mutate(
type = factor(.data$type,
levels = types_base$type %>%
levels
levels()
),
type_name =
.data$type %>%
Expand Down Expand Up @@ -416,7 +416,7 @@ read_types <-
9, 22:23,
10, 24:25
) %>%
as_tibble
as_tibble()
}))
}

Expand Down Expand Up @@ -574,7 +574,7 @@ read_env_pressures <-
ep_abbrev = .data$shortname
) %>%
mutate(ep_code = .data$ep_code %>%
factor(levels = env_pressures_base$ep_code %>% levels))
factor(levels = env_pressures_base$ep_code %>% levels()))
)

ep_levels <-
Expand All @@ -587,7 +587,7 @@ read_env_pressures <-
arrange(.data$ep_code)

ep_class_levels <-
tibble(codelevel = env_pressures_base$ep_class %>% levels) %>%
tibble(codelevel = env_pressures_base$ep_class %>% levels()) %>%
left_join(namelist %>% select(-.data$shortname),
by = c("codelevel" = "code")
) %>%
Expand Down Expand Up @@ -615,7 +615,7 @@ read_env_pressures <-
remarks = .data$shortname
) %>%
mutate(ep_code = .data$ep_code %>%
factor(levels = env_pressures_base$ep_code %>% levels)) %>%
factor(levels = env_pressures_base$ep_code %>% levels())) %>%
select(
.data$ep_code,
.data$ep_abbrev,
Expand All @@ -625,7 +625,7 @@ read_env_pressures <-
.data$explanation,
.data$remarks
) %>%
as_tibble
as_tibble()
}


Expand Down Expand Up @@ -834,7 +834,7 @@ read_schemes <-
.data$notes,
contains("tag")
) %>%
as_tibble
as_tibble()
)
}

Expand Down Expand Up @@ -1063,9 +1063,9 @@ read_scheme_types <- function(path = pkgdatasource_path("textdata/scheme_types",
levels =
read_vc(file = file, root = path) %>%
pull(.data$type) %>%
levels
levels()
)) %>%
as_tibble
as_tibble()
} else {
scheme_types %>%
mutate(
Expand All @@ -1077,6 +1077,6 @@ read_scheme_types <- function(path = pkgdatasource_path("textdata/scheme_types",
codelist = namelist
)
) %>%
as_tibble
as_tibble()
}
}
Loading

0 comments on commit 6a3b0ee

Please sign in to comment.