Skip to content

Commit

Permalink
gcp class
Browse files Browse the repository at this point in the history
  • Loading branch information
wlandau-lilly committed Aug 25, 2023
1 parent d7180c5 commit 1ead814
Show file tree
Hide file tree
Showing 6 changed files with 154 additions and 57 deletions.
24 changes: 10 additions & 14 deletions R/class_aws.R
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,8 @@ store_read_object.tar_aws <- function(store) {
endpoint = store_aws_endpoint(path),
version = store_aws_version(path),
args = store$resources$aws$args,
seconds_interval = store$resources$network$seconds_interval %|||% 0L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 0L,
seconds_interval = store$resources$network$seconds_interval %|||% 1L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 60L,
max_tries = store$resources$network$max_tries %|||% 5L,
verbose = store$resources$network$verbose %|||% TRUE
)
Expand All @@ -157,8 +157,8 @@ store_exist_object.tar_aws <- function(store, name = NULL) {
endpoint = store_aws_endpoint(path),
version = store_aws_version(path),
args = store$resources$aws$args,
seconds_interval = store$resources$network$seconds_interval %|||% 0L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 0L,
seconds_interval = store$resources$network$seconds_interval %|||% 1L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 60L,
max_tries = store$resources$network$max_tries %|||% 5L,
verbose = store$resources$network$verbose %|||% TRUE
)
Expand Down Expand Up @@ -188,8 +188,8 @@ store_delete_object.tar_aws <- function(store, name = NULL) {
endpoint = endpoint,
version = version,
args = store$resources$aws$args,
seconds_interval = store$resources$network$seconds_interval %|||% 0L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 0L,
seconds_interval = store$resources$network$seconds_interval %|||% 1L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 60L,
max_tries = store$resources$network$max_tries %|||% 5L,
verbose = store$resources$network$verbose %|||% TRUE
),
Expand All @@ -208,10 +208,6 @@ store_upload_object.tar_aws <- function(store) {
store_upload_object_aws <- function(store) {
key <- store_aws_key(store$file$path)
bucket <- store_aws_bucket(store$file$path)
seconds_interval <- store$resources$network$seconds_interval %|||% 1
seconds_timeout <- store$resources$network$seconds_timeout %|||% 30
max_tries <- store$resources$network$max_tries %|||% Inf
verbose <- store$resources$network$verbose %|||% TRUE
head <- if_any(
file_exists_stage(store$file),
aws_s3_upload(
Expand All @@ -223,8 +219,8 @@ store_upload_object_aws <- function(store) {
metadata = list("targets-hash" = store$file$hash),
part_size = store$resources$aws$part_size %|||% (5 * (2 ^ 20)),
args = store$resources$aws$args,
seconds_interval = store$resources$network$seconds_interval %|||% 0L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 0L,
seconds_interval = store$resources$network$seconds_interval %|||% 1L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 60L,
max_tries = store$resources$network$max_tries %|||% 5L,
verbose = store$resources$network$verbose %|||% TRUE
),
Expand Down Expand Up @@ -265,8 +261,8 @@ store_aws_hash <- function(store) {
endpoint = store_aws_endpoint(path),
version = store_aws_version(path),
args = store$resources$aws$args,
seconds_interval = store$resources$network$seconds_interval %|||% 0L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 0L,
seconds_interval = store$resources$network$seconds_interval %|||% 1L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 60L,
max_tries = store$resources$network$max_tries %|||% 5L,
verbose = store$resources$network$verbose %|||% TRUE
)
Expand Down
4 changes: 2 additions & 2 deletions R/class_aws_file.R
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ store_read_object.tar_aws_file <- function(store) {
region = store_aws_region(path),
version = store_aws_version(path),
args = store$resources$aws$args,
seconds_interval = store$resources$network$seconds_interval %|||% 0L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 0L,
seconds_interval = store$resources$network$seconds_interval %|||% 1L,
seconds_timeout = store$resources$network$seconds_timeout %|||% 60L,
max_tries = store$resources$network$max_tries %|||% 5L,
verbose = store$resources$network$verbose %|||% TRUE
)
Expand Down
31 changes: 18 additions & 13 deletions R/class_database.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,33 +8,35 @@ database_init <- function(
resources = tar_options$get_resources()
) {
memory <- memory_init()
key <- file.path(resources[[repository]]$prefix, subkey)
switch(
repository,
local = database_local_new(
memory = memory,
path = path,
subkey = subkey,
key = basename(path),
header = header,
list_columns = list_columns,
list_column_modes = list_column_modes
list_column_modes = list_column_modes,
resources = resources
),
aws = database_aws_new(
memory = memory,
path = path,
subkey = subkey,
key = key,
header = header,
list_columns = list_columns,
list_column_modes = list_column_modes,
resources = resources$aws
resources = resources
),
gcp = database_gcp_new(
memory = memory,
path = path,
subkey = subkey,
key = key,
header = header,
list_columns = list_columns,
list_column_modes = list_column_modes,
resources = resources$gcp
resources = resources
),
default = tar_throw_validate(
"unsupported repository \"",
Expand All @@ -52,26 +54,29 @@ database_class <- R6::R6Class(
public = list(
memory = NULL,
path = NULL,
subkey = NULL,
key = NULL,
header = NULL,
list_columns = NULL,
list_column_modes = NULL,
resources = NULL,
queue = NULL,
initialize = function(
memory = NULL,
path = NULL,
subkey = NULL,
key = NULL,
header = NULL,
list_columns = NULL,
list_column_modes = NULL,
resources = NULL,
queue = NULL
) {
self$memory <- memory
self$path <- path
self$subkey <- subkey
self$key <- key
self$header <- header
self$list_columns <- list_columns
self$list_column_modes <- list_column_modes
self$resources <- resources
self$queue <- queue
},
get_row = function(name) {
Expand Down Expand Up @@ -321,10 +326,10 @@ database_class <- R6::R6Class(
tar_assert_scalar(self$path)
tar_assert_none_na(self$path)
tar_assert_nzchar(self$path)
tar_assert_chr(self$subkey)
tar_assert_scalar(self$subkey)
tar_assert_none_na(self$subkey)
tar_assert_nzchar(self$subkey)
tar_assert_chr(self$key)
tar_assert_scalar(self$key)
tar_assert_none_na(self$key)
tar_assert_nzchar(self$key)
tar_assert_chr(self$header)
tar_assert_chr(self$list_columns)
}
Expand Down
65 changes: 40 additions & 25 deletions R/class_database_aws.R
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
database_aws_new <- function(
memory = NULL,
path = NULL,
subkey = NULL,
key = NULL,
header = NULL,
list_columns = NULL,
list_column_modes = NULL,
Expand All @@ -11,7 +11,7 @@ database_aws_new <- function(
database_aws_class$new(
memory = memory,
path = path,
subkey = subkey,
key = key,
header = header,
list_columns = list_columns,
list_column_modes = list_column_modes,
Expand All @@ -27,28 +27,6 @@ database_aws_class <- R6::R6Class(
portable = FALSE,
cloneable = FALSE,
public = list(
resources = NULL,
initialize = function(
memory = NULL,
path = NULL,
subkey = NULL,
header = NULL,
list_columns = NULL,
list_column_modes = NULL,
queue = NULL,
resources = NULL
) {
super$initialize(
memory = memory,
path = path,
subkey = subkey,
header = header,
list_columns = list_columns,
list_column_modes = list_column_modes,
queue = queue
)
self$resources <- resources
},
validate = function() {
super$validate()
tar_assert_inherits(
Expand All @@ -60,6 +38,43 @@ database_aws_class <- R6::R6Class(
)
)
resources_validate(self$resources)
}
},
download = function() {
aws <- self$resources$aws
network <- self$resources$network
file <- file_init(path = path)
file_ensure_hash(file)
aws_s3_download(
file = self$path,
key = self$key,
bucket = aws$bucket,
region = aws$region,
endpoint = aws$endpoint,
args = aws$args,
seconds_interval = network$seconds_interval %|||% 1,
seconds_timeout = network$seconds_timeout %|||% 60,
max_tries = network$max_tries %|||% 5L,
verbose = network$verbose %|||% TRUE
)
invisible()
},
upload = function() {
aws <- self$resources$aws
network <- self$resources$network
aws_s3_upload(
file = self$path,
key = self$key,
bucket = aws$bucket,
region = aws$region,
endpoint = aws$endpoint,
part_size = aws$part_size,
args = aws$args,
seconds_interval = network$seconds_interval %|||% 1,
seconds_timeout = network$seconds_timeout %|||% 60,
max_tries = network$max_tries %|||% 5L,
verbose = network$verbose %|||% TRUE
)
invisible()
},
)
)
74 changes: 74 additions & 0 deletions R/class_database_gcp.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
database_gcp_new <- function(
memory = NULL,
path = NULL,
key = NULL,
header = NULL,
list_columns = NULL,
list_column_modes = NULL,
queue = NULL,
resources = NULL
) {
database_gcp_class$new(
memory = memory,
path = path,
key = key,
header = header,
list_columns = list_columns,
list_column_modes = list_column_modes,
queue = queue,
resources = resources
)
}

database_gcp_class <- R6::R6Class(
classname = "tar_database_gcp",
inherit = database_class,
class = FALSE,
portable = FALSE,
cloneable = FALSE,
public = list(
validate = function() {
super$validate()
tar_assert_inherits(
self$resources,
"tar_resources_gcp",
msg = paste(
"gcp resources must be supplied to the {targets} gcp ",
"database class. Set resources with tar_option_set()"
)
)
resources_validate(self$resources)
},
download = function() {
gcp <- self$resources$gcp
network <- self$resources$network
file <- file_init(path = path)
file_ensure_hash(file)
gcp_gcs_download(
file = self$path,
key = self$key,
bucket = gcp$bucket,
seconds_interval = network$seconds_interval %|||% 1,
seconds_timeout = network$seconds_timeout %|||% 60,
max_tries = network$max_tries %|||% 5L,
verbose = network$verbose %|||% TRUE
)
invisible()
},
upload = function() {
gcp <- self$resources$gcp
network <- self$resources$network
gcp_gcs_upload(
file = self$path,
key = self$key,
bucket = gcp$bucket,
predefined_acl = gcp$predefined_acl %|||% "private",
seconds_interval = network$seconds_interval %|||% 1,
seconds_timeout = network$seconds_timeout %|||% 60,
max_tries = network$max_tries %|||% 5L,
verbose = network$verbose %|||% TRUE
)
invisible()
},
)
)
13 changes: 10 additions & 3 deletions R/class_database_local.R
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
database_local_new <- function(
memory = NULL,
path = NULL,
subkey = NULL,
key = NULL,
header = NULL,
list_columns = NULL,
list_column_modes = NULL,
resources = NULL,
queue = NULL
) {
database_local_class$new(
memory = memory,
path = path,
subkey = subkey,
key = key,
header = header,
list_columns = list_columns,
list_column_modes = list_column_modes,
resources = resources,
queue = queue
)
}
Expand All @@ -24,5 +26,10 @@ database_local_class <- R6::R6Class(
class = FALSE,
portable = FALSE,
cloneable = FALSE,
public = list()
public = list(
upload = function() {
},
download = function() {
}
)
)

0 comments on commit 1ead814

Please sign in to comment.