Title: | Human 'Connectome' Project Interface |
---|---|
Description: | Downloads and reads data from Human 'Connectome' Project <https://db.humanconnectome.org> using Amazon Web Services ('AWS') 'S3' buckets. |
Authors: | John Muschelli [aut, cre], Adi Gherman [ctb] |
Maintainer: | John Muschelli <[email protected]> |
License: | GPL-2 |
Version: | 0.10.0 |
Built: | 2024-11-21 05:05:47 UTC |
Source: | https://github.com/muschellij2/neurohcp |
Lists the Buckets available with the Key/Secrete
bucketlist(region = "us-east-1", access_key = NULL, secret_key = NULL, ...)
bucketlist(region = "us-east-1", access_key = NULL, secret_key = NULL, ...)
region |
Region of S3 Bucket |
access_key |
Amazon S3 Access Key |
secret_key |
Amazon S3 Secret Key |
... |
arguments to pass to |
List of Buckets
if (have_aws_key()){ bucketlist() }
if (have_aws_key()){ bucketlist() }
Downloads a directory/folder from HCP database
download_hcp_dir( prefix, delimiter = "", outdir = tempfile(), verbose = TRUE, ... ) download_fcp_dir(...) download_openneuro_dir(...)
download_hcp_dir( prefix, delimiter = "", outdir = tempfile(), verbose = TRUE, ... ) download_fcp_dir(...) download_openneuro_dir(...)
prefix |
Folder to download |
delimiter |
Delimiter for files |
outdir |
Output directory |
verbose |
Should diagnostic values be printed? |
... |
additional arguments to pass to |
List of return from calling hcp_list_files
,
the output directory, and all destination files (not subsetted by those
that did in fact download)
if (have_aws_key()) { prefix = "HCP/100307/release-notes" res = download_hcp_dir(prefix = prefix, verbose = FALSE) }
if (have_aws_key()) { prefix = "HCP/100307/release-notes" res = download_hcp_dir(prefix = prefix, verbose = FALSE) }
Downloads a file from HCP S3 Bucket
download_hcp_file( path_to_file, destfile = NULL, verbose = TRUE, error = TRUE, ... ) download_fcp_file(...) download_openneuro_file(...)
download_hcp_file( path_to_file, destfile = NULL, verbose = TRUE, error = TRUE, ... ) download_fcp_file(...) download_openneuro_file(...)
path_to_file |
Path to file on HCP S3 Bucket |
destfile |
Destination filename |
verbose |
should progress be added to downloading? |
error |
Should the function error if the return was bad? |
... |
arguments to pass to |
Output filename that was downloaded
if (have_aws_key()){ path_to_file <- "HCP_900/100206/MNINonLinear/100206.164k_fs_LR.wb.spec" download_hcp_file(path_to_file = path_to_file) }
if (have_aws_key()){ path_to_file <- "HCP_900/100206/MNINonLinear/100206.164k_fs_LR.wb.spec" download_hcp_file(path_to_file = path_to_file) }
Wraps a make_aws_call
to a GET
statement to get the file
get_hcp_file(path_to_file = "/", ..., verbose = TRUE, verb = "GET") head_hcp_file(...) get_fcp_file(...) head_fcp_file(...) get_openneuro_file(...) head_openneuro_file(...)
get_hcp_file(path_to_file = "/", ..., verbose = TRUE, verb = "GET") head_hcp_file(...) get_fcp_file(...) head_fcp_file(...) get_openneuro_file(...) head_openneuro_file(...)
path_to_file |
Path to file on HCP S3 Bucket |
... |
arguments to pass to |
verbose |
Should the URL be printed? |
verb |
passed to |
Result of GET
A data.frame
with all the available scanning information
for the HCP 1200 data.
hcp_1200_scanning_info
hcp_1200_scanning_info
An object of class tbl_df
(inherits from tbl
, data.frame
) with 69615 rows and 18 columns.
A data.frame
with all the available scanning information
for the HCP 900 data.
hcp_900_scanning_info
hcp_900_scanning_info
An object of class tbl_df
(inherits from tbl
, data.frame
) with 34406 rows and 18 columns.
Constructs url string for AWS S3 Bucket to pass in for HCP downloading
hcp_aws_url( path_to_file = "", bucket = "hcp-openaccess", region = "us-east-1", access_key = NULL, secret_key = NULL, lifetime_minutes = 20, query = NULL, verb = "GET", sign = TRUE )
hcp_aws_url( path_to_file = "", bucket = "hcp-openaccess", region = "us-east-1", access_key = NULL, secret_key = NULL, lifetime_minutes = 20, query = NULL, verb = "GET", sign = TRUE )
path_to_file |
Path to file on HCP S3 Bucket |
bucket |
Bucket to download from |
region |
Region of S3 Bucket |
access_key |
Amazon S3 Access Key |
secret_key |
Amazon S3 Secret Key |
lifetime_minutes |
Time that connection can be opened |
query |
additional query to add to url |
verb |
httr VERB to be used |
sign |
Should the url be signed? |
Character of the url to be passed to httr
'VERB's
if (have_aws_key()){ path_to_file <- "HCP_900/100206/MNINonLinear/100206.164k_fs_LR.wb.spec" hcp_aws_url(path_to_file) }
if (have_aws_key()){ path_to_file <- "HCP_900/100206/MNINonLinear/100206.164k_fs_LR.wb.spec" hcp_aws_url(path_to_file) }
Returns list of IDs of folders in the HCP database
hcp_ids(group = c("HCP", "HCP_900", "HCP_1200", "HCP_Retest"))
hcp_ids(group = c("HCP", "HCP_900", "HCP_1200", "HCP_Retest"))
group |
Group of IDs to return |
Character vector
if (have_aws_key()) { head(hcp_ids("HCP")) head(hcp_ids("HCP_900")) }
if (have_aws_key()) { head(hcp_ids("HCP")) head(hcp_ids("HCP_900")) }
Lists a set of files from an HCP bucket for a specific sub-folder
hcp_list_files(prefix = "", delimiter = NULL, query = NULL, marker = NULL, ...) fcp_list_files(prefix = "", delimiter = NULL, query = NULL, marker = NULL, ...) openneuro_list_files( prefix = "", delimiter = NULL, query = NULL, marker = NULL, ... ) hcp_list_dirs(prefix = "HCP/", ...) fcp_list_dirs(prefix = "data/Projects/", ...) openneuro_list_dirs(prefix = NULL, ...)
hcp_list_files(prefix = "", delimiter = NULL, query = NULL, marker = NULL, ...) fcp_list_files(prefix = "", delimiter = NULL, query = NULL, marker = NULL, ...) openneuro_list_files( prefix = "", delimiter = NULL, query = NULL, marker = NULL, ... ) hcp_list_dirs(prefix = "HCP/", ...) fcp_list_dirs(prefix = "data/Projects/", ...) openneuro_list_dirs(prefix = NULL, ...)
prefix |
directory folder to list files. If |
delimiter |
Delimiter to list files. For example |
query |
Additional query arguments |
marker |
the marker to start the listing, needed for pagination of results |
... |
additional arguments passed to |
List with the result of the GET command, the parsed result, and the content from the result.
if (have_aws_key()){ x = hcp_list_files(prefix = "HCP/100307/unprocessed/3T/Diffusion", delimiter="bval") stopifnot(x$parsed_result$ListBucketResult$Name[[1]] == "hcp-openaccess") t1_niis = hcp_list_files(prefix ="HCP/100307/T1w", delimiter = ".nii.gz") all_dirs = hcp_list_dirs("HCP/") } if (have_aws_key()){ res = hcp_list_dirs("HCP/") projects = unlist(parse_list_files(res)$prefixes) projects = unname(projects) projects = unname(projects) head(projects) head(basename(projects)) stopifnot("100307" %in% basename(projects)) } if (have_aws_key()){ res = fcp_list_dirs() projects = unlist(parse_list_files(res)$prefixes) projects = unname(projects) head(projects) head(basename(projects)) stopifnot("ABIDE" %in% basename(projects)) } res = openneuro_list_dirs() projects = unlist(parse_list_files(res)$prefixes) projects = unname(projects) head(projects) if (length(projects) > 0) { head(basename(projects)) stopifnot("ds000002" %in% basename(projects)) }
if (have_aws_key()){ x = hcp_list_files(prefix = "HCP/100307/unprocessed/3T/Diffusion", delimiter="bval") stopifnot(x$parsed_result$ListBucketResult$Name[[1]] == "hcp-openaccess") t1_niis = hcp_list_files(prefix ="HCP/100307/T1w", delimiter = ".nii.gz") all_dirs = hcp_list_dirs("HCP/") } if (have_aws_key()){ res = hcp_list_dirs("HCP/") projects = unlist(parse_list_files(res)$prefixes) projects = unname(projects) projects = unname(projects) head(projects) head(basename(projects)) stopifnot("100307" %in% basename(projects)) } if (have_aws_key()){ res = fcp_list_dirs() projects = unlist(parse_list_files(res)$prefixes) projects = unname(projects) head(projects) head(basename(projects)) stopifnot("ABIDE" %in% basename(projects)) } res = openneuro_list_dirs() projects = unlist(parse_list_files(res)$prefixes) projects = unname(projects) head(projects) if (length(projects) > 0) { head(basename(projects)) stopifnot("ds000002" %in% basename(projects)) }
A data.frame
with all the available scanning information
for the HCP data.
hcp_scanning_info
hcp_scanning_info
An object of class tbl_df
(inherits from tbl
, data.frame
) with 34406 rows and 18 columns.
Constructs GET information string for AWS S3 Bucket
make_aws_call( path_to_file = "/", bucket = "hcp-openaccess", region = "us-east-1", access_key = NULL, secret_key = NULL, lifetime_minutes = 5, query = NULL, verb = "GET", sign = TRUE )
make_aws_call( path_to_file = "/", bucket = "hcp-openaccess", region = "us-east-1", access_key = NULL, secret_key = NULL, lifetime_minutes = 5, query = NULL, verb = "GET", sign = TRUE )
path_to_file |
Path to file on HCP S3 Bucket |
bucket |
Bucket to download from |
region |
Region of S3 Bucket |
access_key |
Amazon S3 Access Key |
secret_key |
Amazon S3 Secret Key |
lifetime_minutes |
Time that connection can be opened |
query |
additional query to add to |
verb |
httr VERB to be used |
sign |
Should the url be signed? |
Character of the url to be passed to httr
'VERB's
if (have_aws_key()){ path_to_file <- paste0( "HCP_900/100206/MNINonLinear/", "100206.164k_fs_LR.wb.spec") result = make_aws_call(path_to_file) # don't want this stuff printed because it has keys result$headers$secret_key = NULL result$headers$access_key = NULL result$query$AWSAccessKeyId = NULL result$query$Signature = NULL result }
if (have_aws_key()){ path_to_file <- paste0( "HCP_900/100206/MNINonLinear/", "100206.164k_fs_LR.wb.spec") result = make_aws_call(path_to_file) # don't want this stuff printed because it has keys result$headers$secret_key = NULL result$headers$access_key = NULL result$query$AWSAccessKeyId = NULL result$query$Signature = NULL result }
This parses the result from hcp_list_files
and
organizes the files into data.frame
s
parse_list_files(ret)
parse_list_files(ret)
ret |
object with element |
List of 2 data.frames, the contents and the
commonprefixes
elements from the list
if (have_aws_key()){ ret = hcp_list_files(prefix = "HCP/100307/unprocessed/3T/Diffusion") parsed = parse_list_files(ret) stopifnot(!is.null(parsed$contents)) }
if (have_aws_key()){ ret = hcp_list_files(prefix = "HCP/100307/unprocessed/3T/Diffusion") parsed = parse_list_files(ret) stopifnot(!is.null(parsed$contents)) }
Sets and returns the AWS keys. This will error if not all are specified.
set_aws_api_key( access_key = NULL, secret_key = NULL, default_region = "us-east-1", error = TRUE ) have_aws_key()
set_aws_api_key( access_key = NULL, secret_key = NULL, default_region = "us-east-1", error = TRUE ) have_aws_key()
access_key |
Amazon access key. If |
secret_key |
Amazon secret key. If |
default_region |
Amazon default region. If |
error |
Should this function error if things are not specified? |
List of access_key
, secret_key
,
and default_region