-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathcmr.R
More file actions
160 lines (134 loc) · 4.99 KB
/
cmr.R
File metadata and controls
160 lines (134 loc) · 4.99 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
# Authors: Alex Mandel, Aniruddha Ghosh, Robert J. Hijmans
# July 2019
# Version 0.1
# Licence GPL v3
# Converted from the NASA official pyCMR
# https://github.com/nasa/pyCMR
#AUTH_HOST = 'urs.earthdata.nasa.gov'
.get_search_results <- function(url, limit, kwargs){
# Search the CMR granules
#:param limit: limit of the number of results
#:param kwargs: search parameters
#:return: list of results (<Instance of Result>)
page_num <- 1
results <- NULL
while (length(results) < limit){
#print(page_num)
response <- httr::GET(
url=url,
# TODO: fix next line to take all possible args passed via ...
httr::add_headers(Accept="text/csv"),
query=c(kwargs, page_num=page_num)
#, page_size=self._PAGE_SIZE),
# headers=self._SEARCH_HEADER # what is the header passed from parent function?
)
# Check for a valid response
httr::stop_for_status(response)
#unparsed_page = content(response,parsed="application/json")
# parsing without messages
# http://r.789695.n4.nabble.com/httr-content-without-message-td4747453.html
if (httr::http_type(response) == "text/csv"){
# Per httr docs testing for expected type and parsing manually
#unparsed_page = readr::read_csv(httr::content(response, as="text"))
p <- utils::read.csv(text=httr::content(response, as="text"), check.names=FALSE, stringsAsFactors=FALSE)
#Check the URL column is not empty
catcher <- tryCatch(urls <- p[["Online Access URLs"]], error=function(e){e})
if(!inherits(catcher, "error")){
if(length(urls)==0){
break
}
# Append the full table of results
results <- rbind(results, p)
page_num <- page_num + 1
} else {
break
}
} else {
#The response was not a csv, we should throw and error?
break
}
}
return(results)
}
.searchCollection <- function(cmr_host="https://cmr.earthdata.nasa.gov", limit=100, ...){
# Search the CMR collections
# :param limit: limit of the number of results
# :param kwargs ...: search parameters
# :return: dataframe of results
SEARCH_COLLECTION_URL = paste0(cmr_host,"/search/collections")
results <- .get_search_results(url=SEARCH_COLLECTION_URL, limit=limit, ...)
return(results)
}
.cmr_download_one <- function(url, path, netrc, overwrite, ...){
# Download a single result
# TODO check if file exists
outfile <- file.path(path, basename(url))
if ((!file.exists(outfile)) | overwrite){
if(!is.null(netrc)){
f = GET(url, write_disk(outfile, overwrite = overwrite), progress(),
config(netrc = TRUE, netrc_file = netrc), set_cookies("LC" = "cookies"))
} else {
f <- utils::download.file(url, outfile, mode = "wb")
return(f)
}
}
return(outfile)
}
.get_netrc = function(username, password) {
usr <- file.path(Sys.getenv("USERPROFILE")) # Retrieve home dir (for netrc file)
if (usr == "") {usr = Sys.getenv("HOME")} # If no user profile exists, use home
netrc <- file.path(usr,'.netrc', fsep = .Platform$file.sep) # Path to netrc file
if (file.exists(netrc) == FALSE || grepl("urs.earthdata.nasa.gov", readLines(netrc)[1]) == FALSE) {
netrc_conn <- file(netrc)
# User will be prompted for NASA Earthdata Login Username and Password below
writeLines(c("machine urs.earthdata.nasa.gov",
sprintf("login %s", username),
sprintf("password %s", password)), netrc_conn)
close(netrc_conn)
}
netrc <- file.path(usr,'.netrc', fsep = .Platform$file.sep) # Path to netrc file
return(netrc)
}
.cmr_download <- function(urls, path, username, password, overwrite, ...){
# Given a list of results, download all of them
netrc = .get_netrc(username, password) # get netrc file
files <- rep("", length(urls))
for (i in 1:length(urls)) {
f <- tryCatch(
.cmr_download_one(urls[i], path, netrc, overwrite),
error = function(e){e}
)
if (inherits(f, "error")) {
warning("failure: ", urls[i])
f <- file.path(path, urls[i])
if ( isTRUE(file.info(f)$size < 1) ) file.remove(f)
} else {
files[i] = urls[i]
}
}
cat("\n")
return(files)
}
.searchGranules <- function(product="MOD09A1", start_date, end_date, extent, limit=100, datesuffix = "T00:00:00Z", ...){
#Search the CMR granules
#:param limit: limit of the number of results
#:param kwargs: search parameters
#:return: dataframe of results
e <- .getExtent(extent)
# for testing validity
start_date <- as.Date(start_date)
end_date <- as.Date(end_date)
temporal <- paste0(start_date, datesuffix, ",", end_date, datesuffix)
params <- list(
short_name=product, temporal=temporal, downloadable="true", bounding_box=e
)
pars <- list(...)
if (length(pars) > 0) {
params <- c(params, pars)
}
cmr_host="https://cmr.earthdata.nasa.gov"
url <- file.path(cmr_host, "search/granules")
results <- .get_search_results(url=url, limit=limit, kwargs=params)
return(results)
}
# CMR download attempt