Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- library(rNOMADS)
- #Get the latest 1 model instances
- urls.out <- CrawlModels(abbrev = "narre", depth = 1)[1] # , verbose = FALSE)
- #Get the available predictions.
- model.parameters <- ParseModelPage(urls.out[1])
- ## added from examples page ###RK
- pred.ind <- which(grepl("f03", model.parameters$pred))
- latest.pred <- model.parameters$pred[pred.ind[1]] #here I just use the first result
- levels <- c("surface")
- variables <- c("APCP")
- domain <- c(-81, -77, 38, 34) #small area around ch hill
- #Now let's get the grib file
- grb.info <- GribGrab(urls.out, latest.pred, levels, variables, model.domain = domain)
- #Read it in
- grb.data <- ReadGrib(grb.info$file.name, levels, variables)
- #Pull values for points we're interested in
- lon <- c(-79.052104, -78.010020)
- lat <- c(35.907553, 36.000200)
- for(k in 1:length(lon)) {
- dist <- sqrt((grb.data$lon[k] - lon)^2 + (grb.data$lat[k] - lat)^2)
- v.i <- which(dist == min(dist))
- print(paste("LAT", lat[k], "LON", lon[k], levels[1], variables[1], ":", grb.data$value[v.i]))
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement