Skip to content

Commit

Permalink
Merge pull request #5 from OCHA-DAP/ECMWF-DroughtPrioritization
Browse files Browse the repository at this point in the history
ECMWF Zonal &  drought prioritization
  • Loading branch information
caldwellst authored Aug 14, 2024
2 parents 17d58a3 + 871e7f7 commit 4e8d10a
Show file tree
Hide file tree
Showing 18 changed files with 2,995 additions and 0 deletions.
16 changes: 16 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,31 @@
.RData
.Ruserdata

_targets/*
.scrap/*

*.html
exploration/rsconnect/*
src/.ipynb_checkpoints
exploration/rsconnect/*
exploration/rsconnect_rpubsupload.R
AFG_AdminBoundaries_AGCHO_20211117/*
pub_rpubs/*

*.pyc
.venv-afg-drought/*
.vscode/*
.ipynb_checkpoints/
*.grib
*.idx
*.html

src.egg-info/
*.tif
*.gif
*.zip
*.csv
*.parquet

.python-version

Expand Down
55 changes: 55 additions & 0 deletions R/blob_connect.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@

#' @export
load_proj_containers <- function() {
es <- azure_endpoint_url()
# storage endpoint
se <- AzureStor::storage_endpoint(es, sas = Sys.getenv("DSCI_AZ_SAS_DEV"))
# storage container
sc_global <- AzureStor::storage_container(se, "global")
sc_projects <- AzureStor::storage_container(se, "projects")
list(
GLOBAL_CONT = sc_global,
PROJECTS_CONT = sc_projects
)
}

#' @export
azure_endpoint_url <- function(
service = c("blob", "file"),
stage = c("dev", "prod"),
storage_account = "imb0chd0") {
blob_url <- "https://{storage_account}{stage}.{service}.core.windows.net/"
service <- rlang::arg_match(service)
stage <- rlang::arg_match(stage)
storae_account <- rlang::arg_match(storage_account)
endpoint <- glue::glue(blob_url)
return(endpoint)
}


#' proj_blob_paths
#' @description
#' convenience function to easily load in blob paths required for project.
#' being built on an as needed basis.
#'
#' @export
proj_blob_paths <- function(){
proj_root <- "ds-aa-afg-drought/"
raw_root <- paste0(proj_root, "raw/")
processed_root <- paste0(proj_root, "processed/")

vector_raw <- paste0(raw_root, "vector/")
vector_processed <- paste0(processed_root, "vector/")

list(
GDF_ADM1 = paste0(vector_raw, "afg_admbnda_agcho_adm1.parquet"),
GDF_ADM2 = paste0(vector_raw, "afg_admbnda_agcho_adm2.parquet"),
DF_ADM2_CHIRPS_WFP = paste0(vector_raw, "wfp-chirps-adm2.csv"),
DF_ADM2_NDVI_WFP = paste0(vector_raw, "wfp-ndvi-adm2.csv"),
DF_ADM1_CHIRPS = paste0(vector_processed,"chirps_monthly_afg_adm1_historical.csv"),
DF_ADM1_MODIS_NDVI_CROPS = paste0(vector_processed, "modis_ndvi_crops_adm1.csv"),
DF_ADM1_MODIS_SNOW = paste0(vector_processed, "modis_snow_frac_monthly_afg_adm1_historical.csv"),
GIF_MODIS_NDVI_CROPS = paste0(processed_root, "modis_ndvi_crops_hirat_animation.gif"),
DIR_COGS = paste0(raw_root, "cogs/")
)
}
22 changes: 22 additions & 0 deletions R/load_funcs.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#' Helper functions to load data sets

load_wfp_chirps <- function(){

chirps_url <- "https://data.humdata.org/dataset/3b5e8a5c-e4e0-4c58-9c58-d87e33520e08/resource/a8c98023-e078-4684-ade3-9fdfd66a1361/download/afg-rainfall-adm2-full.csv"
download.file(chirps_url, tf <- tempfile("afg-rainfall-adm2-full.csv"))
df_chirps_adm2 <- read_csv(tf)

df_chirps_adm2[-1,] |>
janitor::clean_names() |>
readr::type_convert()
}

load_wfp_ndvi <- function(){
url <- "https://data.humdata.org/dataset/fa36ae79-984e-4819-b0eb-a79fbb168f6c/resource/d79de660-6e50-418b-a971-e0dfaa02586f/download/afg-ndvi-adm2-full.csv"
download.file(url, tf <- tempfile("afg-ndvi-adm2-full.csv"))
df_adm2 <- read_csv(tf)

df_adm2[-1,] |>
janitor::clean_names() |>
readr::type_convert()
}
22 changes: 22 additions & 0 deletions R/raster_utils.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@


source("R/blob_connect.R")
load_mars_stack <- function(){

pc <- load_proj_containers()
fbps <- proj_blob_paths()

cog_df <- AzureStor::list_blobs(
container = pc$PROJECTS_CONT,
prefix = "ds-aa-afg-drought/cogs/ecmwf_seas5_mars_"
)

container_vp <- paste0("/vsiaz/projects/")
urls <- paste0(container_vp, cog_df$name)

Sys.setenv(AZURE_STORAGE_SAS_TOKEN=Sys.getenv("DSCI_AZ_SAS_DEV"))
Sys.setenv(AZURE_STORAGE_ACCOUNT=Sys.getenv("DSCI_AZ_STORAGE_ACCOUNT"))

r <- terra::rast(urls)
r
}
222 changes: 222 additions & 0 deletions R/utils.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,222 @@

#' Download shapefile and read
#'
#' Download shapefile to temp file, unzipping zip files if necessary. Deals with zipped
#' files like geojson or gpkg files as well as shapefiles, when the unzipping
#' returns a folder. The file is then read with `sf::st_read()`.
#'
#' @param url URL to download
#' @param layer Layer to read
#' @param iso3 `character` string of ISO3 code to add to the file.
#' @param boundary_source `character` name of source for the admin 0 boundaries
#' layer. If supplied a column named "boundary_source"
#' will added to sf object with the specified input. If `NULL` (default)
#' no column added.
#'
#' @returns sf object
#'
#' @export
download_shapefile <- function(
url,
layer = NULL,
iso3 = NULL,
boundary_source = NULL
) {
if (stringr::str_ends(url, ".zip")) {
utils::download.file(
url = url,
destfile = zf <- tempfile(fileext = ".zip"),
quiet = TRUE
)

utils::unzip(
zipfile = zf,
exdir = td <- tempdir()
)

# if the file extension is just `.zip`, we return the temp dir alone
# because that works for shapefiles, otherwise we return the file unzipped
fn <- stringr::str_remove(basename(url), ".zip")
if (tools::file_ext(fn) == "") {
fn <- td
} else {
fn <- file.path(td, fn)
}
} else {
utils::download.file(
url = url,
destfile = fn <- tempfile(fileext = paste0(".", tools::file_ext(url))),
quiet = TRUE
)
}

if (!is.null(layer)) {
ret <- sf::st_read(
fn,
layer = layer,
quiet = TRUE
)
} else {
ret <- sf::st_read(
fn,
quiet = TRUE
)
}

# add in iso3 and boundary source. if NULL, no change will happen
ret$iso3 <- iso3
ret$boundary_source <- boundary_source

ret
}

#' @export
download_fieldmaps_sf <- function(iso3, layer = NULL) {
iso3 <- tolower(iso3)
download_shapefile(
url = glue::glue("https://data.fieldmaps.io/cod/originals/{iso3}.gpkg.zip"),
layer = layer,
iso3 = iso3,
boundary_source = "FieldMaps, OCHA"
)
}

proj_paths <- function(){

AA_DIR_NEW <- Sys.getenv("AA_DATA_DIR_NEW")
AA_DIR_OLD <- Sys.getenv("AA_DATA_DIR")

PUB_PROCESSED_AFG <- file.path(
AA_DIR_NEW ,
"public",
"processed",
"afg"
)
PUB_RAW_AFG <- file.path(
AA_DIR_NEW,
"public",
"raw",
"afg"
)

PUB_RAW_GLB <- file.path(
AA_DIR_OLD,
"public",
"raw",
"glb")


list(
"PUB_RAW_AFG" = PUB_RAW_AFG,
"PUB_PROCESSED_AFG"= PUB_PROCESSED_AFG,
"PUB_RAW_GLB" = PUB_RAW_GLB,
"ADM1_GAUL"= file.path(
PUB_RAW_GLB,
"asap",
"reference_data",
"gaul1_asap_v04" ) ,

"ADM2_GAUL"= file.path(PUB_RAW_GLB,
"asap",
"reference_data",
"gaul2_asap_v04" ) ,

"ADM1_CHIRPS_MONTHLY"= file.path(
PUB_PROCESSED_AFG,
"chirps_monthly_afg_adm1_historical.csv"
) ,

"ADM2_AFG_GAUL" = file.path(
PUB_RAW_AFG,
"afg_adm2_fao_gaul2015.rds"
),
"ADM_TABULAR_COD"= file.path(
PUB_RAW_AFG,
"afg_adminboundaries_tabulardata.xlsx"
),
"ADM1_MODIS_SNOW" = file.path(
PUB_PROCESSED_AFG,
"modis_snow_frac_monthly_afg_adm1_historical.csv"
),

"ONI_LINK"= "https://origin.cpc.ncep.noaa.gov/products/analysis_monitoring/ensostuff/detrend.nino34.ascii.txt",

"ASAP_ACTIVE_SEASON_RASTER" = file.path(
Sys.getenv("AA_DATA_DIR"),
"public",
"processed",
"glb",
"asap",
"season",
"month"
),
"ADM2_NDVI_WFP_HDX" = file.path(
PUB_RAW_AFG,
"afg-ndvi-adm2-full.csv"
),
"FS_INDICATORS_FAO" = file.path(
PUB_RAW_AFG,
"suite-of-food-security-indicators_afg.csv"
),
"AMD1_NDVI_CROP_MODIS" = file.path(
PUB_PROCESSED_AFG,
"modis_ndvi_2000_2004.csv"
)
)

}


#' oni_to_enso_class
#'
#' @param oni `numeric`
#'
#' @return
#' @export
#'
#' @examples \dontrun{
#' df_oni <- read_table()
#' df_oni |>
#' mutate(
#' enso_class = oni_to_enso_class(anom)
#' )
#' }
oni_to_enso_class <- function(x){
x_class <- case_when(x<=-.5~"La Nina",
x< 0.5~"Neutral",
x>= 0.5 ~"El Nino")
fct_relevel(x_class, "La Nina","Neutral","El Nino")
}

months_of_interest <- function(){
list(
winter_wheat = c(12,1,2,3,4)
)
}


# update_theme_gghdx()
update_theme_gghdx <- function(context = "rpubs"){
if(context == "rpubs"){
theme_update(
plot.title = element_text(size=10),
plot.subtitle = element_text(size=10),
axis.title = element_text(size=8),
axis.text= element_text(size=8),
plot.caption = element_text(hjust= 0, size=8),
strip.text = element_text(size=8),
axis.text.x = element_text(angle = 90, hjust = 1),
legend.title =element_blank()
)
}

}

proj_palettes <- function(){
list(
"enso_palette" = c("La Nina"= "#FBB4AE" ,
"El Nino" = "#B3CDE3" ,
"Neutral" ="#CCEBC5"
)
)
}
Loading

0 comments on commit 4e8d10a

Please sign in to comment.