Skip to content

Commit

Permalink
Update documentation
Browse files Browse the repository at this point in the history
  • Loading branch information
George committed Sep 16, 2024
1 parent 034052c commit 252e8c4
Show file tree
Hide file tree
Showing 8 changed files with 3,542 additions and 5,337 deletions.
368 changes: 368 additions & 0 deletions _sources/chapters/05-notebook-time-machine/Untitled.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,368 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 22,
"id": "05ac7679-3339-4eac-87da-d1b4e97c9602",
"metadata": {},
"outputs": [],
"source": [
"import cartopy.crs as ccrs\n",
"import cartopy.feature as cfeature\n",
"import geopandas as gpd\n",
"import glob\n",
"from matplotlib.colors import ListedColormap\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import os\n",
"import pandas as pd\n",
"from shapely.geometry import Point\n",
"import xarray as xr\n",
"\n",
"import warnings\n",
"warnings.filterwarnings('ignore')"
]
},
{
"cell_type": "code",
"execution_count": 27,
"id": "52a5a896-a208-484b-844c-36d7237160a4",
"metadata": {},
"outputs": [],
"source": [
"def get_spi_dataset(acc_period: str = 1, years: list = [2020]):\n",
" data_root_folder = '/data1/drought_dataset/spi/'\n",
" spi_folder = os.path.join(data_root_folder, f'spi{acc_period}')\n",
" spi_paths = []\n",
"\n",
" for year in years:\n",
" spi_paths.extend(sorted(glob.glob(\n",
" f'{data_root_folder}spi{acc_period}/SPI{acc_period}_gamma_global_era5_moda_ref1991to2020_{year}*.nc')))\n",
"\n",
" return xr.open_mfdataset(spi_paths, chunks={'time': \"auto\"}, concat_dim=\"time\", combine='nested', parallel=False)\n",
"\n",
"\n",
"def get_spei_dataset(acc_period: str = 1, years: list = [2020]):\n",
" data_root_folder = '/data1/drought_dataset/spei/'\n",
" spi_folder = os.path.join(data_root_folder, f'spi{acc_period}')\n",
" spi_paths = []\n",
"\n",
" for year in years:\n",
" spi_paths.extend(sorted(glob.glob(\n",
" f'{data_root_folder}spei{acc_period}/SPEI{acc_period}_genlogistic_global_era5_moda_ref1991to2020_{year}*.nc')))\n",
"\n",
" return xr.open_mfdataset(spi_paths, chunks={'time': \"auto\"}, concat_dim=\"time\", combine='nested', parallel=False)\n",
"\n",
"\n",
"def mask_invalid_values(ds, index, value=-9999):\n",
" ds[index] = ds[index].where(ds[index] != value, np.nan)\n",
" return ds\n",
"\n",
"def get_spei_significance_dataset(index='SPEI1', year=2020):\n",
" data_root_folder='/data1/drought_dataset/spei/'\n",
" quality_paths = []\n",
" for month in range(1, 13):\n",
" month_str = f'{month:02d}'\n",
" quality_paths.append(f'{data_root_folder}{index.lower()}/parameter/{index}_significance_global_era5_moda_{year}{month_str}_ref1991to2020.nc')\n",
" return xr.open_mfdataset(quality_paths, concat_dim=\"time\", combine='nested', parallel=False)\n",
"\n",
"\n",
"def get_spi_significance_dataset(index='SPI1', year=2020):\n",
" data_root_folder='/data1/drought_dataset/spi/'\n",
" quality_paths = []\n",
" for month in range(1, 13):\n",
" month_str = f'{month:02d}'\n",
" quality_paths.append(f'{data_root_folder}{index.lower()}/parameter/{index}_significance_global_era5_moda_{year}{month_str}_ref1991to2020.nc')\n",
" return xr.open_mfdataset(quality_paths, concat_dim=\"time\", combine='nested', parallel=False)\n",
"\n",
"\n",
"def create_drought_dataset(years: list):\n",
" # spi1 = get_spi_dataset(acc_period=1, years=years)\n",
" # spi3 = get_spi_dataset(acc_period=3, years=years)\n",
" # spi6 = get_spi_dataset(acc_period=6, years=years)\n",
" # spi12 = get_spi_dataset(acc_period=12, years=years)\n",
" # spi24 = get_spi_dataset(acc_period=24, years=years)\n",
" # spi48 = get_spi_dataset(acc_period=48, years=years)\n",
" \n",
" # spei1 = get_spei_dataset(acc_period=1, years=years)\n",
" # spei3 = get_spei_dataset(acc_period=3, years=years)\n",
" spei6 = get_spei_dataset(acc_period=6, years=years)\n",
" # spei12 = get_spei_dataset(acc_period=12, years=years)\n",
" # spei24 = get_spei_dataset(acc_period=24, years=years)\n",
" # spei48 = get_spei_dataset(acc_period=48, years=years)\n",
" \n",
" # spei_significance = get_spei_significance_dataset(year=2020)\n",
" # spi_significance = get_spi_significance_dataset(year=2020)\n",
" \n",
" drought_dataset = xr.Dataset()\n",
"\n",
" for key, ds in {\n",
" # 'SPI1': spi1,\n",
" # 'SPI3': spi3,\n",
" # 'SPI6': spi6,\n",
" # 'SPI12': spi12,\n",
" # 'SPI24': spi24,\n",
" # 'SPI48': spi48,\n",
" # 'SPEI1': spei1,\n",
" # 'SPEI3': spei3,\n",
" 'SPEI6': spei6,\n",
" # 'SPEI12': spei12,\n",
" # 'SPEI24': spei24,\n",
" # 'SPEI48': spei48,\n",
" # 'SPEI_significance': spei_significance,\n",
" # 'SPI_significance': spi_significance\n",
" }.items():\n",
" for var in ds.data_vars:\n",
" drought_dataset[f\"{key}\"] = ds[var]\n",
" \n",
" return drought_dataset"
]
},
{
"cell_type": "code",
"execution_count": 28,
"id": "afae37a8-a5eb-4bf0-98eb-003d9677b113",
"metadata": {},
"outputs": [],
"source": [
"drought_dataset = create_drought_dataset(years = [y for y in range(1991, 2024)])"
]
},
{
"cell_type": "code",
"execution_count": 29,
"id": "57b98ffa-bb4a-4623-824c-a3a4a2997e72",
"metadata": {},
"outputs": [],
"source": [
"def classify_drought(spei_value):\n",
" if spei_value < -2:\n",
" return 5 # Extreme Drought\n",
" elif -2 <= spei_value < -1.5:\n",
" return 4 # Severe Drought\n",
" elif -1.5 <= spei_value < -1:\n",
" return 3 # Moderate Drought\n",
" elif -1 <= spei_value < -0.5:\n",
" return 2 # Mild Drought\n",
" elif -0.5 <= spei_value <= 0.5:\n",
" return 1 # Normal\n",
" else:\n",
" return 0 # Wet Conditions"
]
},
{
"cell_type": "code",
"execution_count": 30,
"id": "3c86c84f-2a5b-460d-9733-8bb27dc1521d",
"metadata": {},
"outputs": [],
"source": [
"from pathlib import Path\n",
"import shutil\n",
"\n",
"index = 'SPEI6'\n",
"\n",
"dest_folder = Path().resolve() / 'animation' / index\n",
"if os.path.exists(dest_folder):\n",
" shutil.rmtree(dest_folder)\n",
"\n",
"os.makedirs(dest_folder, exist_ok=True)"
]
},
{
"cell_type": "code",
"execution_count": 35,
"id": "191c9cbe-21f3-4339-85ea-bf894610cbd6",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1991\n",
"1992\n",
"1993\n",
"1994\n",
"1995\n",
"1996\n",
"1997\n",
"1998\n",
"1999\n",
"2000\n",
"2001\n",
"2002\n",
"2003\n",
"2004\n",
"2005\n",
"2006\n",
"2007\n",
"2008\n",
"2009\n",
"2010\n",
"2011\n",
"2012\n",
"2013\n",
"2014\n",
"2015\n",
"2016\n",
"2017\n",
"2018\n",
"2019\n",
"2020\n",
"2021\n",
"2022\n",
"2023\n"
]
}
],
"source": [
"months = [\"January\", \"February\", \"March\", \"April\", \"May\", \"June\",\"July\", \"August\", \"September\", \"October\", \"November\", \"December\"]\n",
"years = [y for y in range(1991, 2024)]\n",
"map_paths = []\n",
"for target_year in years:\n",
" print(target_year)\n",
" for target_month in range(1, 13):\n",
" spei_data = drought_dataset[index].sel(time=f'{target_year}-{target_month:2d}', method='nearest').squeeze()\n",
" classified_spei = xr.apply_ufunc(classify_drought, spei_data, vectorize=True, dask='parallelized')\n",
" \n",
" cmap = ListedColormap(['#5AB1A7', '#B6E3DC', '#EFDDAF', '#CEA053', '#995D12', '#543005'])\n",
" levels = [-0.5, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5]\n",
" \n",
" # Plotting the classified SPEI data on a global map\n",
" plt.figure(figsize=(15, 10))\n",
" ax = plt.axes(projection=ccrs.PlateCarree())\n",
" \n",
" # Plot the classified SPEI data using the custom colormap and levels\n",
" im = classified_spei.plot(ax=ax, transform=ccrs.PlateCarree(), cmap=cmap, levels=levels, \n",
" extend='neither',\n",
" cbar_kwargs={'label': 'Drought Category', \n",
" 'ticks': [0, 1, 2, 3, 4, 5],\n",
" # 'tick_labels': ['Wet Conditions', 'Normal', 'Mild Drought', 'Moderate Drought', 'Severe Drought', 'Extreme Drought'],\n",
" 'format': '%d',\n",
" 'shrink': 0.5\n",
" }, \n",
" zorder=1)\n",
" \n",
" # Add coastlines, borders, and ocean masking\n",
" ax.coastlines(zorder=3)\n",
" ax.add_feature(cfeature.BORDERS, zorder=4)\n",
" ax.add_feature(cfeature.OCEAN, color='white', zorder=2)\n",
" \n",
" # Add latitude and longitude grid lines\n",
" gl = ax.gridlines(draw_labels=True, linewidth=0.5, color='gray', alpha=0.7, linestyle='--', zorder=5)\n",
" gl.top_labels = False # Disable top labels\n",
" gl.right_labels = False # Disable right labels\n",
" gl.xlocator = plt.FixedLocator(range(-180, 181, 30)) # Set longitude grid line intervals\n",
" gl.ylocator = plt.FixedLocator(range(-90, 91, 15)) # Set latitude grid line intervals\n",
" gl.xlabel_style = {'size': 10, 'color': 'black'}\n",
" gl.ylabel_style = {'size': 10, 'color': 'black'}\n",
" \n",
" im.colorbar.set_ticklabels(['Wet Conditions', 'Normal', 'Mild Drought', 'Moderate Drought', 'Severe Drought', 'Extreme Drought'])\n",
" # ax.set_title(f'Global Drought Conditions ({index}) for {months[target_month-1]} {target_year}')\n",
" ax.set_title(f'Global Drought Conditions ({index}) {target_year}')\n",
"\n",
" map_path = os.path.join(dest_folder, f'{target_year}-{months[target_month-1]}.png')\n",
" # im.save(dest_path, dpi=300, bbox_inches='tight')\n",
" plt.savefig(map_path, dpi=100, bbox_inches='tight')\n",
" \n",
" # Close the plot to avoid memory overload\n",
" plt.close()\n",
" map_paths.append(map_path)\n",
" # plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c623f411-962d-4db9-88cf-dae0363ecb56",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Images loaded...\n"
]
}
],
"source": [
"from IPython.display import HTML\n",
"import matplotlib.animation as animation\n",
"from PIL import Image\n",
"import matplotlib as mpl\n",
"\n",
"# Set a larger embed limit, e.g., 30 MB\n",
"mpl.rcParams['animation.embed_limit'] = 100 # in MB\n",
"\n",
"\n",
"months = [\"January\", \"February\", \"March\", \"April\", \"May\", \"June\",\"July\", \"August\", \"September\", \"October\", \"November\", \"December\"]\n",
"years = [y for y in range(1991, 2024)]\n",
"index = 'SPEI6'\n",
"\n",
"dest_folder = Path().resolve() / 'animation' / index\n",
"\n",
"map_paths = []\n",
"for target_year in years:\n",
" for target_month in range(1, 13):\n",
" map_path = os.path.join(dest_folder, f'{target_year}-{months[target_month-1]}.png')\n",
" map_paths.append(map_path)\n",
"# Open all images as PIL objects\n",
"\n",
"images = [Image.open(map_) for map_ in map_paths]\n",
"print('Images loaded...')\n",
"\n",
"fig, ax = plt.subplots(figsize=(16, 10))\n",
"ims = []\n",
"for idx, image in enumerate(images):\n",
" im = ax.imshow(image, animated=True)\n",
" \n",
" # Remove ticks and borders\n",
" ax.set_xticks([]) # Remove x-axis ticks\n",
" ax.set_yticks([]) # Remove y-axis ticks\n",
" ax.spines['top'].set_visible(False) # Remove top border\n",
" ax.spines['bottom'].set_visible(False) # Remove bottom border\n",
" ax.spines['left'].set_visible(False) # Remove left border\n",
" ax.spines['right'].set_visible(False) # Remove right border\n",
" \n",
" ims.append([im])\n",
" \n",
"\n",
"# Create the animation\n",
"ani = animation.ArtistAnimation(fig, ims, interval=50, blit=True, repeat_delay=1000)\n",
"ani.save('animation-22.gif', writer='ffmpeg', dpi=100)\n",
"plt.close(fig)\n",
"\n",
"# HTML(ani.to_jshtml())"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "45d1534d-58ca-49ab-8240-f4976bab8db9",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Loading

0 comments on commit 252e8c4

Please sign in to comment.