{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import pandas as pd\n", "import numpy as np\n", "import xarray as xr" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "path = \"/g/data/zv2/agcd/v2-0-1/precip/total/r005/01month/\"\n", "file = \"agcd_v2-0-1_precip_total_r005_monthly_2018.nc\"" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "ename": "FileNotFoundError", "evalue": "[Errno 2] No such file or directory: '/g/data/zv2/agcd/v2-0-1/precip/total/r005/01month/agcd_v2-0-1_precip_total_r005_monthly_2018.nc'", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/file_manager.py:211\u001b[0m, in \u001b[0;36mCachingFileManager._acquire_with_cache_info\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 210\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 211\u001b[0m file \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cache\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_key\u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m 212\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m:\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/lru_cache.py:56\u001b[0m, in \u001b[0;36mLRUCache.__getitem__\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 55\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_lock:\n\u001b[0;32m---> 56\u001b[0m value \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cache\u001b[49m\u001b[43m[\u001b[49m\u001b[43mkey\u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m 57\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_cache\u001b[38;5;241m.\u001b[39mmove_to_end(key)\n", "\u001b[0;31mKeyError\u001b[0m: [, ('/g/data/zv2/agcd/v2-0-1/precip/total/r005/01month/agcd_v2-0-1_precip_total_r005_monthly_2018.nc',), 'r', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False)), 'b0f33c8d-d367-4f9c-91cf-679ecd4b4326']", "\nDuring handling of the above exception, another exception occurred:\n", "\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[3], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# Open NetCDF file\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m ds \u001b[38;5;241m=\u001b[39m \u001b[43mxr\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen_dataset\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpath\u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43mfile\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 4\u001b[0m \u001b[38;5;66;03m# Slice the dataset to only include data for the specific lat/lon grid\u001b[39;00m\n\u001b[1;32m 5\u001b[0m ds_sliced \u001b[38;5;241m=\u001b[39m ds\u001b[38;5;241m.\u001b[39msel(lon\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mslice\u001b[39m(\u001b[38;5;241m142\u001b[39m, \u001b[38;5;241m145\u001b[39m), lat\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mslice\u001b[39m(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m25\u001b[39m, \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m22\u001b[39m))\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/api.py:573\u001b[0m, in \u001b[0;36mopen_dataset\u001b[0;34m(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)\u001b[0m\n\u001b[1;32m 561\u001b[0m decoders \u001b[38;5;241m=\u001b[39m _resolve_decoders_kwargs(\n\u001b[1;32m 562\u001b[0m decode_cf,\n\u001b[1;32m 563\u001b[0m open_backend_dataset_parameters\u001b[38;5;241m=\u001b[39mbackend\u001b[38;5;241m.\u001b[39mopen_dataset_parameters,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 569\u001b[0m decode_coords\u001b[38;5;241m=\u001b[39mdecode_coords,\n\u001b[1;32m 570\u001b[0m )\n\u001b[1;32m 572\u001b[0m overwrite_encoded_chunks \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mpop(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124moverwrite_encoded_chunks\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[0;32m--> 573\u001b[0m backend_ds \u001b[38;5;241m=\u001b[39m \u001b[43mbackend\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen_dataset\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 574\u001b[0m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 575\u001b[0m \u001b[43m \u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdrop_variables\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 576\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mdecoders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 577\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 578\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 579\u001b[0m ds \u001b[38;5;241m=\u001b[39m _dataset_from_backend_dataset(\n\u001b[1;32m 580\u001b[0m backend_ds,\n\u001b[1;32m 581\u001b[0m filename_or_obj,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 592\u001b[0m )\n\u001b[1;32m 593\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ds\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:603\u001b[0m, in \u001b[0;36mNetCDF4BackendEntrypoint.open_dataset\u001b[0;34m(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, format, clobber, diskless, persist, lock, autoclose)\u001b[0m\n\u001b[1;32m 582\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mopen_dataset\u001b[39m( \u001b[38;5;66;03m# type: ignore[override] # allow LSP violation, not supporting **kwargs\u001b[39;00m\n\u001b[1;32m 583\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 584\u001b[0m filename_or_obj: \u001b[38;5;28mstr\u001b[39m \u001b[38;5;241m|\u001b[39m os\u001b[38;5;241m.\u001b[39mPathLike[Any] \u001b[38;5;241m|\u001b[39m BufferedIOBase \u001b[38;5;241m|\u001b[39m AbstractDataStore,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 600\u001b[0m autoclose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m 601\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Dataset:\n\u001b[1;32m 602\u001b[0m filename_or_obj \u001b[38;5;241m=\u001b[39m _normalize_path(filename_or_obj)\n\u001b[0;32m--> 603\u001b[0m store \u001b[38;5;241m=\u001b[39m \u001b[43mNetCDF4DataStore\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 604\u001b[0m \u001b[43m \u001b[49m\u001b[43mfilename_or_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 605\u001b[0m \u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 606\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mformat\u001b[39;49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mformat\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 607\u001b[0m \u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 608\u001b[0m \u001b[43m \u001b[49m\u001b[43mclobber\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mclobber\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 609\u001b[0m \u001b[43m \u001b[49m\u001b[43mdiskless\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdiskless\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 610\u001b[0m \u001b[43m \u001b[49m\u001b[43mpersist\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpersist\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 611\u001b[0m \u001b[43m \u001b[49m\u001b[43mlock\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlock\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 612\u001b[0m \u001b[43m \u001b[49m\u001b[43mautoclose\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mautoclose\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 613\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 615\u001b[0m store_entrypoint \u001b[38;5;241m=\u001b[39m StoreBackendEntrypoint()\n\u001b[1;32m 616\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m close_on_error(store):\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:400\u001b[0m, in \u001b[0;36mNetCDF4DataStore.open\u001b[0;34m(cls, filename, mode, format, group, clobber, diskless, persist, lock, lock_maker, autoclose)\u001b[0m\n\u001b[1;32m 394\u001b[0m kwargs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mdict\u001b[39m(\n\u001b[1;32m 395\u001b[0m clobber\u001b[38;5;241m=\u001b[39mclobber, diskless\u001b[38;5;241m=\u001b[39mdiskless, persist\u001b[38;5;241m=\u001b[39mpersist, \u001b[38;5;28mformat\u001b[39m\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mformat\u001b[39m\n\u001b[1;32m 396\u001b[0m )\n\u001b[1;32m 397\u001b[0m manager \u001b[38;5;241m=\u001b[39m CachingFileManager(\n\u001b[1;32m 398\u001b[0m netCDF4\u001b[38;5;241m.\u001b[39mDataset, filename, mode\u001b[38;5;241m=\u001b[39mmode, kwargs\u001b[38;5;241m=\u001b[39mkwargs\n\u001b[1;32m 399\u001b[0m )\n\u001b[0;32m--> 400\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mcls\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mmanager\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgroup\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgroup\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlock\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlock\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mautoclose\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mautoclose\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:347\u001b[0m, in \u001b[0;36mNetCDF4DataStore.__init__\u001b[0;34m(self, manager, group, mode, lock, autoclose)\u001b[0m\n\u001b[1;32m 345\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_group \u001b[38;5;241m=\u001b[39m group\n\u001b[1;32m 346\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode \u001b[38;5;241m=\u001b[39m mode\n\u001b[0;32m--> 347\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mformat \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mds\u001b[49m\u001b[38;5;241m.\u001b[39mdata_model\n\u001b[1;32m 348\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_filename \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mds\u001b[38;5;241m.\u001b[39mfilepath()\n\u001b[1;32m 349\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mis_remote \u001b[38;5;241m=\u001b[39m is_remote_uri(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_filename)\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:409\u001b[0m, in \u001b[0;36mNetCDF4DataStore.ds\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 407\u001b[0m \u001b[38;5;129m@property\u001b[39m\n\u001b[1;32m 408\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mds\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m--> 409\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_acquire\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:403\u001b[0m, in \u001b[0;36mNetCDF4DataStore._acquire\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 402\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_acquire\u001b[39m(\u001b[38;5;28mself\u001b[39m, needs_lock\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m):\n\u001b[0;32m--> 403\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mwith\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_manager\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43macquire_context\u001b[49m\u001b[43m(\u001b[49m\u001b[43mneeds_lock\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mas\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mroot\u001b[49m\u001b[43m:\u001b[49m\n\u001b[1;32m 404\u001b[0m \u001b[43m \u001b[49m\u001b[43mds\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43m_nc4_require_group\u001b[49m\u001b[43m(\u001b[49m\u001b[43mroot\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_group\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_mode\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 405\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ds\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/contextlib.py:137\u001b[0m, in \u001b[0;36m_GeneratorContextManager.__enter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 135\u001b[0m \u001b[38;5;28;01mdel\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39margs, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkwds, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfunc\n\u001b[1;32m 136\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 137\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mnext\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgen)\n\u001b[1;32m 138\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mStopIteration\u001b[39;00m:\n\u001b[1;32m 139\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgenerator didn\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mt yield\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/file_manager.py:199\u001b[0m, in \u001b[0;36mCachingFileManager.acquire_context\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 196\u001b[0m \u001b[38;5;129m@contextlib\u001b[39m\u001b[38;5;241m.\u001b[39mcontextmanager\n\u001b[1;32m 197\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21macquire_context\u001b[39m(\u001b[38;5;28mself\u001b[39m, needs_lock\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m):\n\u001b[1;32m 198\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Context manager for acquiring a file.\"\"\"\u001b[39;00m\n\u001b[0;32m--> 199\u001b[0m file, cached \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_acquire_with_cache_info\u001b[49m\u001b[43m(\u001b[49m\u001b[43mneeds_lock\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 200\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 201\u001b[0m \u001b[38;5;28;01myield\u001b[39;00m file\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/xarray/backends/file_manager.py:217\u001b[0m, in \u001b[0;36mCachingFileManager._acquire_with_cache_info\u001b[0;34m(self, needs_lock)\u001b[0m\n\u001b[1;32m 215\u001b[0m kwargs \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mcopy()\n\u001b[1;32m 216\u001b[0m kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmode\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode\n\u001b[0;32m--> 217\u001b[0m file \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_opener\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_args\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 218\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mw\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 219\u001b[0m \u001b[38;5;66;03m# ensure file doesn't get overridden when opened again\u001b[39;00m\n\u001b[1;32m 220\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mode \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124ma\u001b[39m\u001b[38;5;124m\"\u001b[39m\n", "File \u001b[0;32msrc/netCDF4/_netCDF4.pyx:2464\u001b[0m, in \u001b[0;36mnetCDF4._netCDF4.Dataset.__init__\u001b[0;34m()\u001b[0m\n", "File \u001b[0;32msrc/netCDF4/_netCDF4.pyx:2027\u001b[0m, in \u001b[0;36mnetCDF4._netCDF4._ensure_nc_success\u001b[0;34m()\u001b[0m\n", "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: '/g/data/zv2/agcd/v2-0-1/precip/total/r005/01month/agcd_v2-0-1_precip_total_r005_monthly_2018.nc'" ] } ], "source": [ "# Open NetCDF file\n", "ds = xr.open_dataset(path+file)\n", "\n", "# Slice the dataset to only include data for the specific lat/lon grid\n", "ds_sliced = ds.sel(lon=slice(142, 145), lat=slice(-25, -22))\n", "\n", "# Convert to DataFrame\n", "df = ds_sliced['precip'].to_dataframe().reset_index()\n", "\n", "# Filter rows where latitude and longitude are integers\n", "df = df[df['lat'].apply(lambda x: x.is_integer())]\n", "df = df[df['lon'].apply(lambda x: x.is_integer())]\n", "df.reset_index(inplace=True, drop=True)\n", "\n", "# Close the xarray dataset\n", "ds.close()" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timelatlonprecip
02018-01-16-25.0142.03.752930
12018-01-16-25.0143.00.613281
22018-01-16-25.0144.03.609375
32018-01-16-25.0145.010.794922
42018-01-16-24.0142.07.103516
\n", "
" ], "text/plain": [ " time lat lon precip\n", "0 2018-01-16 -25.0 142.0 3.752930\n", "1 2018-01-16 -25.0 143.0 0.613281\n", "2 2018-01-16 -25.0 144.0 3.609375\n", "3 2018-01-16 -25.0 145.0 10.794922\n", "4 2018-01-16 -24.0 142.0 7.103516" ] }, "execution_count": 13, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df.head()" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(44652, 4)" ] }, "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df.shape" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "
<xarray.Dataset>\n",
       "Dimensions:  (lat: 691, lon: 886, time: 8)\n",
       "Coordinates:\n",
       "  * time     (time) datetime64[ns] 2019-01-16T12:00:00 ... 2019-08-16T12:00:00\n",
       "  * lat      (lat) float32 -44.5 -44.45 -44.4 -44.35 ... -10.1 -10.05 -10.0\n",
       "  * lon      (lon) float32 112.0 112.1 112.1 112.2 ... 156.1 156.1 156.2 156.2\n",
       "Data variables:\n",
       "    mask_70  (lat, lon) float32 ...\n",
       "    pr       (time, lat, lon) float32 ...\n",
       "Attributes: (12/45)\n",
       "    date_calibration:         2021-06-25 09:32:04Z\n",
       "    date_created:             2021-06-25 09:32:04Z\n",
       "    keywords:                 multi-week, seasonal, calibrated\n",
       "    summary:                  Data calibrated using quantile-quantile matching\n",
       "    acknowledgement:          Australian Bureau of Meteorology, Commonwealth ...\n",
       "    author:                   Bureau of Meteorology\n",
       "    ...                       ...\n",
       "    url:                      http://www.bom.gov.au/climate/\n",
       "    history:                  Fri Feb 18 12:27:26 2022: ncap2 -O -s mask_70=s...\n",
       "    latest_script_used:       make_time_av_.py\n",
       "    history_raw_model:        Mon Feb  8 06:03:44 2021: ncks -O -v pr pc19811...\n",
       "    obs_history:              Tue May 25 00:01:58 2021 : /S2PrepareData/bin/o...\n",
       "    history_regrid_model:     Thu Jun 24 22:29:35 2021 : /S2PrepareData/bin/m...
" ], "text/plain": [ "\n", "Dimensions: (lat: 691, lon: 886, time: 8)\n", "Coordinates:\n", " * time (time) datetime64[ns] 2019-01-16T12:00:00 ... 2019-08-16T12:00:00\n", " * lat (lat) float32 -44.5 -44.45 -44.4 -44.35 ... -10.1 -10.05 -10.0\n", " * lon (lon) float32 112.0 112.1 112.1 112.2 ... 156.1 156.1 156.2 156.2\n", "Data variables:\n", " mask_70 (lat, lon) float32 ...\n", " pr (time, lat, lon) float32 ...\n", "Attributes: (12/45)\n", " date_calibration: 2021-06-25 09:32:04Z\n", " date_created: 2021-06-25 09:32:04Z\n", " keywords: multi-week, seasonal, calibrated\n", " summary: Data calibrated using quantile-quantile matching\n", " acknowledgement: Australian Bureau of Meteorology, Commonwealth ...\n", " author: Bureau of Meteorology\n", " ... ...\n", " url: http://www.bom.gov.au/climate/\n", " history: Fri Feb 18 12:27:26 2022: ncap2 -O -s mask_70=s...\n", " latest_script_used: make_time_av_.py\n", " history_raw_model: Mon Feb 8 06:03:44 2021: ncks -O -v pr pc19811...\n", " obs_history: Tue May 25 00:01:58 2021 : /S2PrepareData/bin/o...\n", " history_regrid_model: Thu Jun 24 22:29:35 2021 : /S2PrepareData/bin/m..." ] }, "execution_count": 15, "metadata": {}, "output_type": "execute_result" } ], "source": [ "path = \"/g/data/ux62/access-s2/hindcast/calibrated/atmos/pr/monthly/e03/\"\n", "file = \"maq5_pr_20181231_e03.nc\"\n", "\n", "# Open NetCDF file\n", "ds = xr.open_dataset(path+file)\n", "\n", "ds" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Producing data for 0 files...\n" ] } ], "source": [ "import xarray as xr\n", "import pandas as pd\n", "from glob import glob\n", "import os\n", "\n", "# Initialize an empty DataFrame to hold all the data\n", "master_df = pd.DataFrame()\n", "\n", "# Define the path and file pattern\n", "path = \"/g/data/ux62/access-s2/hindcast/calibrated/atmos/pr/monthly/e03/\"\n", "pattern = \"*pr_2017*.nc\"\n", "\n", "# List all files that match the pattern\n", "files = glob(os.path.join(path, pattern))\n", "print(f\"Producing data for {len(files)} files...\")\n", "\n", "# Loop through the list of files and load each one\n", "for file in files:\n", " # Load the xarray dataset\n", " ds = xr.open_dataset(file)\n", " \n", " # Slice the dataset for specific lat/lon grid\n", " ds_sliced = ds.sel(lon=slice(142, 145), lat=slice(-25, -22))\n", " \n", " # Flatten the sliced data for 'pr' variable\n", " df = ds_sliced['pr'].to_dataframe().reset_index()\n", " \n", " # Filter rows where latitude and longitude are integers\n", " df = df[df['lat'].apply(lambda x: x.is_integer())]\n", " df = df[df['lon'].apply(lambda x: x.is_integer())]\n", " \n", " # Append the DataFrame to the master DataFrame\n", " master_df = pd.concat([master_df, df], ignore_index=True)\n", " \n", " # Close the xarray dataset\n", " ds.close()\n" ] }, { "cell_type": "code", "execution_count": 33, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timelatlonpr
02017-08-16 12:00:00-25.0142.00.000000
12017-08-16 12:00:00-25.0143.00.020323
22017-08-16 12:00:00-25.0144.00.036452
32017-08-16 12:00:00-25.0145.00.004516
42017-08-16 12:00:00-24.0142.00.000000
\n", "
" ], "text/plain": [ " time lat lon pr\n", "0 2017-08-16 12:00:00 -25.0 142.0 0.000000\n", "1 2017-08-16 12:00:00 -25.0 143.0 0.020323\n", "2 2017-08-16 12:00:00 -25.0 144.0 0.036452\n", "3 2017-08-16 12:00:00 -25.0 145.0 0.004516\n", "4 2017-08-16 12:00:00 -24.0 142.0 0.000000" ] }, "execution_count": 33, "metadata": {}, "output_type": "execute_result" } ], "source": [ "master_df.head()" ] }, { "cell_type": "code", "execution_count": 34, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(19840, 4)" ] }, "execution_count": 34, "metadata": {}, "output_type": "execute_result" } ], "source": [ "master_df.shape" ] }, { "cell_type": "code", "execution_count": 36, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "0 2017-08-16 12:00:00\n", "1 2017-08-16 12:00:00\n", "2 2017-08-16 12:00:00\n", "3 2017-08-16 12:00:00\n", "4 2017-08-16 12:00:00\n", " ... \n", "19835 2018-04-16 00:00:00\n", "19836 2018-04-16 00:00:00\n", "19837 2018-04-16 00:00:00\n", "19838 2018-04-16 00:00:00\n", "19839 2018-04-16 00:00:00\n", "Name: time, Length: 19840, dtype: datetime64[ns]" ] }, "execution_count": 36, "metadata": {}, "output_type": "execute_result" } ], "source": [ "master_df.time" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Producing data for 0 files...\n" ] }, { "ename": "KeyError", "evalue": "'time'", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[5], line 38\u001b[0m\n\u001b[1;32m 35\u001b[0m \u001b[38;5;66;03m# Close the xarray dataset\u001b[39;00m\n\u001b[1;32m 36\u001b[0m ds\u001b[38;5;241m.\u001b[39mclose()\n\u001b[0;32m---> 38\u001b[0m master_df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m pd\u001b[38;5;241m.\u001b[39mto_datetime(\u001b[43mmaster_df\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mtime\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m)\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/pandas/core/frame.py:3896\u001b[0m, in \u001b[0;36mDataFrame.__getitem__\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 3894\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns\u001b[38;5;241m.\u001b[39mnlevels \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m1\u001b[39m:\n\u001b[1;32m 3895\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_getitem_multilevel(key)\n\u001b[0;32m-> 3896\u001b[0m indexer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcolumns\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_loc\u001b[49m\u001b[43m(\u001b[49m\u001b[43mkey\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3897\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m is_integer(indexer):\n\u001b[1;32m 3898\u001b[0m indexer \u001b[38;5;241m=\u001b[39m [indexer]\n", "File \u001b[0;32m~/miniconda3/envs/iceds/lib/python3.11/site-packages/pandas/core/indexes/range.py:418\u001b[0m, in \u001b[0;36mRangeIndex.get_loc\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 416\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(key) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01merr\u001b[39;00m\n\u001b[1;32m 417\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(key, Hashable):\n\u001b[0;32m--> 418\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(key)\n\u001b[1;32m 419\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_indexing_error(key)\n\u001b[1;32m 420\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(key)\n", "\u001b[0;31mKeyError\u001b[0m: 'time'" ] } ], "source": [ "import xarray as xr\n", "import pandas as pd\n", "from glob import glob\n", "import os\n", "\n", "# Initialize an empty DataFrame to hold all the data\n", "master_df = pd.DataFrame()\n", "\n", "# Define the path and file pattern\n", "path = \"/g/data/ux62/access-s2/hindcast/calibrated/atmos/pr/monthly/e02/\"\n", "pattern = \"*pr_2017*.nc\"\n", "\n", "# List all files that match the pattern\n", "files = glob(os.path.join(path, pattern))\n", "print(f\"Producing data for {len(files)} files...\")\n", "\n", "# Loop through the list of files and load each one\n", "for file in files:\n", " # Load the xarray dataset\n", " ds = xr.open_dataset(file)\n", " \n", " # Slice the dataset for specific lat/lon grid\n", " ds_sliced = ds.sel(lon=slice(142, 145), lat=slice(-25, -22))\n", " \n", " # Flatten the sliced data for 'pr' variable\n", " df = ds_sliced['pr'].to_dataframe().reset_index()\n", " \n", " # Filter rows where latitude and longitude are integers\n", " df = df[df['lat'].apply(lambda x: x.is_integer())]\n", " df = df[df['lon'].apply(lambda x: x.is_integer())]\n", " \n", " # Append the DataFrame to the master DataFrame\n", " master_df = pd.concat([master_df, df], ignore_index=True)\n", " \n", " # Close the xarray dataset\n", " ds.close()\n", "\n", "master_df['time'] = pd.to_datetime(master_df['time'])" ] }, { "cell_type": "code", "execution_count": 38, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timelatlonpr
02017-10-16 12:00:00-25.0142.00.054194
12017-10-16 12:00:00-25.0143.00.296452
22017-10-16 12:00:00-25.0144.00.480000
32017-10-16 12:00:00-25.0145.00.640968
42017-10-16 12:00:00-24.0142.00.224839
\n", "
" ], "text/plain": [ " time lat lon pr\n", "0 2017-10-16 12:00:00 -25.0 142.0 0.054194\n", "1 2017-10-16 12:00:00 -25.0 143.0 0.296452\n", "2 2017-10-16 12:00:00 -25.0 144.0 0.480000\n", "3 2017-10-16 12:00:00 -25.0 145.0 0.640968\n", "4 2017-10-16 12:00:00 -24.0 142.0 0.224839" ] }, "execution_count": 38, "metadata": {}, "output_type": "execute_result" } ], "source": [ "master_df.head()" ] }, { "cell_type": "code", "execution_count": 41, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "0 2017-10-16 12:00:00\n", "1 2017-10-16 12:00:00\n", "2 2017-10-16 12:00:00\n", "3 2017-10-16 12:00:00\n", "4 2017-10-16 12:00:00\n", " ... \n", "19835 2018-06-16 00:00:00\n", "19836 2018-06-16 00:00:00\n", "19837 2018-06-16 00:00:00\n", "19838 2018-06-16 00:00:00\n", "19839 2018-06-16 00:00:00\n", "Name: time, Length: 19840, dtype: datetime64[ns]" ] }, "execution_count": 41, "metadata": {}, "output_type": "execute_result" } ], "source": [ "master_df.time" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "## Getting the ground-truth data" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "import requests\n", "import numpy as np\n", "import pandas as pd\n", "from tqdm import tqdm" ] }, { "cell_type": "code", "execution_count": 54, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "100%|██████████| 1/1 [00:08<00:00, 8.71s/it]\n" ] } ], "source": [ "import requests\n", "from tqdm import tqdm\n", "\n", "# Base URL for NetCDF files on Amazon S3\n", "BASE_URL = \"https://s3-ap-southeast-2.amazonaws.com/silo-open-data/Official/annual/\"\n", "\n", "# Variable of interest (e.g., daily_rain, max_temp, etc.)\n", "VARIABLE = \"daily_rain\"\n", "\n", "# Specify the range of years\n", "years = range(1989, 1990)\n", "\n", "# Loop over each year to download the corresponding NetCDF file\n", "for year in tqdm(years):\n", " url = f\"{BASE_URL}{VARIABLE}/{year}.{VARIABLE}.nc\"\n", " response = requests.get(url)\n", " \n", " # Check if the request was successful\n", " if response.status_code == 200:\n", " # Save the NetCDF file\n", " with open(f\"data/{year}.{VARIABLE}.nc\", \"wb\") as f:\n", " f.write(response.content)\n", " else:\n", " print(f\"Failed to download data for {year}\")" ] }, { "cell_type": "code", "execution_count": 56, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "
<xarray.Dataset>\n",
       "Dimensions:     (lat: 681, lon: 841, time: 365)\n",
       "Coordinates:\n",
       "  * lat         (lat) float64 -44.0 -43.95 -43.9 -43.85 ... -10.1 -10.05 -10.0\n",
       "  * lon         (lon) float64 112.0 112.0 112.1 112.2 ... 153.9 153.9 154.0\n",
       "  * time        (time) datetime64[ns] 1989-01-01 1989-01-02 ... 1989-12-31\n",
       "Data variables:\n",
       "    daily_rain  (time, lat, lon) float32 ...\n",
       "    crs         |S1 ...\n",
       "Attributes:\n",
       "    department:               Department of Environment and Science\n",
       "    department_short:         DES\n",
       "    copyright:                Copyright - the State of Queensland Department ...\n",
       "    site_url:                 http://www.longpaddock.qld.gov.au\n",
       "    institution:              Queensland Government, Department of Environmen...\n",
       "    raster_source:            Gridded surface was created by interpolating ob...\n",
       "    raster_source_additions:  and other suppliers (see the SILO webpage for d...\n",
       "    metadata_url:             http://qldspatial.information.qld.gov.au/catalo...\n",
       "    reference:                Jeffrey, S.J., Carter, J.O., Moodie, K.B.A. and...\n",
       "    disclaimer:               1. The user accepts all responsibility and risk...
" ], "text/plain": [ "\n", "Dimensions: (lat: 681, lon: 841, time: 365)\n", "Coordinates:\n", " * lat (lat) float64 -44.0 -43.95 -43.9 -43.85 ... -10.1 -10.05 -10.0\n", " * lon (lon) float64 112.0 112.0 112.1 112.2 ... 153.9 153.9 154.0\n", " * time (time) datetime64[ns] 1989-01-01 1989-01-02 ... 1989-12-31\n", "Data variables:\n", " daily_rain (time, lat, lon) float32 ...\n", " crs |S1 ...\n", "Attributes:\n", " department: Department of Environment and Science\n", " department_short: DES\n", " copyright: Copyright - the State of Queensland Department ...\n", " site_url: http://www.longpaddock.qld.gov.au\n", " institution: Queensland Government, Department of Environmen...\n", " raster_source: Gridded surface was created by interpolating ob...\n", " raster_source_additions: and other suppliers (see the SILO webpage for d...\n", " metadata_url: http://qldspatial.information.qld.gov.au/catalo...\n", " reference: Jeffrey, S.J., Carter, J.O., Moodie, K.B.A. and...\n", " disclaimer: 1. The user accepts all responsibility and risk..." ] }, "execution_count": 56, "metadata": {}, "output_type": "execute_result" } ], "source": [ "ds = xr.open_dataset(\"/home/156/cn1951/iceds/data/1989.daily_rain.nc\")\n", "ds" ] }, { "cell_type": "code", "execution_count": 65, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ " time lat lon daily_rain\n", "0 1989-01-01 -25.0 142.0 0.000000\n", "1 1989-01-01 -25.0 143.0 0.000000\n", "2 1989-01-01 -25.0 144.0 0.099854\n", "3 1989-01-01 -25.0 145.0 0.000000\n", "4 1989-01-01 -24.0 142.0 0.099854\n" ] } ], "source": [ "ds = xr.open_dataset(\"/home/156/cn1951/iceds/data/1989.daily_rain.nc\")\n", "\n", "# Filter data for specific latitude and longitude\n", "# Latitude between -25 and -22, and Longitude between 142 and 145\n", "filtered_data = ds.sel(lat=slice(-25, -22), lon=slice(142, 145))\n", "\n", "# Slice for latitude between -25 and -22, and longitude between 142 and 145\n", "filtered_data = ds.sel(lat=slice(-25, -22), lon=slice(142, 145))\n", "\n", "# Further filter to only keep integer latitudes and longitudes\n", "filtered_data = filtered_data.where(\n", " (filtered_data['lat'] % 1 == 0) & (filtered_data['lon'] % 1 == 0), drop=True\n", ")\n", "\n", "# Convert to DataFrame\n", "df = filtered_data.to_dataframe().reset_index()\n", "\n", "df.drop(columns=['crs'], inplace=True)\n", "df.reset_index(inplace=True, drop=True)\n", "\n", "# Display the DataFrame\n", "print(df.head())" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timelatlondaily_rain
01989-01-01-25.0142.00.000000
11989-01-01-25.0143.00.000000
21989-01-01-25.0144.00.099854
31989-01-01-25.0145.00.000000
41989-01-01-24.0142.00.099854
\n", "
" ], "text/plain": [ " time lat lon daily_rain\n", "0 1989-01-01 -25.0 142.0 0.000000\n", "1 1989-01-01 -25.0 143.0 0.000000\n", "2 1989-01-01 -25.0 144.0 0.099854\n", "3 1989-01-01 -25.0 145.0 0.000000\n", "4 1989-01-01 -24.0 142.0 0.099854" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "path = \"/home/156/cn1951/iceds/data/1989.daily_rain.nc\"\n", "\n", "def clean_silo(ds):\n", " ds = xr.open_dataset(path)\n", "\n", " # Filter data for specific latitude and longitude\n", " # Latitude between -25 and -22, and Longitude between 142 and 145\n", " filtered_data = ds.sel(lat=slice(-25, -22), lon=slice(142, 145))\n", "\n", " # Slice for latitude between -25 and -22, and longitude between 142 and 145\n", " filtered_data = ds.sel(lat=slice(-25, -22), lon=slice(142, 145))\n", "\n", " # Further filter to only keep integer latitudes and longitudes\n", " filtered_data = filtered_data.where(\n", " (filtered_data['lat'] % 1 == 0) & (filtered_data['lon'] % 1 == 0), drop=True\n", " )\n", "\n", " # Convert to DataFrame\n", " df = filtered_data.to_dataframe().reset_index()\n", "\n", " df.drop(columns=['crs'], inplace=True)\n", " df.reset_index(inplace=True, drop=True)\n", "\n", " return df\n", "\n", "df = clean_silo(path)\n", "df.head()" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "100%|██████████| 30/30 [07:36<00:00, 15.22s/it]\n" ] } ], "source": [ "from tqdm import tqdm\n", "import requests\n", "import os\n", "import xarray as xr\n", "import pandas as pd\n", "\n", "# Define constants\n", "BASE_URL = \"https://s3-ap-southeast-2.amazonaws.com/silo-open-data/Official/annual/\"\n", "VARIABLE = \"daily_rain\" # or whatever variable you are interested in\n", "years = range(1989, 2019) # Replace with your actual range\n", "\n", "def clean_silo(path):\n", " ds = xr.open_dataset(path)\n", " filtered_data = ds.sel(lat=slice(-25, -22), lon=slice(142, 145))\n", " filtered_data = filtered_data.where(\n", " (filtered_data['lat'] % 1 == 0) & (filtered_data['lon'] % 1 == 0), drop=True\n", " )\n", " df = filtered_data.to_dataframe().reset_index()\n", " df.drop(columns=['crs'], inplace=True)\n", " df.reset_index(inplace=True, drop=True)\n", " return df\n", "\n", "# List to store cleaned DataFrames\n", "df_list = []\n", "\n", "# Loop over each year to download the corresponding NetCDF file\n", "for year in tqdm(years):\n", " url = f\"{BASE_URL}{VARIABLE}/{year}.{VARIABLE}.nc\"\n", " response = requests.get(url)\n", " \n", " # Temporary path to save the downloaded NetCDF file\n", " temp_path = f\"{year}.{VARIABLE}.nc\"\n", " \n", " # Check if the request was successful\n", " if response.status_code == 200:\n", " # Save the NetCDF file\n", " with open(temp_path, \"wb\") as f:\n", " f.write(response.content)\n", " \n", " # Clean the data\n", " cleaned_df = clean_silo(temp_path)\n", " df_list.append(cleaned_df)\n", " \n", " # Remove the temporary NetCDF file to save space\n", " os.remove(temp_path)\n", " else:\n", " print(f\"Failed to download data for {year}\")\n", "\n", "# Concatenate all the cleaned DataFrames\n", "final_df = pd.concat(df_list, ignore_index=True)" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timelatlondaily_rain
01989-01-01-25.0142.00.000000
11989-01-01-25.0143.00.000000
21989-01-01-25.0144.00.099854
31989-01-01-25.0145.00.000000
41989-01-01-24.0142.00.099854
...............
1753072018-12-31-23.0145.00.000000
1753082018-12-31-22.0142.00.000000
1753092018-12-31-22.0143.00.000000
1753102018-12-31-22.0144.00.000000
1753112018-12-31-22.0145.00.000000
\n", "

175312 rows × 4 columns

\n", "
" ], "text/plain": [ " time lat lon daily_rain\n", "0 1989-01-01 -25.0 142.0 0.000000\n", "1 1989-01-01 -25.0 143.0 0.000000\n", "2 1989-01-01 -25.0 144.0 0.099854\n", "3 1989-01-01 -25.0 145.0 0.000000\n", "4 1989-01-01 -24.0 142.0 0.099854\n", "... ... ... ... ...\n", "175307 2018-12-31 -23.0 145.0 0.000000\n", "175308 2018-12-31 -22.0 142.0 0.000000\n", "175309 2018-12-31 -22.0 143.0 0.000000\n", "175310 2018-12-31 -22.0 144.0 0.000000\n", "175311 2018-12-31 -22.0 145.0 0.000000\n", "\n", "[175312 rows x 4 columns]" ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "final_df" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "final_df.to_parquet('data/silo.parquet')" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "## GloSea5" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2024-02-06 09:48:29,889 INFO Welcome to the CDS\n", "2024-02-06 09:48:29,891 INFO Sending request to https://cds.climate.copernicus.eu/api/v2/resources/seasonal-monthly-single-levels\n", "2024-02-06 09:48:30,263 INFO Request is queued\n", "2024-02-06 10:39:09,544 INFO Request is running\n", "2024-02-06 11:09:29,317 INFO Request is completed\n", "2024-02-06 11:09:29,318 INFO Downloading https://download-0004-clone.copernicus-climate.eu/cache-compute-0004/cache/data8/adaptor.mars.external-1707176342.1964428-14567-18-9bd4bd56-8eb1-4627-9c42-422594814c6b.grib to download.grib (5.1M)\n", "2024-02-06 11:09:33,462 INFO Download rate 1.2M/s \n" ] }, { "data": { "text/plain": [ "Result(content_length=5374080,content_type=application/x-grib,location=https://download-0004-clone.copernicus-climate.eu/cache-compute-0004/cache/data8/adaptor.mars.external-1707176342.1964428-14567-18-9bd4bd56-8eb1-4627-9c42-422594814c6b.grib)" ] }, "execution_count": 1, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# For evaluation\n", "import cdsapi\n", "\n", "c = cdsapi.Client()\n", "\n", "c.retrieve(\n", " 'seasonal-monthly-single-levels',\n", " {\n", " 'format': 'grib',\n", " 'originating_centre': 'ukmo',\n", " 'system': '15',\n", " 'variable': [\n", " '10m_wind_speed', '2m_temperature', 'evaporation',\n", " 'mean_sea_level_pressure', 'total_cloud_cover', 'total_precipitation',\n", " ],\n", " 'product_type': [\n", " 'ensemble_mean', 'monthly_mean',\n", " ],\n", " 'year': [\n", " '2020', '2021',\n", " ],\n", " 'month': [\n", " '01', '02', '03', \n", " '04', '05', '06', \n", " '07', '08', '09', \n", " '10', '11', '12',\n", " ],\n", " 'leadtime_month': [\n", " '1', '2', '3',\n", " '4', '5', '6',\n", " ],\n", " 'area': [\n", " -17, 143, -20,\n", " 146,\n", " ],\n", " },\n", " 'download.grib')" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2024-01-30 09:38:54,519 INFO Welcome to the CDS\n", "2024-01-30 09:38:54,521 INFO Sending request to https://cds.climate.copernicus.eu/api/v2/resources/seasonal-monthly-single-levels\n", "2024-01-30 09:38:54,965 INFO Request is queued\n", "2024-01-30 09:41:49,396 INFO Request is running\n", "2024-01-30 10:31:46,811 INFO Request is completed\n", "2024-01-30 10:31:46,812 INFO Downloading https://download-0011-clone.copernicus-climate.eu/cache-compute-0011/cache/data6/adaptor.mars.external-1706568091.827076-31839-1-6aa81b6c-3e92-43d3-a1a6-77d0056f8ea4.grib to download.grib (55.1M)\n", "2024-01-30 10:32:16,473 INFO Download rate 1.9M/s \n" ] }, { "data": { "text/plain": [ "Result(content_length=57818880,content_type=application/x-grib,location=https://download-0011-clone.copernicus-climate.eu/cache-compute-0011/cache/data6/adaptor.mars.external-1706568091.827076-31839-1-6aa81b6c-3e92-43d3-a1a6-77d0056f8ea4.grib)" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import cdsapi\n", "\n", "c = cdsapi.Client()\n", "\n", "c.retrieve(\n", " 'seasonal-monthly-single-levels',\n", " {\n", " 'format': 'grib',\n", " 'originating_centre': 'ukmo',\n", " 'variable': [\n", " '10m_wind_speed', '2m_temperature', 'evaporation',\n", " 'mean_sea_level_pressure', 'total_cloud_cover', 'total_precipitation',\n", " ],\n", " 'product_type': 'monthly_mean',\n", " 'leadtime_month': [\n", " '1', '2', '3',\n", " '4', '5', '6',\n", " ],\n", " 'month': [\n", " '01', '02', '05',\n", " '06', '07', '08',\n", " '09', '10', '11',\n", " '12',\n", " ],\n", " 'area': [\n", " -17, 143, -20,\n", " 146,\n", " ],\n", " 'system': '15',\n", " 'year': [\n", " '1993', '1994', '1995',\n", " '1996', '1997', '1998',\n", " '1999', '2000', '2001',\n", " '2002', '2003', '2004',\n", " '2005', '2006', '2007',\n", " '2008', '2009', '2010',\n", " '2011', '2012', '2013',\n", " '2014', '2015', '2016',\n", " ],\n", " },\n", " 'download.grib')" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Ignoring index file 'data/raw/glosea_1.grib.923a8.idx' incompatible with GRIB file\n", "Ignoring index file 'data/raw/glosea_1.grib.923a8.idx' incompatible with GRIB file\n", "Ignoring index file 'data/raw/glosea_2.grib.923a8.idx' incompatible with GRIB file\n", "Ignoring index file 'data/raw/glosea_2.grib.923a8.idx' incompatible with GRIB file\n", "Ignoring index file 'data/raw/glosea_2.grib.923a8.idx' incompatible with GRIB file\n", "Ignoring index file 'data/raw/glosea_2.grib.923a8.idx' incompatible with GRIB file\n", "Ignoring index file 'data/raw/glosea_2.grib.923a8.idx' incompatible with GRIB file\n", "Ignoring index file 'data/raw/glosea_2.grib.923a8.idx' incompatible with GRIB file\n", "Ignoring index file 'data/raw/glosea_2.grib.923a8.idx' incompatible with GRIB file\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "6896\n" ] } ], "source": [ "import cfgrib\n", "import xarray as xr\n", "import pandas as pd\n", "\n", "path = \"data/raw/glosea.grib\"\n", "\n", "# Open the GRIB file as an xarray dataset\n", "ds_1 = cfgrib.open_datasets(\"data/raw/glosea_1.grib\")\n", "ds_2 = cfgrib.open_datasets(\"data/raw/glosea_2.grib\")\n", "\n", "glosea_xarray_1, glosea_xarray_2 = ds_1[0], ds_2[0]\n", "\n", "# Convert to pandas DataFrame\n", "df_1 = glosea_xarray_1.to_dataframe().reset_index()\n", "df_2 = glosea_xarray_2.to_dataframe().reset_index()\n", "\n", "# Concatenate the two DataFrames\n", "df = pd.concat([df_1, df_2], ignore_index=True)\n", "\n", "print(len(df.valid_time.unique()))" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "array([142., 143., 144., 145., 150., 151., 152., 153.])" ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df.longitude.unique()" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timelatlonpr
01993-02-06-29.0150.00.0
11993-02-06-29.0151.00.0
21993-02-06-29.0152.00.0
31993-02-06-29.0153.00.0
41993-02-06-28.0150.00.0
...............
2206672017-06-26-23.0145.00.0
2206682017-06-26-22.0142.00.0
2206692017-06-26-22.0143.00.0
2206702017-06-26-22.0144.00.0
2206712017-06-26-22.0145.00.0
\n", "

220672 rows × 4 columns

\n", "
" ], "text/plain": [ " time lat lon pr\n", "0 1993-02-06 -29.0 150.0 0.0\n", "1 1993-02-06 -29.0 151.0 0.0\n", "2 1993-02-06 -29.0 152.0 0.0\n", "3 1993-02-06 -29.0 153.0 0.0\n", "4 1993-02-06 -28.0 150.0 0.0\n", "... ... ... ... ...\n", "220667 2017-06-26 -23.0 145.0 0.0\n", "220668 2017-06-26 -22.0 142.0 0.0\n", "220669 2017-06-26 -22.0 143.0 0.0\n", "220670 2017-06-26 -22.0 144.0 0.0\n", "220671 2017-06-26 -22.0 145.0 0.0\n", "\n", "[220672 rows x 4 columns]" ] }, "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ "glosea_df = df.copy()\n", "\n", "# Keep only longitude between 142 and 145\n", "# glosea_df = glosea_df[glosea_df['longitude'].between(142, 145)]\n", "\n", "# # Keep only latitude between -25 and -22\n", "# glosea_df = glosea_df[glosea_df['latitude'].between(-25, -22)]\n", "\n", "# Convert tprate NaN to 0.0\n", "glosea_df['tprate'] = glosea_df['tprate'].fillna(0.0)\n", "\n", "# Keep only step = 28, 60, 120, 150, 182 days\n", "#glosea_df = glosea_df[glosea_df['step'].isin(['28 days'])]#, '60 days', '120 days', '150 days', '182 days'])]\n", "# Aggregate across steps\n", "glosea_df = glosea_df.groupby(['valid_time', 'latitude', 'longitude'])['tprate'].mean().reset_index()\n", "\n", "# Keep between 1981 and 2019\n", "glosea_df = glosea_df[glosea_df['valid_time'].between('1981-01-01', '2018-12-31')]\n", "\n", "# Rename columns\n", "glosea_df.rename(columns={'tprate': 'pr', 'latitude': 'lat', 'longitude': 'lon', 'valid_time': 'time'}, inplace=True)\n", "\n", "glosea_df.reset_index(inplace=True, drop=True)\n", "glosea_df = glosea_df.groupby(['time', 'lat', 'lon']).agg({'pr': 'sum'}).reset_index()\n", "glosea_df" ] }, { "cell_type": "code", "execution_count": 64, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "956" ] }, "execution_count": 64, "metadata": {}, "output_type": "execute_result" } ], "source": [ "len(glosea_df.time.unique())" ] }, { "cell_type": "code", "execution_count": 52, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "\n", "['1993-01-09 00:00:00', '1993-01-17 00:00:00', '1993-01-25 00:00:00',\n", " '1993-02-01 00:00:00', '1993-04-09 00:00:00', '1993-04-17 00:00:00',\n", " '1993-04-25 00:00:00', '1993-05-01 00:00:00', '1993-05-09 00:00:00',\n", " '1993-05-17 00:00:00',\n", " ...\n", " '2016-09-25 00:00:00', '2016-10-01 00:00:00', '2016-10-09 00:00:00',\n", " '2016-10-17 00:00:00', '2016-10-25 00:00:00', '2016-11-01 00:00:00',\n", " '2016-11-09 00:00:00', '2016-11-17 00:00:00', '2016-11-25 00:00:00',\n", " '2016-12-01 00:00:00']\n", "Length: 956, dtype: datetime64[ns]" ] }, "execution_count": 52, "metadata": {}, "output_type": "execute_result" } ], "source": [ "glosea_df.time.unique()" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "## ECMWF" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2024-02-06 11:16:52,612 INFO Welcome to the CDS\n", "2024-02-06 11:16:52,613 INFO Sending request to https://cds.climate.copernicus.eu/api/v2/resources/seasonal-monthly-single-levels\n", "2024-02-06 11:16:53,160 INFO Request is queued\n", "2024-02-06 11:41:29,373 INFO Request is running\n", "2024-02-06 11:59:41,787 INFO Request is completed\n", "2024-02-06 11:59:41,789 INFO Downloading https://download-0012-clone.copernicus-climate.eu/cache-compute-0012/cache/data1/adaptor.mars.external-1707180039.2851062-16433-12-3ec044af-cb3e-47cb-9f97-cdb3e4d29e04.grib to download.grib (10.3M)\n", "2024-02-06 12:00:05,301 INFO Download rate 447.9K/s \n" ] }, { "data": { "text/plain": [ "Result(content_length=10782720,content_type=application/x-grib,location=https://download-0012-clone.copernicus-climate.eu/cache-compute-0012/cache/data1/adaptor.mars.external-1707180039.2851062-16433-12-3ec044af-cb3e-47cb-9f97-cdb3e4d29e04.grib)" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# For evaluation\n", "import cdsapi\n", "\n", "c = cdsapi.Client()\n", "\n", "c.retrieve(\n", " 'seasonal-monthly-single-levels',\n", " {\n", " 'format': 'grib',\n", " 'originating_centre': 'ecmwf',\n", " 'system': '51',\n", " 'variable': [\n", " '10m_wind_speed', '2m_temperature', 'evaporation',\n", " 'mean_sea_level_pressure', 'total_cloud_cover', 'total_precipitation',\n", " ],\n", " 'product_type': [\n", " 'ensemble_mean', 'monthly_mean',\n", " ],\n", " 'year': [\n", " '2020', '2021',\n", " ],\n", " 'month': [\n", " '01', '02', '03',\n", " '04', '05', '06',\n", " '07', '08', '09',\n", " '10', '11', '12',\n", " ],\n", " 'leadtime_month': [\n", " '1', '2', '3',\n", " '4', '5', '6',\n", " ],\n", " 'area': [\n", " -17, 143, -20,\n", " 146,\n", " ],\n", " },\n", " 'download.grib')" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2024-01-30 10:41:49,922 INFO Welcome to the CDS\n", "2024-01-30 10:41:49,922 INFO Sending request to https://cds.climate.copernicus.eu/api/v2/resources/seasonal-monthly-single-levels\n", "2024-01-30 10:41:50,258 INFO Request is queued\n", "2024-01-30 10:43:08,678 INFO Request is running\n", "2024-01-30 11:10:29,302 INFO Request is completed\n", "2024-01-30 11:10:29,307 INFO Downloading https://download-0007-clone.copernicus-climate.eu/cache-compute-0007/cache/data0/adaptor.mars.external-1706571771.78567-27117-2-cadfcf36-0be1-4376-960f-e5d7a9a23648.grib to download.grib (23.1M)\n", "2024-01-30 11:10:37,956 INFO Download rate 2.7M/s \n" ] }, { "data": { "text/plain": [ "Result(content_length=24261120,content_type=application/x-grib,location=https://download-0007-clone.copernicus-climate.eu/cache-compute-0007/cache/data0/adaptor.mars.external-1706571771.78567-27117-2-cadfcf36-0be1-4376-960f-e5d7a9a23648.grib)" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import cdsapi\n", "\n", "c = cdsapi.Client()\n", "\n", "c.retrieve(\n", " 'seasonal-monthly-single-levels',\n", " {\n", " 'format': 'grib',\n", " 'originating_centre': 'ecmwf',\n", " 'system': '51',\n", " 'variable': [\n", " '2m_temperature', 'total_precipitation',\n", " ],\n", " 'product_type': [\n", " 'ensemble_mean', 'monthly_mean',\n", " ],\n", " 'year': [\n", " '1991', '1992', '1993',\n", " '1994', '1995', '1996',\n", " '1997', '1998', '1999',\n", " '2000', '2001', '2002',\n", " '2003', '2004', '2005',\n", " '2006', '2007', '2008',\n", " '2009', '2010', '2011',\n", " '2012', '2013', '2014',\n", " '2015', '2016', '2017',\n", " ],\n", " 'month': [\n", " '01', '02', '03',\n", " '04', '05', '06',\n", " '07', '08', '09',\n", " '10', '11', '12',\n", " ],\n", " 'leadtime_month': [\n", " '1', '2', '3',\n", " '4', '5', '6',\n", " ],\n", " 'area': [\n", " -17, 143, -20,\n", " 146,\n", " ],\n", " },\n", " 'download.grib')" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timesteplatitudelongitudenumbersurfacevalid_timet2mtprate
02017-01-0128 days-17.0143.000.02017-01-29NaN0.0
12017-01-0128 days-17.0144.000.02017-01-29NaN0.0
22017-01-0128 days-17.0145.000.02017-01-29NaN0.0
32017-01-0128 days-17.0146.000.02017-01-29NaN0.0
42017-01-0128 days-18.0143.000.02017-01-29NaN0.0
..............................
40272017-12-01184 days-19.0146.000.02018-06-03NaN0.0
40282017-12-01184 days-20.0143.000.02018-06-03NaN0.0
40292017-12-01184 days-20.0144.000.02018-06-03NaN0.0
40302017-12-01184 days-20.0145.000.02018-06-03NaN0.0
40312017-12-01184 days-20.0146.000.02018-06-03NaN0.0
\n", "

4032 rows × 9 columns

\n", "
" ], "text/plain": [ " time step latitude longitude number surface valid_time \\\n", "0 2017-01-01 28 days -17.0 143.0 0 0.0 2017-01-29 \n", "1 2017-01-01 28 days -17.0 144.0 0 0.0 2017-01-29 \n", "2 2017-01-01 28 days -17.0 145.0 0 0.0 2017-01-29 \n", "3 2017-01-01 28 days -17.0 146.0 0 0.0 2017-01-29 \n", "4 2017-01-01 28 days -18.0 143.0 0 0.0 2017-01-29 \n", "... ... ... ... ... ... ... ... \n", "4027 2017-12-01 184 days -19.0 146.0 0 0.0 2018-06-03 \n", "4028 2017-12-01 184 days -20.0 143.0 0 0.0 2018-06-03 \n", "4029 2017-12-01 184 days -20.0 144.0 0 0.0 2018-06-03 \n", "4030 2017-12-01 184 days -20.0 145.0 0 0.0 2018-06-03 \n", "4031 2017-12-01 184 days -20.0 146.0 0 0.0 2018-06-03 \n", "\n", " t2m tprate \n", "0 NaN 0.0 \n", "1 NaN 0.0 \n", "2 NaN 0.0 \n", "3 NaN 0.0 \n", "4 NaN 0.0 \n", "... ... ... \n", "4027 NaN 0.0 \n", "4028 NaN 0.0 \n", "4029 NaN 0.0 \n", "4030 NaN 0.0 \n", "4031 NaN 0.0 \n", "\n", "[4032 rows x 9 columns]" ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Open up the data/raw/ecmwf_3.grib file as a pandas df\n", "import cfgrib\n", "\n", "path = \"data/raw/ecmwf_3.grib\"\n", "\n", "# Open the GRIB file as an xarray dataset\n", "ds_1 = cfgrib.open_datasets(\"data/raw/ecmwf_1.grib\")\n", "ds_2 = cfgrib.open_datasets(\"data/raw/ecmwf_2.grib\")\n", "ds_3 = cfgrib.open_datasets(\"data/raw/ecmwf_3.grib\")\n", "\n", "grib_xarray_1 = ds_1[0]\n", "grib_xarray_2 = ds_2[0]\n", "grib_xarray_3 = ds_3[0]\n", "\n", "# Convert to pandas DataFrame\n", "df_1 = grib_xarray_1.to_dataframe().reset_index()\n", "df_1['tprate'] = df_1['tprate'].fillna(0.0)\n", "df_2 = grib_xarray_2.to_dataframe().reset_index()\n", "df_2['tprate'] = df_2['tprate'].fillna(0.0)\n", "df_3 = grib_xarray_3.to_dataframe().reset_index()\n", "df_3['tprate'] = df_3['tprate'].fillna(0.0)\n", "\n", "df_3" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
numbertimesteplatitudelongitudesurfacevalid_timetprate
001981-01-0128 days-15.0138.00.01981-01-29NaN
101981-01-0128 days-15.0139.00.01981-01-29NaN
201981-01-0128 days-15.0140.00.01981-01-29NaN
301981-01-0128 days-15.0141.00.01981-01-29NaN
401981-01-0128 days-15.0142.00.01981-01-29NaN
...........................
156684235502022-10-01184 days-29.0150.00.02023-04-03NaN
156684236502022-10-01184 days-29.0151.00.02023-04-03NaN
156684237502022-10-01184 days-29.0152.00.02023-04-03NaN
156684238502022-10-01184 days-29.0153.00.02023-04-03NaN
156684239502022-10-01184 days-29.0154.00.02023-04-03NaN
\n", "

156684240 rows × 8 columns

\n", "
" ], "text/plain": [ " number time step latitude longitude surface \\\n", "0 0 1981-01-01 28 days -15.0 138.0 0.0 \n", "1 0 1981-01-01 28 days -15.0 139.0 0.0 \n", "2 0 1981-01-01 28 days -15.0 140.0 0.0 \n", "3 0 1981-01-01 28 days -15.0 141.0 0.0 \n", "4 0 1981-01-01 28 days -15.0 142.0 0.0 \n", "... ... ... ... ... ... ... \n", "156684235 50 2022-10-01 184 days -29.0 150.0 0.0 \n", "156684236 50 2022-10-01 184 days -29.0 151.0 0.0 \n", "156684237 50 2022-10-01 184 days -29.0 152.0 0.0 \n", "156684238 50 2022-10-01 184 days -29.0 153.0 0.0 \n", "156684239 50 2022-10-01 184 days -29.0 154.0 0.0 \n", "\n", " valid_time tprate \n", "0 1981-01-29 NaN \n", "1 1981-01-29 NaN \n", "2 1981-01-29 NaN \n", "3 1981-01-29 NaN \n", "4 1981-01-29 NaN \n", "... ... ... \n", "156684235 2023-04-03 NaN \n", "156684236 2023-04-03 NaN \n", "156684237 2023-04-03 NaN \n", "156684238 2023-04-03 NaN \n", "156684239 2023-04-03 NaN \n", "\n", "[156684240 rows x 8 columns]" ] }, "execution_count": 5, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df_1" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timesteplatitudelongitudenumbersurfacevalid_timet2mtprate
02017-01-0128 days-26.0150.000.02017-01-29NaNNaN
12017-01-0128 days-26.0151.000.02017-01-29NaNNaN
22017-01-0128 days-26.0152.000.02017-01-29NaNNaN
32017-01-0128 days-26.0153.000.02017-01-29NaNNaN
42017-01-0128 days-27.0150.000.02017-01-29NaNNaN
..............................
40272017-12-01184 days-28.0153.000.02018-06-03NaNNaN
40282017-12-01184 days-29.0150.000.02018-06-03NaNNaN
40292017-12-01184 days-29.0151.000.02018-06-03NaNNaN
40302017-12-01184 days-29.0152.000.02018-06-03NaNNaN
40312017-12-01184 days-29.0153.000.02018-06-03NaNNaN
\n", "

4032 rows × 9 columns

\n", "
" ], "text/plain": [ " time step latitude longitude number surface valid_time \\\n", "0 2017-01-01 28 days -26.0 150.0 0 0.0 2017-01-29 \n", "1 2017-01-01 28 days -26.0 151.0 0 0.0 2017-01-29 \n", "2 2017-01-01 28 days -26.0 152.0 0 0.0 2017-01-29 \n", "3 2017-01-01 28 days -26.0 153.0 0 0.0 2017-01-29 \n", "4 2017-01-01 28 days -27.0 150.0 0 0.0 2017-01-29 \n", "... ... ... ... ... ... ... ... \n", "4027 2017-12-01 184 days -28.0 153.0 0 0.0 2018-06-03 \n", "4028 2017-12-01 184 days -29.0 150.0 0 0.0 2018-06-03 \n", "4029 2017-12-01 184 days -29.0 151.0 0 0.0 2018-06-03 \n", "4030 2017-12-01 184 days -29.0 152.0 0 0.0 2018-06-03 \n", "4031 2017-12-01 184 days -29.0 153.0 0 0.0 2018-06-03 \n", "\n", " t2m tprate \n", "0 NaN NaN \n", "1 NaN NaN \n", "2 NaN NaN \n", "3 NaN NaN \n", "4 NaN NaN \n", "... ... ... \n", "4027 NaN NaN \n", "4028 NaN NaN \n", "4029 NaN NaN \n", "4030 NaN NaN \n", "4031 NaN NaN \n", "\n", "[4032 rows x 9 columns]" ] }, "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df_2" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
timelatlonpr
01981-01-29-29.0138.00.0
11981-01-29-29.0139.00.0
21981-01-29-29.0140.00.0
31981-01-29-29.0141.00.0
41981-01-29-29.0142.00.0
...............
6272952018-12-31-15.0150.00.0
6272962018-12-31-15.0151.00.0
6272972018-12-31-15.0152.00.0
6272982018-12-31-15.0153.00.0
6272992018-12-31-15.0154.00.0
\n", "

627300 rows × 4 columns

\n", "
" ], "text/plain": [ " time lat lon pr\n", "0 1981-01-29 -29.0 138.0 0.0\n", "1 1981-01-29 -29.0 139.0 0.0\n", "2 1981-01-29 -29.0 140.0 0.0\n", "3 1981-01-29 -29.0 141.0 0.0\n", "4 1981-01-29 -29.0 142.0 0.0\n", "... ... ... ... ...\n", "627295 2018-12-31 -15.0 150.0 0.0\n", "627296 2018-12-31 -15.0 151.0 0.0\n", "627297 2018-12-31 -15.0 152.0 0.0\n", "627298 2018-12-31 -15.0 153.0 0.0\n", "627299 2018-12-31 -15.0 154.0 0.0\n", "\n", "[627300 rows x 4 columns]" ] }, "execution_count": 9, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import pandas as pd\n", "\n", "# Read in the processed parquet\n", "df = pd.read_parquet(\"data/processed/ecmwf.parquet\")\n", "\n", "df" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Print the number of unique time values" ] } ], "metadata": { "kernelspec": { "display_name": "iceds", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.5" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }