From 53179e4bca0cba6b6b26d843f735943df35e6c83 Mon Sep 17 00:00:00 2001 From: Drew Camron Date: Mon, 17 Aug 2020 12:48:28 -0600 Subject: [PATCH 1/2] Get things back to baseline Main attempt at getting python-training back to building on Travis before making any larger changes. Primarily fix broken TDS URLs and fix syntax for building on MetPy 0.12.2 (preparing changes for 1.0). Took the chance to also update style, cell organizations, and siphon workflows for clarity. Update certain printouts and formatting. Plenty of work to do from here but should get us running on Travis again for now. --- .gitignore | 1 + environment.yml | 5 +- .../gallery/500hPa_Vorticity_Advection.ipynb | 42 +++--- .../850hPa_Temperature_Advection.ipynb | 37 +++-- pages/gallery/Ageostrophic_Wind_Example.ipynb | 72 ++++++--- pages/gallery/HILO_Symbol_Plot.ipynb | 57 ++++--- pages/gallery/MSLP_temp_winds.ipynb | 32 ++-- pages/gallery/Smoothing_Contours.ipynb | 40 +++-- pages/gallery/Upperair_Obs.ipynb | 18 ++- .../gallery/Wind_Shear_Vectors_Example.ipynb | 45 +++--- pages/gallery/declarative_500_hPa.ipynb | 26 ++-- pages/gallery/miller_composite.ipynb | 142 +++++++++--------- pages/gallery/xarray_500hPa_map.ipynb | 39 +++-- .../workshop/MetPy_Advanced/QG Analysis.ipynb | 83 +++++----- .../MetPy_Case_Study/MetPy Case Study.ipynb | 54 ++++--- .../solutions/abs_vort_500.py | 1 - .../MetPy_Case_Study/solutions/markers.py | 1 - .../solutions/temp_adv_map_850.py | 1 - .../MetPy_Case_Study/solutions/winds_300.py | 1 - .../Satellite_Data/Satellite Animations.ipynb | 14 +- run_notebooks.py | 31 ++-- 21 files changed, 419 insertions(+), 323 deletions(-) diff --git a/.gitignore b/.gitignore index 12d46c05..49c0429c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ .DS_Store .doit.db.db *.swp +.vscode/ .idea/* .ipynb_checkpoints/ diff --git a/environment.yml b/environment.yml index 29b73790..8755c12c 100644 --- a/environment.yml +++ b/environment.yml @@ -9,7 +9,7 @@ - jupyter - metpy - siphon - - pandas + - pandas<1.1.0 - pip - xarray - ipywidgets @@ -17,5 +17,4 @@ - boto3 - botocore - jupyterlab - - pip: - - python-awips + - python-awips diff --git a/pages/gallery/500hPa_Vorticity_Advection.ipynb b/pages/gallery/500hPa_Vorticity_Advection.ipynb index c64681ea..4a144180 100644 --- a/pages/gallery/500hPa_Vorticity_Advection.ipynb +++ b/pages/gallery/500hPa_Vorticity_Advection.ipynb @@ -37,13 +37,14 @@ "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", "import matplotlib.gridspec as gridspec\n", - "import matplotlib.pylab as plt\n", + "import matplotlib.pyplot as plt\n", "import metpy.calc as mpcalc\n", - "from metpy.units import units\n", - "from netCDF4 import num2date\n", "import numpy as np\n", "import scipy.ndimage as ndimage\n", - "from siphon.ncss import NCSS" + "\n", + "from metpy.units import units\n", + "from netCDF4 import num2date\n", + "from siphon.catalog import TDSCatalog" ] }, { @@ -62,25 +63,32 @@ "metadata": {}, "outputs": [], "source": [ - "# Open the example netCDF data\n", - "ncss = NCSS('https://www.ncei.noaa.gov/thredds/ncss/grid/namanl/'\n", - " '201604/20160416/namanl_218_20160416_1800_000.grb')\n", - "now = datetime.utcnow()\n", + "dt = datetime(2016, 4, 16, 18)\n", + "\n", + "# Assemble our URL to the THREDDS Data Server catalog,\n", + "# and access our desired dataset within via NCSS\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/catalog/model-namanl-old/'\n", + "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", + "ncss = cat.datasets[f'namanl_218_{dt:%Y%m%d}_{dt:%H}00_000.grb'].subset()\n", "\n", "# Query for Latest GFS Run\n", - "hgt = ncss.query().time(datetime(2016, 4, 16, 18)).accept('netcdf')\n", - "hgt.variables('Geopotential_height_isobaric', 'u-component_of_wind_isobaric',\n", - " 'v-component_of_wind_isobaric').add_lonlat()\n", + "query = ncss.query()\n", "\n", - "# Actually getting the data\n", - "ds = ncss.get_data(hgt)\n", + "query.time(dt)\n", + "query.accept('netcdf')\n", + "query.variables('Geopotential_height_isobaric',\n", + " 'u-component_of_wind_isobaric',\n", + " 'v-component_of_wind_isobaric')\n", + "query.add_lonlat()\n", + "\n", + "# Obtain our queried data\n", + "ds = ncss.get_data(query)\n", "\n", "lon = ds.variables['lon'][:]\n", "lat = ds.variables['lat'][:]\n", "\n", "times = ds.variables[ds.variables['Geopotential_height_isobaric'].dimensions[0]]\n", - "vtime = num2date(times[:], units=times.units)\n", - "\n", + "vtime = num2date(times[:].squeeze(), units=times.units)\n", "\n", "lev_500 = np.where(ds.variables['isobaric'][:] == 500)[0][0]\n", "\n", @@ -153,7 +161,7 @@ "# Plot Titles\n", "plt.title(r'500-hPa Heights (m), AVOR$*10^5$ ($s^{-1}$), AVOR Adv$*10^8$ ($s^{-2}$)',\n", " loc='left')\n", - "plt.title('VALID: {}'.format(vtime[0]), loc='right')\n", + "plt.title(f'VALID: {vtime}', loc='right')\n", "\n", "# Plot Background\n", "ax.set_extent([235., 290., 20., 58.], ccrs.PlateCarree())\n", @@ -210,7 +218,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/850hPa_Temperature_Advection.ipynb b/pages/gallery/850hPa_Temperature_Advection.ipynb index be34ade9..0be3625c 100644 --- a/pages/gallery/850hPa_Temperature_Advection.ipynb +++ b/pages/gallery/850hPa_Temperature_Advection.ipynb @@ -39,11 +39,12 @@ "import matplotlib.gridspec as gridspec\n", "import matplotlib.pyplot as plt\n", "import metpy.calc as mpcalc\n", - "from metpy.units import units\n", - "from netCDF4 import num2date\n", "import numpy as np\n", "import scipy.ndimage as ndimage\n", - "from siphon.ncss import NCSS" + "\n", + "from cftime import num2pydate\n", + "from metpy.units import units\n", + "from siphon.catalog import TDSCatalog" ] }, { @@ -88,19 +89,26 @@ "metadata": {}, "outputs": [], "source": [ - "base_url = 'https://www.ncei.noaa.gov/thredds/ncss/grid/gfs-g4-anl-files/'\n", "dt = datetime(2017, 4, 5, 12)\n", - "ncss = NCSS('{}{dt:%Y%m}/{dt:%Y%m%d}/gfsanl_4_{dt:%Y%m%d}_'\n", - " '{dt:%H}00_000.grb2'.format(base_url, dt=dt))\n", "\n", - "# Create lat/lon box for location you want to get data for\n", - "query = ncss.query().time(dt)\n", + "# Assemble our URL to the THREDDS Data Server catalog,\n", + "# and access our desired dataset within via NCSS\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/model-gfs-g4-anl-files-old/'\n", + "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", + "ncss = cat.datasets[f'gfsanl_4_{dt:%Y%m%d}_{dt:%H}00_000.grb2'].subset()\n", + "\n", + "# Create NCSS query for our desired time, region, and data variables\n", + "query = ncss.query()\n", + "\n", + "query.time(dt)\n", "query.lonlat_box(north=65, south=15, east=310, west=220)\n", "query.accept('netcdf')\n", + "query.variables('Geopotential_height_isobaric',\n", + " 'Temperature_isobaric',\n", + " 'u-component_of_wind_isobaric',\n", + " 'v-component_of_wind_isobaric')\n", "\n", - "# Request data for vorticity\n", - "query.variables('Geopotential_height_isobaric', 'Temperature_isobaric',\n", - " 'u-component_of_wind_isobaric', 'v-component_of_wind_isobaric')\n", + "# Obtain the queried data\n", "data = ncss.get_data(query)\n", "\n", "# Pull out variables you want to use\n", @@ -121,7 +129,7 @@ "v_wind = units.Quantity(v_wind_var[:].squeeze(), v_wind_var.units)\n", "\n", "# Convert number of hours since the reference time into an actual date\n", - "time = num2date(time_var[:].squeeze(), time_var.units)\n", + "time = num2pydate(time_var[:].squeeze(), time_var.units)\n", "\n", "lev_850 = np.where(data.variables['isobaric'][:] == 850*100)[0][0]\n", "hght_850 = hght[lev_850]\n", @@ -131,6 +139,7 @@ "\n", "# Combine 1D latitude and longitudes into a 2D grid of locations\n", "lon_2d, lat_2d = np.meshgrid(lon, lat)\n", + "\n", "# Gridshift for barbs\n", "lon_2d[lon_2d > 180] = lon_2d[lon_2d > 180] - 360" ] @@ -194,7 +203,7 @@ "\n", "# Add the map and set the extent\n", "ax = plt.subplot(gs[0], projection=plotcrs)\n", - "plt.title('850mb Temperature Advection for {0:%d %B %Y %H:%MZ}'.format(time), fontsize=16)\n", + "plt.title(f'850mb Temperature Advection for {time:%d %B %Y %H:%MZ}', fontsize=16)\n", "ax.set_extent([235., 290., 20., 55.])\n", "\n", "# Add state/country boundaries to plot\n", @@ -251,7 +260,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/Ageostrophic_Wind_Example.ipynb b/pages/gallery/Ageostrophic_Wind_Example.ipynb index f86e49f6..f41035f9 100644 --- a/pages/gallery/Ageostrophic_Wind_Example.ipynb +++ b/pages/gallery/Ageostrophic_Wind_Example.ipynb @@ -37,17 +37,18 @@ "metadata": {}, "outputs": [], "source": [ - "from datetime import datetime\n", + "from datetime import datetime, timedelta\n", "\n", "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", "import matplotlib.pyplot as plt\n", "import metpy.calc as mpcalc\n", - "from metpy.units import units\n", - "from netCDF4 import num2date\n", "import numpy as np\n", "import scipy.ndimage as ndimage\n", - "from siphon.ncss import NCSS" + "\n", + "from cftime import num2pydate\n", + "from metpy.units import units\n", + "from siphon.catalog import TDSCatalog" ] }, { @@ -65,23 +66,35 @@ "metadata": {}, "outputs": [], "source": [ - "# Create NCSS object to access the NetcdfSubset\n", - "base_url = 'https://www.ncei.noaa.gov/thredds/ncss/grid/gfs-g4-anl-files/'\n", "dt = datetime(2016, 8, 22, 18)\n", - "ncss = NCSS('{}{dt:%Y%m}/{dt:%Y%m%d}/gfsanl_4_{dt:%Y%m%d}_'\n", - " '{dt:%H}00_003.grb2'.format(base_url, dt=dt))\n", + "forecast_hour = 3\n", + "h = timedelta(hours=forecast_hour)\n", + "\n", + "# Assemble our URL to the THREDDS Data Server catalog,\n", + "# and access our desired dataset within via NCSS\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/model-gfs-g4-anl-files-old/'\n", + "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", + "ncss = cat.datasets[f'gfsanl_4_{dt:%Y%m%d}_{dt:%H}00_00{forecast_hour}.grb2'].subset()\n", "\n", - "# Create lat/lon box for location you want to get data for\n", + "# Create NCSS query for our desired time, region, and data variables\n", "query = ncss.query()\n", + " \n", "query.lonlat_box(north=50, south=30, east=-80, west=-115)\n", - "query.time(datetime(2016, 8, 22, 21))\n", - "\n", - "# Request data for geopotential height\n", - "query.variables('Geopotential_height_isobaric', 'u-component_of_wind_isobaric',\n", + "query.time(dt + h)\n", + "query.variables('Geopotential_height_isobaric',\n", + " 'u-component_of_wind_isobaric',\n", " 'v-component_of_wind_isobaric')\n", "query.vertical_level(100000)\n", - "data = ncss.get_data(query)\n", - "\n", + " \n", + "data = ncss.get_data(query)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# Pull out variables you want to use\n", "height_var = data.variables['Geopotential_height_isobaric']\n", "u_wind_var = data.variables['u-component_of_wind_isobaric']\n", @@ -105,8 +118,15 @@ "v_wind = v_wind_var[0, 0, :, :].squeeze() * units('m/s')\n", "\n", "# Convert number of hours since the reference time into an actual date\n", - "time = num2date(time_var[:].squeeze(), time_var.units)\n", - "\n", + "time = num2pydate(time_var[:].squeeze(), time_var.units)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# Combine 1D latitude and longitudes into a 2D grid of locations\n", "lon_2d, lat_2d = np.meshgrid(lon, lat)\n", "\n", @@ -127,15 +147,22 @@ "\n", "# Calculate ageostrophic wind components\n", "ageo_wind_u = u_wind - geo_wind_u\n", - "ageo_wind_v = v_wind - geo_wind_v\n", - "\n", + "ageo_wind_v = v_wind - geo_wind_v" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# Create new figure\n", "fig = plt.figure(figsize=(15, 10), facecolor='black')\n", "\n", "# Add the map and set the extent\n", "ax = plt.axes(projection=ccrs.PlateCarree())\n", "ax.set_extent([-105., -93., 35., 43.])\n", - "ax.background_patch.set_fill(False)\n", + "ax.patch.set_fill(False)\n", "\n", "# Add state boundaries to plot\n", "ax.add_feature(cfeature.STATES, edgecolor='white', linewidth=2)\n", @@ -171,8 +198,7 @@ "\n", "# Add a title to the plot\n", "plt.title('1000mb Geopotential Heights(m), Wind(blue), Geostrophic Wind(purple), and \\n'\n", - " 'Ageostrophic Wind(green) for {0:%d %B %Y %H:%MZ}'.format(time),\n", - " color='white', size=14)" + " f'Ageostrophic Wind(green) for {time:%d %B %Y %H:%MZ}', color='white', size=14)" ] } ], @@ -197,7 +223,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/HILO_Symbol_Plot.ipynb b/pages/gallery/HILO_Symbol_Plot.ipynb index 263c3264..b66623bb 100644 --- a/pages/gallery/HILO_Symbol_Plot.ipynb +++ b/pages/gallery/HILO_Symbol_Plot.ipynb @@ -35,11 +35,12 @@ "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", "from metpy.units import units\n", "from netCDF4 import num2date\n", - "import numpy as np\n", "from scipy.ndimage import gaussian_filter\n", - "from siphon.ncss import NCSS" + "from siphon.catalog import TDSCatalog" ] }, { @@ -59,9 +60,8 @@ }, "outputs": [], "source": [ - "\n", "def plot_maxmin_points(lon, lat, data, extrema, nsize, symbol, color='k',\n", - " plotValue=True, transform=None):\n", + " plotValue=True, transform=None, ax=None):\n", " \"\"\"\n", " This function will find and plot relative maximum and minimum for a 2D grid. The function\n", " can be used to plot an H for maximum values (e.g., High pressure) and an L for minimum\n", @@ -76,11 +76,16 @@ " symbol = String to be placed at location of max/min value\n", " color = String matplotlib colorname to plot the symbol (and numerica value, if plotted)\n", " plot_value = Boolean (True/False) of whether to plot the numeric value of max/min point\n", - " The max/min symbol will be plotted on the current axes within the bounding frame\n", - " (e.g., clip_on=True)\n", + " ax = axes object to plot onto, defaults to current axes\n", + " The max/min symbol will be plotted only within the bounding frame\n", + " (i.e., clip_on=True, clip_box=ax.bbox)\n", " \"\"\"\n", + " import matplotlib.pyplot as plt\n", " from scipy.ndimage.filters import maximum_filter, minimum_filter\n", "\n", + " if ax is None:\n", + " ax = plt.gca()\n", + " \n", " if (extrema == 'max'):\n", " data_ext = maximum_filter(data, nsize, mode='nearest')\n", " elif (extrema == 'min'):\n", @@ -92,12 +97,12 @@ "\n", " for i in range(len(mxy)):\n", " ax.text(lon[mxy[i], mxx[i]], lat[mxy[i], mxx[i]], symbol, color=color, size=24,\n", - " clip_on=True, horizontalalignment='center', verticalalignment='center',\n", - " transform=transform)\n", + " clip_on=True, clip_box=ax.bbox, horizontalalignment='center', verticalalignment='center',\n", + " transform=transform)\n", " ax.text(lon[mxy[i], mxx[i]], lat[mxy[i], mxx[i]],\n", - " '\\n' + str(np.int(data[mxy[i], mxx[i]])),\n", - " color=color, size=12, clip_on=True, fontweight='bold',\n", - " horizontalalignment='center', verticalalignment='top', transform=transform)" + " '\\n' + str(np.int(data[mxy[i], mxx[i]])),\n", + " color=color, size=12, clip_on=True, clip_box=ax.bbox, fontweight='bold',\n", + " horizontalalignment='center', verticalalignment='top', transform=transform)" ] }, { @@ -116,13 +121,25 @@ "metadata": {}, "outputs": [], "source": [ - "dattim = datetime(1999, 1, 3, 0)\n", + "# Specify our date/time of product desired\n", + "dt = datetime(1999, 1, 3, 0)\n", "\n", - "ncss = NCSS('https://www.ncei.noaa.gov/thredds/ncss/grid/narr-a-files/{0:%Y%m}/{0:%Y%m%d}/'\n", - " 'narr-a_221_{0:%Y%m%d}_{0:%H}00_000.grb'.format(dattim))\n", + "# Assemble our URL to the NCEI THREDDS Data Server catalog,\n", + "# and access our desired dataset within via NCSS\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/model-narr-a-files/'\n", + "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", + "ncss = cat.datasets[f'narr-a_221_{dt:%Y%m%d}_{dt:%H}00_000.grb'].subset()\n", + "\n", + "# Create a NCSS query to add specifications to\n", "query = ncss.query()\n", - "query.all_times().variables('Pressure_reduced_to_MSL_msl',\n", - " 'Geopotential_height_isobaric').add_lonlat().accept('netcdf')\n", + "\n", + "query.all_times()\n", + "query.add_lonlat()\n", + "query.accept('netcdf')\n", + "query.variables('Pressure_reduced_to_MSL_msl',\n", + " 'Geopotential_height_isobaric')\n", + "\n", + "# Obtain the data we want to query for\n", "data = ncss.get_data(query)" ] }, @@ -151,7 +168,7 @@ "\n", "# Grab valid time and get into datetime format\n", "time = data['time2']\n", - "vtime = num2date(time[:], units=time.units)\n", + "vtime = num2date(time[:].squeeze(), units=time.units)\n", "\n", "# Grab MSLP and smooth, use MetPy Units module for conversion\n", "emsl_var = data.variables['Pressure_reduced_to_MSL_msl']\n", @@ -238,12 +255,12 @@ "plt.clabel(cs2, **kw_clabels)\n", "\n", "# Use definition to plot H/L symbols\n", - "plot_maxmin_points(lons, lats, mslp, 'max', 50, symbol='H', color='b', transform=dataproj)\n", + "plot_maxmin_points(lons, lats, mslp, 'max', 50, symbol='H', color='b', transform=dataproj)\n", "plot_maxmin_points(lons, lats, mslp, 'min', 25, symbol='L', color='r', transform=dataproj)\n", "\n", "# Put on some titles\n", "plt.title('MSLP (hPa) with Highs and Lows, 1000-500 hPa Thickness (m)', loc='left')\n", - "plt.title('VALID: {}'.format(vtime[0]), loc='right')" + "plt.title(f'VALID: {vtime}', loc='right')" ] } ], @@ -268,7 +285,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/MSLP_temp_winds.ipynb b/pages/gallery/MSLP_temp_winds.ipynb index f77861d9..3b223e40 100644 --- a/pages/gallery/MSLP_temp_winds.ipynb +++ b/pages/gallery/MSLP_temp_winds.ipynb @@ -34,11 +34,12 @@ "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", "import matplotlib.pyplot as plt\n", - "from metpy.units import units\n", - "from netCDF4 import num2date\n", "import numpy as np\n", "import scipy.ndimage as ndimage\n", - "from siphon.ncss import NCSS" + "\n", + "from cftime import num2pydate\n", + "from metpy.units import units\n", + "from siphon.catalog import TDSCatalog" ] }, { @@ -83,21 +84,26 @@ "metadata": {}, "outputs": [], "source": [ - "base_url = 'https://www.ncei.noaa.gov/thredds/ncss/grid/gfs-g4-anl-files/'\n", + "# Specify our date/time of product desired\n", "dt = datetime(2018, 1, 4, 12)\n", - "ncss = NCSS('{}{dt:%Y%m}/{dt:%Y%m%d}/gfsanl_4_{dt:%Y%m%d}'\n", - " '_{dt:%H}00_000.grb2'.format(base_url, dt=dt))\n", "\n", - "# Create lat/lon box for location you want to get data for\n", - "query = ncss.query().time(dt)\n", + "# Construct the URL for our THREDDS Data Server Catalog,\n", + "# and access our desired dataset within via NCSS\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/model-gfs-g4-anl-files-old/'\n", + "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", + "ncss = cat.datasets[f'gfsanl_4_{dt:%Y%m%d}_{dt:%H}00_000.grb2'].subset()\n", + "\n", + "# Create our NCSS query with desired specifications\n", + "query = ncss.query()\n", + "query.time(dt)\n", "query.lonlat_box(north=65, south=15, east=310, west=220)\n", "query.accept('netcdf')\n", - "\n", - "# Request data for model \"surface\" data\n", "query.variables('Pressure_reduced_to_MSL_msl',\n", " 'Apparent_temperature_height_above_ground',\n", " 'u-component_of_wind_height_above_ground',\n", " 'v-component_of_wind_height_above_ground')\n", + "\n", + "# Obtain the data we've queried for\n", "data = ncss.get_data(query)" ] }, @@ -140,7 +146,7 @@ "v_wind.ito('kt')\n", "\n", "# Convert number of hours since the reference time into an actual date\n", - "time = num2date(time_var[:].squeeze(), time_var.units)\n", + "time = num2pydate(time_var[:].squeeze(), time_var.units)\n", "\n", "lev_10m = np.where(data.variables['height_above_ground3'][:] == 10)[0][0]\n", "u_wind_10m = u_wind[lev_10m]\n", @@ -183,7 +189,7 @@ "# Add the map and set the extent\n", "ax = plt.subplot(111, projection=plotcrs)\n", "plt.title('GFS Analysis MSLP, 2m Temperature (F), Wind Barbs (kt)'\n", - " ' {0:%d %B %Y %H:%MZ}'.format(time), fontsize=16)\n", + " f' {time:%d %B %Y %H:%MZ}', fontsize=16)\n", "ax.set_extent([235., 290., 20., 55.])\n", "\n", "# Add state boundaries to plot\n", @@ -240,7 +246,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/Smoothing_Contours.ipynb b/pages/gallery/Smoothing_Contours.ipynb index cc20e0b6..14820f8b 100644 --- a/pages/gallery/Smoothing_Contours.ipynb +++ b/pages/gallery/Smoothing_Contours.ipynb @@ -39,11 +39,12 @@ "import cartopy.feature as cfeature\n", "import matplotlib.pyplot as plt\n", "import metpy.calc as mpcalc\n", - "from metpy.units import units\n", - "from netCDF4 import num2date\n", "import numpy as np\n", "import scipy.ndimage as ndimage\n", - "from siphon.ncss import NCSS" + "\n", + "from metpy.units import units\n", + "from netCDF4 import num2date\n", + "from siphon.catalog import TDSCatalog" ] }, { @@ -62,18 +63,25 @@ "metadata": {}, "outputs": [], "source": [ + "# Specify our date/time of product desired\n", "dt = datetime(2016, 4, 16, 18)\n", - "base_url = 'https://www.ncei.noaa.gov/thredds/ncss/grid/namanl/'\n", - "ncss = NCSS('{}{dt:%Y%m}/{dt:%Y%m%d}/namanl_218_{dt:%Y%m%d}_'\n", - " '{dt:%H}00_000.grb'.format(base_url, dt=dt))\n", - "\n", - "# Data Query\n", - "hgt = ncss.query().time(dt)\n", - "hgt.variables('Geopotential_height_isobaric', 'u-component_of_wind_isobaric',\n", - " 'v-component_of_wind_isobaric').add_lonlat()\n", "\n", - "# Actually getting the data\n", - "data = ncss.get_data(hgt)" + "# Construct the URL for our THREDDS Data Server Catalog,\n", + "# and access our desired dataset within via NCSS\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/model-namanl-old/'\n", + "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", + "ncss = cat.datasets[f'namanl_218_{dt:%Y%m%d}_{dt:%H}00_000.grb'].subset()\n", + "\n", + "# Create our NCSS query with desired specifications\n", + "query = ncss.query()\n", + "query.time(dt)\n", + "query.add_lonlat()\n", + "query.variables('Geopotential_height_isobaric',\n", + " 'u-component_of_wind_isobaric',\n", + " 'v-component_of_wind_isobaric')\n", + "\n", + "# Obtain the data we've queried for\n", + "data = ncss.get_data(query)" ] }, { @@ -112,7 +120,7 @@ "\n", "# Create more useable times for output\n", "times = data.variables[dtime]\n", - "vtimes = num2date(times[:], times.units)\n", + "vtimes = num2date(times[:].squeeze(), times.units)\n", "\n", "# Pull out the 500 hPa Heights\n", "hght = units.meter * data.variables['Geopotential_height_isobaric'][:].squeeze()\n", @@ -190,7 +198,7 @@ "# Add some titles to make the plot readable by someone else\n", "plt.title('500-hPa Geo Heights (m; black), Smoothed 500-hPa Geo. Heights (m; red)',\n", " loc='left')\n", - "plt.title('VALID: {}'.format(vtimes[0]), loc='right')\n", + "plt.title(f'VALID: {vtimes}', loc='right')\n", "\n", "# Set GAREA and add map features\n", "ax.set_extent([-125., -67., 22., 52.], ccrs.PlateCarree())\n", @@ -235,7 +243,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/Upperair_Obs.ipynb b/pages/gallery/Upperair_Obs.ipynb index 3b2a5a2d..cb767aa5 100644 --- a/pages/gallery/Upperair_Obs.ipynb +++ b/pages/gallery/Upperair_Obs.ipynb @@ -26,18 +26,20 @@ "metadata": {}, "outputs": [], "source": [ - "from datetime import datetime, timedelta\n", "import urllib.request\n", "\n", + "from datetime import datetime, timedelta\n", + "\n", "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", "import matplotlib.pyplot as plt\n", "import metpy.calc as mpcalc\n", + "import numpy as np\n", + "import xarray as xr\n", + "\n", "from metpy.plots import StationPlot\n", "from metpy.units import units\n", - "import numpy as np\n", - "from siphon.simplewebservice.iastate import IAStateUpperAir\n", - "import xarray as xr" + "from siphon.simplewebservice.iastate import IAStateUpperAir" ] }, { @@ -435,7 +437,7 @@ "clabels = plt.clabel(cs, fmt='%d', colors='white', inline_spacing=5, use_clabeltext=True)\n", "\n", "# Contour labels with black boxes and white text\n", - "for t in clabels:\n", + "for t in cs.labelTexts:\n", " t.set_bbox({'facecolor': 'black', 'pad': 4})\n", " t.set_fontweight('heavy')\n", "\n", @@ -449,7 +451,7 @@ " c.set_dashes([(0, (5.0, 3.0))])\n", "\n", "# Contour labels with black boxes and white text\n", - "for t in clabels:\n", + "for t in cs.labelTexts:\n", " t.set_bbox({'facecolor': 'black', 'pad': 4})\n", " t.set_fontweight('heavy')\n", "\n", @@ -466,7 +468,7 @@ "# Add titles\n", "plt.title('Upper-air Observations at {}-hPa Analysis Heights/Temperature'.format(level),\n", " loc='left')\n", - "plt.title('Valid: {}'.format(date), loc='right');" + "plt.title(f'Valid: {date}', loc='right');" ] } ], @@ -491,7 +493,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/Wind_Shear_Vectors_Example.ipynb b/pages/gallery/Wind_Shear_Vectors_Example.ipynb index 0e6be925..848cc2ca 100644 --- a/pages/gallery/Wind_Shear_Vectors_Example.ipynb +++ b/pages/gallery/Wind_Shear_Vectors_Example.ipynb @@ -38,12 +38,12 @@ "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", "import matplotlib.pyplot as plt\n", - "from metpy.units import units\n", - "from netCDF4 import num2date\n", "import numpy as np\n", "import scipy.ndimage as ndimage\n", - "from siphon.catalog import TDSCatalog\n", - "from siphon.ncss import NCSS" + "\n", + "from cftime import num2pydate\n", + "from metpy.units import units\n", + "from siphon.catalog import TDSCatalog" ] }, { @@ -88,13 +88,9 @@ "best_gfs = TDSCatalog('http://thredds.ucar.edu/thredds/catalog/grib/'\n", " 'NCEP/GFS/Global_0p5deg/catalog.xml')\n", "\n", - "# Pull out the dataset you want to use and look at the access URLs\n", - "best_ds = list(best_gfs.datasets.values())[1]\n", - "print(best_ds.access_urls)\n", - "\n", - "# Create NCSS object to access the NetcdfSubset\n", - "ncss = NCSS(best_ds.access_urls['NetcdfSubset'])\n", - "print(best_ds.access_urls['NetcdfSubset'])" + "# Interface with the Best GFS Half Degree Forecast Time Series dataset\n", + "# via NCSS directly\n", + "ncss = best_gfs.datasets['Best GFS Half Degree Forecast Time Series'].subset()" ] }, { @@ -112,13 +108,14 @@ "metadata": {}, "outputs": [], "source": [ - "# Create lat/lon box for location you want to get data for\n", + "# Create our NCSS query with desired specifications\n", "query = ncss.query()\n", - "query.lonlat_box(north=50, south=30, east=-80, west=-115).time(datetime.utcnow())\n", + "query.lonlat_box(north=50, south=30, east=-80, west=-115)\n", + "query.time(datetime.utcnow())\n", "query.accept('netcdf4')\n", - "\n", - "# Request data for MSLP\n", "query.variables('MSLP_Eta_model_reduction_msl')\n", + "\n", + "# Obtain the data we've queried for as a netcdf4-python dataset\n", "data = ncss.get_data(query)\n", "\n", "# Pull out the variables you want to use\n", @@ -146,9 +143,12 @@ "# Request data for 850-hPa winds\n", "# First clear the query's variables from previous query for MSLP\n", "query.var = set()\n", - "query.variables('u-component_of_wind_isobaric', 'v-component_of_wind_isobaric')\n", "query.vertical_level(85000)\n", + "query.variables('u-component_of_wind_isobaric',\n", + " 'v-component_of_wind_isobaric')\n", + "\n", "data = ncss.get_data(query)\n", + "\n", "u_wind_var850 = data.variables['u-component_of_wind_isobaric']\n", "v_wind_var850 = data.variables['v-component_of_wind_isobaric']" ] @@ -171,9 +171,12 @@ "# Request data for 500-hPa winds\n", "# First clear the query's variables from previous query for 850-hPa data\n", "query.var = set()\n", - "query.variables('u-component_of_wind_isobaric', 'v-component_of_wind_isobaric')\n", "query.vertical_level(50000)\n", + "query.variables('u-component_of_wind_isobaric',\n", + " 'v-component_of_wind_isobaric')\n", + "\n", "data = ncss.get_data(query)\n", + "\n", "u_wind_var500 = data.variables['u-component_of_wind_isobaric']\n", "v_wind_var500 = data.variables['v-component_of_wind_isobaric']" ] @@ -203,7 +206,7 @@ "v_wind500 = v_wind_var500[:].squeeze()\n", "\n", "# Convert number of hours since the reference time into an actual date\n", - "time = num2date(time_var[:].squeeze(), time_var.units)\n", + "time = num2pydate(time_var[:].squeeze(), time_var.units)\n", "\n", "# Combine 1D latitude and longitudes into a 2D grid of locations\n", "lon_2d, lat_2d = np.meshgrid(lon, lat)\n", @@ -233,7 +236,7 @@ "# Add the map and set the extent\n", "ax = plt.axes(projection=ccrs.PlateCarree())\n", "ax.set_extent([-108., -91., 33., 45.])\n", - "ax.background_patch.set_fill(False)\n", + "ax.patch.set_fill(False)\n", "\n", "# Add state boundaries to plot\n", "ax.add_feature(cfeature.STATES, edgecolor='white', linewidth=2)\n", @@ -273,7 +276,7 @@ "\n", "# Add a title to the plot\n", "plt.title('MSLP, 850mb Wind, 500mb Wind, and 500-850mb Vertical Wind Shear \\n'\n", - " ' for {0:%d %B %Y %H:%MZ}'.format(time), color='white', size=14);" + " f' for {time:%d %B %Y %H:%MZ}', color='white', size=14);" ] } ], @@ -298,7 +301,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/declarative_500_hPa.ipynb b/pages/gallery/declarative_500_hPa.ipynb index 6934588a..893fec8f 100644 --- a/pages/gallery/declarative_500_hPa.ipynb +++ b/pages/gallery/declarative_500_hPa.ipynb @@ -19,17 +19,25 @@ "source": [ "from datetime import datetime\n", "\n", - "from metpy.plots import declarative\n", - "from metpy.units import units\n", "import xarray as xr\n", "\n", + "from metpy.plots import declarative\n", + "from metpy.units import units" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# Set date for desired dataset\n", - "date = datetime(2012, 10, 31, 12)\n", + "dt = datetime(2012, 10, 31, 12)\n", "\n", "# Open dataset from NCEI\n", "ds = xr.open_dataset('https://www.ncei.noaa.gov/thredds/dodsC/'\n", - " f'gfs-g4-anl-files/{date:%Y%m}/{date:%Y%m%d}/'\n", - " f'gfsanl_4_{date:%Y%m%d}_{date:%H}00_000.grb2'\n", + " f'model-gfs-g4-anl-files-old/{dt:%Y%m}/{dt:%Y%m%d}/'\n", + " f'gfsanl_4_{dt:%Y%m%d}_{dt:%H}00_000.grb2'\n", " ).metpy.parse_cf()\n", "\n", "# Subset Data to be just over CONUS\n", @@ -92,7 +100,7 @@ "# Set Contour Plot Parameters\n", "contour = declarative.ContourPlot()\n", "contour.data = ds_us\n", - "contour.time = date\n", + "contour.time = dt\n", "contour.field = 'Geopotential_height_isobaric'\n", "contour.level = 500 * units.hPa\n", "contour.linecolor = 'black'\n", @@ -104,7 +112,7 @@ "# Set Color-filled Contour Parameters\n", "cfill = declarative.FilledContourPlot()\n", "cfill.data = ds_us\n", - "cfill.time = date\n", + "cfill.time = dt\n", "cfill.field = 'Absolute_vorticity_isobaric'\n", "cfill.level = 500 * units.hPa\n", "cfill.contours = clevs_500_avor\n", @@ -119,7 +127,7 @@ "panel.projection = 'lcc'\n", "panel.layers = ['coastline', 'borders', 'states']\n", "panel.title = (f'{cfill.level} GFS Geopotential Heights'\n", - " f'and Absolute Vorticity at {date}')\n", + " f'and Absolute Vorticity at {dt}')\n", "panel.plots = [cfill, contour]\n", "\n", "# Bringing it all together\n", @@ -147,7 +155,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/miller_composite.ipynb b/pages/gallery/miller_composite.ipynb index fbeb8233..7c56c65d 100644 --- a/pages/gallery/miller_composite.ipynb +++ b/pages/gallery/miller_composite.ipynb @@ -19,7 +19,7 @@ "metadata": {}, "outputs": [], "source": [ - "from datetime import datetime\n", + "from datetime import datetime, timedelta\n", "\n", "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", @@ -27,12 +27,13 @@ "import matplotlib.patches as mpatches\n", "import matplotlib.pyplot as plt\n", "import metpy.calc as mpcalc\n", - "from metpy.units import units\n", - "from netCDF4 import num2date\n", "import numpy as np\n", "import numpy.ma as ma\n", + "\n", + "from metpy.units import units\n", + "from netCDF4 import num2date\n", "from scipy.ndimage import gaussian_filter\n", - "from siphon.ncss import NCSS" + "from siphon.catalog import TDSCatalog" ] }, { @@ -43,8 +44,7 @@ "source": [ "**Get the data**\n", "\n", - "This example will use data from the North American Mesoscale Model Analysis\n", - "(https://nomads.ncdc.gov/) for 12 UTC 27 April 2011." + "This example will use data from the [North American Mesoscale](https://www.ncdc.noaa.gov/data-access/model-data/model-datasets/north-american-mesoscale-forecast-system-nam) Model Analysis for 18 UTC 27 April 2011." ] }, { @@ -53,54 +53,57 @@ "metadata": {}, "outputs": [], "source": [ - "base_url = 'https://www.ncei.noaa.gov/thredds/ncss/grid/namanl/'\n", - "dt = datetime(2011, 4, 27)\n", - "ncss = NCSS('{}{dt:%Y%m}/{dt:%Y%m%d}/namanl_218_{dt:%Y%m%d}_'\n", - " '1800_000.grb'.format(base_url, dt=dt))\n", - "\n", - "# Query for required variables\n", - "gfsdata = ncss.query().all_times()\n", - "gfsdata.variables('Geopotential_height_isobaric',\n", - " 'u-component_of_wind_isobaric',\n", - " 'v-component_of_wind_isobaric',\n", - " 'Temperature_isobaric',\n", - " 'Relative_humidity_isobaric',\n", - " 'Best_4_layer_lifted_index_layer_between_two_pressure_'\n", - " 'difference_from_ground_layer',\n", - " 'Absolute_vorticity_isobaric',\n", - " 'Pressure_reduced_to_MSL_msl',\n", - " 'Dew_point_temperature_height_above_ground'\n", - " ).add_lonlat()\n", - "\n", - "# Set the lat/lon box for the data to pull in.\n", - "gfsdata.lonlat_box(-135, -60, 15, 65)\n", - "\n", - "# Actually getting the data\n", - "data = ncss.get_data(gfsdata)\n", + "# Specify our date/time of product desired\n", + "dt = datetime(2011, 4, 27, 18)\n", + "\n", + "# Construct the URL for our THREDDS Data Server Catalog,\n", + "# and access our desired dataset within via NCSS\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/model-namanl-old/'\n", + "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", + "ncss = cat.datasets[f'namanl_218_{dt:%Y%m%d}_{dt:%H}00_000.grb'].subset()\n", + "\n", + "# Create our NCSS query with desired specifications\n", + "query = ncss.query()\n", + "query.all_times()\n", + "query.add_lonlat()\n", + "query.lonlat_box(-135, -60, 15, 65)\n", + "query.variables('Geopotential_height_isobaric',\n", + " 'u-component_of_wind_isobaric',\n", + " 'v-component_of_wind_isobaric',\n", + " 'Temperature_isobaric',\n", + " 'Relative_humidity_isobaric',\n", + " 'Best_4_layer_lifted_index_layer_between_two_pressure_'\n", + " 'difference_from_ground_layer',\n", + " 'Absolute_vorticity_isobaric',\n", + " 'Pressure_reduced_to_MSL_msl',\n", + " 'Dew_point_temperature_height_above_ground')\n", + "\n", + "# Obtain the data we've queried for\n", + "data_18z = ncss.get_data(query)\n", "\n", "# Assign variable names to collected data\n", - "dtime = data.variables['Geopotential_height_isobaric'].dimensions[0]\n", - "dlev = data.variables['Geopotential_height_isobaric'].dimensions[1]\n", - "lat = data.variables['lat'][:]\n", - "lon = data.variables['lon'][:]\n", - "lev = units.hPa * data.variables[dlev][:]\n", - "times = data.variables[dtime]\n", - "vtimes = num2date(times[:], times.units)\n", - "temps = data.variables['Temperature_isobaric']\n", + "dtime = data_18z.variables['Geopotential_height_isobaric'].dimensions[0]\n", + "dlev = data_18z.variables['Geopotential_height_isobaric'].dimensions[1]\n", + "lat = data_18z.variables['lat'][:]\n", + "lon = data_18z.variables['lon'][:]\n", + "lev = units.hPa * data_18z.variables[dlev][:]\n", + "times = data_18z.variables[dtime]\n", + "vtimes = num2date(times[:].squeeze(), times.units)\n", + "temps = data_18z.variables['Temperature_isobaric']\n", "tmp = units.kelvin * temps[0, :]\n", - "uwnd = (units.meter / units.second) * data.variables['u-component_of_wind_isobaric'][0, :]\n", - "vwnd = (units.meter / units.second) * data.variables['v-component_of_wind_isobaric'][0, :]\n", - "hgt = units.meter * data.variables['Geopotential_height_isobaric'][0, :]\n", - "relh = units.percent * data.variables['Relative_humidity_isobaric'][0, :]\n", - "lifted_index = (data.variables['Best_4_layer_lifted_index_layer_between_two_'\n", - " 'pressure_difference_from_ground_layer'][0, 0, :] *\n", - " units(data.variables['Best_4_layer_lifted_index_layer_between_two_'\n", - " 'pressure_difference_from_ground_layer'].units))\n", - "Td_sfc = (units(data.variables['Dew_point_temperature_height_above_ground'].units) *\n", - " data.variables['Dew_point_temperature_height_above_ground'][0, 0, :])\n", - "avor = data.variables['Absolute_vorticity_isobaric'][0, :] * units('1/s')\n", - "pmsl = (units(data.variables['Pressure_reduced_to_MSL_msl'].units) *\n", - " data.variables['Pressure_reduced_to_MSL_msl'][0, :])" + "uwnd = (units.meter / units.second) * data_18z.variables['u-component_of_wind_isobaric'][0, :]\n", + "vwnd = (units.meter / units.second) * data_18z.variables['v-component_of_wind_isobaric'][0, :]\n", + "hgt = units.meter * data_18z.variables['Geopotential_height_isobaric'][0, :]\n", + "relh = units.percent * data_18z.variables['Relative_humidity_isobaric'][0, :]\n", + "lifted_index = (data_18z.variables['Best_4_layer_lifted_index_layer_between_two_'\n", + " 'pressure_difference_from_ground_layer'][0, 0, :] *\n", + " units(data_18z.variables['Best_4_layer_lifted_index_layer_between_two_'\n", + " 'pressure_difference_from_ground_layer'].units))\n", + "Td_sfc = (units(data_18z.variables['Dew_point_temperature_height_above_ground'].units) *\n", + " data_18z.variables['Dew_point_temperature_height_above_ground'][0, 0, :])\n", + "avor = data_18z.variables['Absolute_vorticity_isobaric'][0, :] * units('1/s')\n", + "pmsl = (units(data_18z.variables['Pressure_reduced_to_MSL_msl'].units) *\n", + " data_18z.variables['Pressure_reduced_to_MSL_msl'][0, :])" ] }, { @@ -109,7 +112,7 @@ "cell_marker": "########################" }, "source": [ - "Query for 00 UTC to calculate pressure falls and height change" + "Repeat the above process to query for the analysis from 12 hours earlier (06 UTC) to calculate pressure falls and height change." ] }, { @@ -118,24 +121,23 @@ "metadata": {}, "outputs": [], "source": [ - "ncss2 = NCSS('{}{dt:%Y%m}/{dt:%Y%m%d}/namanl_218_{dt:%Y%m%d}_'\n", - " '0600_000.grb'.format(base_url, dt=dt))\n", + "td = timedelta(hours=12)\n", "\n", - "# Query for required variables\n", - "gfsdata = ncss.query().all_times()\n", - "gfsdata.variables('Geopotential_height_isobaric',\n", - " 'Pressure_reduced_to_MSL_msl',\n", - " ).add_lonlat()\n", + "ncss_06z = cat.datasets[f'namanl_218_{dt:%Y%m%d}_{dt-td:%H}00_000.grb'].subset()\n", "\n", - "# Set the lat/lon box for the data you want to pull in.\n", - "gfsdata.lonlat_box(-135, -60, 15, 65)\n", + "query = ncss_06z.query()\n", + "query.all_times()\n", + "query.add_lonlat()\n", + "query.lonlat_box(-135, -60, 15, 65)\n", + "query.variables('Geopotential_height_isobaric',\n", + " 'Pressure_reduced_to_MSL_msl')\n", "\n", "# Actually getting the data\n", - "data2 = ncss2.get_data(gfsdata)\n", + "data_06z = ncss_06z.get_data(query)\n", "\n", - "hgt_00z = units.meter * data2.variables['Geopotential_height_isobaric'][0, :]\n", - "pmsl_00z = (units(data2.variables['Pressure_reduced_to_MSL_msl'].units) *\n", - " data2.variables['Pressure_reduced_to_MSL_msl'][0, :])" + "hgt_06z = units.meter * data_06z.variables['Geopotential_height_isobaric'][0, :]\n", + "pmsl_06z = (units(data_06z.variables['Pressure_reduced_to_MSL_msl'].units) *\n", + " data_06z.variables['Pressure_reduced_to_MSL_msl'][0, :])" ] }, { @@ -166,7 +168,7 @@ "u_500 = uwnd[idx_500].to('kt')\n", "v_500 = vwnd[idx_500].to('kt')\n", "hgt_500 = hgt[idx_500]\n", - "hgt_500_00z = hgt_00z[idx_500]\n", + "hgt_500_06z = hgt_06z[idx_500]\n", "\n", "# 700 hPa, index 12\n", "idx_700 = np.where(lev == 700. * units.hPa)[0][0]\n", @@ -274,8 +276,8 @@ "metadata": {}, "outputs": [], "source": [ - "pmsl_change = pmsl - pmsl_00z\n", - "hgt_500_change = hgt_500 - hgt_500_00z" + "pmsl_change = pmsl - pmsl_06z\n", + "hgt_500_change = hgt_500 - hgt_500_06z" ] }, { @@ -426,7 +428,7 @@ " label='12-hr 500-hPa Height Falls (m)')\n", "leg = plt.legend(handles=[jet300, jet500, jet850, dashed_black_line, black_line, red_line,\n", " purple, tan, green, yellow], loc=3,\n", - " title='Composite Analysis Valid: {:s}'.format(str(vtimes[0])),\n", + " title=f'Composite Analysis Valid: {vtimes}',\n", " framealpha=1)\n", "leg.set_zorder(100)" ] @@ -453,7 +455,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/gallery/xarray_500hPa_map.ipynb b/pages/gallery/xarray_500hPa_map.ipynb index 505dc9dd..4bcd2ae7 100644 --- a/pages/gallery/xarray_500hPa_map.ipynb +++ b/pages/gallery/xarray_500hPa_map.ipynb @@ -60,17 +60,14 @@ "metadata": {}, "outputs": [], "source": [ - "# Set year, month, day, and hour values as variables to make it\n", - "# easier to change dates for a case study\n", - "base_url = 'https://www.ncei.noaa.gov/thredds/dodsC/namanl/'\n", + "# Specify our date/time of product desired\n", "dt = datetime(2016, 4, 16, 18)\n", - "data = xr.open_dataset('{}{dt:%Y%m}/{dt:%Y%m%d}/namanl_218_{dt:%Y%m%d}_'\n", - " '{dt:%H}00_000.grb'.format(base_url, dt=dt),\n", - " decode_times=True)\n", "\n", - "# To list all available variables for this data set,\n", - "# uncomment the following line\n", - "# print(sorted(list(data.variables)))" + "# Construct our OPeNDAP access URL\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/dodsC/model-namanl-old/'\n", + "data = xr.open_dataset(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/'\n", + " f'namanl_218_{dt:%Y%m%d}_{dt:%H}00_000.grb',\n", + " decode_times=True)" ] }, { @@ -89,12 +86,9 @@ "metadata": {}, "outputs": [], "source": [ - "# X, Y values are in units of km, need them in meters for plotting/calculations\n", - "data.x.values = data.x.values * 1000.\n", - "data.y.values = data.y.values * 1000.\n", - "\n", - "# Make them 2D for plotting/calculations\n", - "x, y = np.meshgrid(data.x.values, data.y.values)" + "# Create a 2-d meshgrid of our x, y coordinates\n", + "# manually converted to meters (km * 1000)\n", + "x, y = np.meshgrid(data['x'].values * 1000, data['y'].values * 1000)" ] }, { @@ -139,7 +133,7 @@ "outputs": [], "source": [ "# print(data.Geopotential_height.coords)\n", - "hght_500 = data.Geopotential_height_isobaric.sel(time1=vtimes[0], isobaric=500)\n", + "hght_500 = data['Geopotential_height_isobaric'].sel(time1=vtimes[0], isobaric=500)\n", "uwnd_500 = data['u-component_of_wind_isobaric'].sel(time1=vtimes[0], isobaric=500)\n", "vwnd_500 = data['v-component_of_wind_isobaric'].sel(time1=vtimes[0], isobaric=500)" ] @@ -178,10 +172,13 @@ "clev500 = np.arange(5100, 6000, 60)\n", "cs = ax.contour(x, y, ndimage.gaussian_filter(hght_500, sigma=5), clev500,\n", " colors='k', linewidths=2.5, linestyles='solid', transform=datacrs)\n", - "tl = plt.clabel(cs, fontsize=12, colors='k', inline=1, inline_spacing=8,\n", - " fmt='%i', rightside_up=True, use_clabeltext=True)\n", + "ax.clabel(cs, fontsize=12, colors='k', inline=1, inline_spacing=8,\n", + " fmt='%i', rightside_up=True, use_clabeltext=True)\n", + "\n", "# Here we put boxes around the clabels with a black boarder white facecolor\n", - "for t in tl:\n", + "# `labelTexts` necessary as ~cartopy.mpl.contour.GeoContourSet.clabel\n", + "# does not return list of texts as of 0.18\n", + "for t in cs.labelTexts:\n", " t.set_bbox({'fc': 'w'})\n", "\n", "# Transform Vectors before plotting, then plot wind barbs.\n", @@ -189,7 +186,7 @@ "\n", "# Add some titles to make the plot readable by someone else\n", "plt.title('500-hPa Geopotential Heights (m)', loc='left')\n", - "plt.title('VALID: {}'.format(vtimes[0]), loc='right');" + "plt.title(f'VALID: {vtimes[0]}', loc='right');" ] } ], @@ -214,7 +211,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/workshop/MetPy_Advanced/QG Analysis.ipynb b/pages/workshop/MetPy_Advanced/QG Analysis.ipynb index d96922be..00e653e7 100644 --- a/pages/workshop/MetPy_Advanced/QG Analysis.ipynb +++ b/pages/workshop/MetPy_Advanced/QG Analysis.ipynb @@ -51,14 +51,15 @@ "\n", "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", - "import numpy as np\n", - "from siphon.catalog import TDSCatalog\n", - "from siphon.ncss import NCSS\n", "import matplotlib.pyplot as plt\n", "import metpy.calc as mpcalc\n", "import metpy.constants as mpconstants\n", + "import numpy as np\n", + "import xarray as xr\n", + "\n", "from metpy.units import units\n", - "import xarray as xr" + "from siphon.catalog import TDSCatalog\n", + "from siphon.ncss import NCSS" ] }, { @@ -116,21 +117,29 @@ "outputs": [], "source": [ "# Read NARR Data from THREDDS server\n", - "base_url = 'https://www.ncei.noaa.gov/thredds/catalog/narr-a-files/'\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/catalog/model-narr-a-files/'\n", "\n", "# Programmatically generate the URL to the day of data we want\n", "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", "\n", "# Have Siphon find the appropriate dataset\n", - "ds = cat.datasets.filter_time_nearest(dt)\n", - "\n", - "# Download data using the NetCDF Subset Service\n", - "ncss = ds.subset()\n", - "query = ncss.query().lonlat_box(north=60, south=18, east=300, west=225)\n", - "query.time(dt).variables('Geopotential_height_isobaric',\n", - " 'Temperature_isobaric',\n", - " 'u-component_of_wind_isobaric',\n", - " 'v-component_of_wind_isobaric').add_lonlat().accept('netcdf')\n", + "tds_ds = cat.datasets.filter_time_nearest(dt)\n", + "\n", + "# Interface with the data through the NetCDF Subset Service (NCSS) \n", + "ncss = tds_ds.subset()\n", + "\n", + "# Create an NCSS query with our desired specifications\n", + "query = ncss.query()\n", + "query.lonlat_box(north=60, south=18, east=300, west=225)\n", + "query.time(dt)\n", + "query.add_lonlat()\n", + "query.accept('netcdf')\n", + "query.variables('Geopotential_height_isobaric',\n", + " 'Temperature_isobaric',\n", + " 'u-component_of_wind_isobaric',\n", + " 'v-component_of_wind_isobaric')\n", + "\n", + "# Use the query to obtain our NetCDF data\n", "data = ncss.get_data(query)" ] }, @@ -171,11 +180,11 @@ "outputs": [], "source": [ "# This is the time we're using\n", - "vtime = ds.Temperature_isobaric.metpy.time[0]\n", + "vtime = ds['Temperature_isobaric'].metpy.time[0]\n", "\n", "# Grab lat/lon values from file as unit arrays\n", - "lats = ds.lat.metpy.unit_array\n", - "lons = ds.lon.metpy.unit_array\n", + "lats = ds['lat'].metpy.unit_array\n", + "lons = ds['lon'].metpy.unit_array\n", "\n", "# Calculate distance between grid points\n", "# will need for computations later\n", @@ -183,16 +192,12 @@ "\n", "# Grabbing data for specific variable contained in file (as a unit array)\n", "# 700 hPa Geopotential Heights\n", - "hght_700 = ds.Geopotential_height_isobaric.metpy.sel(vertical=700 * units.hPa,\n", - " time=vtime)\n", - "\n", - "# Equivalent form needed if there is a dash in name of variable\n", - "# (e.g., 'u-component_of_wind_isobaric')\n", - "# hght_700 = ds['Geopotential_height_isobaric'].metpy.sel(vertical=700 * units.hPa, time=vtime)\n", + "hght_700 = ds['Geopotential_height_isobaric'].metpy.sel(vertical=700 * units.hPa,\n", + " time=vtime)\n", "\n", "# 700 hPa Temperature\n", - "tmpk_700 = ds.Temperature_isobaric.metpy.sel(vertical=700 * units.hPa,\n", - " time=vtime)\n", + "tmpk_700 = ds['Temperature_isobaric'].metpy.sel(vertical=700 * units.hPa,\n", + " time=vtime)\n", "\n", "# 700 hPa u-component_of_wind\n", "uwnd_700 = ds['u-component_of_wind_isobaric'].metpy.sel(vertical=700 * units.hPa,\n", @@ -311,20 +316,20 @@ "n_reps = 50\n", "\n", "# Apply the 9-point smoother\n", - "hght_700s = mpcalc.smooth_n_point(hght_700, 9, n_reps).metpy.unit_array\n", - "hght_500s = mpcalc.smooth_n_point(hght_500, 9, n_reps).metpy.unit_array\n", + "hght_700s = mpcalc.smooth_n_point(hght_700, 9, n_reps)#.metpy.unit_array\n", + "hght_500s = mpcalc.smooth_n_point(hght_500, 9, n_reps)#.metpy.unit_array\n", "\n", - "tmpk_700s = mpcalc.smooth_n_point(tmpk_700, 9, n_reps).metpy.unit_array\n", + "tmpk_700s = mpcalc.smooth_n_point(tmpk_700, 9, n_reps)#.metpy.unit_array\n", "tmpc_700s = tmpk_700s.to('degC')\n", "\n", - "uwnd_700s = mpcalc.smooth_n_point(uwnd_700, 9, n_reps).metpy.unit_array\n", - "vwnd_700s = mpcalc.smooth_n_point(vwnd_700, 9, n_reps).metpy.unit_array\n", + "uwnd_700s = mpcalc.smooth_n_point(uwnd_700, 9, n_reps)#.metpy.unit_array\n", + "vwnd_700s = mpcalc.smooth_n_point(vwnd_700, 9, n_reps)#.metpy.unit_array\n", "\n", - "uwnd_500s = mpcalc.smooth_n_point(uwnd_500, 9, n_reps).metpy.unit_array\n", - "vwnd_500s = mpcalc.smooth_n_point(vwnd_500, 9, n_reps).metpy.unit_array\n", + "uwnd_500s = mpcalc.smooth_n_point(uwnd_500, 9, n_reps)#.metpy.unit_array\n", + "vwnd_500s = mpcalc.smooth_n_point(vwnd_500, 9, n_reps)#.metpy.unit_array\n", "\n", - "uwnd_900s = mpcalc.smooth_n_point(uwnd_900, 9, n_reps).metpy.unit_array\n", - "vwnd_900s = mpcalc.smooth_n_point(vwnd_900, 9, n_reps).metpy.unit_array" + "uwnd_900s = mpcalc.smooth_n_point(uwnd_900, 9, n_reps)#.metpy.unit_array\n", + "vwnd_900s = mpcalc.smooth_n_point(vwnd_900, 9, n_reps)#.metpy.unit_array" ] }, { @@ -520,7 +525,7 @@ "# Titles\n", "plt.title('700-hPa Geopotential Heights (m), Temperature (C),\\n'\n", " 'Winds (kts), and Temp Adv. ($*10^4$ C/s)',loc='left')\n", - "plt.title('VALID: ' + vtime_str, loc='right')\n", + "plt.title(f'VALID: {vtime_str}', loc='right')\n", "\n", "\n", "\n", @@ -555,7 +560,7 @@ "# Titles\n", "plt.title('500-hPa Geopotential Heights (m), Winds (kt), and\\n'\n", " 'Absolute Vorticity Advection ($*10^{8}$ 1/s^2)',loc='left')\n", - "plt.title('VALID: ' + vtime_str, loc='right')\n", + "plt.title(f'VALID: {vtime_str}', loc='right')\n", "\n", "\n", "\n", @@ -589,7 +594,7 @@ "# Titles\n", "plt.title('700-hPa Geopotential Heights (m), Winds (kt), and\\n'\n", " 'Term B QG Omega ($*10^{12}$ kg m$^{-3}$ s$^{-3}$)',loc='left')\n", - "plt.title('VALID: ' + vtime_str, loc='right')\n", + "plt.title(f'VALID: {vtime_str}', loc='right')\n", "\n", "\n", "\n", @@ -623,7 +628,7 @@ "# Titles\n", "plt.title('500-hPa Geopotential Heights (m), Winds (kt), and\\n'\n", " 'Term A QG Omega ($*10^{12}$ kg m$^{-3}$ s$^{-3}$)',loc='left')\n", - "plt.title('VALID: ' + vtime_str, loc='right')\n", + "plt.title(f'VALID: {vtime_str}', loc='right')\n", "\n", "plt.show()" ] @@ -685,7 +690,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/workshop/MetPy_Case_Study/MetPy Case Study.ipynb b/pages/workshop/MetPy_Case_Study/MetPy Case Study.ipynb index 7f3d987d..e3f0b35c 100644 --- a/pages/workshop/MetPy_Case_Study/MetPy Case Study.ipynb +++ b/pages/workshop/MetPy_Case_Study/MetPy Case Study.ipynb @@ -47,15 +47,15 @@ "\n", "import cartopy.crs as ccrs\n", "import cartopy.feature as cfeature\n", - "from netCDF4 import Dataset, num2date\n", - "import numpy as np\n", - "from scipy.ndimage import gaussian_filter\n", - "from siphon.catalog import TDSCatalog\n", - "from siphon.ncss import NCSS\n", "import matplotlib.pyplot as plt\n", "import metpy.calc as mpcalc\n", + "import numpy as np\n", + "\n", "from metpy.plots import StationPlot\n", - "from metpy.units import units" + "from metpy.units import units\n", + "from netCDF4 import Dataset, num2date\n", + "from scipy.ndimage import gaussian_filter\n", + "from siphon.catalog import TDSCatalog" ] }, { @@ -142,11 +142,11 @@ "**Choosing our data source**\n", "Let's go ahead and use the NARR Analysis data to investigate the past case we identified (The Storm of the Century).\n", "\n", - "https://www.ncei.noaa.gov/thredds/catalog/narr-a-files/199303/19930313/catalog.html?dataset=narr-a-files/199303/19930313/narr-a_221_19930313_0000_000.grb\n", + "https://www.ncei.noaa.gov/thredds/catalog/model-narr-a-files/199303/19930313/catalog.html?dataset=model-narr-a-files/199303/19930313/narr-a_221_19930313_0000_000.grb\n", "\n", - "And we will use a python package called Siphon to read this data through the NetCDFSubset (NetCDFServer) link.\n", + "And we will use a Unidata python package called [Siphon](https://unidata.github.io/siphon/latest/) to read this data through the NetCDFSubset (NetCDFServer) link.\n", "\n", - "https://www.ncei.noaa.gov/thredds/ncss/grid/narr-a-files/199303/19930313/narr-a_221_19930313_0000_000.grb/dataset.html" + "https://www.ncei.noaa.gov/thredds/ncss/model-narr-a-files/199303/19930313/narr-a_221_19930313_0000_000.grb/dataset.html" ] }, { @@ -171,7 +171,7 @@ "outputs": [], "source": [ "# Read NARR Data from THREDDS server\n", - "base_url = 'https://www.ncei.noaa.gov/thredds/catalog/narr-a-files/'\n", + "base_url = 'https://www.ncei.noaa.gov/thredds/catalog/model-narr-a-files/'\n", "\n", "# Programmatically generate the URL to the day of data we want\n", "cat = TDSCatalog(f'{base_url}{dt:%Y%m}/{dt:%Y%m%d}/catalog.xml')\n", @@ -179,12 +179,21 @@ "# Have Siphon find the appropriate dataset\n", "ds = cat.datasets.filter_time_nearest(dt)\n", "\n", - "# Download data using the NetCDF Subset Service\n", + "# Interface with the data through the NetCDF Subset Service (NCSS)\n", "ncss = ds.subset()\n", - "query = ncss.query().lonlat_box(north=60, south=18, east=300, west=225)\n", - "query.all_times().variables('Geopotential_height_isobaric', 'Temperature_isobaric',\n", - " 'u-component_of_wind_isobaric',\n", - " 'v-component_of_wind_isobaric').add_lonlat().accept('netcdf')\n", + "\n", + "# Create an NCSS query with our desired specifications\n", + "query = ncss.query()\n", + "query.lonlat_box(north=60, south=18, east=300, west=225)\n", + "query.all_times()\n", + "query.add_lonlat()\n", + "query.accept('netcdf')\n", + "query.variables('Geopotential_height_isobaric',\n", + " 'Temperature_isobaric',\n", + " 'u-component_of_wind_isobaric',\n", + " 'v-component_of_wind_isobaric')\n", + "\n", + "# Use the query to obtain our NetCDF data\n", "data = ncss.get_data(query)" ] }, @@ -709,7 +718,6 @@ " Temp Adv (C/h), and Wind Barbs (kts)', loc='left')\n", "plt.title(f'VALID: {vtime}', loc='right')\n", "\n", - "plt.tight_layout()\n", "plt.show()" ] }, @@ -787,7 +795,6 @@ " (1/s), and Wind Barbs (kts)', loc='left')\n", "plt.title(f'VALID: {vtime}', loc='right')\n", "\n", - "plt.tight_layout()\n", "plt.show()" ] }, @@ -859,7 +866,6 @@ " loc='left')\n", "plt.title(f'VALID: {vtime}', loc='right')\n", "\n", - "plt.tight_layout()\n", "plt.show()" ] }, @@ -933,7 +939,7 @@ "\n", "# Titles\n", "plt.title(r'500-hPa Geopotential Heights, Planetary Vorticity Advection ($*10^{10}$ 1/s^2)',loc='left')\n", - "plt.title('VALID: %s' %(vtime),loc='right')\n", + "plt.title(f'VALID: {vtime}',loc='right')\n", "\n", "\n", "\n", @@ -964,7 +970,7 @@ "\n", "# Titles\n", "plt.title(r'500-hPa Geopotential Heights, Relative Vorticity Advection ($*10^{8}$ 1/s^2)',loc='left')\n", - "plt.title('VALID: %s' %(vtime),loc='right')\n", + "plt.title(f'VALID: {vtime}',loc='right')\n", "\n", "\n", "\n", @@ -995,7 +1001,7 @@ "\n", "# Titles\n", "plt.title(r'500-hPa Geopotential Heights, Absolute Vorticity Advection ($*10^{8}$ 1/s^2)',loc='left')\n", - "plt.title('VALID: %s' %(vtime),loc='right')\n", + "plt.title(f'VALID: {vtime}',loc='right')\n", "\n", "\n", "\n", @@ -1026,9 +1032,8 @@ "\n", "# Titles\n", "plt.title(r'500-hPa Geopotential Heights, Stretching Vorticity ($*10^{9}$ 1/s^2)',loc='left')\n", - "plt.title('VALID: %s' %(vtime),loc='right')\n", + "plt.title(f'VALID: {vtime}',loc='right')\n", "\n", - "plt.tight_layout()\n", "plt.show()" ] }, @@ -1108,7 +1113,6 @@ "# Title\n", "plt.title('Geopotential (m; top), U-wind (m/s; Lower Left), V-wind (m/s; Lower Right)')\n", "\n", - "plt.tight_layout()\n", "plt.show()" ] }, @@ -1148,7 +1152,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/pages/workshop/MetPy_Case_Study/solutions/abs_vort_500.py b/pages/workshop/MetPy_Case_Study/solutions/abs_vort_500.py index 06049463..ffd412b0 100644 --- a/pages/workshop/MetPy_Case_Study/solutions/abs_vort_500.py +++ b/pages/workshop/MetPy_Case_Study/solutions/abs_vort_500.py @@ -34,5 +34,4 @@ (1/s), and Wind Barbs (kts)', loc='left') plt.title(f'VALID: {vtime}', loc='right') -plt.tight_layout() plt.show() diff --git a/pages/workshop/MetPy_Case_Study/solutions/markers.py b/pages/workshop/MetPy_Case_Study/solutions/markers.py index a64c58ee..abeee27f 100644 --- a/pages/workshop/MetPy_Case_Study/solutions/markers.py +++ b/pages/workshop/MetPy_Case_Study/solutions/markers.py @@ -24,5 +24,4 @@ plt.title('Geopotential (m; top), U-wind (m/s; Lower Left), \ V-wind (m/s; Lower Right)') -plt.tight_layout() plt.show() diff --git a/pages/workshop/MetPy_Case_Study/solutions/temp_adv_map_850.py b/pages/workshop/MetPy_Case_Study/solutions/temp_adv_map_850.py index cfe81959..567c2773 100644 --- a/pages/workshop/MetPy_Case_Study/solutions/temp_adv_map_850.py +++ b/pages/workshop/MetPy_Case_Study/solutions/temp_adv_map_850.py @@ -31,5 +31,4 @@ Temp Adv (C/h), and Wind Barbs (kts)', loc='left') plt.title(f'VALID: {vtime}', loc='right') -plt.tight_layout() plt.show() diff --git a/pages/workshop/MetPy_Case_Study/solutions/winds_300.py b/pages/workshop/MetPy_Case_Study/solutions/winds_300.py index 848093d5..d5ab7045 100644 --- a/pages/workshop/MetPy_Case_Study/solutions/winds_300.py +++ b/pages/workshop/MetPy_Case_Study/solutions/winds_300.py @@ -29,5 +29,4 @@ loc='left') plt.title(f'VALID: {vtime}', loc='right') -plt.tight_layout() plt.show() diff --git a/pages/workshop/Satellite_Data/Satellite Animations.ipynb b/pages/workshop/Satellite_Data/Satellite Animations.ipynb index b1a18633..b32cb6f1 100644 --- a/pages/workshop/Satellite_Data/Satellite Animations.ipynb +++ b/pages/workshop/Satellite_Data/Satellite Animations.ipynb @@ -54,16 +54,18 @@ "source": [ "import os.path\n", "import sys\n", - "from IPython.display import HTML\n", + "\n", "from datetime import datetime\n", + "\n", "import cartopy.feature as cfeature\n", "import matplotlib as mpl\n", "import matplotlib.pyplot as plt\n", "import metpy\n", - "from metpy.plots import colortables\n", - "from metpy.plots import add_timestamp\n", - "from siphon.catalog import TDSCatalog\n", - "from matplotlib.animation import ArtistAnimation" + "\n", + "from IPython.display import HTML\n", + "from matplotlib.animation import ArtistAnimation\n", + "from metpy.plots import add_timestamp, colortables\n", + "from siphon.catalog import TDSCatalog" ] }, { @@ -167,7 +169,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.8.5" } }, "nbformat": 4, diff --git a/run_notebooks.py b/run_notebooks.py index 2bea3b58..d90f17d9 100644 --- a/run_notebooks.py +++ b/run_notebooks.py @@ -4,20 +4,23 @@ import os.path NOTEBOOKS_DIR = 'pages/' -SKIP_NOTEBOOKS = [os.path.join('workshop/Bonus', 'What to do when things go wrong.ipynb'), - os.path.join('workshop/Bonus', 'netCDF-Writing.ipynb'), - os.path.join('workshop/AWIPS', 'AWIPS_Grids_and_Cartopy.ipynb'), - os.path.join('workshop/AWIPS', 'Grid_Levels_and_Parameters.ipynb'), - os.path.join('workshop/AWIPS', 'Map_Resources_and_Topography.ipynb'), - os.path.join('workshop/AWIPS', 'Model_Sounding_Data.ipynb'), - os.path.join('workshop/AWIPS', 'NEXRAD_Level_3_Plot_with_Matplotlib.ipynb'), - os.path.join('workshop/AWIPS', 'Satellite_Imagery.ipynb'), - os.path.join('workshop/AWIPS', 'Upper_Air_BUFR_Soundings.ipynb'), - os.path.join('workshop/AWIPS', 'Watch_and_Warning_Polygons.ipynb'), - os.path.join('gallery', 'Sounding_Plotter.ipynb'), - os.path.join('gallery', 'Precipitation_Map.ipynb'), - os.path.join('workshop/MetPy_Advanced', 'QG Analysis.ipynb'), - os.path.join('workshop/MetPy_Case_Study', 'MetPy_Case_Study.ipynb')] +SKIP_NOTEBOOKS = [ + os.path.join('workshop/Bonus', 'What to do when things go wrong.ipynb'), + os.path.join('workshop/Bonus', 'netCDF-Writing.ipynb'), + os.path.join('workshop/AWIPS', 'AWIPS_Grids_and_Cartopy.ipynb'), + os.path.join('workshop/AWIPS', 'Grid_Levels_and_Parameters.ipynb'), + os.path.join('workshop/AWIPS', 'Map_Resources_and_Topography.ipynb'), + os.path.join('workshop/AWIPS', 'Model_Sounding_Data.ipynb'), + os.path.join('workshop/AWIPS', 'NEXRAD_Level_3_Plot_with_Matplotlib.ipynb'), + os.path.join('workshop/AWIPS', 'Satellite_Imagery.ipynb'), + os.path.join('workshop/AWIPS', 'Upper_Air_BUFR_Soundings.ipynb'), + os.path.join('workshop/AWIPS', 'Watch_and_Warning_Polygons.ipynb'), + os.path.join('gallery', 'Sounding_Plotter.ipynb'), + os.path.join('gallery', 'Precipitation_Map.ipynb'), + os.path.join('gallery', 'upperair_500_obs_contours.ipynb') + #os.path.join('workshop/MetPy_Advanced', 'QG Analysis.ipynb'), + #os.path.join('workshop/MetPy_Case_Study', 'MetPy_Case_Study.ipynb') + ] def run_notebook(notebook): From 9a05cace4e2d0579868e04fc5516c6f9fd7d71b9 Mon Sep 17 00:00:00 2001 From: Drew Camron Date: Fri, 21 Aug 2020 13:23:45 -0600 Subject: [PATCH 2/2] Move data outside of pages Saved METAR txt files in the pages directory throw off Nikola. I'm sure there's a prettier way to re-organize this, but let's just get python-training building again first. --- {pages/data => data}/NARR_19930313_0000.nc | Bin pages/gallery/satellite_sfc_obs.ipynb | 6 +- pages/workshop/Bonus/netCDF Writing.ipynb | 78 ++++++++++++------ .../workshop/MetPy_Advanced/QG Analysis.ipynb | 2 +- .../MetPy_Case_Study/MetPy Case Study.ipynb | 2 +- ...Scientific Python Ecosystem Overview.ipynb | 2 +- .../Declarative Surface Observations.ipynb | 6 +- .../workshop/XArray/XArray Introduction.ipynb | 2 +- pages/workshop/XArray/XArray and CF.ipynb | 2 +- 9 files changed, 62 insertions(+), 38 deletions(-) rename {pages/data => data}/NARR_19930313_0000.nc (100%) diff --git a/pages/data/NARR_19930313_0000.nc b/data/NARR_19930313_0000.nc similarity index 100% rename from pages/data/NARR_19930313_0000.nc rename to data/NARR_19930313_0000.nc diff --git a/pages/gallery/satellite_sfc_obs.ipynb b/pages/gallery/satellite_sfc_obs.ipynb index efaf209c..58fbd8d9 100644 --- a/pages/gallery/satellite_sfc_obs.ipynb +++ b/pages/gallery/satellite_sfc_obs.ipynb @@ -108,10 +108,10 @@ "time_idx = list(cat.datasets).index(f'metar_{vtime:%Y%m%d}_{vtime:%H}00.txt')\n", "\n", "metar_df = cat.datasets[time_idx]\n", - "metar_df.download()\n", + "metar_df.download(f'../../data/{metar_df.name}')\n", "\n", "# Parse METAR data with MetPy\n", - "df = metar.parse_metar_file(metar_df.name)" + "df = metar.parse_metar_file(f'../../data/{metar_df.name}')" ] }, { @@ -223,4 +223,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file diff --git a/pages/workshop/Bonus/netCDF Writing.ipynb b/pages/workshop/Bonus/netCDF Writing.ipynb index ebd9f769..c0b383c3 100644 --- a/pages/workshop/Bonus/netCDF Writing.ipynb +++ b/pages/workshop/Bonus/netCDF Writing.ipynb @@ -64,7 +64,7 @@ "source": [ "## Opening a file, creating a new Dataset\n", "\n", - "Let's create a new, empty netCDF file named '../data/new.nc', opened for writing.\n", + "Let's create a new, empty netCDF file named 'new.nc' in our project root `data` directory, opened for writing.\n", "\n", "Be careful, opening a file with 'w' will clobber any existing data (unless `clobber=False` is used, in which case an exception is raised if the file already exists).\n", "\n", @@ -85,13 +85,14 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ "try: ncfile.close() # just to be safe, make sure dataset is not already open.\n", "except: pass\n", - "ncfile = Dataset('../../data/new.nc',mode='w',format='NETCDF4_CLASSIC') \n", + "ncfile = Dataset('../../../data/new.nc',mode='w',format='NETCDF4_CLASSIC') \n", "print(ncfile)" ] }, @@ -133,7 +134,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -175,7 +177,8 @@ }, "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -186,7 +189,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "ncfile.subtitle=\"My model data subtitle\"\n", @@ -252,7 +257,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -306,7 +312,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -345,7 +352,8 @@ }, "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -395,7 +403,8 @@ }, "slideshow": { "slide_type": "slide" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -432,7 +441,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -462,7 +472,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "import datetime as dt\n", @@ -475,7 +487,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "times = date2num(dates, time.units)\n", @@ -494,7 +508,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -538,7 +553,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -551,10 +567,12 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "!ncdump -h ../data/new.nc" + "!ncdump -h ../../../data/new.nc" ] }, { @@ -589,11 +607,12 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ - "ncfile = Dataset('../../data/new2.nc','w',format='NETCDF4')\n", + "ncfile = Dataset('../../../data/new2.nc','w',format='NETCDF4')\n", "print(ncfile)" ] }, @@ -636,7 +655,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -712,7 +732,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -759,7 +780,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -887,7 +909,8 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ @@ -929,12 +952,13 @@ "slide_helper": "slide_end", "slideshow": { "slide_type": "fragment" - } + }, + "tags": [] }, "outputs": [], "source": [ "ncfile.close()\n", - "!ncdump -h ../data/new2.nc" + "!ncdump -h ../../../data/new2.nc" ] }, { @@ -977,9 +1001,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.3" + "version": "3.8.5-final" } }, "nbformat": 4, "nbformat_minor": 1 -} +} \ No newline at end of file diff --git a/pages/workshop/MetPy_Advanced/QG Analysis.ipynb b/pages/workshop/MetPy_Advanced/QG Analysis.ipynb index 00e653e7..7c1c9609 100644 --- a/pages/workshop/MetPy_Advanced/QG Analysis.ipynb +++ b/pages/workshop/MetPy_Advanced/QG Analysis.ipynb @@ -162,7 +162,7 @@ "source": [ "# Back up in case of bad internet connection.\n", "# Uncomment the following line to read local netCDF file of NARR data\n", - "# ds = xr.open_dataset('../../data/NARR_19930313_0000.nc').metpy.parse_cf()" + "# ds = xr.open_dataset('../../../data/NARR_19930313_0000.nc').metpy.parse_cf()" ] }, { diff --git a/pages/workshop/MetPy_Case_Study/MetPy Case Study.ipynb b/pages/workshop/MetPy_Case_Study/MetPy Case Study.ipynb index e3f0b35c..220dec05 100644 --- a/pages/workshop/MetPy_Case_Study/MetPy Case Study.ipynb +++ b/pages/workshop/MetPy_Case_Study/MetPy Case Study.ipynb @@ -205,7 +205,7 @@ "source": [ "# Back up in case of bad internet connection.\n", "# Uncomment the following line to read local netCDF file of NARR data\n", - "# data = Dataset('../../data/NARR_19930313_0000.nc','r')" + "# data = Dataset('../../../data/NARR_19930313_0000.nc','r')" ] }, { diff --git a/pages/workshop/Python_Ecosystem/Scientific Python Ecosystem Overview.ipynb b/pages/workshop/Python_Ecosystem/Scientific Python Ecosystem Overview.ipynb index b8439340..cd5cd4a8 100644 --- a/pages/workshop/Python_Ecosystem/Scientific Python Ecosystem Overview.ipynb +++ b/pages/workshop/Python_Ecosystem/Scientific Python Ecosystem Overview.ipynb @@ -121,7 +121,7 @@ "outputs": [], "source": [ "import xarray as xr\n", - "ds = xr.open_dataset('../../data/NARR_19930313_0000.nc')\n", + "ds = xr.open_dataset('../../../data/NARR_19930313_0000.nc')\n", "ds" ] }, diff --git a/pages/workshop/Surface_Data/Declarative Surface Observations.ipynb b/pages/workshop/Surface_Data/Declarative Surface Observations.ipynb index 52f5ed18..cc8b4aa3 100644 --- a/pages/workshop/Surface_Data/Declarative Surface Observations.ipynb +++ b/pages/workshop/Surface_Data/Declarative Surface Observations.ipynb @@ -87,7 +87,7 @@ "metadata": {}, "outputs": [], "source": [ - "ds.download()" + "ds.download(f'../../../data/{ds.name}')" ] }, { @@ -129,7 +129,7 @@ "metadata": {}, "outputs": [], "source": [ - "df = parse_metar_file(ds.name)" + "df = parse_metar_file(f'../../../data/{ds.name}')" ] }, { @@ -512,4 +512,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file diff --git a/pages/workshop/XArray/XArray Introduction.ipynb b/pages/workshop/XArray/XArray Introduction.ipynb index 48124846..0ec75d86 100644 --- a/pages/workshop/XArray/XArray Introduction.ipynb +++ b/pages/workshop/XArray/XArray Introduction.ipynb @@ -343,7 +343,7 @@ "outputs": [], "source": [ "# Open sample North American Reanalysis data in netCDF format\n", - "ds = xr.open_dataset('../../data/NARR_19930313_0000.nc')\n", + "ds = xr.open_dataset('../../../data/NARR_19930313_0000.nc')\n", "ds" ] }, diff --git a/pages/workshop/XArray/XArray and CF.ipynb b/pages/workshop/XArray/XArray and CF.ipynb index 515a7094..72c09272 100644 --- a/pages/workshop/XArray/XArray and CF.ipynb +++ b/pages/workshop/XArray/XArray and CF.ipynb @@ -344,7 +344,7 @@ "outputs": [], "source": [ "# Open sample North American Reanalysis data in netCDF format\n", - "ds = xr.open_dataset('../../data/NARR_19930313_0000.nc')\n", + "ds = xr.open_dataset('../../../data/NARR_19930313_0000.nc')\n", "ds" ] },