By: Kevin Goebbert
Northern Hemispheric v-wind component over the mid-latitudes in a Hovmoller diagram. This diagram can be used to illustrate upper-level wave and energy propogation (e.g., downstream baroclinic development)
import cartopy.crs as ccrs
import cartopy.feature as cfeature
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import metpy.calc as mpcalc
import numpy as np
import xarray as xr
Get the data¶
Using NCEP/NCAR reanalysis data via xarray remote access using the OPeNDAP protocol.
Set the time range, parameter, and level to desired values
# Create time slice from dates
start_time = '2011-01-20'
end_time = '2011-02-06'
# Select NCEP/NCAR parameter and level
param = 'vwnd'
level = 250
# Remote get dataset using OPeNDAP method via xarray
ds = xr.open_dataset('http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/'
f'ncep.reanalysis/pressure/{param}.{start_time[:4]}.nc')
# Create slice variables subset domain
time_slice = slice(start_time, end_time)
lat_slice = slice(60, 40)
lon_slice = slice(0, 360)
# Get data, selecting time, level, lat/lon slice
data = ds[param].sel(time=time_slice,
level=level,
lat=lat_slice,
lon=lon_slice)
# Compute weights and take weighted average over latitude dimension
weights = np.cos(np.deg2rad(data.lat.values))
avg_data = (data * weights[None, :, None]).sum(dim='lat') / np.sum(weights)
# Get times and make array of datetime objects
vtimes = data.time.values.astype('datetime64[ms]').astype('O')
# Specify longitude values for chosen domain
lons = data.lon.values
syntax error, unexpected WORD_WORD, expecting SCAN_ATTR or SCAN_DATASET or SCAN_ERROR
context: <html^><head><title>429 Too Many Requests</title></head><body><center><h1>429 Too Many Requests</h1></center><hr><center>nginx/1.29.1</center></body></html>
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/file_manager.py:211, in CachingFileManager._acquire_with_cache_info(self, needs_lock)
210 try:
--> 211 file = self._cache[self._key]
212 except KeyError:
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/lru_cache.py:56, in LRUCache.__getitem__(self, key)
55 with self._lock:
---> 56 value = self._cache[key]
57 self._cache.move_to_end(key)
KeyError: [<class 'netCDF4._netCDF4.Dataset'>, ('http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis/pressure/vwnd.2011.nc',), 'r', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False)), 'f5028eb8-4987-4f91-a813-090448845eb7']
During handling of the above exception, another exception occurred:
OSError Traceback (most recent call last)
Cell In[2], line 10
7 level = 250
9 # Remote get dataset using OPeNDAP method via xarray
---> 10 ds = xr.open_dataset('http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/'
11 f'ncep.reanalysis/pressure/{param}.{start_time[:4]}.nc')
13 # Create slice variables subset domain
14 time_slice = slice(start_time, end_time)
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/api.py:760, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, create_default_indexes, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)
748 decoders = _resolve_decoders_kwargs(
749 decode_cf,
750 open_backend_dataset_parameters=backend.open_dataset_parameters,
(...) 756 decode_coords=decode_coords,
757 )
759 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 760 backend_ds = backend.open_dataset(
761 filename_or_obj,
762 drop_variables=drop_variables,
763 **decoders,
764 **kwargs,
765 )
766 ds = _dataset_from_backend_dataset(
767 backend_ds,
768 filename_or_obj,
(...) 779 **kwargs,
780 )
781 return ds
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:682, in NetCDF4BackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, format, clobber, diskless, persist, auto_complex, lock, autoclose)
660 def open_dataset(
661 self,
662 filename_or_obj: T_PathFileOrDataStore,
(...) 679 autoclose=False,
680 ) -> Dataset:
681 filename_or_obj = _normalize_path(filename_or_obj)
--> 682 store = NetCDF4DataStore.open(
683 filename_or_obj,
684 mode=mode,
685 format=format,
686 group=group,
687 clobber=clobber,
688 diskless=diskless,
689 persist=persist,
690 auto_complex=auto_complex,
691 lock=lock,
692 autoclose=autoclose,
693 )
695 store_entrypoint = StoreBackendEntrypoint()
696 with close_on_error(store):
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:468, in NetCDF4DataStore.open(cls, filename, mode, format, group, clobber, diskless, persist, auto_complex, lock, lock_maker, autoclose)
464 kwargs["auto_complex"] = auto_complex
465 manager = CachingFileManager(
466 netCDF4.Dataset, filename, mode=mode, kwargs=kwargs
467 )
--> 468 return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:398, in NetCDF4DataStore.__init__(self, manager, group, mode, lock, autoclose)
396 self._group = group
397 self._mode = mode
--> 398 self.format = self.ds.data_model
399 self._filename = self.ds.filepath()
400 self.is_remote = is_remote_uri(self._filename)
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:477, in NetCDF4DataStore.ds(self)
475 @property
476 def ds(self):
--> 477 return self._acquire()
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:471, in NetCDF4DataStore._acquire(self, needs_lock)
470 def _acquire(self, needs_lock=True):
--> 471 with self._manager.acquire_context(needs_lock) as root:
472 ds = _nc4_require_group(root, self._group, self._mode)
473 return ds
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/contextlib.py:141, in _GeneratorContextManager.__enter__(self)
139 del self.args, self.kwds, self.func
140 try:
--> 141 return next(self.gen)
142 except StopIteration:
143 raise RuntimeError("generator didn't yield") from None
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/file_manager.py:199, in CachingFileManager.acquire_context(self, needs_lock)
196 @contextlib.contextmanager
197 def acquire_context(self, needs_lock=True):
198 """Context manager for acquiring a file."""
--> 199 file, cached = self._acquire_with_cache_info(needs_lock)
200 try:
201 yield file
File ~/micromamba/envs/metpy-cookbook/lib/python3.13/site-packages/xarray/backends/file_manager.py:217, in CachingFileManager._acquire_with_cache_info(self, needs_lock)
215 kwargs = kwargs.copy()
216 kwargs["mode"] = self._mode
--> 217 file = self._opener(*self._args, **kwargs)
218 if self._mode == "w":
219 # ensure file doesn't get overridden when opened again
220 self._mode = "a"
File src/netCDF4/_netCDF4.pyx:2521, in netCDF4._netCDF4.Dataset.__init__()
File src/netCDF4/_netCDF4.pyx:2158, in netCDF4._netCDF4._ensure_nc_success()
OSError: [Errno -77] NetCDF: Access failure: 'http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis/pressure/vwnd.2011.nc'
Make the Hovmoller Plot¶
Pretty simple to use common matplotlib/cartopy to create the diagram. Cartopy is used to create a geographic reference map to highlight the area being averaged as well as the visual reference for longitude.
# Start figure
fig = plt.figure(figsize=(10, 13))
# Use gridspec to help size elements of plot; small top plot
# and big bottom plot
gs = gridspec.GridSpec(nrows=2, ncols=1, height_ratios=[1, 6], hspace=0.03)
# Tick labels
x_tick_labels = [u'0\N{DEGREE SIGN}E', u'90\N{DEGREE SIGN}E',
u'180\N{DEGREE SIGN}E', u'90\N{DEGREE SIGN}W',
u'0\N{DEGREE SIGN}E']
# Top plot for geographic reference (makes small map)
ax1 = fig.add_subplot(gs[0, 0],
projection=ccrs.PlateCarree(central_longitude=180))
ax1.set_extent([0, 357.5, 35, 65], ccrs.PlateCarree(central_longitude=180))
ax1.set_yticks([40, 60])
ax1.set_yticklabels([u'40\N{DEGREE SIGN}N', u'60\N{DEGREE SIGN}N'])
ax1.set_xticks([-180, -90, 0, 90, 180])
ax1.set_xticklabels(x_tick_labels)
ax1.grid(linestyle='dotted', linewidth=2)
# Add geopolitical boundaries for map reference
ax1.add_feature(cfeature.COASTLINE.with_scale('50m'))
ax1.add_feature(cfeature.LAKES.with_scale('50m'), color='black',
linewidths=0.5)
# Set some titles
plt.title('Hovmoller Diagram', loc='left')
plt.title('NCEP/NCAR Reanalysis', loc='right')
# Bottom plot for Hovmoller diagram
ax2 = fig.add_subplot(gs[1, 0])
ax2.invert_yaxis() # Reverse the time order to do oldest first
# Plot of chosen variable averaged over latitude and slightly smoothed
clevs = np.arange(-50, 51, 5)
cf = ax2.contourf(lons, vtimes, mpcalc.smooth_n_point(
avg_data, 9, 2), clevs, cmap=plt.cm.bwr, extend='both')
cs = ax2.contour(lons, vtimes, mpcalc.smooth_n_point(
avg_data, 9, 2), clevs, colors='k', linewidths=1)
cbar = plt.colorbar(cf, orientation='horizontal', pad=0.04, aspect=50,
extendrect=True)
cbar.set_label('m $s^{-1}$')
# Make some ticks and tick labels
ax2.set_xticks([0, 90, 180, 270, 357.5])
ax2.set_xticklabels(x_tick_labels)
ax2.set_yticks(vtimes[4::8])
ax2.set_yticklabels(vtimes[4::8])
# Set some titles
plt.title('250-hPa V-wind', loc='left', fontsize=10)
plt.title(f'Time Range: {vtimes[0]:%Y%m%d %HZ} - {vtimes[-1]:%Y%m%d %HZ}',
loc='right', fontsize=10)