Skip to article frontmatterSkip to article content

https://pyresample.readthedocs.io/en/latest/

This package seems a bit more speciallized, and does not have as tight of integration with xarray like xESMF and Verde do. If working with satellite or swath data, this is not one to miss! This package integrates with Satpy https://satpy.readthedocs.io/en/stable/ .

(more) Integration with xarray

Prerequisites

Knowing your way around xarray, numpy is beneficial. This is not deisgned to be an introduction to any of those packages. Would do this notebook after doing the xESMF one!

Imports

import pandas as pd
import numpy as np
import xarray as xr

import matplotlib.pyplot as plt

from appdirs import *

import dask.array as da

import pyresample
from pyresample import image, geometry
from pyresample.bilinear import NumpyBilinearResampler

from xarray import DataArray
from pyresample.bilinear import XArrayBilinearResampler

import os
%load_ext watermark
%watermark --iversions
pandas    : 2.3.2
numpy     : 2.2.6
dask      : 2025.7.0
pyresample: 1.34.2
appdirs   : 1.4.4
matplotlib: 3.10.6
sys       : 3.13.7 | packaged by conda-forge | (main, Sep  3 2025, 14:30:35) [GCC 14.3.0]
xarray    : 2025.9.0

Loading in one netCDF

file = '../data/onestorm.nc'

Let’s open this file with xarray:

ds = xr.open_dataset(file)
ds
Loading...

Trying to do the same thing with pyresample:

from pyresample.utils import load_cf_area
area_def, cf_info = load_cf_area('data/onestorm.nc', variable='visible', x='x', y='y')
---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/file_manager.py:211, in CachingFileManager._acquire_with_cache_info(self, needs_lock)
    210 try:
--> 211     file = self._cache[self._key]
    212 except KeyError:

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/lru_cache.py:56, in LRUCache.__getitem__(self, key)
     55 with self._lock:
---> 56     value = self._cache[key]
     57     self._cache.move_to_end(key)

KeyError: [<class 'netCDF4._netCDF4.Dataset'>, ('/home/runner/work/gridding-cookbook/gridding-cookbook/notebooks/data/onestorm.nc',), 'r', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False)), '64f9b9cc-f27e-489f-8831-cbb515059ace']

During handling of the above exception, another exception occurred:

FileNotFoundError                         Traceback (most recent call last)
Cell In[6], line 1
----> 1 area_def, cf_info = load_cf_area('data/onestorm.nc', variable='visible', x='x', y='y')

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/pyresample/utils/cf.py:444, in load_cf_area(nc_file, variable, y, x)
    441 if (x is not None and y is None) or (x is None and y is not None):
    442     raise ValueError("You must specify both or neither of x= and y=")
--> 444 nc_handle = _open_nc_file(nc_file)
    445 if variable is None:
    446     # if the variable=None, we search through all variables
    447     area_def, cf_info = _load_cf_area_several_variables(nc_handle)

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/pyresample/utils/cf.py:478, in _open_nc_file(nc_file)
    475 if isinstance(nc_file, xr.Dataset):
    476     return nc_file
--> 478 return xr.open_dataset(nc_file)

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/api.py:760, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, create_default_indexes, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)
    748 decoders = _resolve_decoders_kwargs(
    749     decode_cf,
    750     open_backend_dataset_parameters=backend.open_dataset_parameters,
   (...)    756     decode_coords=decode_coords,
    757 )
    759 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 760 backend_ds = backend.open_dataset(
    761     filename_or_obj,
    762     drop_variables=drop_variables,
    763     **decoders,
    764     **kwargs,
    765 )
    766 ds = _dataset_from_backend_dataset(
    767     backend_ds,
    768     filename_or_obj,
   (...)    779     **kwargs,
    780 )
    781 return ds

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:682, in NetCDF4BackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, format, clobber, diskless, persist, auto_complex, lock, autoclose)
    660 def open_dataset(
    661     self,
    662     filename_or_obj: T_PathFileOrDataStore,
   (...)    679     autoclose=False,
    680 ) -> Dataset:
    681     filename_or_obj = _normalize_path(filename_or_obj)
--> 682     store = NetCDF4DataStore.open(
    683         filename_or_obj,
    684         mode=mode,
    685         format=format,
    686         group=group,
    687         clobber=clobber,
    688         diskless=diskless,
    689         persist=persist,
    690         auto_complex=auto_complex,
    691         lock=lock,
    692         autoclose=autoclose,
    693     )
    695     store_entrypoint = StoreBackendEntrypoint()
    696     with close_on_error(store):

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:468, in NetCDF4DataStore.open(cls, filename, mode, format, group, clobber, diskless, persist, auto_complex, lock, lock_maker, autoclose)
    464     kwargs["auto_complex"] = auto_complex
    465 manager = CachingFileManager(
    466     netCDF4.Dataset, filename, mode=mode, kwargs=kwargs
    467 )
--> 468 return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:398, in NetCDF4DataStore.__init__(self, manager, group, mode, lock, autoclose)
    396 self._group = group
    397 self._mode = mode
--> 398 self.format = self.ds.data_model
    399 self._filename = self.ds.filepath()
    400 self.is_remote = is_remote_uri(self._filename)

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:477, in NetCDF4DataStore.ds(self)
    475 @property
    476 def ds(self):
--> 477     return self._acquire()

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/netCDF4_.py:471, in NetCDF4DataStore._acquire(self, needs_lock)
    470 def _acquire(self, needs_lock=True):
--> 471     with self._manager.acquire_context(needs_lock) as root:
    472         ds = _nc4_require_group(root, self._group, self._mode)
    473     return ds

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/contextlib.py:141, in _GeneratorContextManager.__enter__(self)
    139 del self.args, self.kwds, self.func
    140 try:
--> 141     return next(self.gen)
    142 except StopIteration:
    143     raise RuntimeError("generator didn't yield") from None

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/file_manager.py:199, in CachingFileManager.acquire_context(self, needs_lock)
    196 @contextlib.contextmanager
    197 def acquire_context(self, needs_lock=True):
    198     """Context manager for acquiring a file."""
--> 199     file, cached = self._acquire_with_cache_info(needs_lock)
    200     try:
    201         yield file

File ~/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/xarray/backends/file_manager.py:217, in CachingFileManager._acquire_with_cache_info(self, needs_lock)
    215     kwargs = kwargs.copy()
    216     kwargs["mode"] = self._mode
--> 217 file = self._opener(*self._args, **kwargs)
    218 if self._mode == "w":
    219     # ensure file doesn't get overridden when opened again
    220     self._mode = "a"

File src/netCDF4/_netCDF4.pyx:2521, in netCDF4._netCDF4.Dataset.__init__()

File src/netCDF4/_netCDF4.pyx:2158, in netCDF4._netCDF4._ensure_nc_success()

FileNotFoundError: [Errno 2] No such file or directory: '/home/runner/work/gridding-cookbook/gridding-cookbook/notebooks/data/onestorm.nc'

This is supposed to fail. Will chat about pro’s and con’s in the summary.

Resampling of gridded data using pyresample

Link to this turtorial is here: https://pyresample.readthedocs.io/en/latest/swath.html#pyresample-bilinear

We will be deconstructing it a bit to get into the details, but all of the code is from the above link.

target_def = geometry.AreaDefinition('areaD',
                                     'Europe (3km, HRV, VTC)',
                                     'areaD',
                                     {'a': '6378144.0', 'b': '6356759.0',
                                      'lat_0': '50.00', 'lat_ts': '50.00',
                                      'lon_0': '8.00', 'proj': 'stere'},
                                     800, 800,
                                     [-1370912.72, -909968.64,
                                      1029087.28, 1490031.36])

Unlike using xESMF, this does not depend or work with xarray:

print('target def type', type(target_def))
target def type <class 'pyresample.geometry.AreaDefinition'>
data = DataArray(da.from_array(np.fromfunction(lambda y, x: y*x, (500, 100))), dims=('y', 'x'))
type(data)
xarray.core.dataarray.DataArray
lons = da.from_array(np.fromfunction(lambda y, x: 3 + x * 0.1, (500, 100)))
lats = da.from_array(np.fromfunction(lambda y, x: 75 - y * 0.1, (500, 100)))
source_def = geometry.SwathDefinition(lons=lons, lats=lats)
resampler = XArrayBilinearResampler(source_def, target_def, 30e3)
result = resampler.resample(data)
type(result)
/home/runner/micromamba/envs/gridding-cookbook-dev/lib/python3.13/site-packages/pyproj/crs/crs.py:1295: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems
  proj = self._crs.to_proj4(version=version)
xarray.core.dataarray.DataArray

Can export to xarray

result.to_dataset()
Loading...
data.to_dataset()
Loading...

Summary

Pyresample is a speciallist program, with strong functionality with satpy. Would reccomend if swath/sat image data is part of your normal workflow. For others, the requirement of the data being CF compliant and API is a hurdle.