Skip to article frontmatterSkip to article content

Clouds over SGP for April 4, 2019

Looking at LASSO data for April 4, 2019 to see meterological data and calculate cloud base and cloud top.


Imports

from datetime import datetime
import numpy as np
import xarray as xr
import fsspec
import xwrf

import matplotlib.pyplot as plt

Bring in the data

Here is the raw model output from LASSO.

# Set the URL and path for the cloud
URL = 'https://js2.jetstream-cloud.org:8001/'
path = f'pythia/lasso-sgp'

# Configure the s3-like storage endpoint on jetstream
fs = fsspec.filesystem("s3", anon=True, client_kwargs=dict(endpoint_url=URL))

# Set the analysis date and simulation number
case_date = datetime(2019, 4, 4)
sim_id = 7

# Read the wrfstat files
wrfstat_pattern = f's3://{path}/sim000{sim_id}/raw_model/wrfstat*'
wrfstat_files = sorted(fs.glob(wrfstat_pattern))

# Remotely read these into a list
wrfstat_file_list = [fs.open(file) for file in wrfstat_files]
wrfstat_file_list
[<File-like object S3FileSystem, pythia/lasso-sgp/sim0007/raw_model/wrfstat_d01_2019-04-04_12:00:00.nc>]

Load into an xarray.Dataset

ds_stat = xr.open_mfdataset(wrfstat_file_list, engine='h5netcdf')

# Rename time - in this case, we are not using xwrf to clean the dataset
ds_stat["Time"] = ds_stat["XTIME"]
ds_stat
---------------------------------------------------------------------------
ClientError                               Traceback (most recent call last)
File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:114, in _error_wrapper(func, args, kwargs, retries)
    113 try:
--> 114     return await func(*args, **kwargs)
    115 except S3_RETRYABLE_ERRORS as e:

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/aiobotocore/context.py:36, in with_current_context.<locals>.decorator.<locals>.wrapper(*args, **kwargs)
     35     await resolve_awaitable(hook())
---> 36 return await func(*args, **kwargs)

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/aiobotocore/client.py:424, in AioBaseClient._make_api_call(self, operation_name, api_params)
    423     error_class = self.exceptions.from_code(error_code)
--> 424     raise error_class(parsed_response, operation_name)
    425 else:

ClientError: An error occurred (PreconditionFailed) when calling the GetObject operation: None

The above exception was the direct cause of the following exception:

OSError                                   Traceback (most recent call last)
File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:2378, in S3File._fetch_range(self, start, end)
   2377 try:
-> 2378     return _fetch_range(
   2379         self.fs,
   2380         self.bucket,
   2381         self.key,
   2382         self.version_id,
   2383         start,
   2384         end,
   2385         req_kw=self.req_kw,
   2386     )
   2388 except OSError as ex:

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:2547, in _fetch_range(fs, bucket, key, version_id, start, end, req_kw)
   2546 logger.debug("Fetch: %s/%s, %s-%s", bucket, key, start, end)
-> 2547 return sync(fs.loop, _inner_fetch, fs, bucket, key, version_id, start, end, req_kw)

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/fsspec/asyn.py:103, in sync(loop, func, timeout, *args, **kwargs)
    102 elif isinstance(return_result, BaseException):
--> 103     raise return_result
    104 else:

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/fsspec/asyn.py:56, in _runner(event, coro, result, timeout)
     55 try:
---> 56     result[0] = await coro
     57 except Exception as ex:

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:2565, in _inner_fetch(fs, bucket, key, version_id, start, end, req_kw)
   2563         resp["Body"].close()
-> 2565 return await _error_wrapper(_call_and_read, retries=fs.retries)

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:146, in _error_wrapper(func, args, kwargs, retries)
    145 err = translate_boto_error(err)
--> 146 raise err

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:114, in _error_wrapper(func, args, kwargs, retries)
    113 try:
--> 114     return await func(*args, **kwargs)
    115 except S3_RETRYABLE_ERRORS as e:

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:2552, in _inner_fetch.<locals>._call_and_read()
   2551 async def _call_and_read():
-> 2552     resp = await fs._call_s3(
   2553         "get_object",
   2554         Bucket=bucket,
   2555         Key=key,
   2556         Range="bytes=%i-%i" % (start, end - 1),
   2557         **version_id_kw(version_id),
   2558         **req_kw,
   2559     )
   2560     try:

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:371, in S3FileSystem._call_s3(self, method, *akwarglist, **kwargs)
    370 additional_kwargs = self._get_s3_method_kwargs(method, *akwarglist, **kwargs)
--> 371 return await _error_wrapper(
    372     method, kwargs=additional_kwargs, retries=self.retries
    373 )

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:146, in _error_wrapper(func, args, kwargs, retries)
    145 err = translate_boto_error(err)
--> 146 raise err

OSError: [Errno 22] None

During handling of the above exception, another exception occurred:

TypeError                                 Traceback (most recent call last)
Cell In[3], line 1
----> 1 ds_stat = xr.open_mfdataset(wrfstat_file_list, engine='h5netcdf')
      3 # Rename time - in this case, we are not using xwrf to clean the dataset
      4 ds_stat["Time"] = ds_stat["XTIME"]

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/xarray/backends/api.py:1624, in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, data_vars, coords, combine, parallel, join, attrs_file, combine_attrs, errors, **kwargs)
   1622 for p in paths1d:
   1623     try:
-> 1624         ds = open_(p, **open_kwargs)
   1625         datasets.append(ds)
   1626     except Exception as e:

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/xarray/backends/api.py:596, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, create_default_indexes, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)
    584 decoders = _resolve_decoders_kwargs(
    585     decode_cf,
    586     open_backend_dataset_parameters=backend.open_dataset_parameters,
   (...)    592     decode_coords=decode_coords,
    593 )
    595 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 596 backend_ds = backend.open_dataset(
    597     filename_or_obj,
    598     drop_variables=drop_variables,
    599     **decoders,
    600     **kwargs,
    601 )
    602 ds = _dataset_from_backend_dataset(
    603     backend_ds,
    604     filename_or_obj,
   (...)    615     **kwargs,
    616 )
    617 return ds

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/xarray/backends/h5netcdf_.py:502, in H5netcdfBackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, format, group, lock, invalid_netcdf, phony_dims, decode_vlen_strings, driver, driver_kwds, storage_options)
    499 emit_phony_dims_warning, phony_dims = _check_phony_dims(phony_dims)
    501 filename_or_obj = _normalize_filename_or_obj(filename_or_obj)
--> 502 store = H5NetCDFStore.open(
    503     filename_or_obj,
    504     format=format,
    505     group=group,
    506     lock=lock,
    507     invalid_netcdf=invalid_netcdf,
    508     phony_dims=phony_dims,
    509     decode_vlen_strings=decode_vlen_strings,
    510     driver=driver,
    511     driver_kwds=driver_kwds,
    512     storage_options=storage_options,
    513 )
    515 store_entrypoint = StoreBackendEntrypoint()
    517 ds = store_entrypoint.open_dataset(
    518     store,
    519     mask_and_scale=mask_and_scale,
   (...)    525     decode_timedelta=decode_timedelta,
    526 )

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/xarray/backends/h5netcdf_.py:195, in H5NetCDFStore.open(cls, filename, mode, format, group, lock, autoclose, invalid_netcdf, phony_dims, decode_vlen_strings, driver, driver_kwds, storage_options)
    192     source.getvalue = filename.getbuffer
    194 if isinstance(filename, io.IOBase) and mode == "r":
--> 195     magic_number = read_magic_number_from_file(filename)
    196     if not magic_number.startswith(b"\211HDF\r\n\032\n"):
    197         raise ValueError(
    198             f"{magic_number!r} is not the signature of a valid netCDF4 file"
    199         )

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/xarray/core/utils.py:737, in read_magic_number_from_file(filename_or_obj, count)
    735 if filename_or_obj.tell() != 0:
    736     filename_or_obj.seek(0)
--> 737 magic_number = filename_or_obj.read(count)
    738 filename_or_obj.seek(0)
    739 return magic_number

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/fsspec/spec.py:2111, in AbstractBufferedFile.read(self, length)
   2108 if length == 0:
   2109     # don't even bother calling fetch
   2110     return b""
-> 2111 out = self.cache._fetch(self.loc, self.loc + length)
   2113 logger.debug(
   2114     "%s read: %i - %i %s",
   2115     self,
   (...)   2118     self.cache._log_stats(),
   2119 )
   2120 self.loc += len(out)

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/fsspec/caching.py:287, in ReadAheadCache._fetch(self, start, end)
    285 end = min(self.size, end + self.blocksize)
    286 self.total_requested_bytes += end - start
--> 287 self.cache = self.fetcher(start, end)  # new block replaces old
    288 self.start = start
    289 self.end = self.start + len(self.cache)

File ~/micromamba/envs/lasso-those-clouds-cookbook-dev/lib/python3.13/site-packages/s3fs/core.py:2389, in S3File._fetch_range(self, start, end)
   2378     return _fetch_range(
   2379         self.fs,
   2380         self.bucket,
   (...)   2385         req_kw=self.req_kw,
   2386     )
   2388 except OSError as ex:
-> 2389     if ex.args[0] == errno.EINVAL and "pre-conditions" in ex.args[1]:
   2390         raise FileExpired(
   2391             filename=self.details["name"], e_tag=self.details.get("ETag")
   2392         ) from ex
   2393     else:

TypeError: argument of type 'NoneType' is not iterable

Find the indices of the boundary layer depth - in case we happen to care about that later

ds_stat["bottom_top"] = ds_stat.bottom_top
ds_stat
ki = ds_stat['CSP_THL'].idxmin(dim='bottom_top')
ki.load()

Let’s look at some meteorological info for this date

plot_ql = ds_stat['CSP_QL'].assign_coords(height = (ds_stat["CSP_Z"]))
plot_ql.isel(Time=slice(6,None)).plot(x = 'Time',y = 'height', ylim=[0,7000])
plot_lwc = ds_stat['CSP_LWC'].assign_coords(height = (ds_stat["CSP_Z"]))
plot_lwc.isel(Time=slice(6,None)).plot(x = 'Time',y = 'height', ylim=[0,7000])
plot_thl = ds_stat['CSP_TH'].assign_coords(height = (ds_stat["CSP_Z"]))
plot_thl.isel(Time=slice(6,None)).plot(x = 'Time',y = 'height', ylim=[0,7000],vmin=298,vmax=320)

Fix some height things so that we can plot...

The z values are time dependent, so we need to deal with the height values by assuming that the first time step is close enough

We also need to make bottom_top a coordinate so that we aren’t yelled at by errors

ds_stat["bottom_top"] = ds_stat.bottom_top
ds_stat['bottom_top'] = ds_stat['CSP_Z'].isel(Time = 1).values
ds_stat['bottom_top'].values # make sure that these are heights and not indicies

Calculate cloud base and top from the liquid water conent and the liquid water mixing ratio

ds_stat['cb_lwc'] = (ds_stat['CSP_LWC']>0).idxmax(dim = 'bottom_top')
ds_stat['cb_lwc'] = ds_stat['cb_lwc'].where(ds_stat['cb_lwc']>ds_stat['bottom_top'][0])
print(ds_stat['cb_lwc'])

ds_stat['ct_lwc'] = ((ds_stat['CSP_LWC'].isel(bottom_top = slice(None, None, -1)))>0).idxmax(dim='bottom_top')
ds_stat['ct_lwc'] = ds_stat['ct_lwc'].where(ds_stat['ct_lwc']<ds_stat['bottom_top'][-1])
print(ds_stat['ct_lwc'])
ds_stat['cb_lwc'].plot(label='base',ylim = (0,7000),xlim = (ds_stat['CSP_Z'].Time[6],ds_stat['CSP_Z'].Time[-1]))
ds_stat['ct_lwc'].plot(label='top',ylim = (0,7000),xlim = (ds_stat['CSP_Z'].Time[6],ds_stat['CSP_Z'].Time[-1]))
plt.legend()
plt.ylabel('Height (m)')
plt.xlabel('Time (UTC)')
plt.show()
ds_stat['cb_ql'] = (ds_stat['CSP_LWC']>0).idxmax(dim = 'bottom_top')
ds_stat['cb_ql'] = ds_stat['cb_ql'].where(ds_stat['cb_ql']>ds_stat['bottom_top'][0])
print(ds_stat['cb_ql'].load())

ds_stat['ct_ql'] = ((ds_stat['CSP_LWC'].isel(bottom_top = slice(None, None, -1)))>0).idxmax(dim='bottom_top')
ds_stat['ct_ql'] = ds_stat['ct_ql'].where(ds_stat['ct_ql']<ds_stat['bottom_top'][-1])
print(ds_stat['ct_ql'].load())
ds_stat['cb_ql'].plot(label='base',ylim = (0,7000),xlim = (ds_stat['CSP_Z'].Time[6],ds_stat['CSP_Z'].Time[-1]))
ds_stat['ct_ql'].plot(label='top',ylim = (0,7000),xlim = (ds_stat['CSP_Z'].Time[6],ds_stat['CSP_Z'].Time[-1]))
plt.legend()
plt.ylabel('Height (m)')
plt.xlabel('Time (UTC)')
plt.show()

Conclusions

We notice how similar the cloud base/top are at their beginning and end times! This framework enables a streamlined method of analyzing clouds within the simulation data, including derived quantities such as cloud base/height.