Analysis of Surface Fields#
mom6_tools.MOM6grid
returns an object with MOM6 grid data.
mom6_tools.latlon_analysis
has a collection of tools used to perform spatial analysis (e.g., time averages and spatial mean).
The goal of this notebook is the following:
server as an example of how to post-process CESM/MOM6 output;
create time averages of surface fields;
create time-series of globally-averaged surface fields;
%load_ext autoreload
%autoreload 2
import xarray as xr
import numpy as np
import matplotlib.pyplot as plt
import warnings, os, yaml, argparse
import pandas as pd
import dask, intake
from datetime import datetime, date
from ncar_jobqueue import NCARCluster
from dask.distributed import Client
from mom6_tools.DiagsCase import DiagsCase
from mom6_tools.m6toolbox import add_global_attrs
from mom6_tools.m6plot import xycompare, xyplot
from mom6_tools.MOM6grid import MOM6grid
from mom6_tools.surface import get_SSH, get_MLD, get_BLD
warnings.filterwarnings("ignore")
Basemap module not found. Some regional plots may not function properly
# Read in the yaml file
diag_config_yml_path = "diag_config.yml"
diag_config_yml = yaml.load(open(diag_config_yml_path,'r'), Loader=yaml.Loader)
# load avg dates
avg = diag_config_yml['Avg']
# Create the case instance
dcase = DiagsCase(diag_config_yml['Case'])
DOUT_S = dcase.get_value('DOUT_S')
if DOUT_S:
OUTDIR = dcase.get_value('DOUT_S_ROOT')+'/ocn/hist/'
else:
OUTDIR = dcase.get_value('RUNDIR')+'/'
print('Output directory is:', OUTDIR)
print('Casename is:', dcase.casename)
Output directory is: /glade/derecho/scratch/hannay/archive/b.e23_alpha16b.BLT1850.ne30_t232.054/ocn/hist/
Casename is: b.e23_alpha16b.BLT1850.ne30_t232.054
# The following parameters must be set accordingly
######################################################
# create an empty class object
class args:
pass
args.start_date = avg['start_date']
args.end_date = avg['end_date']
args.casename = dcase.casename
args.native = dcase.casename+diag_config_yml['Fnames']['native']
args.static = dcase.casename+diag_config_yml['Fnames']['static']
args.mld_obs = "mld-deboyer-tx2_3v2"
args.savefigs = False
args.nw = 6 # requesting 6 workers
# Parameters
test_global_param = "hello"
sname = "adf-quick-run"
subset_kwargs = {}
product = "/glade/u/home/hannay/CUPiD/examples/adf-mom6/computed_notebooks/adf-quick-run/surface.ipynb"
if not os.path.isdir('PNG/BLD'):
print('Creating a directory to place figures (PNG/BLD)... \n')
os.system('mkdir -p PNG/BLD')
if not os.path.isdir('PNG/MLD'):
print('Creating a directory to place figures (PNG/MLD)... \n')
os.system('mkdir -p PNG/MLD')
if not os.path.isdir('ncfiles'):
print('Creating a directory to place netcdf files (ncfiles)... \n')
os.system('mkdir ncfiles')
parallel = False
if args.nw > 1:
parallel = True
cluster = NCARCluster(interface='ext')
cluster.scale(args.nw)
client = Client(cluster)
client
client
Client
Client-59a56c08-b636-11ee-b588-ac1f6bc7cc7e
Connection method: Cluster object | Cluster type: dask_jobqueue.PBSCluster |
Dashboard: /proxy/8787/status |
Cluster Info
PBSCluster
04975794
Dashboard: /proxy/8787/status | Workers: 0 |
Total threads: 0 | Total memory: 0 B |
Scheduler Info
Scheduler
Scheduler-e15b5241-9539-43e3-b745-924c77d446b5
Comm: tcp://128.117.211.222:41785 | Workers: 0 |
Dashboard: /proxy/8787/status | Total threads: 0 |
Started: Just now | Total memory: 0 B |
Workers
# load mom6 grid
grd = MOM6grid(OUTDIR+args.static)
grd_xr = MOM6grid(OUTDIR+args.static, xrformat=True)
MOM6 grid successfully loaded...
MOM6 grid successfully loaded...
print('Reading native dataset...')
startTime = datetime.now()
def preprocess(ds):
''' Compute montly averages and return the dataset with variables'''
variables = ['oml','mlotst','tos','SSH', 'SSU', 'SSV', 'speed', 'time_bnds']
for v in variables:
if v not in ds.variables:
ds[v] = xr.zeros_like(ds.SSH)
return ds[variables]
ds1 = xr.open_mfdataset(OUTDIR+args.native, parallel=parallel)
ds = preprocess(ds1)
print('Time elasped: ', datetime.now() - startTime)
Reading native dataset...
Time elasped: 0:02:41.643157
print('Selecting data between {} and {}...'.format(args.start_date, args.end_date))
ds_sel = ds.sel(time=slice(args.start_date, args.end_date))
Selecting data between 0091-01-01 and 0101-01-01...
catalog = intake.open_catalog(diag_config_yml['oce_cat'])
mld_obs = catalog[args.mld_obs].to_dask()
# uncomment to list all datasets available
#list(catalog)
Mixed layer depth#
%matplotlib inline
# MLD
get_MLD(ds,'mlotst', mld_obs, grd, args)
Computing monthly MLD climatology...
Time elasped: 0:00:32.460716
Plotting...
Boundary layer depth#
get_BLD(ds, 'oml', grd, args)
Computing monthly BLD climatology...
Time elasped: 0:00:20.588897
Plotting...
# SSH (not working)
#get_SSH(ds, 'SSH', grd, args)
if parallel:
print('\n Releasing workers...')
client.close(); cluster.close()
Releasing workers...