Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion gfdlvitals/diags/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Supplemental diagnostic routines"""

from . import acc
from . import amoc
from . import fms

__all__ = ["amoc", "fms"]
__all__ = ["acc", "amoc", "fms"]
115 changes: 115 additions & 0 deletions gfdlvitals/diags/acc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
""" Routine for calculating ACC """

import warnings
import sectionate
import xgcm

import numpy as np
import xarray as xr
from gfdlvitals.util import gmeantools
from gfdlvitals.util.netcdf import tar_member_exists
from gfdlvitals.util.netcdf import extract_from_tar
from gfdlvitals.util.netcdf import in_mem_xr

__all__ = ["mom6_acc"]

def mom6_acc(fyear, tar, label="Ocean", outdir="./"):
"""Driver for AMOC calculation in MOM6-class models

Parameters
----------
fyear : str
Year label (YYYY)
tar : tarfile
In-memory history tarfile object
label : str
SQLite output stream name
outdir : str, path-like
Path to output SQLite file
"""

member = f"{fyear}.ocean_annual_z.nc"
static = f"{fyear}.ocean_static.nc"

annual_file = (
extract_from_tar(tar, member, ncfile=True)
if tar_member_exists(tar, member)
else None
)
static_file = (
extract_from_tar(tar, static, ncfile=True)
if tar_member_exists(tar, static)
else None
)

if annual_file is not None and static_file is not None:
# open the Dataset with the transports
ds = in_mem_xr(annual_file)

# select first time level from static file
# editorial comment: why does the static file have a time dimension?
dss = in_mem_xr(static_file)

ds = ds[["umo","vmo","z_i"]]
ds = xr.merge([ds,dss], compat="override", join="outer", combine_attrs="override")

required_vars = ["umo", "vmo", "z_l", "z_i"]

if list(set(required_vars) - set(ds.variables)) == []:

ds = ds.assign_coords({
"geolon_c": ds.geolon_c,
"geolat_c": ds.geolat_c,
"geolon_v": ds.geolon_v,
"geolat_v": ds.geolat_v,
"geolon_u": ds.geolon_u,
"geolat_u": ds.geolat_u,
})

if len(ds["yq"]) > len(ds["yh"]):
coords = {
"X": {"center":"xh", "outer":"xq"},
"Y": {"center":"yh", "outer":"yq"},
}
else:
coords = {
"X": {"center":"xh", "right":"xq"},
"Y": {"center":"yh", "right":"yq"},
}

grid = xgcm.Grid(
ds,
coords=coords,
boundary={"X":"periodic", "Y":"extend"},
autoparse_metadata=False
)

drake_section_lats = [-67.46, -54.76]
drake_section_lons = [-68.29, -66.32]

i_c, j_c, lons_c, lats_c = sectionate.grid_section(
grid,
drake_section_lons,
drake_section_lats,
topology="MOM-tripolar"
)

transport = sectionate.convergent_transport(grid, i_c, j_c)

acc = float(transport.conv_mass_transport.sum()*1.e-9)

# -- Write to sqlite
gmeantools.write_sqlite_data(
outdir + "/" + fyear + ".globalAve" + label + ".db",
"acc_drake",
fyear[:4],
acc,
)

print(f"ACC: {acc}")

else:
warnings.warn(f"{required_vars} are required to calculate ACC")

else:
warnings.warn("ACC calculation requires ocean_static and ocean_annual_z")
9 changes: 9 additions & 0 deletions gfdlvitals/models/CM4.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def routines(args, infile):

# -- Get list of components to process
comps = args.component
print(f"Requested Components: {comps}")

# -- Atmospheric Fields
modules = {
Expand Down Expand Up @@ -123,6 +124,14 @@ def routines(args, infile):
print("\n\n# -----\n# AMOC vitals failed\n# -----\n\n")
print(exc)

# -- ACC
if any(comp in comps for comp in ["acc", "all"]):
try:
diags.acc.mom6_acc(fyear, tar)
except Exception as exc:
print("\n\n# -----\n# ACC vitals failed\n# -----\n\n")
print(exc)

# -- Close out the tarfile handle
tar.close()

Expand Down
7 changes: 6 additions & 1 deletion gfdlvitals/util/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,23 @@ def merge(source, destination):
sql = "ATTACH '" + source + "' as src"
cur.execute(sql)
cur.close()

cur = con.cursor()
sql = "SELECT * FROM main.sqlite_master WHERE type='table'"
cur.execute(sql)
dst_tables = cur.fetchall()
cur.close()

cur = con.cursor()
sql = "SELECT * FROM src.sqlite_master WHERE type='table'"
cur.execute(sql)
src_tables = cur.fetchall()
cur.close()


for var in src_tables:
varname = var[1]
if varname not in [x[1] for x in src_tables]:
if varname not in [x[1] for x in dst_tables]:
cur = con.cursor()
cur.execute(var[-1])
cur.close()
Expand Down
Loading