diff --git a/.zenodo.json b/.zenodo.json index f5b00f1f8..fa46def55 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -1,22 +1,22 @@ { "license": "BSD-3-Clause", - "copyright": "Copyright (c) 1998, 2017 Triad National Security, LLC", + "copyright": "Copyright 1998-2026 Triad National Security, LLC", "description": "View detailed release notes at https://github.com/CICE-Consortium/CICE/releases", "language": "eng", - "title": "CICE-Consortium/CICE: CICE Version 6.6.1", + "title": "CICE-Consortium/CICE: CICE Version 6.6.3", "keywords": [ "sea ice model", "CICE", "Icepack" ], - "version": "6.6.1", + "version": "6.6.3", "upload_type": "software", "communities": [ { "identifier": "cice-consortium" } ], - "publication_date": "2025-07-21", + "publication_date": "2026-01-22", "creators": [ { "affiliation": "Los Alamos National Laboratory", diff --git a/COPYRIGHT.pdf b/COPYRIGHT.pdf index 55132ca5e..6a8923c95 100644 Binary files a/COPYRIGHT.pdf and b/COPYRIGHT.pdf differ diff --git a/cicecore/cicedyn/analysis/ice_history.F90 b/cicecore/cicedyn/analysis/ice_history.F90 index a6eabe2db..eb1a66194 100644 --- a/cicecore/cicedyn/analysis/ice_history.F90 +++ b/cicecore/cicedyn/analysis/ice_history.F90 @@ -273,7 +273,7 @@ subroutine init_hist (dt) do ns = 1, max_nstrm if (histfreq(ns) == '1' .or. histfreq(ns) == 'h' .or. & histfreq(ns) == 'd' .or. histfreq(ns) == 'm' .or. & - histfreq(ns) == 'y') then + histfreq(ns) == 'y' .or. histfreq(ns) == 'n') then nstreams = nstreams + 1 if (ns >= 2) then if (histfreq(ns-1) == 'x') then @@ -337,90 +337,6 @@ subroutine init_hist (dt) ! to prevent array-out-of-bounds when aggregating if (f_fmeltt_ai(1:1) /= 'x') f_fmelttn_ai = f_fmeltt_ai - ! Turn on all CMIP fields in one go. - - if (f_CMIP(1:1) /= 'x') then - f_sithick = 'mxxxx' - f_sisnthick = 'mxxxx' - f_siage = 'mxxxx' - f_sitemptop = 'mxxxx' - f_sitempsnic = 'mxxxx' - f_sitempbot = 'mxxxx' - f_sispeed = 'mxxxx' - f_siu = 'mxxxx' - f_siv = 'mxxxx' - f_sidmasstranx = 'mxxxx' - f_sidmasstrany = 'mxxxx' - f_sistrxdtop = 'mxxxx' - f_sistrydtop = 'mxxxx' - f_sistrxubot = 'mxxxx' - f_sistryubot = 'mxxxx' - f_sicompstren = 'mxxxx' - f_sialb = 'mxxxx' - f_sihc = 'mxxxx' - f_sisnhc = 'mxxxx' - f_sidconcth = 'mxxxx' - f_sidconcdyn = 'mxxxx' - f_sidmassth = 'mxxxx' - f_sidmassdyn = 'mxxxx' - f_sidmassgrowthwat = 'mxxxx' - f_sidmassgrowthbot = 'mxxxx' - f_sidmasssi = 'mxxxx' - f_sidmassevapsubl = 'mxxxx' - f_sndmasssubl = 'mxxxx' - f_sidmassmelttop = 'mxxxx' - f_sidmassmeltbot = 'mxxxx' - f_sidmasslat = 'mxxxx' - f_sndmasssnf = 'mxxxx' - f_sndmassmelt = 'mxxxx' - f_sndmassdyn = 'mxxxx' - f_siflswdtop = 'mxxxx' - f_siflswutop = 'mxxxx' - f_siflswdbot = 'mxxxx' - f_sifllwdtop = 'mxxxx' - f_sifllwutop = 'mxxxx' - f_siflsenstop = 'mxxxx' - f_siflsensupbot = 'mxxxx' - f_sifllatstop = 'mxxxx' - f_siflcondtop = 'mxxxx' - f_siflcondbot = 'mxxxx' - f_sipr = 'mxxxx' - f_sifb = 'mxxxx' - f_siflsaltbot = 'mxxxx' - f_siflfwbot = 'mxxxx' - f_siflfwdrain = 'mxxxx' - f_siforcetiltx = 'mxxxx' - f_siforcetilty = 'mxxxx' - f_siforcecoriolx = 'mxxxx' - f_siforcecorioly = 'mxxxx' - f_siforceintstrx = 'mxxxx' - f_siforceintstry = 'mxxxx' - f_sidragtop = 'mxxxx' - f_sistreave = 'mxxxx' - f_sistremax = 'mxxxx' - f_sirdgthick = 'mxxxx' - f_siitdconc = 'mxxxx' - f_siitdthick = 'mxxxx' - f_siitdsnthick = 'mxxxx' - f_aicen = 'mxxxx' - endif - - if (f_CMIP(2:2) == 'd') then - f_icepresent = f_CMIP - f_aice = f_CMIP - f_sithick = f_CMIP - f_sisnthick = f_CMIP - f_sitemptop = f_CMIP - f_siu = f_CMIP - f_siv = f_CMIP - f_sispeed = f_CMIP - f_sndmasssubl = f_CMIP - f_sndmasssnf = f_CMIP - f_sndmassmelt = f_CMIP - f_sndmassdyn = f_CMIP - f_sidmasssi = f_CMIP - endif - if (grid_ice == 'CD' .or. grid_ice == 'C') then f_uvelE = f_uvel f_vvelE = f_vvel @@ -494,12 +410,14 @@ subroutine init_hist (dt) call broadcast_scalar (f_NFSD, master_task) ! call broadcast_scalar (f_example, master_task) + call broadcast_scalar (f_CMIP, master_task) call broadcast_scalar (f_hi, master_task) call broadcast_scalar (f_hs, master_task) call broadcast_scalar (f_snowfrac, master_task) call broadcast_scalar (f_snowfracn, master_task) call broadcast_scalar (f_Tsfc, master_task) call broadcast_scalar (f_aice, master_task) + call broadcast_scalar (f_aice_init, master_task) call broadcast_scalar (f_uvel, master_task) call broadcast_scalar (f_vvel, master_task) call broadcast_scalar (f_icespd, master_task) @@ -629,8 +547,15 @@ subroutine init_hist (dt) call broadcast_scalar (f_frz_onset, master_task) call broadcast_scalar (f_aisnap, master_task) call broadcast_scalar (f_hisnap, master_task) + call broadcast_scalar (f_sitimefrac, master_task) call broadcast_scalar (f_sithick, master_task) call broadcast_scalar (f_siage, master_task) + call broadcast_scalar (f_siconc, master_task) + call broadcast_scalar (f_sisnconc, master_task) + call broadcast_scalar (f_sisnmass, master_task) + call broadcast_scalar (f_sivol, master_task) + call broadcast_scalar (f_simass, master_task) + call broadcast_scalar (f_sisaltmass, master_task) call broadcast_scalar (f_sisnthick, master_task) call broadcast_scalar (f_sitemptop, master_task) call broadcast_scalar (f_sitempsnic, master_task) @@ -645,8 +570,8 @@ subroutine init_hist (dt) call broadcast_scalar (f_sistryubot, master_task) call broadcast_scalar (f_sicompstren, master_task) call broadcast_scalar (f_sispeed, master_task) - call broadcast_scalar (f_sidir, master_task) - call broadcast_scalar (f_sialb, master_task) +! call broadcast_scalar (f_sidir, master_task) + call broadcast_scalar (f_sisali, master_task) call broadcast_scalar (f_sihc, master_task) call broadcast_scalar (f_sisnhc, master_task) call broadcast_scalar (f_sidconcth, master_task) @@ -655,23 +580,24 @@ subroutine init_hist (dt) call broadcast_scalar (f_sidmassdyn, master_task) call broadcast_scalar (f_sidmassgrowthwat, master_task) call broadcast_scalar (f_sidmassgrowthbot, master_task) - call broadcast_scalar (f_sidmasssi, master_task) + call broadcast_scalar (f_sidmassgrowthsi, master_task) call broadcast_scalar (f_sidmassevapsubl, master_task) - call broadcast_scalar (f_sndmasssubl, master_task) + call broadcast_scalar (f_sisndmasssubl, master_task) call broadcast_scalar (f_sidmassmelttop, master_task) call broadcast_scalar (f_sidmassmeltbot, master_task) - call broadcast_scalar (f_sidmasslat, master_task) - call broadcast_scalar (f_sndmasssnf, master_task) - call broadcast_scalar (f_sndmassmelt, master_task) - call broadcast_scalar (f_sndmassdyn, master_task) + call broadcast_scalar (f_sidmassmeltlat, master_task) + call broadcast_scalar (f_sisndmasssnf, master_task) + call broadcast_scalar (f_sisndmassmelt, master_task) + call broadcast_scalar (f_sisndmassdyn, master_task) + call broadcast_scalar (f_sisndmasssi, master_task) call broadcast_scalar (f_siflswdtop, master_task) call broadcast_scalar (f_siflswutop, master_task) call broadcast_scalar (f_siflswdbot, master_task) call broadcast_scalar (f_sifllwdtop, master_task) call broadcast_scalar (f_sifllwutop, master_task) call broadcast_scalar (f_siflsenstop, master_task) - call broadcast_scalar (f_siflsensupbot, master_task) - call broadcast_scalar (f_sifllatstop, master_task) + call broadcast_scalar (f_siflsensbot, master_task) + call broadcast_scalar (f_sifllattop, master_task) call broadcast_scalar (f_siflcondtop, master_task) call broadcast_scalar (f_siflcondbot, master_task) call broadcast_scalar (f_sipr, master_task) @@ -686,12 +612,15 @@ subroutine init_hist (dt) call broadcast_scalar (f_siforceintstrx, master_task) call broadcast_scalar (f_siforceintstry, master_task) call broadcast_scalar (f_siitdconc, master_task) + call broadcast_scalar (f_siitdsnconc, master_task) call broadcast_scalar (f_siitdthick, master_task) call broadcast_scalar (f_siitdsnthick, master_task) call broadcast_scalar (f_sidragtop, master_task) - call broadcast_scalar (f_sistreave, master_task) - call broadcast_scalar (f_sistremax, master_task) - call broadcast_scalar (f_sirdgthick, master_task) + call broadcast_scalar (f_sidragbot, master_task) + call broadcast_scalar (f_sistressave, master_task) + call broadcast_scalar (f_sistressmax, master_task) + call broadcast_scalar (f_sidivvel, master_task) + call broadcast_scalar (f_sishearvel, master_task) call broadcast_scalar (f_aicen, master_task) call broadcast_scalar (f_vicen, master_task) @@ -728,6 +657,11 @@ subroutine init_hist (dt) call broadcast_scalar (f_yieldstress12, master_task) call broadcast_scalar (f_yieldstress22, master_task) + if (f_CMIP(1:1) /= 'x') then + if (my_task == master_task) write(nu_diag,*) subname, & + 'WARNING: f_CMIP has been deprecated. Please use the set_nml.cmip namelist option' + endif + ! 2D variables do ns1 = 1, nstreams if (histfreq(ns1) /= 'x') then @@ -739,189 +673,194 @@ subroutine init_hist (dt) ! ns1, f_example) !!!!! end example - call define_hist_field(n_hi,"hi","m",tstr2D, tcstr, & - "grid cell mean ice thickness", & - "ice volume per unit grid cell area", c1, c0, & + call define_hist_field(n_hi,"hi","m",tstr2D, tcstr, & + "grid cell mean ice thickness", & + "ice volume per unit grid cell area", c1, c0, & ns1, f_hi) - call define_hist_field(n_hs,"hs","m",tstr2D, tcstr, & - "grid cell mean snow thickness", & - "snow volume per unit grid cell area", c1, c0, & + call define_hist_field(n_hs,"hs","m",tstr2D, tcstr, & + "grid cell mean snow thickness", & + "snow volume per unit grid cell area", c1, c0, & ns1, f_hs) call define_hist_field(n_snowfrac,"snowfrac","1",tstr2D, tcstr, & - "grid cell mean snow fraction", & - "snow fraction per unit grid cell area", c1, c0, & + "snow fraction of sea ice", & + "none", c1, c0, & ns1, f_snowfrac) - call define_hist_field(n_Tsfc,"Tsfc","C",tstr2D, tcstr, & - "snow/ice surface temperature", & - "averaged with Tf if no ice is present", c1, c0, & + call define_hist_field(n_Tsfc,"Tsfc","C",tstr2D, tcstr, & + "snow/ice surface temperature", & + "averaged with Tf if no ice is present", c1, c0, & ns1, f_Tsfc) - call define_hist_field(n_aice,"aice","1",tstr2D, tcstr, & - "ice area (aggregate)", & - "none", c1, c0, & + call define_hist_field(n_aice,"aice","1",tstr2D, tcstr, & + "ice area (aggregate)", & + "none", c1, c0, & ns1, f_aice) - call define_hist_field(n_uvelE,"uvelE","m/s",estr2D, ecstr, & - "ice velocity (x)", & - "positive is x direction on E grid", c1, c0, & + call define_hist_field(n_aice_init,"aice_init","1",tstr2D, tcstr, & + "ice area (aggregate) initial", & + "none", c1, c0, & + ns1, f_aice_init) + + call define_hist_field(n_uvelE,"uvelE","m/s",estr2D, ecstr, & + "ice velocity (x)", & + "positive is x direction on E grid", c1, c0, & ns1, f_uvelE) - call define_hist_field(n_vvelE,"vvelE","m/s",estr2D, ecstr, & - "ice velocity (y)", & - "positive is y direction on E grid", c1, c0, & + call define_hist_field(n_vvelE,"vvelE","m/s",estr2D, ecstr, & + "ice velocity (y)", & + "positive is y direction on E grid", c1, c0, & ns1, f_vvelE) call define_hist_field(n_icespdE,"icespdE","m/s",estr2D, ecstr, & - "sea ice speed", & + "sea ice speed", & "vector magnitude on E grid", c1, c0, & ns1, f_icespdE) call define_hist_field(n_icedirE,"icedirE","deg",estr2D, ecstr, & - "sea ice direction", & + "sea ice direction", & "vector direction - coming from on E grid", c1, c0, & ns1, f_icedirE) - call define_hist_field(n_uvelN,"uvelN","m/s",nstr2D, ncstr, & - "ice velocity (x)", & - "positive is x direction on N grid", c1, c0, & + call define_hist_field(n_uvelN,"uvelN","m/s",nstr2D, ncstr, & + "ice velocity (x)", & + "positive is x direction on N grid", c1, c0, & ns1, f_uvelN) - call define_hist_field(n_vvelN,"vvelN","m/s",nstr2D, ncstr, & - "ice velocity (y)", & - "positive is y direction on N grid", c1, c0, & + call define_hist_field(n_vvelN,"vvelN","m/s",nstr2D, ncstr, & + "ice velocity (y)", & + "positive is y direction on N grid", c1, c0, & ns1, f_vvelN) call define_hist_field(n_icespdN,"icespdN","m/s",nstr2D, ncstr, & - "sea ice speed", & + "sea ice speed", & "vector magnitude on N grid", c1, c0, & ns1, f_icespdN) call define_hist_field(n_icedirN,"icedirN","deg",nstr2D, ncstr, & - "sea ice direction", & + "sea ice direction", & "vector direction - coming from on N grid", c1, c0, & ns1, f_icedirN) - call define_hist_field(n_uvel,"uvel","m/s",ustr2D, ucstr, & - "ice velocity (x)", & - "positive is x direction on U grid", c1, c0, & + call define_hist_field(n_uvel,"uvel","m/s",ustr2D, ucstr, & + "ice velocity (x)", & + "positive is x direction on U grid", c1, c0, & ns1, f_uvel) - call define_hist_field(n_vvel,"vvel","m/s",ustr2D, ucstr, & - "ice velocity (y)", & - "positive is y direction on U grid", c1, c0, & + call define_hist_field(n_vvel,"vvel","m/s",ustr2D, ucstr, & + "ice velocity (y)", & + "positive is y direction on U grid", c1, c0, & ns1, f_vvel) call define_hist_field(n_icespd,"icespd","m/s",ustr2D, ucstr, & - "sea ice speed", & + "sea ice speed", & "vector magnitude", c1, c0, & ns1, f_icespd) call define_hist_field(n_icedir,"icedir","deg",ustr2D, ucstr, & - "sea ice direction", & + "sea ice direction", & "vector direction - coming from", c1, c0, & ns1, f_icedir) - call define_hist_field(n_uatm,"uatm","m/s",str2D_gau, cstr_gau, & - "atm velocity (x)", & - "positive is x direction on U grid", c1, c0, & + call define_hist_field(n_uatm,"uatm","m/s",str2D_gau, cstr_gau, & + "atm velocity (x)", & + "positive is x direction on U grid", c1, c0, & ns1, f_uatm) - call define_hist_field(n_vatm,"vatm","m/s",str2D_gav, cstr_gav, & - "atm velocity (y)", & - "positive is y direction on U grid", c1, c0, & + call define_hist_field(n_vatm,"vatm","m/s",str2D_gav, cstr_gav, & + "atm velocity (y)", & + "positive is y direction on U grid", c1, c0, & ns1, f_vatm) call define_hist_field(n_atmspd,"atmspd","m/s",str2D_gau, cstr_gau, & - "atmosphere wind speed", & - "vector magnitude", c1, c0, & + "atmosphere wind speed", & + "vector magnitude", c1, c0, & ns1, f_atmspd) call define_hist_field(n_atmdir,"atmdir","deg",str2D_gau, cstr_gau, & - "atmosphere wind direction", & - "vector direction - coming from", c1, c0, & + "atmosphere wind direction", & + "vector direction - coming from", c1, c0, & ns1, f_atmdir) - call define_hist_field(n_sice,"sice","ppt",tstr2D, tcstr, & - "bulk ice salinity", & - "none", c1, c0, & + call define_hist_field(n_sice,"sice","ppt",tstr2D, tcstr, & + "bulk ice salinity", & + "none", c1, c0, & ns1, f_sice) call define_hist_field(n_fswup,"fswup","W/m^2",tstr2D, tcstr, & "upward solar flux", & - "positive upward", c1, c0, & + "positive upward", c1, c0, & ns1, f_fswup) call define_hist_field(n_fswdn,"fswdn","W/m^2",tstr2D, tcstr, & - "down solar flux", & - "positive downward", c1, c0, & + "down solar flux", & + "positive downward", c1, c0, & ns1, f_fswdn) call define_hist_field(n_flwdn,"flwdn","W/m^2",tstr2D, tcstr, & - "down longwave flux", & - "positive downward", c1, c0, & + "down longwave flux", & + "positive downward", c1, c0, & ns1, f_flwdn) call define_hist_field(n_snow,"snow","cm/day",tstr2D, tcstr, & - "snowfall rate (cpl)", & - "none", mps_to_cmpdy/rhofresh, c0, & + "snowfall rate (cpl)", & + "none", mps_to_cmpdy/rhofresh, c0, & ns1, f_snow) call define_hist_field(n_snow_ai,"snow_ai","cm/day",tstr2D, tcstr, & - "snowfall rate", & - "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & + "snowfall rate", & + "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_snow_ai) call define_hist_field(n_rain,"rain","cm/day",tstr2D, tcstr, & - "rainfall rate (cpl)", & - "none", mps_to_cmpdy/rhofresh, c0, & + "rainfall rate (cpl)", & + "none", mps_to_cmpdy/rhofresh, c0, & ns1, f_rain) call define_hist_field(n_rain_ai,"rain_ai","cm/day",tstr2D, tcstr, & - "rainfall rate", & - "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & + "rainfall rate", & + "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_rain_ai) call define_hist_field(n_sst,"sst","C",tstr2D, tcstr, & - "sea surface temperature", & - "none", c1, c0, & + "sea surface temperature", & + "none", c1, c0, & ns1, f_sst) call define_hist_field(n_sss,"sss","ppt",tstr2D, tcstr, & - "sea surface salinity", & - "none", c1, c0, & + "sea surface salinity", & + "none", c1, c0, & ns1, f_sss) call define_hist_field(n_uocn,"uocn","m/s",str2D_gou, cstr_gou, & - "ocean current (x)", & - "positive is x direction on U grid", c1, c0, & + "ocean current (x)", & + "positive is x direction on U grid", c1, c0, & ns1, f_uocn) call define_hist_field(n_vocn,"vocn","m/s",str2D_gov, cstr_gov, & - "ocean current (y)", & - "positive is y direction on U grid", c1, c0, & + "ocean current (y)", & + "positive is y direction on U grid", c1, c0, & ns1, f_vocn) call define_hist_field(n_ocnspd,"ocnspd","m/s",str2D_gou, cstr_gou, & - "ocean current speed", & - "vector magnitude", c1, c0, & + "ocean current speed", & + "vector magnitude", c1, c0, & ns1, f_ocnspd) call define_hist_field(n_ocndir,"ocndir","deg",str2D_gou, cstr_gou, & - "ocean current direction", & - "vector direction - going to", c1, c0, & + "ocean current direction", & + "vector direction - going to", c1, c0, & ns1, f_ocndir) call define_hist_field(n_frzmlt,"frzmlt","W/m^2",tstr2D, tcstr, & - "freeze/melt potential", & - "if >0, new ice forms; if <0, ice melts", c1, c0, & + "freeze/melt potential", & + "if >0, new ice forms; if <0, ice melts", c1, c0, & ns1, f_frzmlt) call define_hist_field(n_fswfac,"scale_factor","1",tstr2D, tcstr, & - "shortwave scaling factor", & - "ratio of netsw new:old", c1, c0, & + "shortwave scaling factor", & + "ratio of netsw new:old", c1, c0, & ns1, f_fswfac) call define_hist_field(n_fswint_ai,"fswint_ai","W/m^2",tstr2D, tcstr, & @@ -930,194 +869,194 @@ subroutine init_hist (dt) ns1, f_fswint_ai) call define_hist_field(n_fswabs,"fswabs","W/m^2",tstr2D, tcstr, & - "snow/ice/ocn absorbed solar flux (cpl)", & - "positive downward", c1, c0, & + "snow/ice/ocn absorbed solar flux (cpl)", & + "positive downward", c1, c0, & ns1, f_fswabs) call define_hist_field(n_fswabs_ai,"fswabs_ai","W/m^2",tstr2D, tcstr, & - "snow/ice/ocn absorbed solar flux", & - "weighted by ice area", c1, c0, & + "snow/ice/ocn absorbed solar flux", & + "weighted by ice area", c1, c0, & ns1, f_fswabs_ai) call define_hist_field(n_albsni,"albsni","%",tstr2D, tcstr, & - "snow/ice broad band albedo", & - "averaged for coszen>0, weighted by aice", c100, c0, & + "snow/ice broad band albedo", & + "averaged for coszen>0, weighted by aice", c100, c0, & ns1, f_albsni) call define_hist_field(n_alvdr,"alvdr","%",tstr2D, tcstr, & - "visible direct albedo", & - "scaled (divided) by aice", c100, c0, & + "visible direct albedo", & + "scaled (divided) by aice", c100, c0, & ns1, f_alvdr) call define_hist_field(n_alidr,"alidr","%",tstr2D, tcstr, & - "near IR direct albedo", & - "scaled (divided) by aice", c100, c0, & + "near IR direct albedo", & + "scaled (divided) by aice", c100, c0, & ns1, f_alidr) call define_hist_field(n_alvdf,"alvdf","%",tstr2D, tcstr, & - "visible diffuse albedo", & - "scaled (divided) by aice", c100, c0, & + "visible diffuse albedo", & + "scaled (divided) by aice", c100, c0, & ns1, f_alvdf) call define_hist_field(n_alidf,"alidf","%",tstr2D, tcstr, & - "near IR diffuse albedo", & - "scaled (divided) by aice", c100, c0, & + "near IR diffuse albedo", & + "scaled (divided) by aice", c100, c0, & ns1, f_alidf) call define_hist_field(n_alvdr_ai,"alvdr_ai","%",tstr2D, tcstr, & - "visible direct albedo", & - " ", c100, c0, & + "visible direct albedo", & + " ", c100, c0, & ns1, f_alvdr_ai) call define_hist_field(n_alidr_ai,"alidr_ai","%",tstr2D, tcstr, & - "near IR direct albedo", & - " ", c100, c0, & + "near IR direct albedo", & + " ", c100, c0, & ns1, f_alidr_ai) call define_hist_field(n_alvdf_ai,"alvdf_ai","%",tstr2D, tcstr, & - "visible diffuse albedo", & - " ", c100, c0, & + "visible diffuse albedo", & + " ", c100, c0, & ns1, f_alvdf_ai) call define_hist_field(n_alidf_ai,"alidf_ai","%",tstr2D, tcstr, & - "near IR diffuse albedo", & - " ", c100, c0, & + "near IR diffuse albedo", & + " ", c100, c0, & ns1, f_alidf_ai) call define_hist_field(n_albice,"albice","%",tstr2D, tcstr, & - "bare ice albedo", & - "averaged for coszen>0, weighted by aice", c100, c0, & + "bare ice albedo", & + "averaged for coszen>0, weighted by aice", c100, c0, & ns1, f_albice) call define_hist_field(n_albsno,"albsno","%",tstr2D, tcstr, & - "snow albedo", & - "averaged for coszen>0, weighted by aice", c100, c0, & + "snow albedo", & + "averaged for coszen>0, weighted by aice", c100, c0, & ns1, f_albsno) call define_hist_field(n_albpnd,"albpnd","%",tstr2D, tcstr, & - "melt pond albedo", & - "averaged for coszen>0, weighted by aice", c100, c0, & + "melt pond albedo", & + "averaged for coszen>0, weighted by aice", c100, c0, & ns1, f_albpnd) call define_hist_field(n_coszen,"coszen","radian",tstr2D, tcstr, & - "cosine of the zenith angle", & - "negative below horizon", c1, c0, & + "cosine of the zenith angle", & + "negative below horizon", c1, c0, & ns1, f_coszen) call define_hist_field(n_flat,"flat","W/m^2",tstr2D, tcstr, & - "latent heat flux (cpl)", & - "positive downward", c1, c0, & + "latent heat flux (cpl)", & + "positive downward", c1, c0, & ns1, f_flat) call define_hist_field(n_flat_ai,"flat_ai","W/m^2",tstr2D, tcstr, & - "latent heat flux", & - "weighted by ice area", c1, c0, & + "latent heat flux", & + "weighted by ice area", c1, c0, & ns1, f_flat_ai) call define_hist_field(n_fsens,"fsens","W/m^2",tstr2D, tcstr, & - "sensible heat flux (cpl)", & - "positive downward", c1, c0, & + "sensible heat flux (cpl)", & + "positive downward", c1, c0, & ns1, f_fsens) call define_hist_field(n_fsens_ai,"fsens_ai","W/m^2",tstr2D, tcstr, & - "sensible heat flux", & - "weighted by ice area", c1, c0, & + "sensible heat flux", & + "weighted by ice area", c1, c0, & ns1, f_fsens_ai) call define_hist_field(n_flwup,"flwup","W/m^2",tstr2D, tcstr, & - "upward longwave flux (cpl)", & - "positive downward", c1, c0, & + "upward longwave flux (cpl)", & + "positive downward", c1, c0, & ns1, f_flwup) call define_hist_field(n_flwup_ai,"flwup_ai","W/m^2",tstr2D, tcstr, & - "upward longwave flux", & - "weighted by ice area", c1, c0, & + "upward longwave flux", & + "weighted by ice area", c1, c0, & ns1, f_flwup_ai) call define_hist_field(n_evap,"evap","cm/day",tstr2D, tcstr, & - "evaporative water flux (cpl)", & - "none", mps_to_cmpdy/rhofresh, c0, & + "evaporative water flux (cpl)", & + "none", mps_to_cmpdy/rhofresh, c0, & ns1, f_evap) call define_hist_field(n_evap_ai,"evap_ai","cm/day",tstr2D, tcstr, & - "evaporative water flux", & - "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & + "evaporative water flux", & + "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_evap_ai) call define_hist_field(n_Tair,"Tair","C",tstr2D, tcstr, & - "air temperature", & - "none", c1, -Tffresh, & + "air temperature", & + "none", c1, -Tffresh, & ns1, f_Tair) call define_hist_field(n_Tref,"Tref","C",tstr2D, tcstr, & - "2m reference temperature", & - "none", c1, -Tffresh, & + "2m reference temperature", & + "none", c1, -Tffresh, & ns1, f_Tref) call define_hist_field(n_Qref,"Qref","g/kg",tstr2D, tcstr, & - "2m reference specific humidity", & - "none", kg_to_g, c0, & + "2m reference specific humidity", & + "none", kg_to_g, c0, & ns1, f_Qref) call define_hist_field(n_congel,"congel","cm/day",tstr2D, tcstr, & - "congelation ice growth", & - "none", mps_to_cmpdy/dt, c0, & + "congelation ice growth", & + "none", mps_to_cmpdy/dt, c0, & ns1, f_congel) call define_hist_field(n_frazil,"frazil","cm/day",tstr2D, tcstr, & - "frazil ice growth", & - "none", mps_to_cmpdy/dt, c0, & + "frazil ice growth", & + "none", mps_to_cmpdy/dt, c0, & ns1, f_frazil) call define_hist_field(n_snoice,"snoice","cm/day",tstr2D, tcstr, & - "snow-ice formation", & - "none", mps_to_cmpdy/dt, c0, & + "snow-ice formation", & + "none", mps_to_cmpdy/dt, c0, & ns1, f_snoice) call define_hist_field(n_dsnow,"dsnow","cm/day",tstr2D, tcstr, & - "snow formation", & + "snow formation", & "none", mps_to_cmpdy/dt, c0, & ns1, f_dsnow) call define_hist_field(n_meltt,"meltt","cm/day",tstr2D, tcstr, & - "top ice melt", & - "none", mps_to_cmpdy/dt, c0, & + "top ice melt", & + "none", mps_to_cmpdy/dt, c0, & ns1, f_meltt) call define_hist_field(n_melts,"melts","cm/day",tstr2D, tcstr, & - "top snow melt", & - "none", mps_to_cmpdy/dt, c0, & + "top snow melt", & + "none", mps_to_cmpdy/dt, c0, & ns1, f_melts) call define_hist_field(n_meltb,"meltb","cm/day",tstr2D, tcstr, & - "basal ice melt", & - "none", mps_to_cmpdy/dt, c0, & + "basal ice melt", & + "none", mps_to_cmpdy/dt, c0, & ns1, f_meltb) call define_hist_field(n_meltl,"meltl","cm/day",tstr2D, tcstr, & - "lateral ice melt", & - "none", mps_to_cmpdy/dt, c0, & + "lateral ice melt", & + "none", mps_to_cmpdy/dt, c0, & ns1, f_meltl) call define_hist_field(n_fresh,"fresh","cm/day",tstr2D, tcstr, & - "freshwtr flx ice to ocn (cpl)", & - "if positive, ocean gains fresh water", & - mps_to_cmpdy/rhofresh, c0, & + "freshwtr flx ice to ocn (cpl)", & + "if positive, ocean gains fresh water", & + mps_to_cmpdy/rhofresh, c0, & ns1, f_fresh) call define_hist_field(n_fresh_ai,"fresh_ai","cm/day",tstr2D, tcstr, & - "freshwtr flx ice to ocn", & - "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & + "freshwtr flx ice to ocn", & + "weighted by ice area", mps_to_cmpdy/rhofresh, c0, & ns1, f_fresh_ai) call define_hist_field(n_fsalt,"fsalt","kg/m^2/s",tstr2D, tcstr, & - "salt flux ice to ocn (cpl)", & - "if positive, ocean gains salt", c1, c0, & + "salt flux ice to ocn (cpl)", & + "if positive, ocean gains salt", c1, c0, & ns1, f_fsalt) call define_hist_field(n_fsalt_ai,"fsalt_ai","kg/m^2/s",tstr2D, tcstr, & - "salt flux ice to ocean", & - "weighted by ice area", c1, c0, & + "salt flux ice to ocean", & + "weighted by ice area", c1, c0, & ns1, f_fsalt_ai) call define_hist_field(n_fbot,"fbot","W/m^2",tstr2D, tcstr, & @@ -1126,223 +1065,223 @@ subroutine init_hist (dt) ns1, f_fbot) call define_hist_field(n_fhocn,"fhocn","W/m^2",tstr2D, tcstr, & - "heat flux ice to ocn (cpl)", & - "if positive, ocean gains heat", c1, c0, & + "heat flux ice to ocn (cpl)", & + "if positive, ocean gains heat", c1, c0, & ns1, f_fhocn) call define_hist_field(n_fhocn_ai,"fhocn_ai","W/m^2",tstr2D, tcstr, & - "heat flux ice to ocean (fhocn_ai)", & - "weighted by ice area", c1, c0, & + "heat flux ice to ocean (fhocn_ai)", & + "weighted by ice area", c1, c0, & ns1, f_fhocn_ai) call define_hist_field(n_fswthru,"fswthru","W/m^2",tstr2D, tcstr, & - "SW thru ice to ocean (cpl)", & - "if positive, ocean gains heat", c1, c0, & + "SW thru ice to ocean (cpl)", & + "if positive, ocean gains heat", c1, c0, & ns1, f_fswthru) call define_hist_field(n_fswthru_ai,"fswthru_ai","W/m^2",tstr2D, tcstr,& - "SW flux thru ice to ocean", & - "weighted by ice area", c1, c0, & + "SW flux thru ice to ocean", & + "weighted by ice area", c1, c0, & ns1, f_fswthru_ai) call define_hist_field(n_strairx,"strairx","N/m^2",ustr2D, ucstr, & - "atm/ice stress (x)", & - "positive is x direction on U grid", c1, c0, & + "atm/ice stress (x)", & + "positive is x direction on U grid", c1, c0, & ns1, f_strairx) call define_hist_field(n_strairy,"strairy","N/m^2",ustr2D, ucstr, & - "atm/ice stress (y)", & - "positive is y direction on U grid", c1, c0, & + "atm/ice stress (y)", & + "positive is y direction on U grid", c1, c0, & ns1, f_strairy) call define_hist_field(n_strtltx,"strtltx","N/m^2",ustr2D, ucstr, & - "sea sfc tilt stress (x)", & - "none", c1, c0, & + "sea sfc tilt stress (x)", & + "none", c1, c0, & ns1, f_strtltx) call define_hist_field(n_strtlty,"strtlty","N/m^2",ustr2D, ucstr, & - "sea sfc tilt stress (y)", & - "none", c1, c0, & + "sea sfc tilt stress (y)", & + "none", c1, c0, & ns1, f_strtlty) call define_hist_field(n_strcorx,"strcorx","N/m^2",ustr2D, ucstr, & - "coriolis stress (x)", & - "positive is x direction on U grid", c1, c0, & + "coriolis stress (x)", & + "positive is x direction on U grid", c1, c0, & ns1, f_strcorx) call define_hist_field(n_strcory,"strcory","N/m^2",ustr2D, ucstr, & - "coriolis stress (y)", & - "positive is y direction on U grid", c1, c0, & + "coriolis stress (y)", & + "positive is y direction on U grid", c1, c0, & ns1, f_strcory) call define_hist_field(n_strocnx,"strocnx","N/m^2",ustr2D, ucstr, & - "ocean/ice stress (x)", & - "positive is x direction on U grid", c1, c0, & + "ocean/ice stress (x)", & + "positive is x direction on U grid", c1, c0, & ns1, f_strocnx) call define_hist_field(n_strocny,"strocny","N/m^2",ustr2D, ucstr, & - "ocean/ice stress (y)", & - "positive is y direction on U grid", c1, c0, & + "ocean/ice stress (y)", & + "positive is y direction on U grid", c1, c0, & ns1, f_strocny) call define_hist_field(n_strintx,"strintx","N/m^2",ustr2D, ucstr, & - "internal ice stress (x)", & - "positive is x direction on U grid", c1, c0, & + "internal ice stress (x)", & + "positive is x direction on U grid", c1, c0, & ns1, f_strintx) call define_hist_field(n_strinty,"strinty","N/m^2",ustr2D, ucstr, & - "internal ice stress (y)", & - "positive is y direction on U grid", c1, c0, & + "internal ice stress (y)", & + "positive is y direction on U grid", c1, c0, & ns1, f_strinty) - call define_hist_field(n_taubx,"taubx","N/m^2",ustr2D, ucstr, & - "seabed (basal) stress (x)", & - "positive is x direction on U grid", c1, c0, & + call define_hist_field(n_taubx,"taubx","N/m^2",ustr2D, ucstr, & + "seabed (basal) stress (x)", & + "positive is x direction on U grid", c1, c0, & ns1, f_taubx) - call define_hist_field(n_tauby,"tauby","N/m^2",ustr2D, ucstr, & - "seabed (basal) stress (y)", & - "positive is y direction on U grid", c1, c0, & + call define_hist_field(n_tauby,"tauby","N/m^2",ustr2D, ucstr, & + "seabed (basal) stress (y)", & + "positive is y direction on U grid", c1, c0, & ns1, f_tauby) call define_hist_field(n_strairxN,"strairxN","N/m^2",nstr2D, ncstr, & - "atm/ice stress (x)", & - "positive is x direction on N grid", c1, c0, & + "atm/ice stress (x)", & + "positive is x direction on N grid", c1, c0, & ns1, f_strairxN) call define_hist_field(n_strairyN,"strairyN","N/m^2",nstr2D, ncstr, & - "atm/ice stress (y)", & - "positive is y direction on N grid", c1, c0, & + "atm/ice stress (y)", & + "positive is y direction on N grid", c1, c0, & ns1, f_strairyN) call define_hist_field(n_strairxE,"strairxE","N/m^2",estr2D, ecstr, & - "atm/ice stress (x)", & - "positive is x direction on E grid", c1, c0, & + "atm/ice stress (x)", & + "positive is x direction on E grid", c1, c0, & ns1, f_strairxE) call define_hist_field(n_strairyE,"strairyE","N/m^2",estr2D, ecstr, & - "atm/ice stress (y)", & - "positive is y direction on E grid", c1, c0, & + "atm/ice stress (y)", & + "positive is y direction on E grid", c1, c0, & ns1, f_strairyE) call define_hist_field(n_strtltxN,"strtltxN","N/m^2",nstr2D, ncstr, & - "sea sfc tilt stress (x)", & - "positive is x direction on N grid", c1, c0, & + "sea sfc tilt stress (x)", & + "positive is x direction on N grid", c1, c0, & ns1, f_strtltxN) call define_hist_field(n_strtltyN,"strtltyN","N/m^2",nstr2D, ncstr, & - "sea sfc tilt stress (y)", & - "positive is y direction on N grid", c1, c0, & + "sea sfc tilt stress (y)", & + "positive is y direction on N grid", c1, c0, & ns1, f_strtltyN) call define_hist_field(n_strtltxE,"strtltxE","N/m^2",estr2D, ecstr, & - "sea sfc tilt stress (x)", & - "positive is x direction on E grid", c1, c0, & + "sea sfc tilt stress (x)", & + "positive is x direction on E grid", c1, c0, & ns1, f_strtltxE) call define_hist_field(n_strtltyE,"strtltyE","N/m^2",estr2D, ecstr, & - "sea sfc tilt stress (y)", & - "positive is y direction on E grid", c1, c0, & + "sea sfc tilt stress (y)", & + "positive is y direction on E grid", c1, c0, & ns1, f_strtltyE) call define_hist_field(n_strcorxN,"strcorxN","N/m^2",nstr2D, ncstr, & - "coriolis stress (x)", & - "positive is x direction on N grid", c1, c0, & + "coriolis stress (x)", & + "positive is x direction on N grid", c1, c0, & ns1, f_strcorxN) call define_hist_field(n_strcoryN,"strcoryN","N/m^2",nstr2D, ncstr, & - "coriolis stress (y)", & - "positive is y direction on N grid", c1, c0, & + "coriolis stress (y)", & + "positive is y direction on N grid", c1, c0, & ns1, f_strcoryN) call define_hist_field(n_strcorxE,"strcorxE","N/m^2",estr2D, ecstr, & - "coriolis stress (x)", & - "positive is x direction on E grid", c1, c0, & + "coriolis stress (x)", & + "positive is x direction on E grid", c1, c0, & ns1, f_strcorxE) call define_hist_field(n_strcoryE,"strcoryE","N/m^2",estr2D, ecstr, & - "coriolis stress (y)", & - "positive is y direction on E grid", c1, c0, & + "coriolis stress (y)", & + "positive is y direction on E grid", c1, c0, & ns1, f_strcoryE) call define_hist_field(n_strocnxN,"strocnxN","N/m^2",nstr2D, ncstr, & - "ocean/ice stress (x)", & - "positive is x direction on N grid", c1, c0, & + "ocean/ice stress (x)", & + "positive is x direction on N grid", c1, c0, & ns1, f_strocnxN) call define_hist_field(n_strocnyN,"strocnyN","N/m^2",nstr2D, ncstr, & - "ocean/ice stress (y)", & - "positive is y direction on N grid", c1, c0, & + "ocean/ice stress (y)", & + "positive is y direction on N grid", c1, c0, & ns1, f_strocnyN) call define_hist_field(n_strocnxE,"strocnxE","N/m^2",estr2D, ecstr, & - "ocean/ice stress (x)", & - "positive is x direction on E grid", c1, c0, & + "ocean/ice stress (x)", & + "positive is x direction on E grid", c1, c0, & ns1, f_strocnxE) call define_hist_field(n_strocnyE,"strocnyE","N/m^2",estr2D, ecstr, & - "ocean/ice stress (y)", & - "positive is y direction on E grid", c1, c0, & + "ocean/ice stress (y)", & + "positive is y direction on E grid", c1, c0, & ns1, f_strocnyE) call define_hist_field(n_strintxN,"strintxN","N/m^2",nstr2D, ncstr, & - "internal ice stress (x)", & - "positive is x direction on N grid", c1, c0, & + "internal ice stress (x)", & + "positive is x direction on N grid", c1, c0, & ns1, f_strintxN) call define_hist_field(n_strintyN,"strintyN","N/m^2",nstr2D, ncstr, & - "internal ice stress (y)", & - "positive is y direction on N grid", c1, c0, & + "internal ice stress (y)", & + "positive is y direction on N grid", c1, c0, & ns1, f_strintyN) call define_hist_field(n_strintxE,"strintxE","N/m^2",estr2D, ecstr, & - "internal ice stress (x)", & - "positive is x direction on E grid", c1, c0, & + "internal ice stress (x)", & + "positive is x direction on E grid", c1, c0, & ns1, f_strintxE) call define_hist_field(n_strintyE,"strintyE","N/m^2",estr2D, ecstr, & - "internal ice stress (y)", & - "positive is y direction on E grid", c1, c0, & + "internal ice stress (y)", & + "positive is y direction on E grid", c1, c0, & ns1, f_strintyE) - call define_hist_field(n_taubxN,"taubxN","N/m^2",nstr2D, ncstr, & + call define_hist_field(n_taubxN,"taubxN","N/m^2",nstr2D, ncstr, & "seabed (basal) stress (x)", & "positive is x direction on N grid", c1, c0, & ns1, f_taubxN) - call define_hist_field(n_taubyN,"taubyN","N/m^2",nstr2D, ncstr, & + call define_hist_field(n_taubyN,"taubyN","N/m^2",nstr2D, ncstr, & "seabed (basal) stress (y)", & "positive is y direction on N grid", c1, c0, & ns1, f_taubyN) - call define_hist_field(n_taubxE,"taubxE","N/m^2",estr2D, ecstr, & + call define_hist_field(n_taubxE,"taubxE","N/m^2",estr2D, ecstr, & "seabed (basal) stress (x)", & "positive is x direction on E grid", c1, c0, & ns1, f_taubxE) - call define_hist_field(n_taubyE,"taubyE","N/m^2",estr2D, ecstr, & + call define_hist_field(n_taubyE,"taubyE","N/m^2",estr2D, ecstr, & "seabed (basal) stress (y)", & "positive is y direction on E grid", c1, c0, & ns1, f_taubyE) call define_hist_field(n_strength,"strength","N/m",tstr2D, tcstr, & - "compressive ice strength", & - "none", c1, c0, & + "compressive ice strength", & + "none", c1, c0, & ns1, f_strength) call define_hist_field(n_divu,"divu","%/day",tstr2D, tcstr, & - "strain rate (divergence)", & - "divu is instantaneous, on T grid", secday*c100, c0, & + "strain rate (divergence)", & + "divu is instantaneous, on T grid", secday*c100, c0, & ns1, f_divu) call define_hist_field(n_shear,"shear","%/day",tstr2D, tcstr, & - "strain rate (shear)", & - "shear is instantaneous, on T grid", secday*c100, c0, & + "strain rate (shear)", & + "shear is instantaneous, on T grid", secday*c100, c0, & ns1, f_shear) call define_hist_field(n_vort,"vort","%/day",tstr2D, tcstr, & - "strain rate (vorticity)", & - "vort is instantaneous, on T grid", secday*c100, c0, & + "strain rate (vorticity)", & + "vort is instantaneous, on T grid", secday*c100, c0, & ns1, f_vort) select case (grid_ice) @@ -1357,68 +1296,68 @@ subroutine init_hist (dt) end select call define_hist_field(n_sig1,"sig1","1",gridstr2d, gridstr, & - "norm. principal stress 1", & - "sig1 is instantaneous" // trim(description), c1, c0, & + "norm. principal stress 1", & + "sig1 is instantaneous" // trim(description), c1, c0, & ns1, f_sig1) call define_hist_field(n_sig2,"sig2","1",gridstr2d, gridstr, & - "norm. principal stress 2", & - "sig2 is instantaneous" // trim(description), c1, c0, & + "norm. principal stress 2", & + "sig2 is instantaneous" // trim(description), c1, c0, & ns1, f_sig2) call define_hist_field(n_sigP,"sigP","N/m",gridstr2d, gridstr, & - "ice pressure", & - "sigP is instantaneous" // trim(description), c1, c0, & + "ice pressure", & + "sigP is instantaneous" // trim(description), c1, c0, & ns1, f_sigP) call define_hist_field(n_dvidtt,"dvidtt","cm/day",tstr2D, tcstr, & - "volume tendency thermo", & - "none", mps_to_cmpdy, c0, & + "volume tendency thermo", & + "none", mps_to_cmpdy, c0, & ns1, f_dvidtt) call define_hist_field(n_dvidtd,"dvidtd","cm/day",tstr2D, tcstr, & - "volume tendency dynamics", & - "none", mps_to_cmpdy, c0, & + "volume tendency dynamics", & + "none", mps_to_cmpdy, c0, & ns1, f_dvidtd) call define_hist_field(n_daidtt,"daidtt","%/day",tstr2D, tcstr, & - "area tendency thermo", & - "none", secday*c100, c0, & + "area tendency thermo", & + "none", secday*c100, c0, & ns1, f_daidtt) call define_hist_field(n_daidtd,"daidtd","%/day",tstr2D, tcstr, & - "area tendency dynamics", & - "none", secday*c100, c0, & + "area tendency dynamics", & + "none", secday*c100, c0, & ns1, f_daidtd) call define_hist_field(n_dagedtt,"dagedtt","day/day",tstr2D, tcstr, & - "age tendency thermo", & - "excludes time step increment", c1, c0, & + "age tendency thermo", & + "excludes time step increment", c1, c0, & ns1, f_dagedtt) call define_hist_field(n_dagedtd,"dagedtd","day/day",tstr2D, tcstr, & - "age tendency dynamics", & - "excludes time step increment", c1, c0, & + "age tendency dynamics", & + "excludes time step increment", c1, c0, & ns1, f_dagedtd) call define_hist_field(n_mlt_onset,"mlt_onset","day of year", & - tstr2D, tcstr,"melt onset date", & + tstr2D, tcstr,"melt onset date", & "midyear restart gives erroneous dates", c1, c0, & ns1, f_mlt_onset) call define_hist_field(n_frz_onset,"frz_onset","day of year", & - tstr2D, tcstr,"freeze onset date", & + tstr2D, tcstr,"freeze onset date", & "midyear restart gives erroneous dates", c1, c0, & ns1, f_frz_onset) call define_hist_field(n_hisnap,"hisnap","m",tstr2D, tcstr, & - "ice volume snapshot", & - "none", c1, c0, & + "ice volume snapshot", & + "none", c1, c0, & ns1, f_hisnap) call define_hist_field(n_aisnap,"aisnap","1",tstr2D, tcstr, & - "ice area snapshot", & - "none", c1, c0, & + "ice area snapshot", & + "none", c1, c0, & ns1, f_aisnap) call define_hist_field(n_trsig,"trsig","N/m",tstr2D, tcstr, & @@ -1427,396 +1366,492 @@ subroutine init_hist (dt) ns1, f_trsig) call define_hist_field(n_icepresent,"ice_present","1",tstr2D, tcstr, & - "fraction of time-avg interval that ice is present", & - "ice extent flag", c1, c0, & + "fraction of time-avg interval that ice is present", & + "ice extent flag", c1, c0, & ns1, f_icepresent) - call define_hist_field(n_fsurf_ai,"fsurf_ai","W/m^2",tstr2D, tcstr, & - "net surface heat flux", & + call define_hist_field(n_fsurf_ai,"fsurf_ai","W/m^2",tstr2D, tcstr, & + "net surface heat flux", & "positive downward, excludes conductive flux, weighted by ice area", & c1, c0, & ns1, f_fsurf_ai) call define_hist_field(n_fcondtop_ai,"fcondtop_ai","W/m^2", & - tstr2D, tcstr,"top surface conductive heat flux", & + tstr2D, tcstr,"top surface conductive heat flux", & "positive downward, weighted by ice area", c1, c0, & ns1, f_fcondtop_ai) call define_hist_field(n_fmeltt_ai,"fmeltt_ai","W/m^2",tstr2D, tcstr, & - "net surface heat flux causing melt", & - "always >= 0, weighted by ice area", c1, c0, & + "net surface heat flux causing melt", & + "always >= 0, weighted by ice area", c1, c0, & ns1, f_fmeltt_ai) call define_hist_field(n_a11,"a11"," ",tstr2D, tcstr, & "a11: component a11 of the structure tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_a11) call define_hist_field(n_a12,"a12"," ",tstr2D, tcstr, & "a12: component a12 of the structure tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_a12) call define_hist_field(n_e11,"e11","1/s",tstr2D, tcstr, & "e11: component e11 of the strain rate tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_e11) call define_hist_field(n_e12,"e12","1/s",tstr2D, tcstr, & "e12: component e12 of the strain rate tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_e12) call define_hist_field(n_e22,"e22","1/s",tstr2D, tcstr, & "e22: component e22 of the strain rate tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_e22) call define_hist_field(n_s11,"s11","kg/s^2",tstr2D, tcstr, & "s11: component s11 of the stress tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_s11) call define_hist_field(n_s12,"s12","kg/s^2",tstr2D, tcstr, & "s12: component s12 of the stress tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_s12) call define_hist_field(n_s22,"s22","kg/s^2",tstr2D, tcstr, & "s22: component s12 of the stress tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_s22) call define_hist_field(n_yieldstress11,"yieldstress11","kg/s^2",tstr2D, tcstr, & "yieldstress11: component 11 of the yieldstress tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_yieldstress11) call define_hist_field(n_yieldstress12,"yieldstress12","kg/s^2",tstr2D, tcstr, & "yieldstress12: component 12 of the yieldstress tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_yieldstress12) call define_hist_field(n_yieldstress22,"yieldstress22","kg/s^2",tstr2D, tcstr, & "yieldstress22: component 12 of the yieldstress tensor", & - "none", c1, c0, & + "none", c1, c0, & ns1, f_yieldstress22) ! Tracers ! Ice Age call define_hist_field(n_iage,"iage","years",tstr2D, tcstr, & - "sea ice age", & - "none", c1/(secday*days_per_year), c0, & + "sea ice age", & + "none", c1/(secday*days_per_year), c0, & ns1, f_iage) ! First Year Ice Area call define_hist_field(n_FY,"FYarea"," ",tstr2D, tcstr, & - "first-year ice area", & - "weighted by ice area", c1, c0, & + "first-year ice area", & + "weighted by ice area", c1, c0, & ns1, f_FY) - ! CMIP 2D variables + ! CMIP 2D variables (for "intensive" variables per Notz et al 2016 definition, + ! that is a weighted time average when ice is present) + ! Use avg_ice_present = 'init', 'final', 'pond', or 'ridge' to divide by + ! sum(aice), sum(apond), or sum(ardg) over time + ! aice is at the start of the timestep ('init') or the end of the timestep ('final') + ! avg_ice_present = 'none' produces a time average including zeroes when ice is not present + + call define_hist_field(n_siage,"siage","s",tstr2D, tcstr, & + "age of sea ice", & + "age of sea ice since its formation in open water", c1, c0, & + ns1, f_siage, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_sicompstren,"sicompstren","N m-1",tstr2D, tcstr, & + "compressive sea-ice strength", & + "computed strength of the ice pack", c1, c0, & + ns1, f_sicompstren, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_sidragbot,"sidragbot","1",tstr2D, tcstr, & + "ocean drag coefficient", & + "drag coefficient that is used to calculate the oceanic momentum drag on sea ice", & + c1, c0, & + ns1, f_sidragbot, avg_ice_present='init', mask_ice_free_points=.true.) + + call define_hist_field(n_sidragtop,"sidragtop","1",tstr2D, tcstr, & + "atmospheric drag coefficient", & + "drag coefficient that is used to calculate the atmospheric momentum drag on sea ice", & + c1, c0, & + ns1, f_sidragtop, avg_ice_present='init', mask_ice_free_points=.true.) - call define_hist_field(n_sithick,"sithick","m",tstr2D, tcstr, & - "sea ice thickness", & - "volume divided by area", c1, c0, & - ns1, f_sithick, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siage,"siage","s",tstr2D, tcstr, & - "sea ice age", & - "none", c1, c0, & - ns1, f_siage, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sisnthick,"sisnthick","m",tstr2D, tcstr, & - "sea ice snow thickness", & - "snow volume divided by area", c1, c0, & - ns1, f_sisnthick, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sitemptop,"sitemptop","K",tstr2D, tcstr, & - "sea ice surface temperature", & - "none", c1, c0, & - ns1, f_sitemptop, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sitempsnic,"sitempsnic","K",tstr2D, tcstr, & - "snow ice interface temperature", & - "surface temperature when no snow present", c1, c0, & - ns1, f_sitempsnic, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sitempbot,"sitempbot","K",tstr2D, tcstr, & - "sea ice bottom temperature", & - "none", c1, c0, & - ns1, f_sitempbot, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siu,"siu","m/s",ustr2D, ucstr, & - "ice x velocity component", & - "none", c1, c0, & - ns1, f_siu, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siv,"siv","m/s",ustr2D, ucstr, & - "ice y velocity component", & - "none", c1, c0, & - ns1, f_siv, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sidmasstranx,"sidmasstranx","kg/s",ustr2D, ucstr, & - "x component of snow and sea ice mass transport", & - "none", c1, c0, & - ns1, f_sidmasstranx) - - call define_hist_field(n_sidmasstrany,"sidmasstrany","kg/s",ustr2D, ucstr, & - "y component of snow and sea ice mass transport", & - "none", c1, c0, & - ns1, f_sidmasstrany) - - call define_hist_field(n_sistrxdtop,"sistrxdtop","N m-2",ustr2D, ucstr, & - "x component of atmospheric stress on sea ice", & - "none", c1, c0, & - ns1, f_sistrxdtop, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sistrydtop,"sistrydtop","N m-2",ustr2D, ucstr, & - "y component of atmospheric stress on sea ice", & - "none", c1, c0, & - ns1, f_sistrydtop, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sistrxubot,"sistrxubot","N m-2",ustr2D, ucstr, & - "x component of ocean stress on sea ice", & - "none", c1, c0, & - ns1, f_sistrxubot, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sistryubot,"sistryubot","N m-2",ustr2D, ucstr, & - "y component of ocean stress on sea ice", & - "none", c1, c0, & - ns1, f_sistryubot, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sicompstren,"sicompstren","N m-1",tstr2D, tcstr, & - "compressive sea ice strength", & - "none", c1, c0, & - ns1, f_sicompstren, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_sifb,"sifb","m",tstr2D, tcstr, & + "sea-ice freeboard", & + "mean height of sea-ice surface above sea level", & + c1, c0, & + ns1, f_sifb, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_siflcondbot,"siflcondbot","W/m2",tstr2D, tcstr, & + "net conductive heat flux in sea-ice at the base", & + "conductive heat flux from the centre of the lowermost vertical sea-ice grid box", & + c1, c0, & + ns1, f_siflcondbot, avg_ice_present='init', mask_ice_free_points=.true.) + + call define_hist_field(n_siflcondtop,"siflcondtop","W/m2",tstr2D, tcstr, & + "net conductive heat flux in sea-ice at the surface", & + "conductive heat flux from the centre of the uppermost vertical sea-ice grid box", & + c1, c0, & + ns1, f_siflcondtop, avg_ice_present='init', mask_ice_free_points=.true.) - call define_hist_field(n_sispeed,"sispeed","m/s",ustr2D, ucstr, & - "ice speed", & - "none", c1, c0, & - ns1, f_sispeed, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_siflfwbot,"siflfwbot","kg m-2 s-1",tstr2D, tcstr, & + "freshwater flux from sea-ice", & + "total flux of fresh water between ocean and sea ice", c1, c0, & + ns1, f_siflfwbot, avg_ice_present='final', mask_ice_free_points=.true.) - call define_hist_field(n_sidir,"sidir","deg",ustr2D, ucstr, & - "ice direction", & - "vector direction - going to", c1, c0, & - ns1, f_sidir) + call define_hist_field(n_siflfwdrain,"siflfwdrain","kg m-2 s-1",tstr2D, tcstr, & + "freshwater flux from sea-ice surface", & + "total flux of fresh water from sea-ice surface into underlying ocean", & + c1, c0, & + ns1, f_siflfwdrain, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_sifllattop,"sifllattop","W/m2",tstr2D, tcstr, & + "net latent heat flux over sea ice", & + "positive downward", c1, c0, & + ns1, f_sifllattop, avg_ice_present='init', mask_ice_free_points=.true.) + + call define_hist_field(n_sifllwdtop,"sifllwdtop","W/m2",tstr2D, tcstr, & + "downwelling longwave flux over sea ice", & + "downwelling longwave flux from the atmosphere to the sea-ice surface ", & + c1, c0, & + ns1, f_sifllwdtop, avg_ice_present='init', mask_ice_free_points=.true.) - call define_hist_field(n_sialb,"sialb","1",tstr2D, tcstr, & - "sea ice albedo", & - "none", c1, c0, & - ns1, f_sialb, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_sifllwutop,"sifllwutop","W/m2",tstr2D, tcstr, & + "upwelling longwave flux over sea ice", & + "upward longwave flux from the sea-ice surface to the atmosphere", & + c1, c0, & + ns1, f_sifllwutop, avg_ice_present='init', mask_ice_free_points=.true.) - call define_hist_field(n_sihc,"sihc","J m-2",tstr2D, tcstr, & - "sea ice heat content", & - "none", c1, c0, & - ns1, f_sihc) + call define_hist_field(n_siflsaltbot,"siflsaltbot","kg m-2 s-1",tstr2D, tcstr, & + "salt flux from sea ice", & + "total flux of salt between ocean and sea ice", c1, c0, & + ns1, f_siflsaltbot, avg_ice_present='final', mask_ice_free_points=.true.) - call define_hist_field(n_sisnhc,"sisnhc","J m-2",tstr2D, tcstr, & - "snow heat content", & - "none", c1, c0, & - ns1, f_sisnhc) - - call define_hist_field(n_sidconcth,"sidconcth","1/s",tstr2D, tcstr, & - "sea ice area change from thermodynamics", & - "none", c1, c0, & - ns1, f_sidconcth) - - call define_hist_field(n_sidconcdyn,"sidconcdyn","1/s",tstr2D, tcstr, & - "sea ice area change from dynamics", & - "none", c1, c0, & - ns1, f_sidconcdyn) - - call define_hist_field(n_sidmassth,"sidmassth","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change from thermodynamics", & - "none", c1, c0, & - ns1, f_sidmassth) - - call define_hist_field(n_sidmassdyn,"sidmassdyn","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change from dynamics", & - "none", c1, c0, & - ns1, f_sidmassdyn) - - call define_hist_field(n_sidmassgrowthwat,"sidmassgrowthwat","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change from frazil", & - "none", c1, c0, & - ns1, f_sidmassgrowthwat) - - call define_hist_field(n_sidmassgrowthbot,"sidmassgrowthbot","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change from basal growth", & - "none", c1, c0, & - ns1, f_sidmassgrowthbot) - - call define_hist_field(n_sidmasssi,"sidmasssi","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change from snow-ice formation", & - "none", c1, c0, & - ns1, f_sidmasssi) - - call define_hist_field(n_sidmassevapsubl,"sidmassevapsubl","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change from evaporation and sublimation", & - "none", c1, c0, & - ns1, f_sidmassevapsubl) - - call define_hist_field(n_sndmasssubl,"sndmasssubl","kg m-2 s-1",tstr2D, tcstr, & - "snow mass change from evaporation and sublimation", & - "none", c1, c0, & - ns1, f_sndmasssubl) - - call define_hist_field(n_sidmassmelttop,"sidmassmelttop","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change top melt", & - "none", c1, c0, & - ns1, f_sidmassmelttop) - - call define_hist_field(n_sidmassmeltbot,"sidmassmeltbot","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change bottom melt", & - "none", c1, c0, & - ns1, f_sidmassmeltbot) - - call define_hist_field(n_sidmasslat,"sidmasslat","kg m-2 s-1",tstr2D, tcstr, & - "sea ice mass change lateral melt", & - "none", c1, c0, & - ns1, f_sidmasslat) - - call define_hist_field(n_sndmasssnf,"sndmasssnf","kg m-2 s-1",tstr2D, tcstr, & - "snow mass change from snow fall", & - "none", c1, c0, & - ns1, f_sndmasssnf) - - call define_hist_field(n_sndmassmelt,"sndmassmelt","kg m-2 s-1",tstr2D, tcstr, & - "snow mass change from snow melt", & - "none", c1, c0, & - ns1, f_sndmassmelt) - - call define_hist_field(n_sndmassdyn,"sndmassdyn","kg m-2 s-1",tstr2D, tcstr, & - "snow mass change from dynamics ridging", & - "none", c1, c0, & - ns1, f_sndmassdyn) - - call define_hist_field(n_siflswdtop,"siflswdtop","W/m2",tstr2D, tcstr, & - "down shortwave flux over sea ice", & - "positive downward", c1, c0, & - ns1, f_siflswdtop, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_siflsensbot,"siflsensbot","W/m2",tstr2D, tcstr, & + "net upward sensible heat flux under sea ice", & + "net sensible heat flux under sea ice from or to the ocean", c1, c0, & + ns1, f_siflsensbot, avg_ice_present='init', mask_ice_free_points=.true.) - call define_hist_field(n_siflswutop,"siflswutop","W/m2",tstr2D, tcstr, & - "upward shortwave flux over sea ice", & - "positive downward", c1, c0, & - ns1, f_siflswutop, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_siflsenstop,"siflsenstop","W/m2",tstr2D, tcstr, & + "net downward sensible heat flux over sea ice", & + "positive downward", c1, c0, & + ns1, f_siflsenstop, avg_ice_present='init', mask_ice_free_points=.true.) call define_hist_field(n_siflswdbot,"siflswdbot","W/m2",tstr2D, tcstr, & - "down shortwave flux at bottom of ice", & - "positive downward", c1, c0, & - ns1, f_siflswdbot, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sifllwdtop,"sifllwdtop","W/m2",tstr2D, tcstr, & - "down longwave flux over sea ice", & - "positive downward", c1, c0, & - ns1, f_sifllwdtop, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sifllwutop,"sifllwutop","W/m2",tstr2D, tcstr, & - "upward longwave flux over sea ice", & - "positive downward", c1, c0, & - ns1, f_sifllwutop, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siflsenstop,"siflsenstop","W/m2",tstr2D, tcstr, & - "sensible heat flux over sea ice", & - "positive downward", c1, c0, & - ns1, f_siflsenstop, avg_ice_present=.true., mask_ice_free_points=.true.) + "downwelling shortwave flux underneath sea ice", & + "amount of shortwave radiation that penetrates the sea ice", & + c1, c0, & + ns1, f_siflswdbot, avg_ice_present='init', mask_ice_free_points=.true.) + + call define_hist_field(n_siflswdtop,"siflswdtop","W/m2",tstr2D, tcstr, & + "downwelling shortwave flux over sea ice", & + "downwelling shortwave flux from the atmosphere to the sea-ice surface", & + c1, c0, & + ns1, f_siflswdtop, avg_ice_present='init', mask_ice_free_points=.true.) + + call define_hist_field(n_siflswutop,"siflswutop","W/m2",tstr2D, tcstr, & + "upwelling shortwave flux over sea ice", & + "upward shortwave flux from the sea-ice surface to the atmosphere", & + c1, c0, & + ns1, f_siflswutop, avg_ice_present='init', mask_ice_free_points=.true.) - call define_hist_field(n_siflsensupbot,"siflsensupbot","W/m2",tstr2D, tcstr, & - "sensible heat flux at bottom of sea ice", & - "positive downward", c1, c0, & - ns1, f_siflsensupbot, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_siforcecoriolx,"siforcecoriolx","N m-2",tstr2D, tcstr, & + "coriolis force term in force balance (x-component)", & + "x-component of the force on sea ice caused by the Coriolis force", & + c1, c0, & + ns1, f_siforcecoriolx, avg_ice_present='final', mask_ice_free_points=.true.) - call define_hist_field(n_sifllatstop,"sifllatstop","W/m2",tstr2D, tcstr, & - "latent heat flux over sea ice", & - "positive downward", c1, c0, & - ns1, f_sifllatstop, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_siforcecorioly,"siforcecorioly","N m-2",tstr2D, tcstr, & + "coriolis force term in force balance (y-component)", & + "y-component of the force on sea ice caused by the Coriolis force", & + c1, c0, & + ns1, f_siforcecorioly, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_siforceintstrx,"siforceintstrx","N m-2",tstr2D, tcstr, & + "internal stress force term in force balance (x-component)", & + "x-component of the force on sea ice caused by internal stress (divergence of sigma)", & + c1, c0, & + ns1, f_siforceintstrx, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_siforceintstry,"siforceintstry","N m-2",tstr2D, tcstr, & + "internal stress force term in force balance (y-component)", & + "y-component of the force on sea ice caused by internal stress (divergence of sigma)", & + c1, c0, & + ns1, f_siforceintstry, avg_ice_present='final', mask_ice_free_points=.true.) - call define_hist_field(n_siflcondtop,"siflcondtop","W/m2",tstr2D, tcstr, & - "conductive heat flux at top of sea ice", & - "positive downward", c1, c0, & - ns1, f_siflcondtop, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_siforcetiltx,"siforcetiltx","N m-2",tstr2D, tcstr, & + "sea-surface tilt term in force balance (x-component)", & + "x-component of the force on sea ice caused by sea-surface tilt", & + c1, c0, & + ns1, f_siforcetiltx, avg_ice_present='init', mask_ice_free_points=.true.) - call define_hist_field(n_siflcondbot,"siflcondbot","W/m2",tstr2D, tcstr, & - "conductive heat flux at bottom of sea ice", & - "positive downward", c1, c0, & - ns1, f_siflcondbot, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_siforcetilty,"siforcetilty","N m-2",tstr2D, tcstr, & + "sea surface tilt term in force balance (y-component)", & + "y-component of the force on sea ice caused by sea-surface tilt", & + c1, c0, & + ns1, f_siforcetilty, avg_ice_present='init', mask_ice_free_points=.true.) call define_hist_field(n_sipr,"sipr","kg m-2 s-1",tstr2D, tcstr, & - "rainfall over sea ice", & - "none", c1, c0, & - ns1, f_sipr, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sifb,"sifb","m",tstr2D, tcstr, & - "sea ice freeboard above sea level", & - "none", c1, c0, & - ns1, f_sifb, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siflsaltbot,"siflsaltbot","kg m-2 s-1",tstr2D, tcstr, & - "salt flux from sea ice", & - "positive downward", c1, c0, & - ns1, f_siflsaltbot, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siflfwbot,"siflfwbot","kg m-2 s-1",tstr2D, tcstr, & - "fresh water flux from sea ice", & - "positive downward", c1, c0, & - ns1, f_siflfwbot, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siflfwdrain,"siflfwdrain","kg m-2 s-1",tstr2D, tcstr, & - "fresh water drainage through sea ice", & - "positive downward", c1, c0, & - ns1, f_siflfwdrain, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sidragtop,"sidragtop","1",tstr2D, tcstr, & - "atmospheric drag over sea ice", & - "none", c1, c0, & - ns1, f_sidragtop, avg_ice_present=.true., mask_ice_free_points=.true.) + "rainfall rate over sea ice", & + "mass of liquid precipitation falling onto sea ice", c1, c0, & + ns1, f_sipr, avg_ice_present='init', mask_ice_free_points=.true.) - call define_hist_field(n_sirdgthick,"sirdgthick","m",tstr2D, tcstr, & - "sea ice ridge thickness", & - "vrdg divided by ardg", c1, c0, & - ns1, f_sirdgthick, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siforcetiltx,"siforcetiltx","N m-2",tstr2D, tcstr, & - "sea surface tilt term", & - "none", c1, c0, & - ns1, f_siforcetiltx, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_sisali,"sisali","ppt",tstr2D, tcstr, & + "sea-ice salinity", & + "mean sea-ice salinity of all sea ice in grid cell", & + c1, c0, & + ns1, f_sisali, avg_ice_present='final', mask_ice_free_points=.true.) - call define_hist_field(n_siforcetilty,"siforcetilty","N m-2",tstr2D, tcstr, & - "sea surface tile term", & - "none", c1, c0, & - ns1, f_siforcetilty, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_sispeed,"sispeed","m/s",ustr2D, ucstr, & + "sea-ice speed", & + "speed of ice (i.e. mean absolute velocity)", c1, c0, & + ns1, f_sispeed, avg_ice_present='final', mask_ice_free_points=.true.) + +! sidir is not actually in the CMIP7 table +! call define_hist_field(n_sidir,"sidir","deg",ustr2D, ucstr, & +! "ice direction", & +! "vector direction - going to", c1, c0, & +! ns1, f_sidir) + + call define_hist_field(n_sisnthick,"sisnthick","m",tstr2D, tcstr, & + "snow thickness", & + "snow volume divided by sea-ice area", c1, c0, & + ns1, f_sisnthick, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_sistrxdtop,"sistrxdtop","N m-2",ustr2D, ucstr, & + "x-component of atmospheric stress on sea ice", & + "x-component of the atmospheric stress on the surface of sea ice", & + c1, c0, & + ns1, f_sistrxdtop, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_sistrydtop,"sistrydtop","N m-2",ustr2D, ucstr, & + "y-component of atmospheric stress on sea ice", & + "y-component of the atmospheric stress on the surface of sea ice", & + c1, c0, & + ns1, f_sistrydtop, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_sistrxubot,"sistrxubot","N m-2",ustr2D, ucstr, & + "x-component of ocean stress on sea ice", & + "x-component of the ocean stress on the sea ice bottom ", c1, c0, & + ns1, f_sistrxubot, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_sistryubot,"sistryubot","N m-2",ustr2D, ucstr, & + "y-component of ocean stress on sea ice", & + "y-component of the ocean stress on the sea ice bottom ", c1, c0, & + ns1, f_sistryubot, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_sitempbot,"sitempbot","K",tstr2D, tcstr, & + "temperature at ice-ocean interface", & + "mean temperature at the base of the sea ice", c1, Tffresh, & + ns1, f_sitempbot, avg_ice_present='init', mask_ice_free_points=.true.) + + call define_hist_field(n_sitempsnic,"sitempsnic","K",tstr2D, tcstr, & + "temperature at snow-ice interface", & + "surface temperature when no snow present", c1, Tffresh, & + ns1, f_sitempsnic, avg_ice_present='init', mask_ice_free_points=.true.) + + call define_hist_field(n_sitemptop,"sitemptop","K",tstr2D, tcstr, & + "surface temperature of sea ice", & + "mean surface temperature of the sea-ice covered part of the grid cell", & + c1, Tffresh, & + ns1, f_sitemptop, avg_ice_present='final', mask_ice_free_points=.true.) - call define_hist_field(n_siforcecoriolx,"siforcecoriolx","N m-2",tstr2D, tcstr, & - "coriolis term", & - "none", c1, c0, & - ns1, f_siforcecoriolx, avg_ice_present=.true., mask_ice_free_points=.true.) + call define_hist_field(n_sithick,"sithick","m",tstr2D, tcstr, & + "sea-ice thickness", & + "volume divided by sea-ice area", c1, c0, & + ns1, f_sithick, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_siu,"siu","m/s",ustr2D, ucstr, & + "x-component of sea-ice velocity", & + "on native model grid", c1, c0, & + ns1, f_siu, avg_ice_present='final', mask_ice_free_points=.true.) + + call define_hist_field(n_siv,"siv","m/s",ustr2D, ucstr, & + "y-component of sea-ice velocity", & + "on native model grid", c1, c0, & + ns1, f_siv, avg_ice_present='final', mask_ice_free_points=.true.) + + ! CMIP 2D extensive variables + + call define_hist_field(n_siconc,"siconc","%",tstr2D, tcstr, & + "sea-ice area percentage (ocean grid)", & + "percentage of a given grid cell that is covered by sea ice on the ocean grid", & + c100, c0, & + ns1, f_siconc, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidconcdyn,"sidconcdyn","1/s",tstr2D, tcstr, & + "sea-ice area fraction tendency due to dynamics", & + "total rate of change in sea-ice area fraction through dynamics-related processes", & + c1, c0, & + ns1, f_sidconcdyn, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidconcth,"sidconcth","1/s",tstr2D, tcstr, & + "sea-ice area fraction tendency due to thermodynamics", & + "total rate of change in sea-ice area fraction through thermodynamic processes", & + c1, c0, & + ns1, f_sidconcth, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassdyn,"sidmassdyn","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change from dynamics", & + "total rate of change in sea-ice mass through dynamics-related processes", & + c1, c0, & + ns1, f_sidmassdyn, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassevapsubl,"sidmassevapsubl","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change through evaporation and sublimation", & + "rate of change of sea-ice mass through evaporation and sublimation", & + c1, c0, & + ns1, f_sidmassevapsubl, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassgrowthbot,"sidmassgrowthbot","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change through basal growth", & + "rate of change of sea-ice mass due to vertical growth of existing sea ice at its base", & + c1, c0, & + ns1, f_sidmassgrowthbot, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassgrowthsi,"sidmassgrowthsi","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change from snow-to-ice conversion", & + "rate of change of sea-ice mass due to transformation of snow to sea ice", & + c1, c0, & + ns1, f_sidmassgrowthsi, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassgrowthwat,"sidmassgrowthwat","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change through growth in supercooled open water (frazil)", & + "always positive or zero", c1, c0, & + ns1, f_sidmassgrowthwat, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassmeltbot,"sidmassmeltbot","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change through bottom melting", & + "rate of change of sea-ice mass through melting/dissolution at the ice bottom", & + c1, c0, & + ns1, f_sidmassmeltbot, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassmeltlat,"sidmassmeltlat","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change through lateral melting", & + "rate of change of sea-ice mass through lateral melting/dissolution", c1, c0, & + ns1, f_sidmassmeltlat, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassmelttop,"sidmassmelttop","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change through surface melting", & + "rate of change of sea-ice mass through melting at the ice surface", c1, c0, & + ns1, f_sidmassmelttop, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmassth,"sidmassth","kg m-2 s-1",tstr2D, tcstr, & + "sea-ice mass change from thermodynamics", & + "total rate of change in sea-ice mass from thermodynamic processes", & + c1, c0, & + ns1, f_sidmassth, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmasstranx,"sidmasstranx","kg/s",ustr2D, ucstr, & + "x-component of sea-ice mass transport", & + "x-component of the sea-ice drift-induced transport of snow and sea ice mass", & + c1, c0, & + ns1, f_sidmasstranx, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sidmasstrany,"sidmasstrany","kg/s",ustr2D, ucstr, & + "y-component of sea-ice mass transport", & + "y-component of the sea-ice drift-induced transport of snow and sea ice mass", & + c1, c0, & + ns1, f_sidmasstrany, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sihc,"sihc","J m-2",tstr2D, tcstr, & + "sea-ice heat content", & + "heat content of all ice in grid cell", c1, c0, & + ns1, f_sihc, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_simass,"simass","kg m-2",tstr2D, tcstr, & + "sea-ice mass per area", & + "total mass of sea ice", c1, c0, & + ns1, f_simass, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sisaltmass,"sisaltmass","kg m-2",tstr2D, tcstr, & + "mass of salt in sea-ice per area", & + "total mass of all salt in sea ice", c1, c0, & + ns1, f_sisaltmass, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sisnconc,"sisnconc","%",tstr2D, tcstr, & + "snow area percentage", & + "percentage of the sea-ice surface that is covered by snow", & + c100, c0, & + ns1, f_sisnconc, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sisndmassdyn,"sisndmassdyn","kg m-2 s-1",tstr2D, tcstr, & + "snow mass rate of change through advection by sea-ice dynamics", & + "rate of change of snow mass due to sea ice dynamics", c1, c0, & + ns1, f_sisndmassdyn, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sisndmassmelt,"sisndmassmelt","kg m-2 s-1",tstr2D, tcstr, & + "snow mass rate of change through melt", & + "always negative or zero", c1, c0, & + ns1, f_sisndmassmelt, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sisndmasssi,"sisndmasssi","kg m-2 s-1",tstr2D, tcstr, & + "snow mass rate of change through snow-to-ice conversion", & + "always negative or zero", c1, c0, & + ns1, f_sisndmasssi, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sisndmasssnf,"sisndmasssnf","kg m-2 s-1",tstr2D, tcstr, & + "snow mass change through snowfall", & + "rate of change of snow mass due to solid precipitation falling onto sea ice", & + c1, c0, & + ns1, f_sisndmasssnf, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sisndmasssubl,"sisndmasssubl","kg m-2 s-1",tstr2D, tcstr, & + "snow mass rate of change through evaporation or sublimation", & + "none", c1, c0, & + ns1, f_sisndmasssubl, avg_ice_present='none', mask_ice_free_points=.false.) - call define_hist_field(n_siforcecorioly,"siforcecorioly","N m-2",tstr2D, tcstr, & - "coriolis term", & - "none", c1, c0, & - ns1, f_siforcecorioly, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siforceintstrx,"siforceintstrx","N m-2",tstr2D, tcstr, & - "internal stress term", & - "none", c1, c0, & - ns1, f_siforceintstrx, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_siforceintstry,"siforceintstry","N m-2",tstr2D, tcstr, & - "internal stress term", & - "none", c1, c0, & - ns1, f_siforceintstry, avg_ice_present=.true., mask_ice_free_points=.true.) - - call define_hist_field(n_sistreave,"sistreave","N m-1",ustr2D, ucstr, & - "average normal stress", & - "sistreave is instantaneous", c1, c0, & - ns1, f_sistreave) - - call define_hist_field(n_sistremax,"sistremax","N m-1",ustr2D, ucstr, & - "maximum shear stress", & - "sistremax is instantaneous", c1, c0, & - ns1, f_sistremax) + call define_hist_field(n_sisnhc,"sisnhc","J m-2",tstr2D, tcstr, & + "snow heat content", & + "heat content of all snow in grid cell", c1, c0, & + ns1, f_sisnhc, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sisnmass,"sisnmass","kg m-2",tstr2D, tcstr, & + "snow mass per area", & + "total mass of snow on sea ice", c1, c0, & + ns1, f_sisnmass, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sitimefrac,"sitimefrac","1",tstr2D, tcstr, & + "fraction of time steps with sea ice", & + "averaging period during which sea ice is present (siconc > 0) in a grid cell", & + c1, c0, & + ns1, f_sitimefrac, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sivol,"sivol","m",tstr2D, tcstr, & + "sea-ice volume per area", & + "total volume of sea ice divided by grid-cell area (equivalent thickness)", & + c1, c0, & + ns1, f_sivol, avg_ice_present='none', mask_ice_free_points=.false.) + + ! CMIP 2D instantaneous fields + + call define_hist_field(n_sidivvel,"sidivvel","s-1",ustr2D, ucstr, & + "divergence of the sea-ice velocity field", & + "sidivvel is instantaneous", c1, c0, & + ns1, f_sidivvel, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sishearvel,"sishearvel","s-1",ustr2D, ucstr, & + "maximum shear of the sea-ice velocity field", & + "sishearvel is instantaneous", c1, c0, & + ns1, f_sishearvel, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sistressave,"sistressave","N m-1",ustr2D, ucstr, & + "average normal stress in sea ice", & + "sistressave is instantaneous", c1, c0, & + ns1, f_sistressave, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_sistressmax,"sistressmax","N m-1",ustr2D, ucstr, & + "maximum shear stress in sea ice", & + "sistressmax is instantaneous", c1, c0, & + ns1, f_sistressmax, avg_ice_present='none', mask_ice_free_points=.false.) endif ! if (histfreq(ns1) /= 'x') then enddo ! ns1 @@ -1890,17 +1925,29 @@ subroutine init_hist (dt) ns1, f_keffn_top) ! CMIP 3D - call define_hist_field(n_siitdconc,"siitdconc","1",tstr3Dc, tcstr, & - "ice area, categories","none", c1, c0, & - ns1, f_siitdconc) - - call define_hist_field(n_siitdthick,"siitdthick","m",tstr3Dc, tcstr, & - "ice thickness, categories","none", c1, c0, & - ns1, f_siitdthick, avg_ice_present=.true.) + call define_hist_field(n_siitdconc,"siitdconc","%",tstr3Dc, tcstr, & + "sea-ice area percentages in ice thickness categories", & + "percentage of grid cell covered by each ice thickness category", & + c100, c0, & + ns1, f_siitdconc, avg_ice_present='none', mask_ice_free_points=.false.) + + call define_hist_field(n_siitdsnconc,"siitdsnconc","%",tstr3Dc, tcstr, & + "snow area percentages in ice thickness categories", & + "percentage of grid cell covered by snow in each ice thickness category", & + c100, c0, & + ns1, f_siitdsnconc, avg_ice_present='none', mask_ice_free_points=.false.) call define_hist_field(n_siitdsnthick,"siitdsnthick","m",tstr3Dc, tcstr, & - "snow thickness, categories","none", c1, c0, & - ns1, f_siitdsnthick, avg_ice_present=.true.) + "snow thickness in ice thickness categories", & + "actual thickness of snow in each ice thickness category", & + c1, c0, & + ns1, f_siitdsnthick, avg_ice_present='final', mask_ice_free_points=.false.) + + call define_hist_field(n_siitdthick,"siitdthick","m",tstr3Dc, tcstr, & + "sea-ice thickness in ice thickness categories", & + "actual (floe) thickness of sea ice in each thickness category", & + c1, c0, & + ns1, f_siitdthick, avg_ice_present='final', mask_ice_free_points=.false.) endif ! if (histfreq(ns1) /= 'x') then enddo ! ns1 @@ -2157,14 +2204,14 @@ subroutine accum_hist (dt) use ice_domain, only: blocks_ice, nblocks use ice_domain_size, only: nfsd use ice_grid, only: tmask, lmask_n, lmask_s, dxU, dyU, grid_ice - use ice_calendar, only: new_year, write_history, & + use ice_calendar, only: new_year, write_history, write_restart, & write_ic, timesecs, histfreq, nstreams, mmonth, & - new_month + new_month, write_histrest use ice_dyn_eap, only: a11, a12, e11, e12, e22, s11, s12, s22, & yieldstress11, yieldstress12, yieldstress22 use ice_dyn_shared, only: kdyn, principal_stress use ice_flux, only: fsw, flw, fsnow, frain, sst, sss, uocn, vocn, & - frzmlt_init, scale_factor, fswabs, fswthru, alvdr, alvdf, alidr, alidf, & + frzmlt_init, scale_factor, fswabs, fswup, fswthru, alvdr, alvdf, alidr, alidf, & albice, albsno, albpnd, coszen, flat, fsens, flwout, evap, evaps, evapi, & Tair, Tref, Qref, congel, frazil, frazil_diag, snoice, dsnow, & melts, meltb, meltt, meltl, fresh, fsalt, fresh_ai, fsalt_ai, & @@ -2183,17 +2230,17 @@ subroutine accum_hist (dt) mlt_onset, frz_onset, dagedtt, dagedtd, fswint_ai, keffn_top, & snowfrac, alvdr_ai, alvdf_ai, alidr_ai, alidf_ai, update_ocn_f, & cpl_frazil - use ice_arrays_column, only: snowfracn, Cdn_atm + use ice_arrays_column, only: snowfracn, Cdn_atm, Cdn_ocn use ice_history_shared ! almost everything use ice_history_write, only: ice_write_hist use ice_history_bgc, only: accum_hist_bgc use ice_history_mechred, only: accum_hist_mechred + use ice_history_mechred, only: n_alvl, n_ardg, f_ardg use ice_history_pond, only: accum_hist_pond + use ice_history_pond, only: n_apond_ai, f_apond_ai use ice_history_snow, only: accum_hist_snow, & f_rhos_cmp, f_rhos_cnt, n_rhos_cmp, n_rhos_cnt use ice_history_drag, only: accum_hist_drag - use icepack_intfc, only: icepack_mushy_density_brine, icepack_mushy_liquid_fraction - use icepack_intfc, only: icepack_mushy_temperature_mush use ice_history_fsd, only: accum_hist_fsd use ice_state ! almost everything use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite @@ -2219,19 +2266,19 @@ subroutine accum_hist (dt) sn ! temporary variable for salinity real (kind=dbl_kind), dimension (nx_block,ny_block) :: & - worka, workb, ravgip + worka, workb, ravgip, ravgip_init, ravgip_pond, ravgip_ridge, rho_ice, rho_ocn, sal_ice real (kind=dbl_kind), dimension (nx_block,ny_block,ncat_hist) :: & - ravgipn, worka3 + ravgipn real (kind=dbl_kind) :: awtvdr, awtidr, awtvdf, awtidf, puny, secday, rad_to_deg real (kind=dbl_kind) :: Tffresh, rhoi, rhos, rhow, ice_ref_salinity - real (kind=dbl_kind) :: rho_ice, rho_ocn, Tice, Sbr, phi, rhob, dfresh, dfsalt, sicen + real (kind=dbl_kind) :: dfresh, dfsalt, sicen logical (kind=log_kind) :: formdrag, skl_bgc - logical (kind=log_kind) :: tr_pond, tr_aero, tr_brine, tr_snow + logical (kind=log_kind) :: tr_pond, tr_aero, tr_brine, tr_snow, tr_pond_topo integer (kind=int_kind) :: ktherm integer (kind=int_kind) :: nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY, nt_Tsfc, & - nt_alvl, nt_vlvl + nt_alvl, nt_apnd character (len=char_len) :: saltflux_option type (block) :: & @@ -2245,11 +2292,11 @@ subroutine accum_hist (dt) rhow_out=rhow, ice_ref_salinity_out=ice_ref_salinity) call icepack_query_parameters(formdrag_out=formdrag, skl_bgc_out=skl_bgc, ktherm_out=ktherm) call icepack_query_parameters(saltflux_option_out=saltflux_option) - call icepack_query_tracer_flags(tr_pond_out=tr_pond, tr_aero_out=tr_aero, & - tr_brine_out=tr_brine, tr_snow_out=tr_snow) + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_brine_out=tr_brine, tr_snow_out=tr_snow, & + tr_pond_out=tr_pond, tr_pond_topo_out=tr_pond_topo) call icepack_query_tracer_indices(nt_sice_out=nt_sice, nt_qice_out=nt_qice, & nt_qsno_out=nt_qsno, nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_Tsfc_out=nt_Tsfc, & - nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl) + nt_alvl_out=nt_alvl, nt_apnd_out=nt_apnd) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -2330,8 +2377,8 @@ subroutine accum_hist (dt) #ifndef __INTEL_LLVM_COMPILER !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & - !$OMP k,n,qn,ns,sn,rho_ocn,rho_ice,Tice,Sbr,phi,rhob,dfresh,dfsalt,sicen, & - !$OMP worka,workb,worka3,Tinz4d,Sinz4d,Tsnz4d) + !$OMP k,n,qn,ns,sn,rho_ocn,rho_ice,sal_ice,dfresh,dfsalt,sicen, & + !$OMP worka,workb,Tinz4d,Sinz4d,Tsnz4d) #endif do iblk = 1, nblocks @@ -2357,6 +2404,8 @@ subroutine accum_hist (dt) call accum_hist_field(n_Tsfc, iblk, trcr(:,:,nt_Tsfc,iblk), a2D) if (f_aice (1:1) /= 'x') & call accum_hist_field(n_aice, iblk, aice(:,:,iblk), a2D) + if (f_aice_init (1:1) /= 'x') & + call accum_hist_field(n_aice_init, iblk, aice_init(:,:,iblk), a2D) if (f_uvel (1:1) /= 'x') & call accum_hist_field(n_uvel, iblk, uvel(:,:,iblk), a2D) if (f_vvel (1:1) /= 'x') & @@ -2447,8 +2496,7 @@ subroutine accum_hist (dt) endif if (f_fswup(1:1) /= 'x') & - call accum_hist_field(n_fswup, iblk, & - (fsw(:,:,iblk)-fswabs(:,:,iblk)*workb(:,:)), a2D) + call accum_hist_field(n_fswup, iblk, fswup(:,:,iblk), a2D) if (f_fswdn (1:1) /= 'x') & call accum_hist_field(n_fswdn, iblk, fsw(:,:,iblk), a2D) if (f_flwdn (1:1) /= 'x') & @@ -2501,14 +2549,16 @@ subroutine accum_hist (dt) call accum_hist_field(n_fswint_ai, iblk, fswint_ai(:,:,iblk), a2D) if (f_fswabs_ai(1:1)/= 'x') & - call accum_hist_field(n_fswabs_ai, iblk, fswabs(:,:,iblk)*workb(:,:), a2D) + call accum_hist_field(n_fswabs_ai, iblk, fswabs(:,:,iblk)*aice(:,:,iblk), a2D) - if (f_albsni (1:1) /= 'x') & + if (f_albsni(1:1) /= 'x') then call accum_hist_field(n_albsni, iblk, & - (awtvdr*alvdr(:,:,iblk) & - + awtidr*alidr(:,:,iblk) & - + awtvdf*alvdf(:,:,iblk) & - + awtidf*alidf(:,:,iblk))*workb(:,:), a2D) + (awtvdr*alvdr_ai(:,:,iblk) & + + awtidr*alidr_ai(:,:,iblk) & + + awtvdf*alvdf_ai(:,:,iblk) & + + awtidf*alidf_ai(:,:,iblk)), a2D) + endif + if (f_alvdr (1:1) /= 'x') & call accum_hist_field(n_alvdr, iblk, alvdr(:,:,iblk), a2D) if (f_alidr (1:1) /= 'x') & @@ -2538,19 +2588,19 @@ subroutine accum_hist (dt) if (f_flat (1:1) /= 'x') & call accum_hist_field(n_flat, iblk, flat(:,:,iblk), a2D) if (f_flat_ai(1:1) /= 'x') & - call accum_hist_field(n_flat_ai,iblk, flat(:,:,iblk)*workb(:,:), a2D) + call accum_hist_field(n_flat_ai,iblk, flat(:,:,iblk)*aice(:,:,iblk), a2D) if (f_fsens (1:1) /= 'x') & call accum_hist_field(n_fsens, iblk, fsens(:,:,iblk), a2D) if (f_fsens_ai(1:1)/= 'x') & - call accum_hist_field(n_fsens_ai,iblk, fsens(:,:,iblk)*workb(:,:), a2D) + call accum_hist_field(n_fsens_ai,iblk, fsens(:,:,iblk)*aice(:,:,iblk), a2D) if (f_flwup (1:1) /= 'x') & call accum_hist_field(n_flwup, iblk, flwout(:,:,iblk), a2D) if (f_flwup_ai(1:1)/= 'x') & - call accum_hist_field(n_flwup_ai,iblk, flwout(:,:,iblk)*workb(:,:), a2D) + call accum_hist_field(n_flwup_ai,iblk, flwout(:,:,iblk)*aice(:,:,iblk), a2D) if (f_evap (1:1) /= 'x') & call accum_hist_field(n_evap, iblk, evap(:,:,iblk), a2D) if (f_evap_ai(1:1) /= 'x') & - call accum_hist_field(n_evap_ai,iblk, evap(:,:,iblk)*workb(:,:), a2D) + call accum_hist_field(n_evap_ai,iblk, evap(:,:,iblk)*aice(:,:,iblk), a2D) if (f_Tair (1:1) /= 'x') & call accum_hist_field(n_Tair, iblk, Tair(:,:,iblk), a2D) @@ -2701,10 +2751,10 @@ subroutine accum_hist (dt) call accum_hist_field(n_dagedtd, iblk, dagedtd(:,:,iblk), a2D) if (f_fsurf_ai(1:1)/= 'x') & - call accum_hist_field(n_fsurf_ai,iblk, fsurf(:,:,iblk)*workb(:,:), a2D) + call accum_hist_field(n_fsurf_ai,iblk, fsurf(:,:,iblk)*aice(:,:,iblk), a2D) if (f_fcondtop_ai(1:1)/= 'x') & call accum_hist_field(n_fcondtop_ai, iblk, & - fcondtop(:,:,iblk)*workb(:,:), a2D) + fcondtop(:,:,iblk)*aice(:,:,iblk), a2D) if (f_icepresent(1:1) /= 'x') then worka(:,:) = c0 @@ -2718,126 +2768,124 @@ subroutine accum_hist (dt) ! 2D CMIP fields - if (f_sithick(1:1) /= 'x') then + if (f_sitimefrac(1:1) /= 'x') then worka(:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) worka(i,j) = vice(i,j,iblk) + if (aice(i,j,iblk) > puny) worka(i,j) = c1 enddo enddo - call accum_hist_field(n_sithick, iblk, worka(:,:), a2D) + call accum_hist_field(n_sitimefrac, iblk, worka(:,:), a2D) + endif + + if (f_sithick(1:1) /= 'x') then + call accum_hist_field(n_sithick, iblk, vice(:,:,iblk), a2D) + endif + + if (f_sivol(1:1) /= 'x') then + call accum_hist_field(n_sivol, iblk, vice(:,:,iblk), a2D) + endif + + rho_ice(:,:) = rhoi + rho_ocn(:,:) = rhow + sal_ice(:,:) = ice_ref_salinity + + if (ktherm == 2) then + do j = jlo, jhi + do i = ilo, ihi + call ice_brine_density(trcr(i,j,nt_qice:nt_qice+nzilyr-1,iblk),trcr(i,j,nt_sice:nt_sice+nzilyr-1,iblk), & + sss(i,j,iblk), rho_ice(i,j), rho_ocn(i,j), sal_ice(i,j)) + enddo + enddo + endif + + if (f_simass(1:1) /= 'x') then + call accum_hist_field(n_simass, iblk, rho_ice(:,:)*vice(:,:,iblk), a2D) + endif + + if (f_sisaltmass(1:1) /= 'x') then + call accum_hist_field(n_sisaltmass, iblk, rho_ice(:,:)*sal_ice(:,:)*vice(:,:,iblk), a2D) + endif + + if (f_sisali(1:1) /= 'x') then + call accum_hist_field(n_sisali, iblk, aice(:,:,iblk)*sal_ice(:,:), a2D) + endif + + if (f_siconc(1:1) /= 'x') then + call accum_hist_field(n_siconc, iblk, aice(:,:,iblk), a2D) + endif + + if (f_sisnconc(1:1) /= 'x') then + call accum_hist_field(n_sisnconc, iblk, snowfrac(:,:,iblk), a2D) + endif + + if (f_sisnmass(1:1) /= 'x') then + call accum_hist_field(n_sisnmass, iblk, rhos*vsno(:,:,iblk), a2D) endif if (f_siage(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) worka(i,j) = aice(i,j,iblk)*trcr(i,j,nt_iage,iblk) - enddo - enddo - call accum_hist_field(n_siage, iblk, worka(:,:), a2D) + call accum_hist_field(n_siage, iblk, aice(:,:,iblk)*trcr(:,:,nt_iage,iblk), a2D) endif if (f_sisnthick(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (vsno(i,j,iblk) > puny) & - worka(i,j) = vsno(i,j,iblk) - enddo - enddo - call accum_hist_field(n_sisnthick, iblk, worka(:,:), a2D) + call accum_hist_field(n_sisnthick, iblk, vsno(:,:,iblk), a2D) endif if (f_sitemptop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*(trcr(i,j,nt_Tsfc,iblk)+Tffresh) - enddo - enddo - call accum_hist_field(n_sitemptop, iblk, worka(:,:), a2D) + call accum_hist_field(n_sitemptop, iblk, aice(:,:,iblk)*trcr(:,:,nt_Tsfc,iblk), a2D) endif + ! Tsnice is already multiplied by aicen in icepack. if (f_sitempsnic(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (vsno(i,j,iblk) > puny .and. aice_init(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*(Tsnice(i,j,iblk)/aice_init(i,j,iblk)+Tffresh) - else - worka(i,j) = aice(i,j,iblk)*(trcr(i,j,nt_Tsfc,iblk)+Tffresh) - endif - enddo - enddo - call accum_hist_field(n_sitempsnic, iblk, worka(:,:), a2D) + call accum_hist_field(n_sitempsnic, iblk, Tsnice(:,:,iblk), a2D) endif if (f_sitempbot(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice_init(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*(Tbot(i,j,iblk)/aice_init(i,j,iblk)+Tffresh) - enddo - enddo - call accum_hist_field(n_sitempbot, iblk, worka(:,:), a2D) + call accum_hist_field(n_sitempbot, iblk, aice_init(:,:,iblk)*Tbot(:,:,iblk), a2D) endif if (f_siu(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) worka(i,j) = aice(i,j,iblk)*uvel(i,j,iblk) - enddo - enddo - call accum_hist_field(n_siu, iblk, worka(:,:), a2D) + call accum_hist_field(n_siu, iblk, aice(:,:,iblk)*uvel(:,:,iblk), a2D) endif if (f_siv(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) worka(i,j) = aice(i,j,iblk)*vvel(i,j,iblk) - enddo - enddo - call accum_hist_field(n_siv, iblk, worka(:,:), a2D) + call accum_hist_field(n_siv, iblk, aice(:,:,iblk)*vvel(:,:,iblk), a2D) endif if (f_sispeed(1:1) /= 'x') then worka(:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) worka(i,j) = aice(i,j,iblk) & + worka(i,j) = aice(i,j,iblk) & * sqrt(uvel(i,j,iblk)*uvel(i,j,iblk)+vvel(i,j,iblk)*vvel(i,j,iblk)) enddo enddo call accum_hist_field(n_sispeed, iblk, worka(:,:), a2D) endif - if (f_sidir(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (abs(uvel(i,j,iblk)) > puny .or. abs(vvel(i,j,iblk)) > puny) & - worka(i,j) = atan2(uvel(i,j,iblk),vvel(i,j,iblk))*rad_to_deg - if (worka(i,j) < 0.0 ) then - worka(i,j) = worka(i,j) + c360 - else - worka(i,j) = worka(i,j) * c1 - endif - enddo - enddo - call accum_hist_field(n_sidir, iblk, worka(:,:), a2D) - endif + +! if (f_sidir(1:1) /= 'x') then +! worka(:,:) = c0 +! do j = jlo, jhi +! do i = ilo, ihi +! if (abs(uvel(i,j,iblk)) > puny .or. abs(vvel(i,j,iblk)) > puny) & +! worka(i,j) = atan2(uvel(i,j,iblk),vvel(i,j,iblk))*rad_to_deg +! if (worka(i,j) < 0.0 ) then +! worka(i,j) = worka(i,j) + c360 +! else +! worka(i,j) = worka(i,j) * c1 +! endif +! enddo +! enddo +! call accum_hist_field(n_sidir, iblk, worka(:,:), a2D) +! endif + if (f_sidmasstranx(1:1) /= 'x') then worka(:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) & - worka(i,j) = (rhoi*p5*(vice(i+1,j,iblk)+vice(i,j,iblk))*dyU(i,j,iblk) & - + rhos*p5*(vsno(i+1,j,iblk)+vsno(i,j,iblk))*dyU(i,j,iblk)) & - * p5*(uvel(i,j-1,iblk)+uvel(i,j,iblk)) + worka(i,j) = (rho_ice(i,j)*p5*(vice(i+1,j,iblk)+vice(i,j,iblk))*dyU(i,j,iblk) & + + rhos*p5*(vsno(i+1,j,iblk)+vsno(i,j,iblk))*dyU(i,j,iblk)) & + * p5*(uvel(i,j-1,iblk)+uvel(i,j,iblk)) enddo enddo call accum_hist_field(n_sidmasstranx, iblk, worka(:,:), a2D) @@ -2847,81 +2895,32 @@ subroutine accum_hist (dt) worka(:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) & - worka(i,j) = (rhoi*p5*(vice(i,j+1,iblk)+vice(i,j,iblk))*dxU(i,j,iblk) & - + rhos*p5*(vsno(i,j+1,iblk)+vsno(i,j,iblk))*dxU(i,j,iblk)) & - * p5*(vvel(i-1,j,iblk)+vvel(i,j,iblk)) + worka(i,j) = (rho_ice(i,j)*p5*(vice(i,j+1,iblk)+vice(i,j,iblk))*dxU(i,j,iblk) & + + rhos*p5*(vsno(i,j+1,iblk)+vsno(i,j,iblk))*dxU(i,j,iblk)) & + * p5*(vvel(i-1,j,iblk)+vvel(i,j,iblk)) enddo enddo call accum_hist_field(n_sidmasstrany, iblk, worka(:,:), a2D) endif if (f_sistrxdtop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice_init(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*(aice(i,j,iblk)*strairxU(i,j,iblk)/aice_init(i,j,iblk)) - enddo - enddo - call accum_hist_field(n_sistrxdtop, iblk, worka(:,:), a2D) + call accum_hist_field(n_sistrxdtop, iblk, aice(:,:,iblk)*strairxU(:,:,iblk), a2D) endif if (f_sistrydtop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice_init(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*(aice(i,j,iblk)*strairyU(i,j,iblk)/aice_init(i,j,iblk)) - enddo - enddo - call accum_hist_field(n_sistrydtop, iblk, worka(:,:), a2D) + call accum_hist_field(n_sistrydtop, iblk, aice(:,:,iblk)*strairyU(:,:,iblk), a2D) endif if (f_sistrxubot(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*strocnxU(i,j,iblk) - enddo - enddo - call accum_hist_field(n_sistrxubot, iblk, worka(:,:), a2D) + call accum_hist_field(n_sistrxubot, iblk, aice(:,:,iblk)*strocnxU(:,:,iblk), a2D) endif if (f_sistryubot(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*strocnyU(i,j,iblk) - enddo - enddo - call accum_hist_field(n_sistryubot, iblk, worka(:,:), a2D) + call accum_hist_field(n_sistryubot, iblk, aice(:,:,iblk)*strocnyU(:,:,iblk), a2D) endif if (f_sicompstren(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) & - worka(i,j) = aice(i,j,iblk)*strength(i,j,iblk) - enddo - enddo - call accum_hist_field(n_sicompstren, iblk, worka(:,:), a2D) - endif - - if (f_sialb(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (fsw(i,j,iblk) > puny .and. aice_init(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*(fsw(i,j,iblk)-fswabs(i,j,iblk) & - * aice(i,j,iblk)/aice_init(i,j,iblk)) / fsw(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sialb, iblk, worka(:,:), a2D) + call accum_hist_field(n_sicompstren, iblk, aice(:,:,iblk)*strength(:,:,iblk), a2D) endif if (f_sihc(1:1) /= 'x') then @@ -2949,342 +2948,127 @@ subroutine accum_hist (dt) endif if (f_sidconcth(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = daidtt(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sidconcth, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidconcth, iblk, daidtt(:,:,iblk), a2D) endif if (f_sidconcdyn(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = daidtd(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sidconcdyn, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidconcdyn, iblk, daidtd(:,:,iblk), a2D) endif if (f_sidmassth(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = dvidtt(i,j,iblk) * rhoi - endif - enddo - enddo - call accum_hist_field(n_sidmassth, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidmassth, iblk, rho_ice(:,:)*dvidtt(:,:,iblk), a2D) endif if (f_sidmassdyn(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = dvidtd(i,j,iblk) * rhoi - endif - enddo - enddo - call accum_hist_field(n_sidmassdyn, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidmassdyn, iblk, rho_ice(:,:)*dvidtd(:,:,iblk), a2D) endif if (f_sidmassgrowthwat(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice_init(i,j,iblk) > puny) then - worka(i,j) = frazil(i,j,iblk)*rhoi/dt - endif - enddo - enddo - call accum_hist_field(n_sidmassgrowthwat, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidmassgrowthwat, iblk, rho_ice(:,:)*frazil(:,:,iblk)/dt, a2D) endif if (f_sidmassgrowthbot(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = congel(i,j,iblk)*rhoi/dt - endif - enddo - enddo - call accum_hist_field(n_sidmassgrowthbot, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidmassgrowthbot, iblk, rho_ice(:,:)*congel(:,:,iblk)/dt, a2D) endif - if (f_sidmasssi(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = snoice(i,j,iblk)*rhoi/dt - endif - enddo - enddo - call accum_hist_field(n_sidmasssi, iblk, worka(:,:), a2D) + if (f_sidmassgrowthsi(1:1) /= 'x') then + call accum_hist_field(n_sidmassgrowthsi, iblk, rho_ice(:,:)*snoice(:,:,iblk)/dt, a2D) + endif + + if (f_sisndmasssi(1:1) /= 'x') then + call accum_hist_field(n_sisndmasssi, iblk, -snoice(:,:,iblk)*rhos/dt, a2D) endif if (f_sidmassevapsubl(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = evapi(i,j,iblk)*rhoi - endif - enddo - enddo - call accum_hist_field(n_sidmassevapsubl, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidmassevapsubl, iblk, rho_ice(:,:)*evapi(:,:,iblk), a2D) endif if (f_sidmassmelttop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = meltt(i,j,iblk)*rhoi/dt - endif - enddo - enddo - call accum_hist_field(n_sidmassmelttop, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidmassmelttop, iblk, -rho_ice(:,:)*meltt(:,:,iblk)/dt, a2D) endif if (f_sidmassmeltbot(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = meltb(i,j,iblk)*rhoi/dt - endif - enddo - enddo - call accum_hist_field(n_sidmassmeltbot, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidmassmeltbot, iblk, -rho_ice(:,:)*meltb(:,:,iblk)/dt, a2D) endif - if (f_sidmasslat(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = meltl(i,j,iblk)*rhoi/dt - endif - enddo - enddo - call accum_hist_field(n_sidmasslat, iblk, worka(:,:), a2D) + if (f_sidmassmeltlat(1:1) /= 'x') then + call accum_hist_field(n_sidmassmeltlat, iblk, -rho_ice(:,:)*meltl(:,:,iblk)/dt, a2D) endif - if (f_sndmasssubl(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = evaps(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sndmasssubl, iblk, worka(:,:), a2D) + if (f_sisndmasssubl(1:1) /= 'x') then + call accum_hist_field(n_sisndmasssubl, iblk, evaps(:,:,iblk), a2D) endif - if (f_sndmasssnf(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fsnow(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sndmasssnf, iblk, worka(:,:), a2D) + if (f_sisndmasssnf(1:1) /= 'x') then + call accum_hist_field(n_sisndmasssnf, iblk, fsnow(:,:,iblk), a2D) endif - if (f_sndmassmelt(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = melts(i,j,iblk)*rhos/dt - endif - enddo - enddo - call accum_hist_field(n_sndmassmelt, iblk, worka(:,:), a2D) + if (f_sisndmassmelt(1:1) /= 'x') then + call accum_hist_field(n_sisndmassmelt, iblk, -melts(:,:,iblk)*rhos/dt, a2D) endif - if (f_sndmassdyn(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = dvsdtd(i,j,iblk)*rhos - endif - enddo - enddo - call accum_hist_field(n_sndmassdyn, iblk, worka(:,:), a2D) + if (f_sisndmassdyn(1:1) /= 'x') then + call accum_hist_field(n_sisndmassdyn, iblk, dvsdtd(:,:,iblk)*rhos, a2D) endif if (f_siflswdtop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (fsw(i,j,iblk) > puny .and. aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fsw(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siflswdtop, iblk, worka(:,:), a2D) + call accum_hist_field(n_siflswdtop, iblk, aice_init(:,:,iblk)*fsw(:,:,iblk), a2D) endif if (f_siflswutop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (fsw(i,j,iblk) > puny .and. aice_init(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*(fsw(i,j,iblk)-fswabs(i,j,iblk) & - * aice(i,j,iblk)/aice_init(i,j,iblk)) - endif - enddo - enddo - call accum_hist_field(n_siflswutop, iblk, worka(:,:), a2D) + call accum_hist_field(n_siflswutop, iblk, fswup(:,:,iblk), a2D) endif if (f_siflswdbot(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fswthru(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siflswdbot, iblk, worka(:,:), a2D) + call accum_hist_field(n_siflswdbot, iblk, aice_init(:,:,iblk)*fswthru(:,:,iblk), a2D) endif if (f_sifllwdtop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*flw(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sifllwdtop, iblk, worka(:,:), a2D) + call accum_hist_field(n_sifllwdtop, iblk, aice_init(:,:,iblk)*flw(:,:,iblk), a2D) endif if (f_sifllwutop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*flwout(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sifllwutop, iblk, worka(:,:), a2D) + call accum_hist_field(n_sifllwutop, iblk, aice(:,:,iblk)*flwout(:,:,iblk), a2D) endif if (f_siflsenstop(1:1) /= 'x') then worka(:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fsens(i,j,iblk) - endif + worka(i,j) = aice(i,j,iblk)*fsens(i,j,iblk) enddo enddo - call accum_hist_field(n_siflsenstop, iblk, worka(:,:), a2D) + call accum_hist_field(n_siflsenstop, iblk, aice(:,:,iblk)*fsens(:,:,iblk), a2D) endif - if (f_siflsensupbot(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fhocn(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siflsensupbot, iblk, worka(:,:), a2D) + if (f_siflsensbot(1:1) /= 'x') then + call accum_hist_field(n_siflsensbot, iblk, aice(:,:,iblk)*fhocn(:,:,iblk), a2D) endif - if (f_sifllatstop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*flat(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sifllatstop, iblk, worka(:,:), a2D) + if (f_sifllattop(1:1) /= 'x') then + call accum_hist_field(n_sifllattop, iblk, aice(:,:,iblk)*flat(:,:,iblk), a2D) endif if (f_siflcondtop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fcondtop(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siflcondtop, iblk, worka(:,:), a2D) + call accum_hist_field(n_siflcondtop, iblk, aice(:,:,iblk)*fcondtop(:,:,iblk), a2D) endif if (f_siflcondbot(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice_init(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fcondbot(i,j,iblk)/aice_init(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siflcondbot, iblk, worka(:,:), a2D) + call accum_hist_field(n_siflcondbot, iblk, aice(:,:,iblk)*fcondbot(:,:,iblk), a2D) endif if (f_sipr(1:1) /= 'x') then worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*frain(i,j,iblk) - endif - enddo - enddo + if (tr_pond_topo) worka(:,:) = aice_init(:,:,iblk)*frain(:,:,iblk) call accum_hist_field(n_sipr, iblk, worka(:,:), a2D) endif if (f_sifb(1:1) /= 'x') then - worka(:,:) = c0 - rho_ice = rhoi - rho_ocn = rhow do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - if (ktherm == 2) then - rho_ocn = icepack_mushy_density_brine(sss(i,j,iblk)) - rho_ice = c0 - do k = 1, nzilyr - Tice = icepack_mushy_temperature_mush(trcr(i,j,nt_qice+k-1,iblk),trcr(i,j,nt_sice+k-1,iblk)) - Sbr = trcr(i,j,nt_sice+k-1,iblk) - phi = icepack_mushy_liquid_fraction(Tice,Sbr) - rhob = icepack_mushy_density_brine(Sbr) - rho_ice = rho_ice + min(phi*rhob+(c1-phi)*rhoi,rho_ocn) - enddo - rho_ice = rho_ice / real(nzilyr,kind=dbl_kind) - endif - worka(i,j) = ((rho_ocn-rho_ice)*vice(i,j,iblk) - rhos*vsno(i,j,iblk))/rho_ocn -! if (worka(i,j) < c0) then -! write(nu_diag,*) 'negative fb',rho_ocn,rho_ice,rhos -! write(nu_diag,*) vice(i,j,iblk),vsno(i,j,iblk) -! endif + worka(i,j) = max(((rho_ocn(i,j)-rho_ice(i,j))*vice(i,j,iblk)-rhos*vsno(i,j,iblk)) & + / rho_ocn(i,j), c0) endif enddo enddo @@ -3345,70 +3129,33 @@ subroutine accum_hist (dt) worka(:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*(frain(i,j,iblk)+melts(i,j,iblk)+meltt(i,j,iblk)) - endif + worka(i,j) = aice(i,j,iblk)*(melts(i,j,iblk)+meltt(i,j,iblk)) enddo enddo call accum_hist_field(n_siflfwdrain, iblk, worka(:,:), a2D) endif if (f_sidragtop(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*Cdn_atm(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_sidragtop, iblk, worka(:,:), a2D) + call accum_hist_field(n_sidragtop, iblk, aice_init(:,:,iblk)*Cdn_atm(:,:,iblk), a2D) endif - if (f_sirdgthick(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk)*(c1 - trcr(i,j,nt_alvl,iblk)) > puny) then - worka(i,j) = vice(i,j,iblk) * (c1 - trcr(i,j,nt_vlvl,iblk)) & - / (aice(i,j,iblk) * (c1 - trcr(i,j,nt_alvl,iblk))) - endif - enddo - enddo - call accum_hist_field(n_sirdgthick, iblk, worka(:,:), a2D) + if (f_sidragbot(1:1) /= 'x') then + call accum_hist_field(n_sidragbot, iblk, aice_init(:,:,iblk)*Cdn_ocn(:,:,iblk), a2D) endif if (f_siforcetiltx(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*strtltxU(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siforcetiltx, iblk, worka(:,:), a2D) + call accum_hist_field(n_siforcetiltx, iblk, aice_init(:,:,iblk)*strtltxU(:,:,iblk), a2D) endif if (f_siforcetilty(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*strtltyU(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siforcetilty, iblk, worka(:,:), a2D) + call accum_hist_field(n_siforcetilty, iblk, aice_init(:,:,iblk)*strtltyU(:,:,iblk), a2D) endif if (f_siforcecoriolx(1:1) /= 'x') then worka(:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fmU(i,j,iblk)*vvel(i,j,iblk) - endif + worka(i,j) = aice(i,j,iblk)*fmU(i,j,iblk)*vvel(i,j,iblk) enddo enddo call accum_hist_field(n_siforcecoriolx, iblk, worka(:,:), a2D) @@ -3418,36 +3165,18 @@ subroutine accum_hist (dt) worka(:,:) = c0 do j = jlo, jhi do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = -aice(i,j,iblk)*fmU(i,j,iblk)*uvel(i,j,iblk) - endif + worka(i,j) = -aice(i,j,iblk)*fmU(i,j,iblk)*uvel(i,j,iblk) enddo enddo call accum_hist_field(n_siforcecorioly, iblk, worka(:,:), a2D) endif if (f_siforceintstrx(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*strintxU(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siforceintstrx, iblk, worka(:,:), a2D) + call accum_hist_field(n_siforceintstrx, iblk, aice(:,:,iblk)*strintxU(:,:,iblk), a2D) endif if (f_siforceintstry(1:1) /= 'x') then - worka(:,:) = c0 - do j = jlo, jhi - do i = ilo, ihi - if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*strintyU(i,j,iblk) - endif - enddo - enddo - call accum_hist_field(n_siforceintstry, iblk, worka(:,:), a2D) + call accum_hist_field(n_siforceintstry, iblk, aice(:,:,iblk)*strintyU(:,:,iblk), a2D) endif endif ! if (allocated(a2D)) @@ -3493,45 +3222,19 @@ subroutine accum_hist (dt) *aicen_init(:,:,1:ncat_hist,iblk), a3Dc) if (f_siitdconc (1:1) /= 'x') then - worka3(:,:,:) = c0 - do n = 1,ncat_hist - do j = jlo, jhi - do i = ilo, ihi - if (aicen(i,j,n,iblk) > puny) then - worka3(i,j,n) = aicen(i,j,n,iblk) - endif - enddo - enddo - enddo - call accum_hist_field(n_siitdconc-n2D, iblk, ncat_hist, worka3(:,:,:), a3Dc) + call accum_hist_field(n_siitdconc-n2D, iblk, ncat_hist, aicen(:,:,:,iblk), a3Dc) + endif + + if (f_siitdsnconc (1:1) /= 'x') then + call accum_hist_field(n_siitdsnconc-n2D, iblk, ncat_hist, snowfracn(:,:,:,iblk), a3Dc) endif if (f_siitdthick (1:1) /= 'x') then - worka3(:,:,:) = c0 - do n = 1,ncat_hist - do j = jlo, jhi - do i = ilo, ihi - if (aicen(i,j,n,iblk) > puny) then - worka3(i,j,n) = vicen(i,j,n,iblk) - endif - enddo - enddo - enddo - call accum_hist_field(n_siitdthick-n2D, iblk, ncat_hist, worka3(:,:,:), a3Dc) + call accum_hist_field(n_siitdthick-n2D, iblk, ncat_hist, vicen(:,:,:,iblk), a3Dc) endif if (f_siitdsnthick (1:1) /= 'x') then - worka3(:,:,:) = c0 - do n = 1,ncat_hist - do j = jlo, jhi - do i = ilo, ihi - if (aicen(i,j,n,iblk) > puny) then - worka3(i,j,n) = vsnon(i,j,n,iblk) - endif - enddo - enddo - enddo - call accum_hist_field(n_siitdsnthick-n2D, iblk, ncat_hist, worka3(:,:,:), a3Dc) + call accum_hist_field(n_siitdsnthick-n2D, iblk, ncat_hist, vsnon(:,:,:,iblk), a3Dc) endif endif ! if (allocated(a3Dc)) @@ -3666,7 +3369,8 @@ subroutine accum_hist (dt) ravgct = c1/avgct(ns) !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block, & - !$OMP n,nn,ravgctz,ravgip,ravgipn) + !$OMP n,nn,ravgctz,ravgip,ravgip_init,ravgip_pond, & + !$OMP ravgip_ridge,ravgipn) do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -3677,129 +3381,214 @@ subroutine accum_hist (dt) ! Ice fraction really needs to be on one of the history ! streams, but in case it is not. - if (n_aice(ns) > 0) then - do j = jlo, jhi - do i = ilo, ihi - if (a2D(i,j,n_aice(ns),iblk) > puny) then - ravgip(i,j) = c1/(a2D(i,j,n_aice(ns),iblk)) - else - ravgip(i,j) = c0 - endif - enddo ! i - enddo ! j - endif - if (n_aicen(ns) > n2D) then - do k=1,ncat_hist - do j = jlo, jhi - do i = ilo, ihi - if (a3Dc(i,j,k,n_aicen(ns)-n2D,iblk) > puny) then - ravgipn(i,j,k) = c1/(a3Dc(i,j,k,n_aicen(ns)-n2D,iblk)) - else - ravgipn(i,j,k) = c0 - endif - enddo ! i - enddo ! j - enddo ! k + if (n_aice(ns) > 0 .and. any(avail_hist_fields(:)%avg_ice_present == 'final')) then + do j = jlo, jhi + do i = ilo, ihi + if (a2D(i,j,n_aice(ns),iblk) > puny) then + ravgip(i,j) = c1/(a2D(i,j,n_aice(ns),iblk)) + else + ravgip(i,j) = c0 + endif + enddo ! i + enddo ! j + elseif (f_aice(1:1) == 'x' .and. any(avail_hist_fields(:)%avg_ice_present == 'final')) then + call abort_ice(subname//' ERROR: f_aice must be defined', file=__FILE__, line=__LINE__) endif - - do n = 1, num_avail_hist_fields_2D - if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then - + if (n_aice_init(ns) > 0 .and. any(avail_hist_fields(:)%avg_ice_present == 'init')) then do j = jlo, jhi do i = ilo, ihi - if (.not. tmask(i,j,iblk)) then ! mask out land points - a2D(i,j,n,iblk) = spval_dbl - else ! convert units - a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & - * ravgct + avail_hist_fields(n)%conb + if (a2D(i,j,n_aice_init(ns),iblk) > puny) then + ravgip_init(i,j) = c1/(a2D(i,j,n_aice_init(ns),iblk)) + else + ravgip_init(i,j) = c0 endif enddo ! i enddo ! j - - ! Only average for timesteps when ice present - if (avail_hist_fields(n)%avg_ice_present) then - do j = jlo, jhi - do i = ilo, ihi - if (tmask(i,j,iblk)) then - a2D(i,j,n,iblk) = & - a2D(i,j,n,iblk)*avgct(ns)*ravgip(i,j) - endif - ! Mask ice-free points - if (avail_hist_fields(n)%mask_ice_free_points) then - if (ravgip(i,j) == c0) a2D(i,j,n,iblk) = spval_dbl - endif - enddo ! i - enddo ! j - endif - - ! CMIP albedo: also mask points below horizon - if (index(avail_hist_fields(n)%vname,'sialb') /= 0) then - do j = jlo, jhi - do i = ilo, ihi - if (albcnt(i,j,iblk,ns) <= puny) a2D(i,j,n,iblk) = spval_dbl - enddo ! i - enddo ! j - endif - - ! back out albedo/zenith angle dependence - if (avail_hist_fields(n)%vname(1:6) == 'albice') then + elseif (f_aice_init(1:1) == 'x' .and. any(avail_hist_fields(:)%avg_ice_present == 'init')) then + call abort_ice(subname//' ERROR: f_aice_init must be defined', file=__FILE__, line=__LINE__) + endif + if (n_apond_ai(ns) > 0 .and. any(avail_hist_fields(:)%avg_ice_present == 'pond')) then do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then - ravgctz = c0 - if (albcnt(i,j,iblk,ns) > puny) & - ravgctz = c1/albcnt(i,j,iblk,ns) - if (f_albice (1:1) /= 'x' .and. n_albice(ns) /= 0) & - a2D(i,j,n_albice(ns),iblk) = & - a2D(i,j,n_albice(ns),iblk)*avgct(ns)*ravgctz - if (f_albsno (1:1) /= 'x' .and. n_albsno(ns) /= 0) & - a2D(i,j,n_albsno(ns),iblk) = & - a2D(i,j,n_albsno(ns),iblk)*avgct(ns)*ravgctz - if (f_albpnd (1:1) /= 'x' .and. n_albpnd(ns) /= 0) & - a2D(i,j,n_albpnd(ns),iblk) = & - a2D(i,j,n_albpnd(ns),iblk)*avgct(ns)*ravgctz + if (a2D(i,j,n_apond_ai(ns),iblk) > puny) then + ravgip_pond(i,j) = c1/a2D(i,j,n_apond_ai(ns),iblk) + else + ravgip_pond(i,j) = c0 endif enddo ! i enddo ! j - endif - if (avail_hist_fields(n)%vname(1:6) == 'albsni') then + elseif (f_apond_ai(1:1) == 'x' .and. any(avail_hist_fields(:)%avg_ice_present == 'pond')) then + call abort_ice(subname//' ERROR: f_apond_ai must be defined', file=__FILE__, line=__LINE__) + endif + if (n_ardg(ns) > 0 .and. any(avail_hist_fields(:)%avg_ice_present == 'ridge')) then do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then - ravgctz = c0 - if (albcnt(i,j,iblk,ns) > puny) & - ravgctz = c1/albcnt(i,j,iblk,ns) - if (f_albsni (1:1) /= 'x' .and. n_albsni(ns) /= 0) & - a2D(i,j,n_albsni(ns),iblk) = & - a2D(i,j,n_albsni(ns),iblk)*avgct(ns)*ravgctz + if (a2D(i,j,n_ardg(ns),iblk) > puny) then + ravgip_ridge(i,j) = c1/a2D(i,j,n_ardg(ns),iblk) + else + ravgip_ridge(i,j) = c0 endif enddo ! i enddo ! j - endif - if (avail_hist_fields(n)%vname(1:8) == 'alvdr_ai') then + elseif (f_ardg(1:1) == 'x' .and. any(avail_hist_fields(:)%avg_ice_present == 'ridge')) then + call abort_ice(subname//' ERROR: f_ardg must be defined', file=__FILE__, line=__LINE__) + endif + if (n_aicen(ns) > n2D .and. any(avail_hist_fields(:)%avg_ice_present == 'final')) then + do k=1,ncat_hist do j = jlo, jhi do i = ilo, ihi - if (tmask(i,j,iblk)) then - ravgctz = c0 - if (albcnt(i,j,iblk,ns) > puny) & - ravgctz = c1/albcnt(i,j,iblk,ns) - if (f_alvdr_ai (1:1) /= 'x' .and. n_alvdr_ai(ns) /= 0) & - a2D(i,j,n_alvdr_ai(ns),iblk) = & - a2D(i,j,n_alvdr_ai(ns),iblk)*avgct(ns)*ravgctz - if (f_alvdf_ai (1:1) /= 'x' .and. n_alvdf_ai(ns) /= 0) & - a2D(i,j,n_alvdf_ai(ns),iblk) = & - a2D(i,j,n_alvdf_ai(ns),iblk)*avgct(ns)*ravgctz - if (f_alidr_ai (1:1) /= 'x' .and. n_alidr_ai(ns) /= 0) & - a2D(i,j,n_alidr_ai(ns),iblk) = & - a2D(i,j,n_alidr_ai(ns),iblk)*avgct(ns)*ravgctz - if (f_alidf_ai (1:1) /= 'x' .and. n_alidf_ai(ns) /= 0) & - a2D(i,j,n_alidf_ai(ns),iblk) = & - a2D(i,j,n_alidf_ai(ns),iblk)*avgct(ns)*ravgctz + if (a3Dc(i,j,k,n_aicen(ns)-n2D,iblk) > puny) then + ravgipn(i,j,k) = c1/(a3Dc(i,j,k,n_aicen(ns)-n2D,iblk)) + else + ravgipn(i,j,k) = c0 endif enddo ! i enddo ! j + enddo ! k + elseif (f_aicen(1:1) == 'x' .and. any(avail_hist_fields(:)%avg_ice_present == 'final')) then + call abort_ice(subname//' ERROR: f_aicen must be defined', file=__FILE__, line=__LINE__) + endif + + do n = 1, num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + + ! Only average when/where ice present + if (trim(avail_hist_fields(n)%avg_ice_present) == 'final') then + do j = jlo, jhi + do i = ilo, ihi + if (.not. tmask(i,j,iblk)) then + a2D(i,j,n,iblk) = spval_dbl + else ! convert units + a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & + * ravgip(i,j) + avail_hist_fields(n)%conb + endif + enddo ! i + enddo ! j + elseif (trim(avail_hist_fields(n)%avg_ice_present) == 'init') then + do j = jlo, jhi + do i = ilo, ihi + if (.not. tmask(i,j,iblk)) then + a2D(i,j,n,iblk) = spval_dbl + else ! convert units + a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & + * ravgip_init(i,j) + avail_hist_fields(n)%conb + endif + enddo ! i + enddo ! j + elseif (trim(avail_hist_fields(n)%avg_ice_present) == 'pond') then + do j = jlo, jhi + do i = ilo, ihi + if (.not. tmask(i,j,iblk)) then + a2D(i,j,n,iblk) = spval_dbl + else ! convert units + a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & + * ravgip_pond(i,j) + avail_hist_fields(n)%conb + endif + enddo ! i + enddo ! j + elseif (trim(avail_hist_fields(n)%avg_ice_present) == 'ridge') then + do j = jlo, jhi + do i = ilo, ihi + if (.not. tmask(i,j,iblk)) then + a2D(i,j,n,iblk) = spval_dbl + else ! convert units + a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & + * ravgip_ridge(i,j) + avail_hist_fields(n)%conb + endif + enddo ! i + enddo ! j + else + do j = jlo, jhi + do i = ilo, ihi + if (.not. tmask(i,j,iblk)) then ! mask out land points + a2D(i,j,n,iblk) = spval_dbl + else ! convert units + a2D(i,j,n,iblk) = avail_hist_fields(n)%cona*a2D(i,j,n,iblk) & + * ravgct + avail_hist_fields(n)%conb + endif + enddo ! i + enddo ! j endif + ! Mask ice-free points + if (avail_hist_fields(n)%mask_ice_free_points .and. & + trim(avail_hist_fields(n)%avg_ice_present) == 'final') then + do j = jlo, jhi + do i = ilo, ihi + if (ravgip(i,j) == c0) a2D(i,j,n,iblk) = spval_dbl + enddo ! i + enddo ! j + endif + + if (avail_hist_fields(n)%mask_ice_free_points .and. & + trim(avail_hist_fields(n)%avg_ice_present) == 'init') then + do j = jlo, jhi + do i = ilo, ihi + if (ravgip_init(i,j) == c0) a2D(i,j,n,iblk) = spval_dbl + enddo ! i + enddo ! j + endif + endif + + ! back out albedo/zenith angle dependence +! if (avail_hist_fields(n)%vname(1:6) == 'albice') then +! do j = jlo, jhi +! do i = ilo, ihi +! if (tmask(i,j,iblk)) then +! ravgctz = c0 +! if (albcnt(i,j,iblk,ns) > puny) & +! ravgctz = c1/albcnt(i,j,iblk,ns) +! if (f_albice (1:1) /= 'x' .and. n_albice(ns) /= 0) & +! a2D(i,j,n_albice(ns),iblk) = & +! a2D(i,j,n_albice(ns),iblk)*avgct(ns)*ravgctz +! if (f_albsno (1:1) /= 'x' .and. n_albsno(ns) /= 0) & +! a2D(i,j,n_albsno(ns),iblk) = & +! a2D(i,j,n_albsno(ns),iblk)*avgct(ns)*ravgctz +! if (f_albpnd (1:1) /= 'x' .and. n_albpnd(ns) /= 0) & +! a2D(i,j,n_albpnd(ns),iblk) = & +! a2D(i,j,n_albpnd(ns),iblk)*avgct(ns)*ravgctz +! endif +! enddo ! i +! enddo ! j +! endif +! if (avail_hist_fields(n)%vname(1:6) == 'albsni') then +! do j = jlo, jhi +! do i = ilo, ihi +! if (tmask(i,j,iblk)) then +! ravgctz = c0 +! if (albcnt(i,j,iblk,ns) > puny) & +! ravgctz = c1/albcnt(i,j,iblk,ns) +! if (f_albsni (1:1) /= 'x' .and. n_albsni(ns) /= 0) & +! a2D(i,j,n_albsni(ns),iblk) = & +! a2D(i,j,n_albsni(ns),iblk)*avgct(ns)*ravgctz +! endif +! enddo ! i +! enddo ! j +! endif +! if (avail_hist_fields(n)%vname(1:8) == 'alvdr_ai') then +! do j = jlo, jhi +! do i = ilo, ihi +! if (tmask(i,j,iblk)) then +! ravgctz = c0 +! if (albcnt(i,j,iblk,ns) > puny) & +! ravgctz = c1/albcnt(i,j,iblk,ns) +! if (f_alvdr_ai (1:1) /= 'x' .and. n_alvdr_ai(ns) /= 0) & +! a2D(i,j,n_alvdr_ai(ns),iblk) = & +! a2D(i,j,n_alvdr_ai(ns),iblk)*avgct(ns)*ravgctz +! if (f_alvdf_ai (1:1) /= 'x' .and. n_alvdf_ai(ns) /= 0) & +! a2D(i,j,n_alvdf_ai(ns),iblk) = & +! a2D(i,j,n_alvdf_ai(ns),iblk)*avgct(ns)*ravgctz +! if (f_alidr_ai (1:1) /= 'x' .and. n_alidr_ai(ns) /= 0) & +! a2D(i,j,n_alidr_ai(ns),iblk) = & +! a2D(i,j,n_alidr_ai(ns),iblk)*avgct(ns)*ravgctz +! if (f_alidf_ai (1:1) /= 'x' .and. n_alidf_ai(ns) /= 0) & +! a2D(i,j,n_alidf_ai(ns),iblk) = & +! a2D(i,j,n_alidf_ai(ns),iblk)*avgct(ns)*ravgctz +! endif +! enddo ! i +! enddo ! j +! endif + ! snwcnt averaging is not working correctly ! for now, these history fields will have zeroes includes in the averages ! if (avail_hist_fields(n)%vname(1:8) == 'rhos_cmp') then @@ -3831,37 +3620,39 @@ subroutine accum_hist (dt) ! enddo ! j ! endif - endif ! avail_hist_fields(n)%vhistfreq == histfreq(ns) enddo ! n do n = 1, num_avail_hist_fields_3Dc nn = n2D + n if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then - do k = 1, ncat_hist - do j = jlo, jhi - do i = ilo, ihi - if (.not. tmask(i,j,iblk)) then ! mask out land points - a3Dc(i,j,k,n,iblk) = spval_dbl - else ! convert units - a3Dc(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Dc(i,j,k,n,iblk) & - * ravgct + avail_hist_fields(nn)%conb + if (trim(avail_hist_fields(nn)%avg_ice_present) /= 'none') then + do k = 1, ncat_hist + do j = jlo, jhi + do i = ilo, ihi + if (.not. tmask(i,j,iblk)) then ! mask out land points + a3Dc(i,j,k,n,iblk) = spval_dbl + else ! convert units + a3Dc(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Dc(i,j,k,n,iblk) & + * ravgipn(i,j,k) + avail_hist_fields(nn)%conb + endif + enddo ! i + enddo ! j + enddo ! k + else + do k = 1, ncat_hist + do j = jlo, jhi + do i = ilo, ihi + if (.not. tmask(i,j,iblk)) then ! mask out land points + a3Dc(i,j,k,n,iblk) = spval_dbl + else ! convert units + a3Dc(i,j,k,n,iblk) = avail_hist_fields(nn)%cona*a3Dc(i,j,k,n,iblk) & + * ravgct + avail_hist_fields(nn)%conb + endif + enddo ! i + enddo ! j + enddo ! k endif - enddo ! i - enddo ! j - enddo ! k - if (avail_hist_fields(nn)%avg_ice_present) then - do k = 1, ncat_hist - do j = jlo, jhi - do i = ilo, ihi - if (tmask(i,j,iblk)) then - a3Dc(i,j,k,n,iblk) = & - a3Dc(i,j,k,n,iblk)*avgct(ns)*ravgipn(i,j,k) - endif - enddo ! i - enddo ! j - enddo ! k - endif endif @@ -3885,6 +3676,7 @@ subroutine accum_hist (dt) enddo ! k endif enddo ! n + do n = 1, num_avail_hist_fields_3Db nn = n3Dzcum + n if (avail_hist_fields(nn)%vhistfreq == histfreq(ns)) then @@ -4034,8 +3826,10 @@ subroutine accum_hist (dt) if (n_sig1 (ns) /= 0) a2D(i,j,n_sig1(ns), iblk) = spval_dbl if (n_sig2 (ns) /= 0) a2D(i,j,n_sig2(ns), iblk) = spval_dbl if (n_sigP (ns) /= 0) a2D(i,j,n_sigP(ns), iblk) = spval_dbl - if (n_sistreave(ns) /= 0) a2D(i,j,n_sistreave(ns),iblk) = spval_dbl - if (n_sistremax(ns) /= 0) a2D(i,j,n_sistremax(ns),iblk) = spval_dbl + if (n_sistressave(ns) /= 0) a2D(i,j,n_sistressave(ns),iblk) = spval_dbl + if (n_sistressmax(ns) /= 0) a2D(i,j,n_sistressmax(ns),iblk) = spval_dbl + if (n_sidivvel(ns) /= 0) a2D(i,j,n_sidivvel(ns),iblk) = spval_dbl + if (n_sishearvel(ns) /= 0) a2D(i,j,n_sishearvel(ns),iblk) = spval_dbl if (n_mlt_onset(ns) /= 0) a2D(i,j,n_mlt_onset(ns),iblk) = spval_dbl if (n_frz_onset(ns) /= 0) a2D(i,j,n_frz_onset(ns),iblk) = spval_dbl if (n_hisnap (ns) /= 0) a2D(i,j,n_hisnap(ns), iblk) = spval_dbl @@ -4068,10 +3862,14 @@ subroutine accum_hist (dt) sig2 (i,j,iblk)*avail_hist_fields(n_sig2(ns))%cona if (n_sigP (ns) /= 0) a2D(i,j,n_sigP(ns),iblk) = & sigP (i,j,iblk)*avail_hist_fields(n_sigP(ns))%cona - if (n_sistreave(ns) /= 0) a2D(i,j,n_sistreave(ns),iblk) = & - p5*(sig1(i,j,iblk)+sig2(i,j,iblk))*avail_hist_fields(n_sistreave(ns))%cona - if (n_sistremax(ns) /= 0) a2D(i,j,n_sistremax(ns),iblk) = & - p5*(sig1(i,j,iblk)-sig2(i,j,iblk))*avail_hist_fields(n_sistremax(ns))%cona + if (n_sistressave(ns) /= 0) a2D(i,j,n_sistressave(ns),iblk) = & + p5*(sig1(i,j,iblk)+sig2(i,j,iblk))*avail_hist_fields(n_sistressave(ns))%cona + if (n_sistressmax(ns) /= 0) a2D(i,j,n_sistressmax(ns),iblk) = & + p5*(sig1(i,j,iblk)-sig2(i,j,iblk))*avail_hist_fields(n_sistressmax(ns))%cona + if (n_sidivvel(ns) /= 0) a2D(i,j,n_sidivvel(ns),iblk) = & + divu (i,j,iblk)*avail_hist_fields(n_sidivvel(ns))%cona + if (n_sishearvel(ns) /= 0) a2D(i,j,n_sishearvel(ns),iblk) = & + shear (i,j,iblk)*avail_hist_fields(n_sishearvel(ns))%cona if (n_mlt_onset(ns) /= 0) a2D(i,j,n_mlt_onset(ns),iblk) = & mlt_onset(i,j,iblk) if (n_frz_onset(ns) /= 0) a2D(i,j,n_frz_onset(ns),iblk) = & @@ -4239,6 +4037,28 @@ subroutine accum_hist (dt) enddo ! iblk !$OMP END PARALLEL DO + !--------------------------------------------------------------- + ! write history restarts + !--------------------------------------------------------------- + + if (write_histrest .and. write_restart == 1) then + ! turn on histrest features + write_histrest_now = .true. + + ! write history restarts + call ice_timer_start(timer_readwrite) ! reading/writing + do ns = 1, nstrm + ! only write avg history file when something has accumulated + if (hist_avg(ns) .and. avgct(ns)>0) then + call ice_write_hist (ns) + endif + enddo + call ice_timer_stop(timer_readwrite) ! reading/writing + + ! turn off histrest features + write_histrest_now = .false. + endif + end subroutine accum_hist !======================================================================= diff --git a/cicecore/cicedyn/analysis/ice_history_bgc.F90 b/cicecore/cicedyn/analysis/ice_history_bgc.F90 index 7c87c1f70..85b8c4821 100644 --- a/cicecore/cicedyn/analysis/ice_history_bgc.F90 +++ b/cicecore/cicedyn/analysis/ice_history_bgc.F90 @@ -3398,7 +3398,7 @@ subroutine init_history_bgc use ice_arrays_column, only: PP_net, grow_net, hbri, & ice_bio_net, snow_bio_net, fbio_snoice, fbio_atmice, & - zfswin + upNO, upNH, bTiz, bphi, zfswin, iDi, iki use ice_flux_bgc, only: flux_bio, flux_bio_ai, fnit, fsil, & famm, fdmsp, fdms, fhum, fdust, falgalN, fdoc, fdic, & fdon, ffep, ffed @@ -3407,6 +3407,8 @@ subroutine init_history_bgc PP_net (:,:,:) = c0 grow_net (:,:,:) = c0 + upNO (:,:,:) = c0 + upNH (:,:,:) = c0 hbri (:,:,:) = c0 flux_bio (:,:,:,:) = c0 flux_bio_ai (:,:,:,:) = c0 @@ -3414,7 +3416,11 @@ subroutine init_history_bgc snow_bio_net(:,:,:,:) = c0 fbio_snoice (:,:,:,:) = c0 fbio_atmice (:,:,:,:) = c0 + bTiz (:,:,:,:,:) = c0 + bphi (:,:,:,:,:) = c0 zfswin (:,:,:,:,:) = c0 + iDi (:,:,:,:,:) = c0 + iki (:,:,:,:,:) = c0 fnit (:,:,:) = c0 fsil (:,:,:) = c0 famm (:,:,:) = c0 diff --git a/cicecore/cicedyn/analysis/ice_history_mechred.F90 b/cicecore/cicedyn/analysis/ice_history_mechred.F90 index e0d15fcf2..318132a92 100644 --- a/cicecore/cicedyn/analysis/ice_history_mechred.F90 +++ b/cicecore/cicedyn/analysis/ice_history_mechred.F90 @@ -37,6 +37,11 @@ module ice_history_mechred f_aredistn = 'x', f_vredistn = 'x', & f_araftn = 'x', f_vraftn = 'x' + ! CMIP ridging variables. + + character (len=max_nstrm), public :: & + f_sirdgconc = 'm', f_sirdgthick = 'm' + !--------------------------------------------------------------- ! namelist variables !--------------------------------------------------------------- @@ -51,13 +56,14 @@ module ice_history_mechred f_dvirdgndt, & f_aparticn, f_krdgn , & f_aredistn, f_vredistn , & - f_araftn, f_vraftn + f_araftn, f_vraftn , & + f_sirdgconc, f_sirdgthick !--------------------------------------------------------------- ! field indices !--------------------------------------------------------------- - integer (kind=int_kind), dimension(max_nstrm) :: & + integer (kind=int_kind), dimension(max_nstrm), public :: & n_ardg , n_vrdg , & n_alvl , n_vlvl , & n_dardg1dt , n_dardg2dt , & @@ -69,6 +75,9 @@ module ice_history_mechred n_aredistn , n_vredistn , & n_araftn , n_vraftn + integer (kind=int_kind), dimension(max_nstrm) :: & + n_sirdgconc, n_sirdgthick + !======================================================================= contains @@ -152,6 +161,8 @@ subroutine init_hist_mechred_2D f_vrdgn = 'x' f_araftn = 'x' f_vraftn = 'x' + f_sirdgconc = 'x' + f_sirdgthick = 'x' endif if (f_araftn /= 'x' .or. f_vraftn /= 'x') f_ardgn = f_araftn @@ -174,6 +185,8 @@ subroutine init_hist_mechred_2D call broadcast_scalar (f_vredistn, master_task) call broadcast_scalar (f_araftn, master_task) call broadcast_scalar (f_vraftn, master_task) + call broadcast_scalar (f_sirdgconc, master_task) + call broadcast_scalar (f_sirdgthick, master_task) ! 2D variables @@ -182,49 +195,63 @@ subroutine init_hist_mechred_2D if (f_alvl(1:1) /= 'x') & call define_hist_field(n_alvl,"alvl","1",tstr2D, tcstr, & - "level ice area fraction", & - "none", c1, c0, & + "level ice area fraction", & + "none", c1, c0, & ns, f_alvl) if (f_vlvl(1:1) /= 'x') & call define_hist_field(n_vlvl,"vlvl","m",tstr2D, tcstr, & - "level ice volume", & - "grid cell mean level ice thickness", c1, c0, & + "level ice volume", & + "grid cell mean level ice thickness", c1, c0, & ns, f_vlvl) if (f_ardg(1:1) /= 'x') & call define_hist_field(n_ardg,"ardg","1",tstr2D, tcstr, & - "ridged ice area fraction", & - "none", c1, c0, & + "ridged ice area fraction", & + "none", c1, c0, & ns, f_ardg) if (f_vrdg(1:1) /= 'x') & call define_hist_field(n_vrdg,"vrdg","m",tstr2D, tcstr, & - "ridged ice volume", & - "grid cell mean level ridged thickness", c1, c0, & + "ridged ice volume", & + "grid cell mean level ridged thickness", c1, c0, & ns, f_vrdg) if (f_dardg1dt(1:1) /= 'x') & call define_hist_field(n_dardg1dt,"dardg1dt","%/day",tstr2D, tcstr, & - "ice area ridging rate", & - "none", secday*c100, c0, & + "ice area ridging rate", & + "none", secday*c100, c0, & ns, f_dardg1dt) if (f_dardg2dt(1:1) /= 'x') & call define_hist_field(n_dardg2dt,"dardg2dt","%/day",tstr2D, tcstr, & - "ridge area formation rate", & - "none", secday*c100, c0, & + "ridge area formation rate", & + "none", secday*c100, c0, & ns, f_dardg2dt) if (f_dvirdgdt(1:1) /= 'x') & call define_hist_field(n_dvirdgdt,"dvirdgdt","cm/day",tstr2D, tcstr, & - "ice volume ridging rate", & - "none", mps_to_cmpdy, c0, & + "ice volume ridging rate", & + "none", mps_to_cmpdy, c0, & ns, f_dvirdgdt) if (f_opening(1:1) /= 'x') & call define_hist_field(n_opening,"opening","%/day",tstr2D, tcstr, & - "lead area opening rate", & - "none", secday*c100, c0, & + "lead area opening rate", & + "none", secday*c100, c0, & ns, f_opening) + if (f_sirdgconc(1:1) /= 'x') & + call define_hist_field(n_sirdgconc,"sirdgconc","%",tstr2D, tcstr, & + "percentage of ridged sea ice", & + "area percentage of sea ice surface that is ridged sea ice", & + c100, c0, & + ns, f_sirdgconc, avg_ice_present='none', mask_ice_free_points=.false.) + + if (f_sirdgthick(1:1) /= 'x') & + call define_hist_field(n_sirdgthick,"sirdgthick","m",tstr2D, tcstr, & + "ridged ice thickness", & + "total volume of ridged sea ice divided by area of ridges", & + c1, c0, & + ns, f_sirdgthick, avg_ice_present='ridge', mask_ice_free_points=.true.) + endif ! histfreq(ns) /= 'x' enddo ! nstreams @@ -331,6 +358,7 @@ end subroutine init_hist_mechred_3Dc subroutine accum_hist_mechred (iblk) + use ice_blocks, only: nx_block, ny_block use ice_history_shared, only: n2D, a2D, a3Dc, ncat_hist, & accum_hist_field use ice_state, only: aice, vice, trcr, aicen, vicen, trcrn @@ -380,6 +408,15 @@ subroutine accum_hist_mechred (iblk) if (f_opening(1:1) /= 'x') & call accum_hist_field(n_opening, iblk, opening(:,:,iblk), a2D) + if (f_sirdgconc(1:1)/= 'x') & + call accum_hist_field(n_sirdgconc, iblk, & + aice(:,:,iblk) * (c1 - trcr(:,:,nt_alvl,iblk)), a2D) + + if (f_sirdgthick(1:1)/= 'x') then + call accum_hist_field(n_sirdgthick, iblk, & + vice(:,:,iblk) * (c1 - trcr(:,:,nt_vlvl,iblk)), a2D) + endif + endif ! allocated(a2D) ! 3D category fields diff --git a/cicecore/cicedyn/analysis/ice_history_pond.F90 b/cicecore/cicedyn/analysis/ice_history_pond.F90 index 88b5fa899..17d1c50b3 100644 --- a/cicecore/cicedyn/analysis/ice_history_pond.F90 +++ b/cicecore/cicedyn/analysis/ice_history_pond.F90 @@ -8,7 +8,7 @@ module ice_history_pond use ice_kinds_mod use ice_domain_size, only: max_nstrm - use ice_constants, only: c0, c1 + use ice_constants, only: c0, c1, c100 use ice_fileunits, only: nu_nml, nml_filename, & get_fileunit, release_fileunit use ice_fileunits, only: nu_diag @@ -40,6 +40,11 @@ module ice_history_pond f_dpnd_freebdn= 'x', f_dpnd_initialn= 'x', & f_dpnd_dlidn = 'x' + ! CMIP related pond variables + character (len=max_nstrm), public :: & + f_simpconc = 'm', f_simpeffconc = 'm', & + f_simpthick = 'm', f_simprefrozen = 'm' + !--------------------------------------------------------------- ! namelist variables !--------------------------------------------------------------- @@ -62,13 +67,17 @@ module ice_history_pond f_dpnd_exponn , & f_dpnd_freebdn , & f_dpnd_initialn , & - f_dpnd_dlidn + f_dpnd_dlidn , & + f_simpconc , & + f_simpeffconc , & + f_simpthick , & + f_simprefrozen !--------------------------------------------------------------- ! field indices !--------------------------------------------------------------- - integer (kind=int_kind), dimension(max_nstrm) :: & + integer (kind=int_kind), dimension(max_nstrm), public :: & n_apondn , n_apeffn , & n_hpondn , & n_apond , n_apond_ai, & @@ -83,6 +92,11 @@ module ice_history_pond n_dpnd_freebdn, n_dpnd_initialn, & n_dpnd_dlidn + ! CMIP related melt pond variables + integer (kind=int_kind), dimension(max_nstrm) :: & + n_simpconc , n_simpeffconc, & + n_simpthick , n_simprefrozen + !======================================================================= contains @@ -177,6 +191,10 @@ subroutine init_hist_pond_2D f_dpnd_freebdn = 'x' f_dpnd_initialn= 'x' f_dpnd_dlidn = 'x' + f_simpconc = 'x' + f_simpeffconc = 'x' + f_simpthick = 'x' + f_simprefrozen = 'x' endif if (tr_pond_topo) then @@ -203,6 +221,10 @@ subroutine init_hist_pond_2D call broadcast_scalar (f_hpond, master_task) call broadcast_scalar (f_ipond, master_task) call broadcast_scalar (f_apeff, master_task) + call broadcast_scalar (f_simpconc, master_task) + call broadcast_scalar (f_simpeffconc, master_task) + call broadcast_scalar (f_simpthick, master_task) + call broadcast_scalar (f_simprefrozen, master_task) call broadcast_scalar (f_apond_ai, master_task) call broadcast_scalar (f_hpond_ai, master_task) call broadcast_scalar (f_ipond_ai, master_task) @@ -234,8 +256,8 @@ subroutine init_hist_pond_2D if (f_apond_ai(1:1) /= 'x') & call define_hist_field(n_apond_ai,"apond_ai","1",tstr2D, tcstr, & - "melt pond fraction of grid cell", & - "weighted by ice area", c1, c0, & + "melt pond fraction of grid cell", & + "weighted by ice area", c1, c0, & ns, f_apond_ai) if (f_hpond(1:1) /= 'x') & @@ -246,8 +268,8 @@ subroutine init_hist_pond_2D if (f_hpond_ai(1:1) /= 'x') & call define_hist_field(n_hpond_ai,"hpond_ai","m",tstr2D, tcstr, & - "mean melt pond depth over grid cell", & - "weighted by ice area", c1, c0, & + "mean melt pond depth over grid cell", & + "weighted by ice area", c1, c0, & ns, f_hpond) if (f_ipond(1:1) /= 'x') & @@ -258,64 +280,93 @@ subroutine init_hist_pond_2D if (f_ipond_ai(1:1) /= 'x') & call define_hist_field(n_ipond_ai,"ipond_ai","m",tstr2D, tcstr, & - "mean pond ice thickness over grid cell", & - "weighted by ice area", c1, c0, & + "mean pond ice thickness over grid cell", & + "weighted by ice area", c1, c0, & ns, f_ipond_ai) if (f_apeff(1:1) /= 'x') & call define_hist_field(n_apeff,"apeff","1",tstr2D, tcstr, & "radiation-effective pond area fraction of sea ice", & - "none", c1, c0, & + "none", c1, c0, & ns, f_apeff) if (f_apeff_ai(1:1) /= 'x') & call define_hist_field(n_apeff_ai,"apeff_ai","1",tstr2D, tcstr, & - "radiation-effective pond area fraction over grid cell", & - "weighted by ice area", c1, c0, & + "radiation-effective pond area fraction over grid cell", & + "weighted by ice area", c1, c0, & ns, f_apeff_ai) if (f_dpnd_flush(1:1) /= 'x') & call define_hist_field(n_dpnd_flush,"dpnd_flush","m/s",tstr2D, tcstr, & - "pond flushing rate due to ice permeability", & - "none", c1, c0, & + "pond flushing rate due to ice permeability", & + "none", c1, c0, & ns, f_dpnd_flush) if (f_dpnd_expon(1:1) /= 'x') & call define_hist_field(n_dpnd_expon,"dpnd_expon","m/s",tstr2D, tcstr, & - "exponential pond drainage rate", & - "none", c1, c0, & + "exponential pond drainage rate", & + "none", c1, c0, & ns, f_dpnd_expon) if (f_dpnd_freebd(1:1) /= 'x') & call define_hist_field(n_dpnd_freebd,"dpnd_freebd","m/s",tstr2D, tcstr, & - "pond drainage rate due to freeboard constraint", & - "none", c1, c0, & + "pond drainage rate due to freeboard constraint", & + "none", c1, c0, & ns, f_dpnd_freebd) if (f_dpnd_initial(1:1) /= 'x') & call define_hist_field(n_dpnd_initial,"dpnd_initial","m/s",tstr2D, tcstr, & - "runoff rate due to rfrac", & - "none", c1, c0, & + "runoff rate due to rfrac", & + "none", c1, c0, & ns, f_dpnd_initial) if (f_dpnd_dlid(1:1) /= 'x') & call define_hist_field(n_dpnd_dlid,"dpnd_dlid","m/s",tstr2D, tcstr, & - "pond loss / gain to ice lid freezing / melting", & - "none", c1, c0, & + "pond loss / gain to ice lid freezing / melting", & + "none", c1, c0, & ns, f_dpnd_dlid) if (f_dpnd_melt(1:1) /= 'x') & call define_hist_field(n_dpnd_melt,"dpnd_melt","m/s",tstr2D, tcstr, & - "pond drainage due to ice melting", & - "none", c1, c0, & + "pond drainage due to ice melting", & + "none", c1, c0, & ns, f_dpnd_melt) if (f_dpnd_ridge(1:1) /= 'x') & call define_hist_field(n_dpnd_ridge,"dpnd_ridge","m",tstr2D, tcstr, & - "pond drainage due to ridging", & - "none", c1, c0, & + "pond drainage due to ridging", & + "none", c1, c0, & ns, f_dpnd_ridge) + ! CMIP melt pond variables + if (f_simpconc(1:1) /= 'x') & + call define_hist_field(n_simpconc,"simpconc","%",tstr2D, tcstr, & + "percentage of sea ice covered by melt ponds", & + "area percentage of sea-ice surface that is covered by melt ponds", & + c100, c0, & + ns, f_simpconc, avg_ice_present='none', mask_ice_free_points=.false.) + + if (f_simpeffconc(1:1) /= 'x') & + call define_hist_field(n_simpeffconc,"simpeffconc","%",tstr2D, tcstr, & + "percentage of sea ice covered by effective melt ponds", & + "area percentage of sea-ice surface that is covered by open melt ponds", & + c100, c0, & + ns, f_simpeffconc, avg_ice_present='none', mask_ice_free_points=.false.) + + if (f_simprefrozen(1:1) /= 'x') & + call define_hist_field(n_simprefrozen,"simprefrozen","m",tstr2D, tcstr, & + "thickness of refrozen ice on melt ponds", & + "volume of refrozen ice on melt ponds divided by melt pond covered area", & + c1, c0, & + ns, f_simprefrozen, avg_ice_present='pond', mask_ice_free_points=.true.) + + if (f_simpthick(1:1) /= 'x') & + call define_hist_field(n_simpthick,"simpthick","m",tstr2D, tcstr, & + "melt pond depth", & + "average depth of melt ponds on sea ice, that is melt pond volume divided by melt pond area", & + c1, c0, & + ns, f_simpthick, avg_ice_present='pond', mask_ice_free_points=.true.) + endif ! histfreq(ns) /= 'x' enddo ! nstreams @@ -413,7 +464,7 @@ subroutine accum_hist_pond (iblk) use ice_flux, only: dpnd_flushn, dpnd_exponn, dpnd_freebdn, dpnd_initialn use ice_history_shared, only: n2D, a2D, a3Dc, ncat_hist, & accum_hist_field - use ice_state, only: aice, trcr, trcrn + use ice_state, only: aice, aice_init, trcr, trcrn integer (kind=int_kind), intent(in) :: & iblk ! block index @@ -455,59 +506,38 @@ subroutine accum_hist_pond (iblk) if (allocated(a2D)) then + worka(:,:) = c0 if (tr_pond_lvl) then + worka(:,:) = trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk) + elseif (tr_pond_sealvl .or. tr_pond_topo) then + worka(:,:) = trcr(:,:,nt_apnd,iblk) + endif if (f_apond(1:1)/= 'x') & - call accum_hist_field(n_apond, iblk, & - trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk), a2D) + call accum_hist_field(n_apond, iblk, worka(:,:), a2D) if (f_apond_ai(1:1)/= 'x') & - call accum_hist_field(n_apond_ai, iblk, & - aice(:,:,iblk) & - * trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk), a2D) + call accum_hist_field(n_apond_ai, iblk, aice_init(:,:,iblk)*worka(:,:), a2D) if (f_hpond(1:1)/= 'x') & - call accum_hist_field(n_hpond, iblk, & - trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_hpnd,iblk), a2D) + call accum_hist_field(n_hpond, iblk, worka(:,:)*trcr(:,:,nt_hpnd,iblk), a2D) if (f_hpond_ai(1:1)/= 'x') & - call accum_hist_field(n_hpond_ai, iblk, & - aice(:,:,iblk) & - * trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_hpnd,iblk), a2D) + call accum_hist_field(n_hpond_ai, iblk, aice_init(:,:,iblk)*worka(:,:)*trcr(:,:,nt_hpnd,iblk), a2D) if (f_ipond(1:1)/= 'x') & - call accum_hist_field(n_ipond, iblk, & - trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_ipnd,iblk), a2D) + call accum_hist_field(n_ipond, iblk, worka(:,:)*trcr(:,:,nt_ipnd,iblk), a2D) if (f_ipond_ai(1:1)/= 'x') & - call accum_hist_field(n_ipond_ai, iblk, & - aice(:,:,iblk) & - * trcr(:,:,nt_alvl,iblk) * trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_ipnd,iblk), a2D) + call accum_hist_field(n_ipond_ai, iblk, aice_init(:,:,iblk)*worka(:,:)*trcr(:,:,nt_ipnd,iblk), a2D) - elseif (tr_pond_topo .or. tr_pond_sealvl) then + ! CMIP pond related variables + if (f_simpeffconc (1:1) /= 'x') & + call accum_hist_field(n_simpeffconc, iblk, apeff_ai(:,:,iblk), a2D) - if (f_apond(1:1)/= 'x') & - call accum_hist_field(n_apond, iblk, & - trcr(:,:,nt_apnd,iblk), a2D) - if (f_apond_ai(1:1)/= 'x') & - call accum_hist_field(n_apond_ai, iblk, & - aice(:,:,iblk) * trcr(:,:,nt_apnd,iblk), a2D) - if (f_hpond(1:1)/= 'x') & - call accum_hist_field(n_hpond, iblk, & - trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_hpnd,iblk), a2D) - if (f_hpond_ai(1:1)/= 'x') & - call accum_hist_field(n_hpond_ai, iblk, & - aice(:,:,iblk) * trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_hpnd,iblk), a2D) - if (f_ipond(1:1)/= 'x') & - call accum_hist_field(n_ipond, iblk, & - trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_ipnd,iblk), a2D) - if (f_ipond_ai(1:1)/= 'x') & - call accum_hist_field(n_ipond_ai, iblk, & - aice(:,:,iblk) * trcr(:,:,nt_apnd,iblk) & - * trcr(:,:,nt_ipnd,iblk), a2D) - endif ! ponds + if (f_simpconc(1:1)/= 'x') & + call accum_hist_field(n_simpconc, iblk, worka(:,:), a2D) + + if (f_simpthick(1:1)/= 'x') & + call accum_hist_field(n_simpthick, iblk, aice_init(:,:,iblk)*worka(:,:)*trcr(:,:,nt_hpnd,iblk), a2D) + + if (f_simprefrozen(1:1)/= 'x') & + call accum_hist_field(n_simprefrozen, iblk, aice_init(:,:,iblk)*worka(:,:)*trcr(:,:,nt_ipnd,iblk), a2D) this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo diff --git a/cicecore/cicedyn/analysis/ice_history_shared.F90 b/cicecore/cicedyn/analysis/ice_history_shared.F90 index daa805794..678284f2a 100644 --- a/cicecore/cicedyn/analysis/ice_history_shared.F90 +++ b/cicecore/cicedyn/analysis/ice_history_shared.F90 @@ -1,4 +1,4 @@ -!======================================================================= +! ======================================================================= ! ! Output files: netCDF or binary data, Fortran unformatted dumps ! @@ -34,11 +34,12 @@ module ice_history_shared implicit none private - public :: define_hist_field, accum_hist_field, icefields_nml, construct_filename + public :: define_hist_field, accum_hist_field, icefields_nml, construct_filename, ice_brine_density integer (kind=int_kind), public :: history_precision logical (kind=log_kind), public :: & + write_histrest_now = .false. , & ! true when writing history restarts hist_avg(max_nstrm) ! if true, write averaged data instead of snapshots character (len=char_len_long), public :: & @@ -80,22 +81,22 @@ module ice_history_shared !--------------------------------------------------------------- type, public :: ice_hist_field - character (len=16) :: vname ! variable name + character (len=24) :: vname ! variable name character (len=16) :: vunit ! variable units character (len=25) :: vcoord ! variable coordinates character (len=16) :: vcellmeas ! variable cell measures - character (len=55) :: vdesc ! variable description - character (len=55) :: vcomment ! variable description + character (len=80) :: vdesc ! variable description + character (len=80) :: vcomment ! variable description real (kind=dbl_kind) :: cona ! multiplicative conversion factor real (kind=dbl_kind) :: conb ! additive conversion factor character (len=1) :: vhistfreq ! frequency of history output integer (kind=int_kind) :: vhistfreq_n ! number of vhistfreq intervals - logical (kind=log_kind) :: avg_ice_present ! only average where ice is present + character (len=16) :: avg_ice_present ! only average where ice is present 'init', 'final', 'none' logical (kind=log_kind) :: mask_ice_free_points ! mask ice-free points end type integer (kind=int_kind), parameter, public :: & - max_avail_hist_fields = 800 ! Max number of history fields + max_avail_hist_fields = 1200 ! Max number of history fields integer (kind=int_kind), public :: & num_avail_hist_fields_tot = 0, & ! Current, total number of defined fields @@ -218,9 +219,11 @@ module ice_history_shared character (len=max_nstrm), public :: & ! f_example = 'md', & + f_CMIP = 'x', & f_hi = 'm', f_hs = 'm', & f_snowfrac = 'x', f_snowfracn = 'x', & f_Tsfc = 'm', f_aice = 'm', & + f_aice_init = 'x', & f_uvel = 'm', f_vvel = 'm', & f_icespd = 'm', f_icedir = 'm', & f_uvelE = 'x', f_vvelE = 'x', & @@ -289,40 +292,43 @@ module ice_history_shared f_mlt_onset = 'm', f_frz_onset = 'm', & f_iage = 'm', f_FY = 'm', & f_hisnap = 'm', f_aisnap = 'm', & - f_CMIP = 'x' , & f_sithick = 'x', f_sisnthick = 'x', & - f_siage = 'x', & + f_siage = 'x', f_siconc = 'x', & + f_sisnconc = 'x', f_sisnmass = 'x', & f_sitemptop = 'x', f_sitempsnic = 'x', & - f_sitempbot = 'x', & + f_sitempbot = 'x', f_sivol = 'x', & f_sispeed = 'x', f_sidir = 'x', & f_siu = 'x', f_siv = 'x', & f_sidmasstranx = 'x', f_sidmasstrany = 'x', & f_sistrxdtop = 'x', f_sistrydtop = 'x', & f_sistrxubot = 'x', f_sistryubot = 'x', & f_sicompstren = 'x', & - f_sialb = 'x', & + f_sisali = 'x', & f_sihc = 'x', f_sisnhc = 'x', & f_sidconcth = 'x', f_sidconcdyn = 'x', & f_sidmassth = 'x', f_sidmassdyn = 'x', & f_sidmassgrowthwat = 'x', & f_sidmassgrowthbot = 'x', & - f_sidmasssi = 'x', & + f_simass = 'x', & + f_sisaltmass = 'x', & + f_sidmassgrowthsi = 'x', & f_sidmassevapsubl = 'x', & - f_sndmasssubl = 'x', & + f_sisndmasssubl = 'x', & f_sidmassmelttop = 'x', & f_sidmassmeltbot = 'x', & - f_sidmasslat = 'x', & - f_sndmasssnf = 'x', & - f_sndmassmelt = 'x', & - f_sndmassdyn = 'x', & + f_sidmassmeltlat = 'x', & + f_sisndmasssnf = 'x', & + f_sisndmassmelt = 'x', & + f_sisndmassdyn = 'x', & + f_sisndmasssi = 'x', & f_siflswdtop = 'x', & f_siflswutop = 'x', & f_siflswdbot = 'x', & f_sifllwdtop = 'x', & f_sifllwutop = 'x', & f_siflsenstop = 'x', & - f_siflsensupbot = 'x', & - f_sifllatstop = 'x', & + f_siflsensbot = 'x', & + f_sifllattop = 'x', & f_siflcondtop = 'x', & f_siflcondbot = 'x', & f_sipr = 'x', & @@ -337,12 +343,16 @@ module ice_history_shared f_siforceintstrx = 'x', & f_siforceintstry = 'x', & f_siitdconc = 'x', & + f_siitdsnconc = 'x', & f_siitdthick = 'x', & f_siitdsnthick = 'x', & + f_sidragbot = 'x', & f_sidragtop = 'x', & - f_sirdgthick = 'x', & - f_sistreave = 'x', & - f_sistremax = 'x', & + f_sistressave = 'x', & + f_sistressmax = 'x', & + f_sidivvel = 'x', & + f_sishearvel = 'x', & + f_sitimefrac = 'x', & f_aicen = 'x', f_vicen = 'x', & f_vsnon = 'x', & f_trsig = 'm', f_icepresent = 'm', & @@ -389,9 +399,11 @@ module ice_history_shared f_VGRDb , f_VGRDa , & f_NFSD , & ! f_example , & + f_CMIP , & f_hi, f_hs , & f_snowfrac, f_snowfracn, & f_Tsfc, f_aice , & + f_aice_init, & f_uvel, f_vvel , & f_icespd, f_icedir , & ! For now, C and CD grid quantities are controlled by the generic (originally B-grid) namelist flag @@ -461,40 +473,43 @@ module ice_history_shared f_mlt_onset, f_frz_onset, & f_iage, f_FY , & f_hisnap, f_aisnap , & - f_CMIP, & f_sithick, f_sisnthick, & - f_siage, & + f_siage, f_siconc , & + f_sisnconc, f_sisnmass , & f_sitemptop, f_sitempsnic,& - f_sitempbot, & + f_sitempbot, f_sivol, & f_sispeed, f_sidir, & f_siu, f_siv, & f_sidmasstranx, f_sidmasstrany, & f_sistrxdtop, f_sistrydtop, & f_sistrxubot, f_sistryubot, & f_sicompstren, & - f_sialb, & + f_sisali, & f_sihc, f_sisnhc, & f_sidconcth, f_sidconcdyn,& f_sidmassth, f_sidmassdyn,& f_sidmassgrowthwat, & f_sidmassgrowthbot, & - f_sidmasssi, & + f_simass, & + f_sisaltmass, & + f_sidmassgrowthsi, & f_sidmassevapsubl, & - f_sndmasssubl, & + f_sisndmasssubl, & f_sidmassmelttop, & f_sidmassmeltbot, & - f_sidmasslat, & - f_sndmasssnf, & - f_sndmassmelt, & - f_sndmassdyn, & + f_sidmassmeltlat, & + f_sisndmasssnf, & + f_sisndmassmelt, & + f_sisndmassdyn, & + f_sisndmasssi, & f_siflswdtop, & f_siflswutop, & f_siflswdbot, & f_sifllwdtop, & f_sifllwutop, & f_siflsenstop, & - f_siflsensupbot, & - f_sifllatstop, & + f_siflsensbot, & + f_sifllattop, & f_siflcondtop, & f_siflcondbot, & f_sipr, & @@ -509,12 +524,16 @@ module ice_history_shared f_siforceintstrx, & f_siforceintstry, & f_siitdconc, & + f_siitdsnconc, & f_siitdthick, & f_siitdsnthick, & + f_sidragbot, & f_sidragtop, & - f_sirdgthick, & - f_sistreave, & - f_sistremax, & + f_sistressave, & + f_sistressmax, & + f_sidivvel, & + f_sishearvel, & + f_sitimefrac, & f_aicen, f_vicen , & f_vsnon, & f_trsig, f_icepresent,& @@ -593,6 +612,7 @@ module ice_history_shared n_hi , n_hs , & n_snowfrac , n_snowfracn , & n_Tsfc , n_aice , & + n_aice_init , & n_uvel , n_vvel , & n_icespd , n_icedir , & n_uvelE , n_vvelE , & @@ -663,38 +683,42 @@ module ice_history_shared n_mlt_onset , n_frz_onset , & n_hisnap , n_aisnap , & n_sithick , n_sisnthick , & - n_siage, & + n_siage , n_siconc , & + n_sisnconc , n_sisnmass , & n_sitemptop , n_sitempsnic , & - n_sitempbot , & + n_sitempbot , n_sivol , & n_sispeed , n_sidir , & - n_siu, n_siv, & + n_siu , n_siv , & n_sidmasstranx, n_sidmasstrany, & n_sistrxdtop, n_sistrydtop, & n_sistrxubot, n_sistryubot, & n_sicompstren, & - n_sialb, & + n_sisali, & n_sihc , n_sisnhc, & n_sidconcth , n_sidconcdyn, & n_sidmassth , n_sidmassdyn, & n_sidmassgrowthwat, & n_sidmassgrowthbot, & - n_sidmasssi, & + n_simass, & + n_sisaltmass, & + n_sidmassgrowthsi, & n_sidmassevapsubl, & - n_sndmasssubl, & + n_sisndmasssubl, & n_sidmassmelttop, & n_sidmassmeltbot, & - n_sidmasslat, & - n_sndmasssnf, & - n_sndmassmelt, & - n_sndmassdyn, & + n_sidmassmeltlat, & + n_sisndmasssnf, & + n_sisndmassmelt, & + n_sisndmassdyn, & + n_sisndmasssi, & n_siflswdtop, & n_siflswutop, & n_siflswdbot, & n_sifllwdtop, & n_sifllwutop, & n_siflsenstop, & - n_siflsensupbot, & - n_sifllatstop, & + n_siflsensbot, & + n_sifllattop, & n_siflcondtop, & n_siflcondbot, & n_sipr, & @@ -709,12 +733,16 @@ module ice_history_shared n_siforceintstrx, & n_siforceintstry, & n_siitdconc, & + n_siitdsnconc, & n_siitdthick, & n_siitdsnthick, & + n_sidragbot, & n_sidragtop, & - n_sirdgthick, & - n_sistreave, & - n_sistremax, & + n_sistressave, & + n_sistressmax, & + n_sidivvel, & + n_sishearvel, & + n_sitimefrac, & n_trsig , n_icepresent , & n_iage , n_FY , & n_fsurf_ai , & @@ -749,7 +777,7 @@ module ice_history_shared !======================================================================= - subroutine construct_filename(ncfile,suffix,ns) + subroutine construct_filename(ncfile,suffix,ns,option) use ice_calendar, only: msec, myear, mmonth, daymo, & mday, write_ic, histfreq, histfreq_n, & @@ -760,13 +788,23 @@ subroutine construct_filename(ncfile,suffix,ns) character (len=*), intent(inout) :: ncfile character (len=*), intent(in) :: suffix integer (kind=int_kind), intent(in) :: ns + character (len=*), intent(in), optional :: option integer (kind=int_kind) :: iyear, imonth, iday, isec integer (kind=int_kind) :: n - character (len=char_len) :: cstream + character (len=char_len) :: cstream, loption character (len=char_len_long), save :: ncfile_last(max_nstrm) = 'UnDefineD' character(len=*), parameter :: subname = '(construct_filename)' + loption = 'history' + if (present(option)) then + loption = option + endif + + if (loption /= 'history' .and. loption /= 'histrest') then + call abort_ice(subname//' ERROR: option invalid = '//trim(loption)) + endif + iyear = myear imonth = mmonth iday = mday @@ -780,6 +818,11 @@ subroutine construct_filename(ncfile,suffix,ns) write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & incond_file(1:lenstr(incond_file)),'.',iyear,'-', & imonth,'-',iday,'-',isec,'.',trim(suffix) + + elseif (loption == 'histrest') then + write(ncfile,'(a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & + history_file(1:lenstr(history_file))//trim(cstream)//'_r'//trim(histfreq(ns))//'.', & + iyear,'-',imonth,'-',iday,'-',msec,'.',trim(suffix) else if (hist_avg(ns)) then @@ -847,19 +890,21 @@ subroutine construct_filename(ncfile,suffix,ns) ! The current filename convention means we just have to check latest filename, ! not all filenames ever generated because of use of current model date/time in filename. - ! write(nu_diag,'(2a,i2,1x,a)') subname, 'debug ncfile= ',ns,trim(ncfile) - do n = 1,max_nstrm - ! write(nu_diag,'(2a,i2,1x,a)') subname, 'debug nfile_last= ',n,trim(ncfile_last(n)) - if (ncfile == ncfile_last(n)) then - write(nu_diag,*) subname,' history stream = ',ns - write(nu_diag,*) subname,' history filename = ',trim(ncfile) - write(nu_diag,*) subname,' filename in use for stream ',n - write(nu_diag,*) subname,' filename for stream ',trim(ncfile_last(n)) - write(nu_diag,*) subname,' Use namelist hist_suffix so history filenames are unique' - call abort_ice(subname//' ERROR: history filename already used for another history stream '//trim(ncfile)) - endif - enddo - ncfile_last(ns) = ncfile + if (loption /= 'histrest') then + ! write(nu_diag,'(2a,i2,1x,a)') subname, 'debug ncfile= ',ns,trim(ncfile) + do n = 1,max_nstrm + ! write(nu_diag,'(2a,i2,1x,a)') subname, 'debug nfile_last= ',n,trim(ncfile_last(n)) + if (ncfile == ncfile_last(n)) then + write(nu_diag,*) subname,' history stream = ',ns + write(nu_diag,*) subname,' history filename = ',trim(ncfile) + write(nu_diag,*) subname,' filename in use for stream ',n + write(nu_diag,*) subname,' filename for stream ',trim(ncfile_last(n)) + write(nu_diag,*) subname,' Use namelist hist_suffix so history filenames are unique' + call abort_ice(subname//' ERROR: history filename already used for another history stream '//trim(ncfile)) + endif + enddo + ncfile_last(ns) = ncfile + endif end subroutine construct_filename @@ -888,6 +933,9 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & vdesc , & ! variable descriptions vcomment ! variable comments + character (len=*), optional, intent(in) :: & + avg_ice_present ! compute average only when ice is present + real (kind=dbl_kind), intent(in) :: & cona , & ! multiplicative conversion factor conb ! additive conversion factor @@ -899,7 +947,6 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & ns ! history file stream index logical (kind=log_kind), optional, intent(in) :: & - avg_ice_present , & ! compute average only when ice is present mask_ice_free_points ! mask ice-free points integer (kind=int_kind) :: & @@ -908,13 +955,14 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & character (len=40) :: stmp + character (len=16) :: l_avg_ice_present ! compute average only when ice is present + logical (kind=log_kind) :: & - l_avg_ice_present , & ! compute average only when ice is present l_mask_ice_free_points ! mask ice-free points character(len=*), parameter :: subname = '(define_hist_field)' - l_avg_ice_present = .false. + l_avg_ice_present = 'none' l_mask_ice_free_points = .false. if(present(avg_ice_present)) l_avg_ice_present = avg_ice_present @@ -1199,6 +1247,55 @@ end subroutine accum_hist_field_4D !======================================================================= +! Computes total density of brine plus fresh ice. Used for mass +! related CMIP variables. +! +! 2025 Created by D. Bailey + + subroutine ice_brine_density (qice,sice,sss,rho_ice,rho_ocn,salt_ice) + + use ice_constants, only: c0, c1 + use icepack_intfc, only: icepack_mushy_density_brine, icepack_mushy_liquid_fraction + use icepack_intfc, only: icepack_mushy_temperature_mush, icepack_query_parameters + + real (kind=dbl_kind), intent(in), dimension(:) :: & + qice, & ! sea ice enthalpy of each layer (J m-3) + sice ! sea ice salinity in each layer (psu) + + real (kind=dbl_kind), intent(in) :: & + sss ! sea surface (ocean) salinity (psu) + + real (kind=dbl_kind), intent(out) :: & + rho_ice, & ! combined brine + ice density (kg m-3) + rho_ocn, & ! ocean density from sss (kg m-3) + salt_ice ! bulk salinity of brine + ice (psu) + + integer (kind=int_kind) :: k + real (kind=dbl_kind) :: rhoi ! constant fresh ice density (kg m-3) + real (kind=dbl_kind) :: & + Tice, & ! sea ice temperature in each layer (C) + Sbr, & ! salinity of brine in each layer (psu) + phi, & ! brine fraction in each layer + rhob ! density of brine (kg m-3) + + call icepack_query_parameters(rhoi_out=rhoi) + + rho_ocn = icepack_mushy_density_brine(sss) + rho_ice = c0 + salt_ice = c0 + do k = 1, nzilyr + Sbr = sice(k) + Tice = icepack_mushy_temperature_mush(qice(k),Sbr) + salt_ice = salt_ice + Sbr / real(nzilyr,kind=dbl_kind) + phi = icepack_mushy_liquid_fraction(Tice,Sbr) + rhob = icepack_mushy_density_brine(Sbr) + rho_ice = rho_ice + min(phi*rhob+(c1-phi)*rhoi,rho_ocn) + enddo + rho_ice = rho_ice / real(nzilyr,kind=dbl_kind) + + end subroutine ice_brine_density +!======================================================================= + end module ice_history_shared !======================================================================= diff --git a/cicecore/cicedyn/analysis/ice_history_snow.F90 b/cicecore/cicedyn/analysis/ice_history_snow.F90 index 19722b014..bb702defa 100644 --- a/cicecore/cicedyn/analysis/ice_history_snow.F90 +++ b/cicecore/cicedyn/analysis/ice_history_snow.F90 @@ -31,6 +31,9 @@ module ice_history_snow f_rsnw = 'm', f_rsnwn = 'x', & f_meltsliq = 'm', f_fsloss = 'x' + character (len=max_nstrm), public :: & + f_sisndmasswind = 'm' + !--------------------------------------------------------------- ! namelist variables !--------------------------------------------------------------- @@ -41,7 +44,8 @@ module ice_history_snow f_rhos_cmp, f_rhos_cmpn, & f_rhos_cnt, f_rhos_cntn, & f_rsnw, f_rsnwn, & - f_meltsliq, f_fsloss + f_meltsliq, f_fsloss, & + f_sisndmasswind !--------------------------------------------------------------- ! field indices @@ -55,6 +59,9 @@ module ice_history_snow n_rsnw, n_rsnwn, & n_meltsliq, n_fsloss + integer (kind=int_kind), dimension(max_nstrm), public :: & + n_sisndmasswind + !======================================================================= contains @@ -147,6 +154,7 @@ subroutine init_hist_snow_2D (dt) f_rsnwn = 'x' f_meltsliq = 'x' f_fsloss = 'x' + f_sisndmasswind = 'x' endif call broadcast_scalar (f_smassice, master_task) @@ -161,6 +169,7 @@ subroutine init_hist_snow_2D (dt) call broadcast_scalar (f_rsnwn, master_task) call broadcast_scalar (f_meltsliq, master_task) call broadcast_scalar (f_fsloss, master_task) + call broadcast_scalar (f_sisndmasswind, master_task) if (tr_snow) then @@ -210,6 +219,13 @@ subroutine init_hist_snow_2D (dt) "none", c1, c0, & ns, f_fsloss) + if (f_sisndmasswind(1:1) /= 'x') & + call define_hist_field(n_sisndmasswind,"sisndmasswind","kg/m^2/s",tstr2D, tcstr, & + "snow mass rate of change through wind drift of snow", & + "rate of change of snow mass due to wind-driven transport into the ocean", & + c1, c0, & + ns, f_sisndmasswind, avg_ice_present='none', mask_ice_free_points=.false.) + endif ! histfreq(ns) /= 'x' enddo ! nstreams endif ! tr_snow @@ -376,6 +392,10 @@ subroutine accum_hist_snow (iblk) call accum_hist_field(n_fsloss, iblk, & fsloss(:,:,iblk), a2D) + if (f_sisndmasswind(1:1)/= 'x') & + call accum_hist_field(n_sisndmasswind, iblk, & + fsloss(:,:,iblk), a2D) + endif ! allocated(a2D) ! 3D category fields diff --git a/cicecore/cicedyn/dynamics/ice_dyn_evp1d.F90 b/cicecore/cicedyn/dynamics/ice_dyn_evp1d.F90 index 223ef2849..18316640e 100644 --- a/cicecore/cicedyn/dynamics/ice_dyn_evp1d.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_evp1d.F90 @@ -94,7 +94,7 @@ subroutine dyn_evp1d_init endif ! gather from blks to global - call gather_static(G_uarear, G_dxT, G_dyT, G_tmask) + call gather_static(G_uarear, G_dxT, G_dyT, G_tmask) ! calculate number of water points (T and U). Only needed for the static version ! tmask in ocean/ice @@ -349,7 +349,6 @@ subroutine evp1d_alloc_static_na(na0) call abort_ice(subname//' ERROR: allocating', file=__FILE__, line=__LINE__) endif - allocate(indxTi(1:na0), & indxTj(1:na0), & stat=ierr) @@ -628,6 +627,11 @@ subroutine gather_static(G_uarear, G_dxT, G_dyT, G_Tmask) character(len=*), parameter :: subname = '(gather_static)' + G_uarear = c0 + G_dyT = c0 + G_dxT = c0 + G_tmask = .false. + ! copy from distributed I_* to G_* call gather_global_ext(G_uarear, uarear, master_task, distrb_info) call gather_global_ext(G_dxT , dxT , master_task, distrb_info) @@ -977,6 +981,37 @@ subroutine convert_1d_2d_dyn(na0 , navel0 , integer(kind=int_kind) :: lo, up, iw, i, j character(len=*), parameter :: subname = '(convert_1d_2d_dyn)' + G_stressp_1 = c0 + G_stressp_2 = c0 + G_stressp_3 = c0 + G_stressp_4 = c0 + G_stressm_1 = c0 + G_stressm_2 = c0 + G_stressm_3 = c0 + G_stressm_4 = c0 + G_stress12_1 = c0 + G_stress12_2 = c0 + G_stress12_3 = c0 + G_stress12_4 = c0 + G_strength = c0 + G_cdn_ocn = c0 + G_aiu = c0 + G_uocn = c0 + G_vocn = c0 + G_waterxU = c0 + G_wateryU = c0 + G_forcexU = c0 + G_forceyU = c0 + G_umassdti = c0 + G_fmU = c0 + G_strintxU = c0 + G_strintyU = c0 + G_Tbu = c0 + G_uvel = c0 + G_vvel = c0 + G_taubxU = c0 + G_taubyU = c0 + lo=1 up=na0 do iw = lo, up diff --git a/cicecore/cicedyn/dynamics/ice_dyn_shared.F90 b/cicecore/cicedyn/dynamics/ice_dyn_shared.F90 index 40f49877d..81d603124 100644 --- a/cicecore/cicedyn/dynamics/ice_dyn_shared.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_shared.F90 @@ -193,6 +193,13 @@ subroutine alloc_dyn_shared stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') + uvel_init = c0 + vvel_init = c0 + iceTmask = .false. + iceUmask = .false. + fcor_blk = c0 + DminTarea = c0 + allocate( & fld2(nx_block,ny_block,2,max_blocks), & fld3(nx_block,ny_block,3,max_blocks), & @@ -200,6 +207,10 @@ subroutine alloc_dyn_shared stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') + fld2 = c0 + fld3 = c0 + fld4 = c0 + allocate( & cyp(nx_block,ny_block,max_blocks), & ! 1.5*HTE - 0.5*HTW cxp(nx_block,ny_block,max_blocks), & ! 1.5*HTN - 0.5*HTS @@ -208,12 +219,19 @@ subroutine alloc_dyn_shared stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') + cyp = c0 + cxp = c0 + cym = c0 + cxm = c0 + if (grid_ice == 'B' .and. evp_algorithm == "standard_2d") then allocate( & dxhy(nx_block,ny_block,max_blocks), & ! 0.5*(HTE - HTW) dyhx(nx_block,ny_block,max_blocks), & ! 0.5*(HTN - HTS) stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') + dxhy = c0 + dyhx = c0 endif if (grid_ice == 'CD' .or. grid_ice == 'C') then @@ -228,6 +246,14 @@ subroutine alloc_dyn_shared fcorN_blk (nx_block,ny_block,max_blocks), & ! Coriolis stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of memory') + uvelE_init = c0 + vvelE_init = c0 + uvelN_init = c0 + vvelN_init = c0 + iceEmask = .false. + iceNmask = .false. + fcorE_blk = c0 + fcorN_blk = c0 endif end subroutine alloc_dyn_shared diff --git a/cicecore/cicedyn/general/ice_flux.F90 b/cicecore/cicedyn/general/ice_flux.F90 index ff71a4a4d..e6f38acda 100644 --- a/cicecore/cicedyn/general/ice_flux.F90 +++ b/cicecore/cicedyn/general/ice_flux.F90 @@ -400,6 +400,7 @@ module ice_flux vatmT , & ! vatm on T grid (m/s) wlat , & ! lateral heat rate (m/s) fsw , & ! incoming shortwave radiation (W/m^2) + fswup , & ! outgoing shortwave radiation (W/m^2) coszen , & ! cosine solar zenith angle, < 0 for sun below horizon rdg_conv, & ! convergence term for ridging (1/s) rdg_shear ! shear term for ridging (1/s) @@ -585,6 +586,7 @@ subroutine alloc_flux vatmT (nx_block,ny_block,max_blocks), & ! vatm on T grid wlat (nx_block,ny_block,max_blocks), & ! lateral melt rate (m/s) fsw (nx_block,ny_block,max_blocks), & ! incoming shortwave radiation (W/m^2) + fswup (nx_block,ny_block,max_blocks), & ! outgoing shortwave radiation (W/m^2) coszen (nx_block,ny_block,max_blocks), & ! cosine solar zenith angle, < 0 for sun below horizon rdg_conv (nx_block,ny_block,max_blocks), & ! convergence term for ridging (1/s) rdg_shear (nx_block,ny_block,max_blocks), & ! shear term for ridging (1/s) @@ -625,40 +627,271 @@ subroutine alloc_flux stat=ierr) if (ierr/=0) call abort_ice('(alloc_flux): Out of memory') - if (grid_ice == "CD" .or. grid_ice == "C") & + strax = c0 + stray = c0 + uocn = c0 + vocn = c0 + ss_tltx = c0 + ss_tlty = c0 + hwater = c0 + strairxT = c0 + strairyT = c0 + strocnxT_iavg= c0 + strocnyT_iavg= c0 + sig1 = c0 + sig2 = c0 + sigP = c0 + taubxU = c0 + taubyU = c0 + strairxU = c0 + strairyU = c0 + strocnxU = c0 + strocnyU = c0 + strtltxU = c0 + strtltyU = c0 + strintxU = c0 + strintyU = c0 + daidtd = c0 + dvidtd = c0 + dvsdtd = c0 + dagedtd = c0 + dardg1dt = c0 + dardg2dt = c0 + dvirdgdt = c0 + opening = c0 + stressp_1 = c0 + stressp_2 = c0 + stressp_3 = c0 + stressp_4 = c0 + stressm_1 = c0 + stressm_2 = c0 + stressm_3 = c0 + stressm_4 = c0 + stress12_1 = c0 + stress12_2 = c0 + stress12_3 = c0 + stress12_4 = c0 + fmU = c0 + TbU = c0 + zlvl = c0 + zlvs = c0 + uatm = c0 + vatm = c0 + wind = c0 + potT = c0 + Tair = c0 + Qa = c0 + rhoa = c0 + swvdr = c0 + swvdf = c0 + swidr = c0 + swidf = c0 + swuvrdr = c0 + swuvrdf = c0 + swpardr = c0 + swpardf = c0 + flw = c0 + frain = c0 + fsnow = c0 + sss = c0 + sst = c0 + frzmlt = c0 + frzmlt_init= c0 + Tf = c0 + qdp = c0 + hmix = c0 + daice_da = c0 + fsens = c0 + flat = c0 + fswabs = c0 + fswint_ai = c0 + flwout = c0 + Tref = c0 + Qref = c0 + Uref = c0 + evap = c0 + evaps = c0 + evapi = c0 + alvdr = c0 + alidr = c0 + alvdf = c0 + alidf = c0 + alvdr_ai = c0 + alidr_ai = c0 + alvdf_ai = c0 + alidf_ai = c0 + albice = c0 + albsno = c0 + albpnd = c0 + apeff_ai = c0 + snowfrac = c0 + alvdr_init = c0 + alidr_init = c0 + alvdf_init = c0 + alidf_init = c0 + fpond = c0 + fresh = c0 + fsalt = c0 + fhocn = c0 + fsloss = c0 + fswthru = c0 + fswthru_vdr= c0 + fswthru_vdf= c0 + fswthru_idr= c0 + fswthru_idf= c0 + fswthru_uvrdr = c0 + fswthru_uvrdf = c0 + fswthru_pardr = c0 + fswthru_pardf = c0 + scale_factor = c0 + strairx_ocn= c0 + strairy_ocn= c0 + fsens_ocn = c0 + flat_ocn = c0 + flwout_ocn = c0 + evap_ocn = c0 + alvdr_ocn = c0 + alidr_ocn = c0 + alvdf_ocn = c0 + alidf_ocn = c0 + Tref_ocn = c0 + Qref_ocn = c0 + fsurf = c0 + fcondtop = c0 + fcondbot = c0 + fbot = c0 + Tbot = c0 + Tsnice = c0 + congel = c0 + frazil = c0 + snoice = c0 + meltt = c0 + melts = c0 + meltb = c0 + meltl = c0 + dsnow = c0 + daidtt = c0 + dvidtt = c0 + dvsdtt = c0 + dagedtt = c0 + mlt_onset = c0 + frz_onset = c0 + frazil_diag= c0 + fresh_ai = c0 + fsalt_ai = c0 + fhocn_ai = c0 + fswthru_ai = c0 + fresh_da = c0 + fsalt_da = c0 + uatmT = c0 + vatmT = c0 + wlat = c0 + fsw = c0 + fswup = c0 + coszen = c0 + rdg_conv = c0 + rdg_shear = c0 + rsiden = c0 + dardg1ndt = c0 + dardg2ndt = c0 + dvirdgndt = c0 + aparticn = c0 + krdgn = c0 + ardgn = c0 + vrdgn = c0 + araftn = c0 + vraftn = c0 + aredistn = c0 + vredistn = c0 + fsurfn_f = c0 + fcondtopn_f= c0 + fsensn_f = c0 + flatn_f = c0 + evapn_f = c0 + dflatndTsfc_f = c0 + dfsurfndTsfc_f= c0 + meltsn = c0 + melttn = c0 + meltbn = c0 + congeln = c0 + snoicen = c0 + keffn_top = c0 + fsurfn = c0 + fcondtopn = c0 + fcondbotn = c0 + fsensn = c0 + flatn = c0 + albcnt = c0 + snwcnt = c0 + salinz = c0 + Tmltz = c0 + + if (grid_ice == "CD" .or. grid_ice == "C") then allocate( & - taubxN (nx_block,ny_block,max_blocks), & ! seabed stress (x) at N points (N/m^2) - taubyN (nx_block,ny_block,max_blocks), & ! seabed stress (y) at N points (N/m^2) - strairxN (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction at N points - strairyN (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction at N points - strocnxN (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction at N points - strocnyN (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction at N points - strtltxN (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction at N points - strtltyN (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction at N points - strintxN (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x at N points (N/m^2) - strintyN (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y at N points (N/m^2) - fmN (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in N-cell (kg/s) - TbN (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) - taubxE (nx_block,ny_block,max_blocks), & ! seabed stress (x) at E points (N/m^2) - taubyE (nx_block,ny_block,max_blocks), & ! seabed stress (y) at E points (N/m^2) - strairxE (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction at E points - strairyE (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction at E points - strocnxE (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction at E points - strocnyE (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction at E points - strtltxE (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction at E points - strtltyE (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction at E points - strintxE (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x at E points (N/m^2) - strintyE (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y at E points (N/m^2) - fmE (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in E-cell (kg/s) - TbE (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) - stresspT (nx_block,ny_block,max_blocks), & ! sigma11+sigma22 - stressmT (nx_block,ny_block,max_blocks), & ! sigma11-sigma22 - stress12T (nx_block,ny_block,max_blocks), & ! sigma12 - stresspU (nx_block,ny_block,max_blocks), & ! sigma11+sigma22 - stressmU (nx_block,ny_block,max_blocks), & ! sigma11-sigma22 - stress12U (nx_block,ny_block,max_blocks), & ! sigma12 - stat=ierr) - if (ierr/=0) call abort_ice('(alloc_flux): Out of memory (C or CD grid)') + taubxN (nx_block,ny_block,max_blocks), & ! seabed stress (x) at N points (N/m^2) + taubyN (nx_block,ny_block,max_blocks), & ! seabed stress (y) at N points (N/m^2) + strairxN (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction at N points + strairyN (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction at N points + strocnxN (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction at N points + strocnyN (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction at N points + strtltxN (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction at N points + strtltyN (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction at N points + strintxN (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x at N points (N/m^2) + strintyN (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y at N points (N/m^2) + fmN (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in N-cell (kg/s) + TbN (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) + taubxE (nx_block,ny_block,max_blocks), & ! seabed stress (x) at E points (N/m^2) + taubyE (nx_block,ny_block,max_blocks), & ! seabed stress (y) at E points (N/m^2) + strairxE (nx_block,ny_block,max_blocks), & ! stress on ice by air, x-direction at E points + strairyE (nx_block,ny_block,max_blocks), & ! stress on ice by air, y-direction at E points + strocnxE (nx_block,ny_block,max_blocks), & ! ice-ocean stress, x-direction at E points + strocnyE (nx_block,ny_block,max_blocks), & ! ice-ocean stress, y-direction at E points + strtltxE (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, x-direction at E points + strtltyE (nx_block,ny_block,max_blocks), & ! stress due to sea surface slope, y-direction at E points + strintxE (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, x at E points (N/m^2) + strintyE (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y at E points (N/m^2) + fmE (nx_block,ny_block,max_blocks), & ! Coriolis param. * mass in E-cell (kg/s) + TbE (nx_block,ny_block,max_blocks), & ! factor for seabed stress (landfast ice) + stresspT (nx_block,ny_block,max_blocks), & ! sigma11+sigma22 + stressmT (nx_block,ny_block,max_blocks), & ! sigma11-sigma22 + stress12T (nx_block,ny_block,max_blocks), & ! sigma12 + stresspU (nx_block,ny_block,max_blocks), & ! sigma11+sigma22 + stressmU (nx_block,ny_block,max_blocks), & ! sigma11-sigma22 + stress12U (nx_block,ny_block,max_blocks), & ! sigma12 + stat=ierr) + if (ierr/=0) call abort_ice('(alloc_flux): Out of memory (C or CD grid)') + + taubxN = c0 + taubyN = c0 + strairxN = c0 + strairyN = c0 + strocnxN = c0 + strocnyN = c0 + strtltxN = c0 + strtltyN = c0 + strintxN = c0 + strintyN = c0 + fmN = c0 + TbN = c0 + taubxE = c0 + taubyE = c0 + strairxE = c0 + strairyE = c0 + strocnxE = c0 + strocnyE = c0 + strtltxE = c0 + strtltyE = c0 + strintxE = c0 + strintyE = c0 + fmE = c0 + TbE = c0 + stresspT = c0 + stressmT = c0 + stress12T = c0 + stresspU = c0 + stressmU = c0 + stress12U = c0 + endif ! Pond diagnostics allocate( & @@ -677,6 +910,19 @@ subroutine alloc_flux stat=ierr) if (ierr/=0) call abort_ice('(alloc_flux): Out of memory (ponds)') + dpnd_flush = c0 + dpnd_expon = c0 + dpnd_freebd = c0 + dpnd_initial = c0 + dpnd_dlid = c0 + dpnd_melt = c0 + dpnd_ridge = c0 + dpnd_flushn = c0 + dpnd_exponn = c0 + dpnd_freebdn = c0 + dpnd_initialn= c0 + dpnd_dlidn = c0 + end subroutine alloc_flux !======================================================================= @@ -827,8 +1073,6 @@ subroutine init_coupler_flux flat (:,:,:) = c0 fswabs (:,:,:) = c0 fswint_ai(:,:,:) = c0 - flwout (:,:,:) = -stefan_boltzmann*Tffresh**4 - ! in case atm model diagnoses Tsfc from flwout evap (:,:,:) = c0 evaps (:,:,:) = c0 evapi (:,:,:) = c0 @@ -885,6 +1129,7 @@ subroutine init_coupler_flux coszen (:,:,:) = c0 ! Cosine of the zenith angle fsw (:,:,:) = c0 ! shortwave radiation (W/m^2) + fswup (:,:,:) = c0 ! shortwave radiation (W/m^2) scale_factor(:,:,:) = c1 ! shortwave scaling factor wind (:,:,:) = sqrt(uatm(:,:,:)**2 & + vatm(:,:,:)**2) ! wind speed, (m/s) @@ -927,6 +1172,8 @@ subroutine init_flux_atm fswabs (:,:,:) = c0 flwout (:,:,:) = c0 evap (:,:,:) = c0 + evaps (:,:,:) = c0 + evapi (:,:,:) = c0 Tref (:,:,:) = c0 Qref (:,:,:) = c0 Uref (:,:,:) = c0 @@ -1320,9 +1567,6 @@ subroutine scale_fluxes (nx_block, ny_block, & fsens (i,j) = fsens (i,j) * ar flat (i,j) = flat (i,j) * ar fswabs (i,j) = fswabs (i,j) * ar - ! Special case where aice_init was zero and aice > 0. - if (flwout(i,j) > -puny) & - flwout (i,j) = -stefan_boltzmann *(Tf(i,j) + Tffresh)**4 flwout (i,j) = flwout (i,j) * ar evap (i,j) = evap (i,j) * ar Tref (i,j) = Tref (i,j) * ar diff --git a/cicecore/cicedyn/general/ice_flux_bgc.F90 b/cicecore/cicedyn/general/ice_flux_bgc.F90 index 9c07971ff..7aaaf2baa 100644 --- a/cicecore/cicedyn/general/ice_flux_bgc.F90 +++ b/cicecore/cicedyn/general/ice_flux_bgc.F90 @@ -7,6 +7,7 @@ module ice_flux_bgc use ice_kinds_mod + use ice_constants, only: c0 use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: max_blocks, ncat use ice_fileunits, only: nu_diag @@ -161,6 +162,48 @@ subroutine alloc_flux_bgc stat=ierr) if (ierr/=0) call abort_ice('(alloc_flux_bgc): Out of memory') + nit = c0 + amm = c0 + sil = c0 + dmsp = c0 + dms = c0 + hum = c0 + fnit = c0 + famm = c0 + fsil = c0 + fdmsp = c0 + fdms = c0 + fhum = c0 + fdust = c0 + hin_old = c0 + dsnown = c0 + HDO_ocn = c0 + H2_16O_ocn = c0 + H2_18O_ocn = c0 + Qa_iso = c0 + Qref_iso = c0 + fiso_atm = c0 + fiso_evap = c0 + fiso_ocn = c0 + faero_atm = c0 + faero_ocn = c0 + zaeros = c0 + flux_bio_atm= c0 + flux_bio = c0 + flux_bio_ai = c0 + algalN = c0 + falgalN = c0 + doc = c0 + fdoc = c0 + don = c0 + fdon = c0 + dic = c0 + fdic = c0 + fed = c0 + fep = c0 + ffed = c0 + ffep = c0 + end subroutine alloc_flux_bgc !======================================================================= diff --git a/cicecore/cicedyn/general/ice_forcing.F90 b/cicecore/cicedyn/general/ice_forcing.F90 index e0f1b736a..d165b612a 100755 --- a/cicecore/cicedyn/general/ice_forcing.F90 +++ b/cicecore/cicedyn/general/ice_forcing.F90 @@ -209,37 +209,59 @@ subroutine alloc_forcing if (local_debug .and. my_task == master_task) write(nu_diag,*) subname,'fdbg start' allocate ( & - cldf(nx_block,ny_block, max_blocks), & ! cloud fraction - fsw_data(nx_block,ny_block,2,max_blocks), & ! field values at 2 temporal data points - cldf_data(nx_block,ny_block,2,max_blocks), & - fsnow_data(nx_block,ny_block,2,max_blocks), & - Tair_data(nx_block,ny_block,2,max_blocks), & - uatm_data(nx_block,ny_block,2,max_blocks), & - vatm_data(nx_block,ny_block,2,max_blocks), & - wind_data(nx_block,ny_block,2,max_blocks), & - strax_data(nx_block,ny_block,2,max_blocks), & - stray_data(nx_block,ny_block,2,max_blocks), & - Qa_data(nx_block,ny_block,2,max_blocks), & - rhoa_data(nx_block,ny_block,2,max_blocks), & - flw_data(nx_block,ny_block,2,max_blocks), & - sst_data(nx_block,ny_block,2,max_blocks), & - sss_data(nx_block,ny_block,2,max_blocks), & - uocn_data(nx_block,ny_block,2,max_blocks), & - vocn_data(nx_block,ny_block,2,max_blocks), & - sublim_data(nx_block,ny_block,2,max_blocks), & - frain_data(nx_block,ny_block,2,max_blocks), & - topmelt_data(nx_block,ny_block,2,max_blocks,ncat), & - botmelt_data(nx_block,ny_block,2,max_blocks,ncat), & - ocn_frc_m(nx_block,ny_block, max_blocks,nfld,12), & ! ocn data for 12 months - topmelt_file(ncat), & - botmelt_file(ncat), & - wave_spectrum_data(nx_block,ny_block,nfreq,2,max_blocks), & + cldf (nx_block,ny_block, max_blocks), & ! cloud fraction + fsw_data (nx_block,ny_block,2,max_blocks), & ! field values at 2 temporal data points + cldf_data (nx_block,ny_block,2,max_blocks), & + fsnow_data (nx_block,ny_block,2,max_blocks), & + Tair_data (nx_block,ny_block,2,max_blocks), & + uatm_data (nx_block,ny_block,2,max_blocks), & + vatm_data (nx_block,ny_block,2,max_blocks), & + wind_data (nx_block,ny_block,2,max_blocks), & + strax_data (nx_block,ny_block,2,max_blocks), & + stray_data (nx_block,ny_block,2,max_blocks), & + Qa_data (nx_block,ny_block,2,max_blocks), & + rhoa_data (nx_block,ny_block,2,max_blocks), & + flw_data (nx_block,ny_block,2,max_blocks), & + sst_data (nx_block,ny_block,2,max_blocks), & + sss_data (nx_block,ny_block,2,max_blocks), & + uocn_data (nx_block,ny_block,2,max_blocks), & + vocn_data (nx_block,ny_block,2,max_blocks), & + sublim_data (nx_block,ny_block,2,max_blocks), & + frain_data (nx_block,ny_block,2,max_blocks), & + topmelt_data(nx_block,ny_block,2,max_blocks,ncat), & + botmelt_data(nx_block,ny_block,2,max_blocks,ncat), & + ocn_frc_m (nx_block,ny_block, max_blocks,nfld,12), & ! ocn data for 12 months + topmelt_file(ncat), & + botmelt_file(ncat), & + wave_spectrum_data(nx_block,ny_block,nfreq,2,max_blocks), & stat=ierr) if (ierr/=0) call abort_ice('(alloc_forcing): Out of Memory') -! initialize this, not set in box2001 (and some other forcings?) - - cldf = c0 + cldf = c0 + fsw_data = c0 + cldf_data = c0 + fsnow_data = c0 + Tair_data = c0 + uatm_data = c0 + vatm_data = c0 + wind_data = c0 + strax_data = c0 + stray_data = c0 + Qa_data = c0 + rhoa_data = c0 + flw_data = c0 + sst_data = c0 + sss_data = c0 + uocn_data = c0 + vocn_data = c0 + sublim_data = c0 + frain_data = c0 + topmelt_data = c0 + botmelt_data = c0 + ocn_frc_m = c0 + topmelt_file = '' + botmelt_file = '' + wave_spectrum_data = c0 end subroutine alloc_forcing @@ -711,13 +733,13 @@ subroutine get_forcing_atmo call ice_timer_start(timer_bound) call ice_HaloUpdate (swvdr, halo_info, & - field_loc_center, field_type_scalar) + field_loc_center, field_type_scalar, fillvalue=c0) call ice_HaloUpdate (swvdf, halo_info, & - field_loc_center, field_type_scalar) + field_loc_center, field_type_scalar, fillvalue=c0) call ice_HaloUpdate (swidr, halo_info, & - field_loc_center, field_type_scalar) + field_loc_center, field_type_scalar, fillvalue=c0) call ice_HaloUpdate (swidf, halo_info, & - field_loc_center, field_type_scalar) + field_loc_center, field_type_scalar, fillvalue=c0) call ice_timer_stop(timer_bound) call ice_timer_stop(timer_forcing) diff --git a/cicecore/cicedyn/general/ice_forcing_bgc.F90 b/cicecore/cicedyn/general/ice_forcing_bgc.F90 index 69c3ea311..d12df6417 100644 --- a/cicecore/cicedyn/general/ice_forcing_bgc.F90 +++ b/cicecore/cicedyn/general/ice_forcing_bgc.F90 @@ -65,6 +65,11 @@ subroutine alloc_forcing_bgc stat=ierr) if (ierr/=0) call abort_ice('(alloc_forcing_bgc): Out of memory') + nitdat = c0 + sildat = c0 + nit_data= c0 + sil_data= c0 + end subroutine alloc_forcing_bgc !======================================================================= diff --git a/cicecore/cicedyn/general/ice_init.F90 b/cicecore/cicedyn/general/ice_init.F90 index f716e59a9..24c67d95f 100644 --- a/cicecore/cicedyn/general/ice_init.F90 +++ b/cicecore/cicedyn/general/ice_init.F90 @@ -74,7 +74,7 @@ subroutine input_data istep0, histfreq, histfreq_n, histfreq_base, & dumpfreq, dumpfreq_n, diagfreq, dumpfreq_base, & npt, dt, ndtd, days_per_year, use_leap_years, & - write_ic, dump_last, npt_unit + write_ic, dump_last, npt_unit, write_histrest use ice_arrays_column, only: oceanmixed_ice use ice_restart_column, only: & restart_age, restart_FY, restart_lvl, & @@ -84,7 +84,7 @@ subroutine input_data restart, restart_ext, restart_coszen, use_restart_time, & runtype, restart_file, restart_dir, runid, pointer_file, & restart_format, restart_rearranger, restart_iotasks, restart_root, & - restart_stride, restart_deflate, restart_chunksize + restart_stride, restart_deflate, restart_chunksize, restart_mod use ice_history_shared, only: & history_precision, hist_avg, history_format, history_file, incond_file, & history_dir, incond_dir, version_name, history_rearranger, & @@ -157,13 +157,14 @@ subroutine input_data phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar, & sw_frac, sw_dtemp, floediam, hfrazilmin, iceruf, iceruf_ocn, & rsnw_fall, rsnw_tmax, rhosnew, rhosmin, rhosmax, Tliquidus_max, & - windmin, drhosdwind, snwlvlfac, tscale_pnd_drain + windmin, drhosdwind, snwlvlfac, tscale_pnd_drain, itd_area_min, itd_mass_min integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & kitd, kcatbound, ktransport character (len=char_len) :: shortwave, albedo_type, conduct, fbot_xfer_type, & - tfrz_option, saltflux_option, frzpnd, atmbndy, wave_spec_type, snwredist, snw_aging_table, & + tfrz_option, saltflux_option, frzpnd, atmbndy, wave_spec_type, wave_height_type, & + snwredist, snw_aging_table, & congel_freeze, capping_method, snw_ssp_table logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & @@ -178,6 +179,7 @@ subroutine input_data integer (kind=int_kind) :: rplvl, rptopo, rpsealvl real (kind=dbl_kind) :: Cf, ksno, puny, ice_ref_salinity, Tocnfrz + real (kind=dbl_kind), parameter :: ice_init_spval = -999._dbl_kind character (len=char_len) :: abort_list character (len=char_len) :: nml_name ! namelist name @@ -196,14 +198,14 @@ subroutine input_data ice_ic, restart, restart_dir, restart_file, & restart_ext, use_restart_time, restart_format, lcdf64, & restart_root, restart_stride, restart_iotasks, restart_rearranger, & - restart_deflate, restart_chunksize, & + restart_deflate, restart_chunksize, restart_mod, write_histrest,& pointer_file, dumpfreq, dumpfreq_n, dump_last, & diagfreq, diag_type, diag_file, history_format,& history_root, history_stride, history_iotasks, history_rearranger, & hist_time_axis, & print_global, print_points, latpnt, lonpnt, & debug_forcing, histfreq, histfreq_n, hist_avg, & - hist_suffix, history_deflate, history_chunksize, & + hist_suffix, history_deflate, history_chunksize, & history_dir, history_file, history_precision, cpl_bgc, & histfreq_base, dumpfreq_base, timer_stats, memory_stats, & conserv_check, debug_model, debug_model_step, & @@ -240,8 +242,7 @@ subroutine input_data a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, & dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy, & floediam, hfrazilmin, Tliquidus_max, hi_min, & - tscale_pnd_drain - + itd_area_min, itd_mass_min, tscale_pnd_drain namelist /dynamics_nml/ & kdyn, ndte, revised_evp, yield_curve, & @@ -287,7 +288,8 @@ subroutine input_data fbot_xfer_type, update_ocn_f, l_mpond_fresh, tfrz_option, & saltflux_option,ice_ref_salinity,cpl_frazil, congel_freeze, & oceanmixed_ice, restore_ice, restore_ocn, trestore, & - precip_units, default_season, wave_spec_type,nfreq, & + precip_units, default_season, & + wave_spec_type, nfreq, wave_height_type, & atm_data_type, ocn_data_type, bgc_data_type, fe_data_type, & ice_data_type, ice_data_conc, ice_data_dist, & fyear_init, ycycle, wave_spec_file,restart_coszen, & @@ -359,6 +361,7 @@ subroutine input_data history_deflate = 0 ! compression level for netcdf4 history_chunksize(:) = 0 ! chunksize for netcdf4 write_ic = .false. ! write out initial condition + write_histrest = .true.! write out history restart files if needed cpl_bgc = .false. ! couple bgc thru driver incond_dir = history_dir ! write to history dir for default incond_file = 'iceh_ic'! file prefix @@ -419,8 +422,8 @@ subroutine input_data kstrength = 1 ! 1 = Rothrock 75 strength, 0 = Hibler 79 Pstar = 2.75e4_dbl_kind ! constant in Hibler strength formula (kstrength = 0) Cstar = 20._dbl_kind ! constant in Hibler strength formula (kstrength = 0) - dyn_area_min = p001 ! minimum ice area concentration to activate dynamics - dyn_mass_min = p01 ! minimum ice mass to activate dynamics (kg/m^2) + dyn_area_min = 1.e-11_dbl_kind ! minimum ice area concentration to activate dynamics + dyn_mass_min = 1.e-10_dbl_kind ! minimum ice mass to activate dynamics (kg/m^2) krdg_partic = 1 ! 1 = new participation, 0 = Thorndike et al 75 krdg_redist = 1 ! 1 = new redistribution, 0 = Hibler 80 mu_rdg = 3 ! e-folding scale of ridged ice, krdg_partic=1 (m^0.5) @@ -487,6 +490,8 @@ subroutine input_data cpl_frazil = 'fresh_ice_correction' ! type of coupling for frazil ice ustar_min = 0.005 ! minimum friction velocity for ocean heat flux (m/s) hi_min = p01 ! minimum ice thickness allowed (m) + itd_area_min = ice_init_spval ! zap residual ice below a minimum area + itd_mass_min = ice_init_spval ! zap residual ice below a minimum mass iceruf = 0.0005_dbl_kind ! ice surface roughness at atmosphere interface (m) iceruf_ocn = 0.03_dbl_kind ! under-ice roughness (m) calc_dragio = .false. ! compute dragio from iceruf_ocn and thickness of first ocean level @@ -557,6 +562,7 @@ subroutine input_data saltflux_option = 'constant' ! saltflux calculation ice_ref_salinity = 4.0_dbl_kind ! Ice reference salinity for coupling oceanmixed_ice = .false. ! if true, use internal ocean mixed layer + wave_height_type= 'internal'! type of wave height forcing wave_spec_type = 'none' ! type of wave spectrum forcing nfreq = 25 ! number of wave frequencies wave_spec_file = ' ' ! wave forcing file name @@ -573,6 +579,7 @@ subroutine input_data restore_ocn = .false. ! restore sst if true trestore = 90 ! restoring timescale, days (0 instantaneous) restore_ice = .false. ! restore ice state on grid edges if true + restart_mod = 'none' ! restart modification option debug_forcing = .false. ! true writes diagnostics for input forcing latpnt(1) = 90._dbl_kind ! latitude of diagnostic point 1 (deg) @@ -921,6 +928,24 @@ subroutine input_data if (trim(diag_type) == 'file') call get_fileunit(nu_diag) #endif + ! To remove small amounts of residual ice that are not handled by either dynamics or + ! column physics, the minimum area and mass parameters should be set to the same values + ! in both places. The default sets the column physics (itd) parameters to the dynamics + ! values (available in namelist). itd_area_min and itd_mass_min can be added to the + ! namelist file ice_in and set to different values, if desired. Setting them to + ! zero turns off residual zapping completely. + if (itd_area_min /= ice_init_spval .or. itd_mass_min /= ice_init_spval) then + ! allow itd and dyn parameters to be different + write(nu_diag,*) subname//' WARNING: zap_residual parameters are reset in namelist' + elseif (itd_area_min == c0 .or. itd_mass_min == c0) then + ! turn off residual zapping in Icepack + write(nu_diag,*) subname//' WARNING: zap_residual is turned off' + else + ! itd and dyn parameters are the same by default + itd_area_min = dyn_area_min ! zap residual ice below dynamics minimum area + itd_mass_min = dyn_mass_min ! zap residual ice below dynamics minimum mass + endif + !----------------------------------------------------------------- ! broadcast namelist settings !----------------------------------------------------------------- @@ -973,6 +998,7 @@ subroutine input_data call broadcast_scalar(history_deflate, master_task) call broadcast_array(history_chunksize, master_task) call broadcast_scalar(write_ic, master_task) + call broadcast_scalar(write_histrest, master_task) call broadcast_scalar(cpl_bgc, master_task) call broadcast_scalar(incond_dir, master_task) call broadcast_scalar(incond_file, master_task) @@ -991,6 +1017,7 @@ subroutine input_data call broadcast_scalar(restart_rearranger, master_task) call broadcast_scalar(restart_deflate, master_task) call broadcast_array(restart_chunksize, master_task) + call broadcast_scalar(restart_mod, master_task) call broadcast_scalar(lcdf64, master_task) call broadcast_scalar(pointer_file, master_task) call broadcast_scalar(ice_ic, master_task) @@ -1143,6 +1170,8 @@ subroutine input_data call broadcast_scalar(l_mpond_fresh, master_task) call broadcast_scalar(ustar_min, master_task) call broadcast_scalar(hi_min, master_task) + call broadcast_scalar(itd_area_min, master_task) + call broadcast_scalar(itd_mass_min, master_task) call broadcast_scalar(iceruf, master_task) call broadcast_scalar(iceruf_ocn, master_task) call broadcast_scalar(calc_dragio, master_task) @@ -1150,6 +1179,7 @@ subroutine input_data call broadcast_scalar(fbot_xfer_type, master_task) call broadcast_scalar(precip_units, master_task) call broadcast_scalar(oceanmixed_ice, master_task) + call broadcast_scalar(wave_height_type, master_task) call broadcast_scalar(wave_spec_type, master_task) call broadcast_scalar(wave_spec_file, master_task) call broadcast_scalar(nfreq, master_task) @@ -1235,7 +1265,8 @@ subroutine input_data if (trim(ice_data_type) == 'default') ice_data_type = 'latsst' ! For backward compatibility - if (grid_format == 'nc') grid_format = 'pop_nc' + if (grid_format == 'nc' ) grid_format = 'pop_nc' + if (grid_format == 'nc_ext') grid_format = 'pop_nc_ext' !----------------------------------------------------------------- ! verify inputs @@ -1443,6 +1474,14 @@ subroutine input_data endif endif + if (close_boundaries) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: close_boundaries deprecated, '// & + 'use ew_boundary_type=closed and/or ns_boundary_type=closed' + endif + abort_list = trim(abort_list)//":49" + endif + if (grid_ice == 'CD') then if (my_task == master_task) then write(nu_diag,*) subname//' ERROR: grid_ice = CD not supported yet' @@ -1883,12 +1922,23 @@ subroutine input_data file=__FILE__, line=__LINE__) wave_spec = .false. - if (tr_fsd .and. (trim(wave_spec_type) /= 'none')) wave_spec = .true. - if (tr_fsd .and. (trim(wave_spec_type) == 'none')) then - if (my_task == master_task) then - write(nu_diag,*) subname//' WARNING: tr_fsd=T but wave_spec=F - not recommended' + if (tr_fsd) then + if (trim(wave_spec_type) /= 'none') then + if (trim(wave_height_type) /= 'none') wave_spec = .true. + if (trim(wave_height_type) /= 'internal') then + ! wave_height_type=coupled is not yet implemented in CICE + write (nu_diag,*) 'WARNING: set wave_height_type=internal' + call abort_ice(error_message=subname//'Wave configuration', & + file=__FILE__, line=__LINE__) endif - end if + endif + if (.not.(wave_spec)) then + write (nu_diag,*) 'WARNING: tr_fsd=T but wave_spec=F - not recommended' + if (trim(wave_height_type) /= 'none') then + write (nu_diag,*) 'WARNING: Wave_spec=F, wave_height_type/=none, wave_sig_ht = 0' + endif + endif + endif ! compute grid locations for thermo, u and v fields @@ -2390,6 +2440,10 @@ subroutine input_data write(nu_diag,1030) ' fbot_xfer_type = ', trim(fbot_xfer_type),trim(tmpstr2) write(nu_diag,1000) ' ustar_min = ', ustar_min,' : minimum value of ocean friction velocity' write(nu_diag,1000) ' hi_min = ', hi_min,' : minimum ice thickness allowed (m)' + write(nu_diag,1000) ' puny = ', puny,' : general-use minimum value' + write(nu_diag,*) ' Ice thickness category areas smaller than puny are always removed.' + write(nu_diag,1003) ' itd_area_min = ', itd_area_min,' : zap residual ice below a minimum area' + write(nu_diag,1003) ' itd_mass_min = ', itd_mass_min,' : zap residual ice below a minimum mass' if (calc_dragio) then tmpstr2 = ' : dragio computed from iceruf_ocn' else @@ -2400,8 +2454,11 @@ subroutine input_data write(nu_diag,1002) ' iceruf_ocn = ', iceruf_ocn,' : under-ice roughness length' endif + write(nu_diag,*) ' ' + write(nu_diag,*) ' Floe size distribution and waves' + write(nu_diag,*) '---------------------------------' + write(nu_diag,1002) ' floediam = ', floediam, ' : constant floe diameter' if (tr_fsd) then - write(nu_diag,1002) ' floediam = ', floediam, ' constant floe diameter' if (wave_spec) then tmpstr2 = ' : use wave spectrum for floe size distribution' else @@ -2426,6 +2483,14 @@ subroutine input_data write(nu_diag,1030) ' wave_spec_type = ', trim(wave_spec_type),trim(tmpstr2) endif write(nu_diag,1020) ' nfreq = ', nfreq,' : number of wave spectral forcing frequencies' + if (trim(wave_height_type) == 'internal') then + tmpstr2 = ' : use internally generated wave height' + elseif (trim(wave_height_type) == 'coupled') then + tmpstr2 = ' : use wave height from external coupled model' + elseif (trim(wave_height_type) == 'none') then + tmpstr2 = ' : no wave height data available, default==0' + endif + write(nu_diag,1030) ' wave_height_type = ', trim(wave_height_type),trim(tmpstr2) endif write(nu_diag,*) ' ' @@ -2620,6 +2685,7 @@ subroutine input_data write(nu_diag,1011) ' restart = ', restart write(nu_diag,1031) ' restart_dir = ', trim(restart_dir) write(nu_diag,1011) ' restart_ext = ', restart_ext + write(nu_diag,1031) ' restart_mod = ', trim(restart_mod) write(nu_diag,1011) ' restart_coszen = ', restart_coszen write(nu_diag,1031) ' restart_format = ', trim(restart_format) write(nu_diag,1021) ' restart_deflate = ', restart_deflate @@ -2632,6 +2698,7 @@ subroutine input_data write(nu_diag,1031) ' restart_file = ', trim(restart_file) write(nu_diag,1031) ' pointer_file = ', trim(pointer_file) write(nu_diag,1011) ' use_restart_time = ', use_restart_time + write(nu_diag,1011) ' write_histrest = ', write_histrest write(nu_diag,1031) ' ice_ic = ', trim(ice_ic) if (trim(grid_type) /= 'rectangular' .or. & trim(grid_type) /= 'column') then @@ -2717,6 +2784,7 @@ subroutine input_data endif ! my_task = master_task if (grid_format /= 'pop_nc' .and. & + grid_format /= 'pop_nc_ext' .and. & grid_format /= 'mom_nc' .and. & grid_format /= 'geosnc' .and. & grid_format /= 'meshnc' .and. & @@ -2788,13 +2856,14 @@ subroutine input_data atmbndy_in=atmbndy, calc_strair_in=calc_strair, formdrag_in=formdrag, highfreq_in=highfreq, & kitd_in=kitd, kcatbound_in=kcatbound, hs0_in=hs0, dpscale_in=dpscale, frzpnd_in=frzpnd, & rfracmin_in=rfracmin, rfracmax_in=rfracmax, pndaspect_in=pndaspect, hs1_in=hs1, hp1_in=hp1, & - apnd_sl_in=apnd_sl, & + apnd_sl_in=apnd_sl, itd_area_min_in=itd_area_min, itd_mass_min_in=itd_mass_min, & ktherm_in=ktherm, calc_Tsfc_in=calc_Tsfc, conduct_in=conduct, semi_implicit_Tsfc_in=semi_implicit_Tsfc, & a_rapid_mode_in=a_rapid_mode, Rac_rapid_mode_in=Rac_rapid_mode, vapor_flux_correction_in=vapor_flux_correction, & floediam_in=floediam, hfrazilmin_in=hfrazilmin, Tliquidus_max_in=Tliquidus_max, & aspect_rapid_mode_in=aspect_rapid_mode, dSdt_slow_mode_in=dSdt_slow_mode, & phi_c_slow_mode_in=phi_c_slow_mode, phi_i_mushy_in=phi_i_mushy, conserv_check_in=conserv_check, & wave_spec_type_in = wave_spec_type, wave_spec_in=wave_spec, nfreq_in=nfreq, & + wave_height_type_in = wave_height_type, & update_ocn_f_in=update_ocn_f, cpl_frazil_in=cpl_frazil, congel_freeze_in=congel_freeze, & tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & saltflux_option_in=saltflux_option, ice_ref_salinity_in=ice_ref_salinity, & @@ -3068,14 +3137,14 @@ subroutine init_state ! Halo update on North, East faces call ice_HaloUpdate(uvelN, halo_info, & - field_loc_Nface, field_type_scalar) + field_loc_Nface, field_type_scalar, fillvalue=c0) call ice_HaloUpdate(vvelN, halo_info, & - field_loc_Nface, field_type_scalar) + field_loc_Nface, field_type_scalar, fillvalue=c0) call ice_HaloUpdate(uvelE, halo_info, & - field_loc_Eface, field_type_scalar) + field_loc_Eface, field_type_scalar, fillvalue=c0) call ice_HaloUpdate(vvelE, halo_info, & - field_loc_Eface, field_type_scalar) + field_loc_Eface, field_type_scalar, fillvalue=c0) endif diff --git a/cicecore/cicedyn/general/ice_init.F90.orig b/cicecore/cicedyn/general/ice_init.F90.orig new file mode 100644 index 000000000..5cbaedcb6 --- /dev/null +++ b/cicecore/cicedyn/general/ice_init.F90.orig @@ -0,0 +1,3584 @@ +!======================================================================= + +! parameter and variable initializations +! +! authors Elizabeth C. Hunke and William H. Lipscomb, LANL +! C. M. Bitz, UW +! +! 2004 WHL: Block structure added +! 2006 ECH: Added namelist variables, warnings. +! Replaced old default initial ice conditions with 3.14 version. +! Converted to free source form (F90). + + module ice_init + + use ice_kinds_mod + use ice_communicate, only: my_task, master_task, ice_barrier + use ice_constants, only: c0, c1, c2, c3, c5, c12, p01, p2, p3, p5, p75, p166, & + cm_to_m + use ice_exit, only: abort_ice + use ice_fileunits, only: nu_nml, nu_diag, nml_filename, diag_type, & + ice_stdout, get_fileunit, release_fileunit, bfbflag, flush_fileunit, & + ice_IOUnitsMinUnit, ice_IOUnitsMaxUnit +#ifdef CESMCOUPLED + use ice_fileunits, only: inst_suffix, nu_diag_set +#endif + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_trcr + use icepack_intfc, only: icepack_init_parameters + use icepack_intfc, only: icepack_init_tracer_flags + use icepack_intfc, only: icepack_init_tracer_sizes + use icepack_intfc, only: icepack_query_tracer_flags + use icepack_intfc, only: icepack_query_tracer_sizes + use icepack_intfc, only: icepack_query_tracer_indices + use icepack_intfc, only: icepack_query_parameters + + implicit none + private + + character(len=char_len_long), public :: & + ice_ic ! method of ice cover initialization + ! 'internal' => set from ice_data_ namelist + ! 'none' => no ice + ! filename => read file + + public :: input_data, init_state, set_state_var + +!======================================================================= + + contains + +!======================================================================= + +! Namelist variables, set to default values; may be altered +! at run time +! +! author Elizabeth C. Hunke, LANL + + subroutine input_data + + use ice_broadcast, only: broadcast_scalar, broadcast_array + use ice_diagnostics, only: & + diag_file, print_global, print_points, latpnt, lonpnt, & + debug_model, debug_model_step, debug_model_task, & + debug_model_i, debug_model_j, debug_model_iblk + use ice_domain, only: close_boundaries + use ice_domain_size, only: & + ncat, nilyr, nslyr, nblyr, nfsd, nfreq, & + n_iso, n_aero, n_zaero, n_algae, & + n_doc, n_dic, n_don, n_fed, n_fep, & + max_nstrm + use ice_calendar, only: & + year_init, month_init, day_init, sec_init, & + istep0, histfreq, histfreq_n, histfreq_base, & + dumpfreq, dumpfreq_n, diagfreq, dumpfreq_base, & + npt, dt, ndtd, days_per_year, use_leap_years, & + write_ic, dump_last, npt_unit + use ice_arrays_column, only: oceanmixed_ice + use ice_restart_column, only: & + restart_age, restart_FY, restart_lvl, & + restart_pond_lvl, restart_pond_topo, restart_aero, & + restart_fsd, restart_iso, restart_snow + use ice_restart_shared, only: & + restart, restart_ext, restart_coszen, use_restart_time, & + runtype, restart_file, restart_dir, runid, pointer_file, & + restart_format, restart_rearranger, restart_iotasks, restart_root, & + restart_stride, restart_deflate, restart_chunksize + use ice_history_shared, only: & + history_precision, hist_avg, history_format, history_file, incond_file, & + history_dir, incond_dir, version_name, history_rearranger, & + hist_suffix, history_iotasks, history_root, history_stride, & + history_deflate, history_chunksize, hist_time_axis + use ice_flux, only: update_ocn_f, cpl_frazil, l_mpond_fresh + use ice_flux, only: default_season + use ice_flux_bgc, only: cpl_bgc + use ice_forcing, only: & + ycycle, fyear_init, debug_forcing, & + atm_data_type, atm_data_dir, precip_units, rotate_wind, & + atm_data_format, ocn_data_format, atm_data_version, & + bgc_data_type, & + ocn_data_type, ocn_data_dir, wave_spec_file, & + oceanmixed_file, restore_ocn, trestore, & + ice_data_type, ice_data_conc, ice_data_dist, & + snw_filename, & + snw_tau_fname, snw_kappa_fname, snw_drdt0_fname, & + snw_rhos_fname, snw_Tgrd_fname, snw_T_fname + use ice_arrays_column, only: bgc_data_dir, fe_data_type + use ice_grid, only: & + grid_file, gridcpl_file, kmt_file, & + bathymetry_file, use_bathymetry, & + bathymetry_format, kmt_type, & + grid_type, grid_format, & + grid_ice, grid_ice_thrm, grid_ice_dynu, grid_ice_dynv, & + grid_ocn, grid_ocn_thrm, grid_ocn_dynu, grid_ocn_dynv, & + grid_atm, grid_atm_thrm, grid_atm_dynu, grid_atm_dynv, & + dxrect, dyrect, dxscale, dyscale, scale_dxdy, & + lonrefrect, latrefrect, save_ghte_ghtn + use ice_dyn_shared, only: & + ndte, kdyn, revised_evp, yield_curve, & + evp_algorithm, visc_method, & + seabed_stress, seabed_stress_method, & + k1, k2, alphab, threshold_hw, Ktens, & + e_yieldcurve, e_plasticpot, coriolis, & + ssh_stress, kridge, brlx, arlx, & + deltaminEVP, deltaminVP, capping, & + elasticDamp + use ice_dyn_vp, only: & + maxits_nonlin, precond, dim_fgmres, dim_pgmres, maxits_fgmres, & + maxits_pgmres, monitor_nonlin, monitor_fgmres, & + monitor_pgmres, reltol_nonlin, reltol_fgmres, reltol_pgmres, & + algo_nonlin, fpfunc_andacc, dim_andacc, reltol_andacc, & + damping_andacc, start_andacc, use_mean_vrel, ortho_type + use ice_transport_driver, only: advection, conserv_check + use ice_restoring, only: restore_ice + use ice_timers, only: timer_stats + use ice_memusage, only: memory_stats + use ice_fileunits, only: goto_nml + +#ifdef CESMCOUPLED + use shr_file_mod, only: shr_file_setIO +#endif + + ! local variables + + integer (kind=int_kind) :: & + nml_error, & ! namelist i/o error flag + n ! loop index + +#ifdef CESMCOUPLED + logical :: exists +#endif + + real (kind=dbl_kind) :: ustar_min, albicev, albicei, albsnowv, albsnowi, & + ahmax, R_ice, R_pnd, R_snw, dT_mlt, rsnw_mlt, emissivity, hi_min, & + mu_rdg, hs0, dpscale, rfracmin, rfracmax, pndaspect, hs1, hp1, & + a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, dSdt_slow_mode, & + phi_c_slow_mode, phi_i_mushy, kalg, atmiter_conv, Pstar, Cstar, & + sw_frac, sw_dtemp, floediam, hfrazilmin, iceruf, iceruf_ocn, & + rsnw_fall, rsnw_tmax, rhosnew, rhosmin, rhosmax, Tliquidus_max, & + windmin, drhosdwind, snwlvlfac + + integer (kind=int_kind) :: ktherm, kstrength, krdg_partic, krdg_redist, natmiter, & + kitd, kcatbound, ktransport + + character (len=char_len) :: shortwave, albedo_type, conduct, fbot_xfer_type, & + tfrz_option, saltflux_option, frzpnd, atmbndy, wave_spec_type, snwredist, snw_aging_table, & + congel_freeze, capping_method, snw_ssp_table + + logical (kind=log_kind) :: calc_Tsfc, formdrag, highfreq, calc_strair, wave_spec, & + sw_redist, calc_dragio, use_smliq_pnd, snwgrain + + logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_pond + logical (kind=log_kind) :: tr_iso, tr_aero, tr_fsd, tr_snow + logical (kind=log_kind) :: tr_pond_lvl, tr_pond_topo + integer (kind=int_kind) :: numin, numax ! unit number limits + logical (kind=log_kind) :: lcdf64 ! deprecated, backwards compatibility + logical (kind=log_kind) :: orca_halogrid !deprecated + + integer (kind=int_kind) :: rplvl, rptopo + real (kind=dbl_kind) :: Cf, ksno, puny, ice_ref_salinity, Tocnfrz + + character (len=char_len) :: abort_list + character (len=char_len) :: nml_name ! namelist name + character (len=char_len_long) :: tmpstr2 + + character(len=*), parameter :: subname='(input_data)' + + !----------------------------------------------------------------- + ! Namelist variables + !----------------------------------------------------------------- + + namelist /setup_nml/ & + days_per_year, use_leap_years, istep0, npt_unit, & + dt, npt, ndtd, numin, & + runtype, runid, bfbflag, numax, & + ice_ic, restart, restart_dir, restart_file, & + restart_ext, use_restart_time, restart_format, lcdf64, & + restart_root, restart_stride, restart_iotasks, restart_rearranger, & + restart_deflate, restart_chunksize, & + pointer_file, dumpfreq, dumpfreq_n, dump_last, & + diagfreq, diag_type, diag_file, history_format,& + history_root, history_stride, history_iotasks, history_rearranger, & + hist_time_axis, & + print_global, print_points, latpnt, lonpnt, & + debug_forcing, histfreq, histfreq_n, hist_avg, & + hist_suffix, history_deflate, history_chunksize, & + history_dir, history_file, history_precision, cpl_bgc, & + histfreq_base, dumpfreq_base, timer_stats, memory_stats, & + conserv_check, debug_model, debug_model_step, & + debug_model_i, debug_model_j, debug_model_iblk, debug_model_task, & + year_init, month_init, day_init, sec_init, & + write_ic, incond_dir, incond_file, version_name + + namelist /grid_nml/ & + grid_format, grid_type, grid_file, kmt_file, & + bathymetry_file, use_bathymetry, nfsd, bathymetry_format, & + ncat, nilyr, nslyr, nblyr, & + kcatbound, gridcpl_file, dxrect, dyrect, & + dxscale, dyscale, lonrefrect, latrefrect, & + scale_dxdy, & + close_boundaries, orca_halogrid, grid_ice, kmt_type, & + grid_atm, grid_ocn + + namelist /tracer_nml/ & + tr_iage, restart_age, & + tr_FY, restart_FY, & + tr_lvl, restart_lvl, & + tr_pond_lvl, restart_pond_lvl, & + tr_pond_topo, restart_pond_topo, & + tr_snow, restart_snow, & + tr_iso, restart_iso, & + tr_aero, restart_aero, & + tr_fsd, restart_fsd, & + n_iso, n_aero, n_zaero, n_algae, & + n_doc, n_dic, n_don, n_fed, n_fep + + namelist /thermo_nml/ & + kitd, ktherm, conduct, ksno, & + a_rapid_mode, Rac_rapid_mode, aspect_rapid_mode, & + dSdt_slow_mode, phi_c_slow_mode, phi_i_mushy, & + floediam, hfrazilmin, Tliquidus_max, hi_min + + namelist /dynamics_nml/ & + kdyn, ndte, revised_evp, yield_curve, & + evp_algorithm, elasticDamp, & + brlx, arlx, ssh_stress, & + advection, coriolis, kridge, ktransport, & + kstrength, krdg_partic, krdg_redist, mu_rdg, & + e_yieldcurve, e_plasticpot, visc_method, & + maxits_nonlin, precond, dim_fgmres, & + dim_pgmres, maxits_fgmres, maxits_pgmres, monitor_nonlin, & + monitor_fgmres, monitor_pgmres, reltol_nonlin, reltol_fgmres, & + reltol_pgmres, algo_nonlin, dim_andacc, reltol_andacc, & + damping_andacc, start_andacc, fpfunc_andacc, use_mean_vrel, & + ortho_type, seabed_stress, seabed_stress_method, & + k1, k2, alphab, threshold_hw, & + deltaminEVP, deltaminVP, capping_method, & + Cf, Pstar, Cstar, Ktens + + namelist /shortwave_nml/ & + shortwave, albedo_type, snw_ssp_table, & + albicev, albicei, albsnowv, albsnowi, & + ahmax, R_ice, R_pnd, R_snw, & + sw_redist, sw_frac, sw_dtemp, & + dT_mlt, rsnw_mlt, kalg + + namelist /ponds_nml/ & + hs0, dpscale, frzpnd, & + rfracmin, rfracmax, pndaspect, hs1, & + hp1 + + namelist /snow_nml/ & + snwredist, snwgrain, rsnw_fall, rsnw_tmax, & + rhosnew, rhosmin, rhosmax, snwlvlfac, & + windmin, drhosdwind, use_smliq_pnd, snw_aging_table,& + snw_filename, snw_rhos_fname, snw_Tgrd_fname,snw_T_fname, & + snw_tau_fname, snw_kappa_fname, snw_drdt0_fname + + namelist /forcing_nml/ & + formdrag, atmbndy, calc_strair, calc_Tsfc, & + highfreq, natmiter, atmiter_conv, calc_dragio, & + ustar_min, emissivity, iceruf, iceruf_ocn, & + fbot_xfer_type, update_ocn_f, l_mpond_fresh, tfrz_option, & + saltflux_option,ice_ref_salinity,cpl_frazil, congel_freeze, & + oceanmixed_ice, restore_ice, restore_ocn, trestore, & + precip_units, default_season, wave_spec_type,nfreq, & + atm_data_type, ocn_data_type, bgc_data_type, fe_data_type, & + ice_data_type, ice_data_conc, ice_data_dist, & + fyear_init, ycycle, wave_spec_file,restart_coszen, & + atm_data_dir, ocn_data_dir, bgc_data_dir, & + atm_data_format, ocn_data_format, rotate_wind, & + oceanmixed_file, atm_data_version + + !----------------------------------------------------------------- + ! default values + !----------------------------------------------------------------- + + abort_list = "" + + call icepack_query_parameters(puny_out=puny,Tocnfrz_out=Tocnfrz) +! nu_diag not yet defined +! call icepack_warnings_flush(nu_diag) +! if (icepack_warnings_aborted()) call abort_ice(error_message=subname//'Icepack Abort0', & +! file=__FILE__, line=__LINE__) + + days_per_year = 365 ! number of days in a year + use_leap_years= .false.! if true, use leap years (Feb 29) + year_init = 0 ! initial year + month_init = 1 ! initial month + day_init = 1 ! initial day + sec_init = 0 ! initial second + istep0 = 0 ! no. of steps taken in previous integrations, + ! real (dumped) or imagined (to set calendar) +#ifndef CESMCOUPLED + dt = 3600.0_dbl_kind ! time step, s +#endif + numin = 11 ! min allowed unit number + numax = 99 ! max allowed unit number + npt = 99999 ! total number of time steps (dt) + npt_unit = '1' ! units of npt 'y', 'm', 'd', 's', '1' + diagfreq = 24 ! how often diag output is written + debug_model = .false. ! debug output + debug_model_step = 0 ! debug model after this step number + debug_model_i = -1 ! debug model local i index + debug_model_j = -1 ! debug model local j index + debug_model_iblk = -1 ! debug model local iblk number + debug_model_task = -1 ! debug model local task number + print_points = .false. ! if true, print point data + print_global = .true. ! if true, print global diagnostic data + timer_stats = .false. ! if true, print out detailed timer statistics + memory_stats = .false. ! if true, print out memory information + bfbflag = 'off' ! off = optimized + diag_type = 'stdout' + diag_file = 'ice_diag.d' + histfreq(1) = '1' ! output frequency option for different streams + histfreq(2) = 'h' ! output frequency option for different streams + histfreq(3) = 'd' ! output frequency option for different streams + histfreq(4) = 'm' ! output frequency option for different streams + histfreq(5) = 'y' ! output frequency option for different streams + histfreq_n(:) = 1 ! output frequency + histfreq_base(:) = 'zero' ! output frequency reference date + hist_avg(:) = .true. ! if true, write time-averages (not snapshots) + hist_suffix(:) = 'x' ! appended to 'history_file' in filename when not 'x' + history_format = 'cdf1'! history file format + history_root = -99 ! history iotasks, root, stride sets pes for pio + history_stride = -99 ! history iotasks, root, stride sets pes for pio + history_iotasks = -99 ! history iotasks, root, stride sets pes for pio + history_rearranger = 'default' ! history rearranger for pio + hist_time_axis = 'end' ! History file time axis averaging interval position + history_dir = './' ! write to executable dir for default + history_file = 'iceh' ! history file name prefix + history_precision = 4 ! precision of history files + history_deflate = 0 ! compression level for netcdf4 + history_chunksize(:) = 0 ! chunksize for netcdf4 + write_ic = .false. ! write out initial condition + cpl_bgc = .false. ! couple bgc thru driver + incond_dir = history_dir ! write to history dir for default + incond_file = 'iceh_ic'! file prefix + dumpfreq(:) = 'x' ! restart frequency option + dumpfreq_n(:) = 1 ! restart frequency + dumpfreq_base(:) = 'init' ! restart frequency reference date + dumpfreq(1) = 'y' ! restart frequency option + dumpfreq_n(1) = 1 ! restart frequency + dump_last = .false. ! write restart on last time step + restart_dir = './' ! write to executable dir for default + restart_file = 'iced' ! restart file name prefix + restart_ext = .false. ! if true, read/write ghost cells + restart_coszen = .false. ! if true, read/write coszen + pointer_file = 'ice.restart_file' + restart_format = 'cdf1' ! restart file format + restart_root = -99 ! restart iotasks, root, stride sets pes for pio + restart_stride = -99 ! restart iotasks, root, stride sets pes for pio + restart_iotasks = -99 ! restart iotasks, root, stride sets pes for pio + restart_rearranger = 'default' ! restart rearranger for pio + restart_deflate = 0 ! compression level for netcdf4 + restart_chunksize(:) = 0 ! chunksize for netcdf4 + lcdf64 = .false. ! 64 bit offset for netCDF + ice_ic = 'default' ! latitude and sst-dependent + grid_format = 'bin' ! file format ('bin'=binary or 'nc'=netcdf) + grid_type = 'rectangular'! define rectangular grid internally + grid_file = 'unknown_grid_file' + grid_ice = 'B' ! underlying grid system + grid_atm = 'A' ! underlying atm forcing/coupling grid + grid_ocn = 'A' ! underlying atm forcing/coupling grid + gridcpl_file = 'unknown_gridcpl_file' + orca_halogrid = .false. ! orca haloed grid - deprecated + bathymetry_file = 'unknown_bathymetry_file' + bathymetry_format = 'default' + use_bathymetry = .false. + kmt_type = 'file' + kmt_file = 'unknown_kmt_file' + version_name = 'unknown_version_name' + ncat = 0 ! number of ice thickness categories + nfsd = 1 ! number of floe size categories (1 = default) + nilyr = 0 ! number of vertical ice layers + nslyr = 0 ! number of vertical snow layers + nblyr = 0 ! number of bio layers + + kitd = 1 ! type of itd conversions (0 = delta, 1 = linear) + kcatbound = 1 ! category boundary formula (0 = old, 1 = new, etc) + kdyn = 1 ! type of dynamics (-1, 0 = off, 1 = evp, 2 = eap, 3 = vp) + ndtd = 1 ! dynamic time steps per thermodynamic time step + ndte = 120 ! subcycles per dynamics timestep: ndte=dt_dyn/dte + evp_algorithm = 'standard_2d' ! EVP kernel (standard_2d=standard cice evp; shared_mem_1d=1d shared memory and no mpi + elasticDamp = 0.36_dbl_kind ! coefficient for calculating the parameter E + save_ghte_ghtn = .false. ! if true, save global hte and htn (global ext.) + brlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared + arlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared + revised_evp = .false. ! if true, use revised procedure for evp dynamics + yield_curve = 'ellipse' ! yield curve + kstrength = 1 ! 1 = Rothrock 75 strength, 0 = Hibler 79 + Pstar = 2.75e4_dbl_kind ! constant in Hibler strength formula (kstrength = 0) + Cstar = 20._dbl_kind ! constant in Hibler strength formula (kstrength = 0) + krdg_partic = 1 ! 1 = new participation, 0 = Thorndike et al 75 + krdg_redist = 1 ! 1 = new redistribution, 0 = Hibler 80 + mu_rdg = 3 ! e-folding scale of ridged ice, krdg_partic=1 (m^0.5) + Cf = 17.0_dbl_kind ! ratio of ridging work to PE change in ridging + ksno = 0.3_dbl_kind ! snow thermal conductivity + dxrect = 0.0_dbl_kind ! user defined grid spacing in cm in x direction + dyrect = 0.0_dbl_kind ! user defined grid spacing in cm in y direction + lonrefrect = -156.50_dbl_kind ! lower left corner lon for rectgrid + latrefrect = 71.35_dbl_kind ! lower left corner lat for rectgrid + scale_dxdy = .false. ! apply dxscale, dyscale to rectgrid + dxscale = 1.0_dbl_kind ! user defined rectgrid x-grid scale factor (e.g., 1.02) + dyscale = 1.0_dbl_kind ! user defined rectgrid y-grid scale factor (e.g., 1.02) + close_boundaries = .false. ! true = set land on edges of grid + seabed_stress= .false. ! if true, seabed stress for landfast is on + seabed_stress_method = 'LKD'! LKD = Lemieux et al 2015, probabilistic = Dupont et al. 2022 + k1 = 7.5_dbl_kind ! 1st free parameter for landfast parameterization + k2 = 15.0_dbl_kind ! 2nd free parameter (N/m^3) for landfast parametrization + alphab = 20.0_dbl_kind ! alphab=Cb factor in Lemieux et al 2015 + threshold_hw = 30.0_dbl_kind ! max water depth for grounding + Ktens = 0.0_dbl_kind ! T=Ktens*P (tensile strength: see Konig and Holland, 2010) + e_yieldcurve = 2.0_dbl_kind ! VP aspect ratio of elliptical yield curve + e_plasticpot = 2.0_dbl_kind ! VP aspect ratio of elliptical plastic potential + visc_method = 'avg_zeta' ! calc viscosities at U point: avg_strength, avg_zeta + deltaminEVP = 1e-11_dbl_kind ! minimum delta for viscosities (EVP, Hunke 2001) + deltaminVP = 2e-9_dbl_kind ! minimum delta for viscosities (VP, Hibler 1979) + capping_method = 'max' ! method for capping of viscosities (max=Hibler 1979,sum=Kreyscher2000) + maxits_nonlin = 10 ! max nb of iteration for nonlinear solver + precond = 'pgmres' ! preconditioner for fgmres: 'ident' (identity), 'diag' (diagonal), + ! 'pgmres' (Jacobi-preconditioned GMRES) + dim_fgmres = 50 ! size of fgmres Krylov subspace + dim_pgmres = 5 ! size of pgmres Krylov subspace + maxits_fgmres = 50 ! max nb of iteration for fgmres + maxits_pgmres = 5 ! max nb of iteration for pgmres + monitor_nonlin = .false. ! print nonlinear residual norm + monitor_fgmres = .false. ! print fgmres residual norm + monitor_pgmres = .false. ! print pgmres residual norm + ortho_type = 'mgs' ! orthogonalization procedure 'cgs' or 'mgs' + reltol_nonlin = 1e-8_dbl_kind ! nonlinear stopping criterion: reltol_nonlin*res(k=0) + reltol_fgmres = 1e-1_dbl_kind ! fgmres stopping criterion: reltol_fgmres*res(k) + reltol_pgmres = 1e-6_dbl_kind ! pgmres stopping criterion: reltol_pgmres*res(k) + algo_nonlin = 'picard' ! nonlinear algorithm: 'picard' (Picard iteration), 'anderson' (Anderson acceleration) + fpfunc_andacc = 1 ! fixed point function for Anderson acceleration: + ! 1: g(x) = FMGRES(A(x),b(x)), 2: g(x) = x - A(x)x + b(x) + dim_andacc = 5 ! size of Anderson minimization matrix (number of saved previous residuals) + reltol_andacc = 1e-6_dbl_kind ! relative tolerance for Anderson acceleration + damping_andacc = 0 ! damping factor for Anderson acceleration + start_andacc = 0 ! acceleration delay factor (acceleration starts at this iteration) + use_mean_vrel = .true. ! use mean of previous 2 iterates to compute vrel + advection = 'remap' ! incremental remapping transport scheme + conserv_check = .false. ! tracer conservation check + shortwave = 'ccsm3' ! 'ccsm3' or 'dEdd' (delta-Eddington) + snw_ssp_table = 'test' ! 'test' or 'snicar' dEdd_snicar_ad table data + albedo_type = 'ccsm3' ! 'ccsm3' or 'constant' + ktherm = 1 ! -1 = OFF, 1 = BL99, 2 = mushy thermo + conduct = 'bubbly' ! 'MU71' or 'bubbly' (Pringle et al 2007) + coriolis = 'latitude' ! latitude dependent, or 'constant' + ssh_stress = 'geostrophic' ! 'geostrophic' or 'coupled' + kridge = 1 ! -1 = off, 1 = on + ktransport = 1 ! -1 = off, 1 = on + calc_Tsfc = .true. ! calculate surface temperature + update_ocn_f = .false. ! include fresh water and salt fluxes for frazil + cpl_frazil = 'fresh_ice_correction' ! type of coupling for frazil ice + ustar_min = 0.005 ! minimum friction velocity for ocean heat flux (m/s) + hi_min = p01 ! minimum ice thickness allowed (m) + iceruf = 0.0005_dbl_kind ! ice surface roughness at atmosphere interface (m) + iceruf_ocn = 0.03_dbl_kind ! under-ice roughness (m) + calc_dragio = .false. ! compute dragio from iceruf_ocn and thickness of first ocean level + emissivity = 0.985 ! emissivity of snow and ice + l_mpond_fresh = .false. ! logical switch for including meltpond freshwater + ! flux feedback to ocean model + fbot_xfer_type = 'constant' ! transfer coefficient type for ocn heat flux + R_ice = 0.00_dbl_kind ! tuning parameter for sea ice + R_pnd = 0.00_dbl_kind ! tuning parameter for ponded sea ice + R_snw = 1.50_dbl_kind ! tuning parameter for snow over sea ice + dT_mlt = 1.5_dbl_kind ! change in temp to give non-melt to melt change + ! in snow grain radius + rsnw_mlt = 1500._dbl_kind ! maximum melting snow grain radius + kalg = 0.60_dbl_kind ! algae absorption coefficient for 0.5 m thick layer + ! 0.5 m path of 75 mg Chl a / m2 + hp1 = 0.01_dbl_kind ! critical pond lid thickness for topo ponds + hs0 = 0.03_dbl_kind ! snow depth for transition to bare sea ice (m) + hs1 = 0.03_dbl_kind ! snow depth for transition to bare pond ice (m) + dpscale = c1 ! alter e-folding time scale for flushing + frzpnd = 'cesm' ! melt pond refreezing parameterization + rfracmin = 0.15_dbl_kind ! minimum retained fraction of meltwater + rfracmax = 0.85_dbl_kind ! maximum retained fraction of meltwater + pndaspect = 0.8_dbl_kind ! ratio of pond depth to area fraction + snwredist = 'none' ! type of snow redistribution + snw_aging_table = 'test' ! snow aging lookup table + snw_filename = 'unknown' ! snowtable filename + snw_tau_fname = 'unknown' ! snowtable file tau fieldname + snw_kappa_fname = 'unknown' ! snowtable file kappa fieldname + snw_drdt0_fname = 'unknown' ! snowtable file drdt0 fieldname + snw_rhos_fname = 'unknown' ! snowtable file rhos fieldname + snw_Tgrd_fname = 'unknown' ! snowtable file Tgrd fieldname + snw_T_fname = 'unknown' ! snowtable file T fieldname + snwgrain = .false. ! snow metamorphosis + use_smliq_pnd = .false. ! use liquid in snow for ponds + rsnw_fall = 100.0_dbl_kind ! radius of new snow (10^-6 m) ! advanced snow physics: 54.526 x 10^-6 m + rsnw_tmax = 1500.0_dbl_kind ! maximum snow radius (10^-6 m) + rhosnew = 100.0_dbl_kind ! new snow density (kg/m^3) + rhosmin = 100.0_dbl_kind ! minimum snow density (kg/m^3) + rhosmax = 450.0_dbl_kind ! maximum snow density (kg/m^3) + windmin = 10.0_dbl_kind ! minimum wind speed to compact snow (m/s) + drhosdwind= 27.3_dbl_kind ! wind compaction factor for snow (kg s/m^4) + snwlvlfac = 0.3_dbl_kind ! fractional increase in snow depth for bulk redistribution + albicev = 0.78_dbl_kind ! visible ice albedo for h > ahmax + albicei = 0.36_dbl_kind ! near-ir ice albedo for h > ahmax + albsnowv = 0.98_dbl_kind ! cold snow albedo, visible + albsnowi = 0.70_dbl_kind ! cold snow albedo, near IR + ahmax = 0.3_dbl_kind ! thickness above which ice albedo is constant (m) + atmbndy = 'similarity' ! Atm boundary layer: 'similarity', 'constant' or 'mixed' + default_season = 'winter' ! default forcing data, if data is not read in + fyear_init = 1900 ! first year of forcing cycle + ycycle = 1 ! number of years in forcing cycle + atm_data_format = 'bin' ! file format ('bin'=binary or 'nc'=netcdf) + atm_data_type = 'default' + atm_data_dir = ' ' + atm_data_version = '_undef' ! date atm_data_file was generated. + rotate_wind = .true. ! rotate wind/stress composants to computational grid orientation + calc_strair = .true. ! calculate wind stress + formdrag = .false. ! calculate form drag + highfreq = .false. ! calculate high frequency RASM coupling + natmiter = 5 ! number of iterations for atm boundary layer calcs + atmiter_conv = c0 ! ustar convergence criteria + precip_units = 'mks' ! 'mm_per_month' or + ! 'mm_per_sec' = 'mks' = kg/m^2 s + congel_freeze = 'two-step'! congelation freezing method + tfrz_option = 'mushy' ! freezing temp formulation + saltflux_option = 'constant' ! saltflux calculation + ice_ref_salinity = 4.0_dbl_kind ! Ice reference salinity for coupling + oceanmixed_ice = .false. ! if true, use internal ocean mixed layer + wave_spec_type = 'none' ! type of wave spectrum forcing + nfreq = 25 ! number of wave frequencies + wave_spec_file = ' ' ! wave forcing file name + ocn_data_format = 'bin' ! file format ('bin'=binary or 'nc'=netcdf) + bgc_data_type = 'default' + fe_data_type = 'default' + ice_data_type = 'default' ! used by some tests to initialize ice state (overall type and mask) + ice_data_conc = 'default' ! used by some tests to initialize ice state (concentration) + ice_data_dist = 'default' ! used by some tests to initialize ice state (distribution) + bgc_data_dir = 'unknown_bgc_data_dir' + ocn_data_type = 'default' + ocn_data_dir = 'unknown_ocn_data_dir' + oceanmixed_file = 'unknown_oceanmixed_file' ! ocean forcing data + restore_ocn = .false. ! restore sst if true + trestore = 90 ! restoring timescale, days (0 instantaneous) + restore_ice = .false. ! restore ice state on grid edges if true + debug_forcing = .false. ! true writes diagnostics for input forcing + + latpnt(1) = 90._dbl_kind ! latitude of diagnostic point 1 (deg) + lonpnt(1) = 0._dbl_kind ! longitude of point 1 (deg) + latpnt(2) = -65._dbl_kind ! latitude of diagnostic point 2 (deg) + lonpnt(2) = -45._dbl_kind ! longitude of point 2 (deg) + +#ifndef CESMCOUPLED + runid = 'unknown' ! run ID used in CESM and for machine 'bering' + runtype = 'initial' ! run type: 'initial', 'continue' + restart = .false. ! if true, read ice state from restart file + use_restart_time = .false. ! if true, use time info written in file +#endif + + ! extra tracers + tr_iage = .false. ! ice age + restart_age = .false. ! ice age restart + tr_FY = .false. ! ice age + restart_FY = .false. ! ice age restart + tr_lvl = .false. ! level ice + restart_lvl = .false. ! level ice restart + tr_pond_lvl = .false. ! level-ice melt ponds + restart_pond_lvl = .false. ! melt ponds restart + tr_pond_topo = .false. ! explicit melt ponds (topographic) + restart_pond_topo = .false. ! melt ponds restart + tr_snow = .false. ! advanced snow physics + restart_snow = .false. ! advanced snow physics restart + tr_iso = .false. ! isotopes + restart_iso = .false. ! isotopes restart + tr_aero = .false. ! aerosols + restart_aero = .false. ! aerosols restart + tr_fsd = .false. ! floe size distribution + restart_fsd = .false. ! floe size distribution restart + + n_iso = 0 + n_aero = 0 + n_zaero = 0 + n_algae = 0 + n_doc = 0 + n_dic = 0 + n_don = 0 + n_fed = 0 + n_fep = 0 + + ! mushy layer gravity drainage physics + a_rapid_mode = 0.5e-3_dbl_kind ! channel radius for rapid drainage mode (m) + Rac_rapid_mode = 10.0_dbl_kind ! critical Rayleigh number + aspect_rapid_mode = 1.0_dbl_kind ! aspect ratio (larger is wider) + dSdt_slow_mode = -1.5e-7_dbl_kind ! slow mode drainage strength (m s-1 K-1) + phi_c_slow_mode = 0.05_dbl_kind ! critical liquid fraction porosity cutoff + phi_i_mushy = 0.85_dbl_kind ! liquid fraction of congelation ice + Tliquidus_max = 0.00_dbl_kind ! maximum liquidus temperature of mush (C) + + floediam = 300.0_dbl_kind ! min thickness of new frazil ice (m) + hfrazilmin = 0.05_dbl_kind ! effective floe diameter (m) + + ! shortwave redistribution in the thermodynamics + sw_redist = .false. + sw_frac = 0.9_dbl_kind + sw_dtemp = 0.02_dbl_kind + + !----------------------------------------------------------------- + ! read from input file + !----------------------------------------------------------------- + +#ifdef CESMCOUPLED + nml_filename = 'ice_in'//trim(inst_suffix) +#endif + + if (my_task == master_task) then + + ! open namelist file + call get_fileunit(nu_nml) + open (nu_nml, file=trim(nml_filename), status='old',iostat=nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: open file '// & + trim(nml_filename), & + file=__FILE__, line=__LINE__) + endif + + ! read setup_nml + nml_name = 'setup_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=setup_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! read grid_nml + nml_name = 'grid_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=grid_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' //trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! read tracer_nml + nml_name = 'tracer_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=tracer_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: ' //trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! read thermo_nml + nml_name = 'thermo_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=thermo_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! read dynamics_nml + nml_name = 'dynamics_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=dynamics_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! read shortwave_nml + nml_name = 'shortwave_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=shortwave_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '//& + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! read ponds_nml + nml_name = 'ponds_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=ponds_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! read snow_nml + nml_name = 'snow_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=snow_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '//trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! read forcing_nml + nml_name = 'forcing_nml' + write(nu_diag,*) subname,' Reading ', trim(nml_name) + + ! goto namelist in file + call goto_nml(nu_nml,trim(nml_name),nml_error) + if (nml_error /= 0) then + call abort_ice(subname//'ERROR: searching for '// trim(nml_name), & + file=__FILE__, line=__LINE__) + endif + + ! read namelist + nml_error = 1 + do while (nml_error > 0) + read(nu_nml, nml=forcing_nml,iostat=nml_error) + ! check if error + if (nml_error /= 0) then + ! backspace and re-read erroneous line + backspace(nu_nml) + read(nu_nml,fmt='(A)') tmpstr2 + call abort_ice(subname//'ERROR: '// trim(nml_name)//' reading '// & + trim(tmpstr2), file=__FILE__, line=__LINE__) + endif + end do + + ! done reading namelist. + close(nu_nml) + call release_fileunit(nu_nml) + endif + + !----------------------------------------------------------------- + ! set up diagnostics output and resolve conflicts + !----------------------------------------------------------------- + +#ifdef CESMCOUPLED + ! Note in CESMCOUPLED mode diag_file is not utilized and + ! runid and runtype are obtained from the driver, not from the namelist + + if (my_task == master_task) then + history_file = trim(runid) // ".cice" // trim(inst_suffix) //".h" + restart_file = trim(runid) // ".cice" // trim(inst_suffix) //".r" + incond_file = trim(runid) // ".cice" // trim(inst_suffix) //".i" + ! Note by tcraig - this if test is needed because the nuopc cap sets + ! nu_diag before this routine is called. This creates a conflict. + ! In addition, in the nuopc cap, shr_file_setIO will fail if the + ! needed namelist is missing (which it is in the CIME nuopc implementation) + if (.not. nu_diag_set) then + inquire(file='ice_modelio.nml'//trim(inst_suffix),exist=exists) + if (exists) then + call get_fileUnit(nu_diag) + call shr_file_setIO('ice_modelio.nml'//trim(inst_suffix),nu_diag) + end if + endif + else + ! each task gets unique ice log filename when if test is true, for debugging + if (1 == 0) then + call get_fileUnit(nu_diag) + write(tmpstr2,'(a,i4.4)') "ice.log.task_",my_task + open(nu_diag,file=tmpstr2) + endif + end if + if (trim(ice_ic) /= 'default' .and. & + trim(ice_ic) /= 'none' .and. & + trim(ice_ic) /= 'internal') then + restart = .true. + end if +#else + if (trim(diag_type) == 'file') call get_fileunit(nu_diag) +#endif + + !----------------------------------------------------------------- + ! broadcast namelist settings + !----------------------------------------------------------------- + + call broadcast_scalar(numin, master_task) + call broadcast_scalar(numax, master_task) + call broadcast_scalar(days_per_year, master_task) + call broadcast_scalar(use_leap_years, master_task) + call broadcast_scalar(year_init, master_task) + call broadcast_scalar(month_init, master_task) + call broadcast_scalar(day_init, master_task) + call broadcast_scalar(sec_init, master_task) + call broadcast_scalar(istep0, master_task) + call broadcast_scalar(dt, master_task) + call broadcast_scalar(npt, master_task) + call broadcast_scalar(npt_unit, master_task) + call broadcast_scalar(diagfreq, master_task) + call broadcast_scalar(debug_model, master_task) + call broadcast_scalar(debug_model_step, master_task) + call broadcast_scalar(debug_model_i, master_task) + call broadcast_scalar(debug_model_j, master_task) + call broadcast_scalar(debug_model_iblk, master_task) + call broadcast_scalar(debug_model_task, master_task) + call broadcast_scalar(print_points, master_task) + call broadcast_scalar(print_global, master_task) + call broadcast_scalar(timer_stats, master_task) + call broadcast_scalar(memory_stats, master_task) + call broadcast_scalar(bfbflag, master_task) + call broadcast_scalar(diag_type, master_task) + call broadcast_scalar(diag_file, master_task) + do n = 1, max_nstrm + call broadcast_scalar(histfreq(n), master_task) + call broadcast_scalar(histfreq_base(n), master_task) + call broadcast_scalar(dumpfreq(n), master_task) + call broadcast_scalar(dumpfreq_base(n), master_task) + call broadcast_scalar(hist_suffix(n), master_task) + enddo + call broadcast_array(hist_avg, master_task) + call broadcast_array(histfreq_n, master_task) + call broadcast_array(dumpfreq_n, master_task) + call broadcast_scalar(history_dir, master_task) + call broadcast_scalar(history_file, master_task) + call broadcast_scalar(history_precision, master_task) + call broadcast_scalar(history_format, master_task) + call broadcast_scalar(history_iotasks, master_task) + call broadcast_scalar(history_root, master_task) + call broadcast_scalar(history_stride, master_task) + call broadcast_scalar(history_rearranger, master_task) + call broadcast_scalar(hist_time_axis, master_task) + call broadcast_scalar(history_deflate, master_task) + call broadcast_array(history_chunksize, master_task) + call broadcast_scalar(write_ic, master_task) + call broadcast_scalar(cpl_bgc, master_task) + call broadcast_scalar(incond_dir, master_task) + call broadcast_scalar(incond_file, master_task) + call broadcast_scalar(dump_last, master_task) + call broadcast_scalar(restart_file, master_task) + call broadcast_scalar(restart, master_task) + call broadcast_scalar(restart_dir, master_task) + call broadcast_scalar(restart_ext, master_task) + call broadcast_scalar(restart_coszen, master_task) + call broadcast_scalar(use_restart_time, master_task) + call broadcast_scalar(restart_format, master_task) + call broadcast_scalar(restart_iotasks, master_task) + call broadcast_scalar(restart_root, master_task) + call broadcast_scalar(restart_stride, master_task) + call broadcast_scalar(restart_rearranger, master_task) + call broadcast_scalar(restart_deflate, master_task) + call broadcast_array(restart_chunksize, master_task) + call broadcast_scalar(lcdf64, master_task) + call broadcast_scalar(pointer_file, master_task) + call broadcast_scalar(ice_ic, master_task) + call broadcast_scalar(grid_format, master_task) + call broadcast_scalar(dxrect, master_task) + call broadcast_scalar(dyrect, master_task) + call broadcast_scalar(scale_dxdy, master_task) + call broadcast_scalar(dxscale, master_task) + call broadcast_scalar(dyscale, master_task) + call broadcast_scalar(lonrefrect, master_task) + call broadcast_scalar(latrefrect, master_task) + call broadcast_scalar(close_boundaries, master_task) + call broadcast_scalar(grid_type, master_task) + call broadcast_scalar(grid_ice, master_task) + call broadcast_scalar(grid_ocn, master_task) + call broadcast_scalar(grid_atm, master_task) + call broadcast_scalar(grid_file, master_task) + call broadcast_scalar(gridcpl_file, master_task) + call broadcast_scalar(orca_halogrid, master_task) + call broadcast_scalar(bathymetry_file, master_task) + call broadcast_scalar(bathymetry_format, master_task) + call broadcast_scalar(use_bathymetry, master_task) + call broadcast_scalar(kmt_type, master_task) + call broadcast_scalar(kmt_file, master_task) + call broadcast_scalar(kitd, master_task) + call broadcast_scalar(kcatbound, master_task) + call broadcast_scalar(kdyn, master_task) + call broadcast_scalar(ndtd, master_task) + call broadcast_scalar(ndte, master_task) + call broadcast_scalar(evp_algorithm, master_task) + call broadcast_scalar(elasticDamp, master_task) + call broadcast_scalar(brlx, master_task) + call broadcast_scalar(arlx, master_task) + call broadcast_scalar(revised_evp, master_task) + call broadcast_scalar(yield_curve, master_task) + call broadcast_scalar(kstrength, master_task) + call broadcast_scalar(Pstar, master_task) + call broadcast_scalar(Cstar, master_task) + call broadcast_scalar(krdg_partic, master_task) + call broadcast_scalar(krdg_redist, master_task) + call broadcast_scalar(mu_rdg, master_task) + call broadcast_scalar(Cf, master_task) + call broadcast_scalar(ksno, master_task) + call broadcast_scalar(seabed_stress, master_task) + call broadcast_scalar(seabed_stress_method, master_task) + call broadcast_scalar(k1, master_task) + call broadcast_scalar(k2, master_task) + call broadcast_scalar(alphab, master_task) + call broadcast_scalar(threshold_hw, master_task) + call broadcast_scalar(Ktens, master_task) + call broadcast_scalar(e_yieldcurve, master_task) + call broadcast_scalar(e_plasticpot, master_task) + call broadcast_scalar(visc_method, master_task) + call broadcast_scalar(deltaminEVP, master_task) + call broadcast_scalar(deltaminVP, master_task) + call broadcast_scalar(capping_method, master_task) + call broadcast_scalar(advection, master_task) + call broadcast_scalar(conserv_check, master_task) + call broadcast_scalar(shortwave, master_task) + call broadcast_scalar(snw_ssp_table, master_task) + call broadcast_scalar(albedo_type, master_task) + call broadcast_scalar(ktherm, master_task) + call broadcast_scalar(coriolis, master_task) + call broadcast_scalar(ssh_stress, master_task) + call broadcast_scalar(kridge, master_task) + call broadcast_scalar(ktransport, master_task) + call broadcast_scalar(maxits_nonlin, master_task) + call broadcast_scalar(precond, master_task) + call broadcast_scalar(dim_fgmres, master_task) + call broadcast_scalar(dim_pgmres, master_task) + call broadcast_scalar(maxits_fgmres, master_task) + call broadcast_scalar(maxits_pgmres, master_task) + call broadcast_scalar(monitor_nonlin, master_task) + call broadcast_scalar(monitor_fgmres, master_task) + call broadcast_scalar(monitor_pgmres, master_task) + call broadcast_scalar(ortho_type, master_task) + call broadcast_scalar(reltol_nonlin, master_task) + call broadcast_scalar(reltol_fgmres, master_task) + call broadcast_scalar(reltol_pgmres, master_task) + call broadcast_scalar(algo_nonlin, master_task) + call broadcast_scalar(fpfunc_andacc, master_task) + call broadcast_scalar(dim_andacc, master_task) + call broadcast_scalar(reltol_andacc, master_task) + call broadcast_scalar(damping_andacc, master_task) + call broadcast_scalar(start_andacc, master_task) + call broadcast_scalar(use_mean_vrel, master_task) + call broadcast_scalar(conduct, master_task) + call broadcast_scalar(R_ice, master_task) + call broadcast_scalar(R_pnd, master_task) + call broadcast_scalar(R_snw, master_task) + call broadcast_scalar(dT_mlt, master_task) + call broadcast_scalar(rsnw_mlt, master_task) + call broadcast_scalar(kalg, master_task) + call broadcast_scalar(hp1, master_task) + call broadcast_scalar(hs0, master_task) + call broadcast_scalar(hs1, master_task) + call broadcast_scalar(dpscale, master_task) + call broadcast_scalar(frzpnd, master_task) + call broadcast_scalar(rfracmin, master_task) + call broadcast_scalar(rfracmax, master_task) + call broadcast_scalar(pndaspect, master_task) + call broadcast_scalar(snwredist, master_task) + call broadcast_scalar(snw_aging_table, master_task) + call broadcast_scalar(snw_filename, master_task) + call broadcast_scalar(snw_tau_fname, master_task) + call broadcast_scalar(snw_kappa_fname, master_task) + call broadcast_scalar(snw_drdt0_fname, master_task) + call broadcast_scalar(snw_rhos_fname, master_task) + call broadcast_scalar(snw_Tgrd_fname, master_task) + call broadcast_scalar(snw_T_fname, master_task) + call broadcast_scalar(snwgrain, master_task) + call broadcast_scalar(use_smliq_pnd, master_task) + call broadcast_scalar(rsnw_fall, master_task) + call broadcast_scalar(rsnw_tmax, master_task) + call broadcast_scalar(rhosnew, master_task) + call broadcast_scalar(rhosmin, master_task) + call broadcast_scalar(rhosmax, master_task) + call broadcast_scalar(windmin, master_task) + call broadcast_scalar(drhosdwind, master_task) + call broadcast_scalar(snwlvlfac, master_task) + call broadcast_scalar(albicev, master_task) + call broadcast_scalar(albicei, master_task) + call broadcast_scalar(albsnowv, master_task) + call broadcast_scalar(albsnowi, master_task) + call broadcast_scalar(ahmax, master_task) + call broadcast_scalar(atmbndy, master_task) + call broadcast_scalar(default_season, master_task) + call broadcast_scalar(fyear_init, master_task) + call broadcast_scalar(ycycle, master_task) + call broadcast_scalar(atm_data_format, master_task) + call broadcast_scalar(atm_data_type, master_task) + call broadcast_scalar(atm_data_dir, master_task) + call broadcast_scalar(atm_data_version, master_task) + call broadcast_scalar(rotate_wind, master_task) + call broadcast_scalar(calc_strair, master_task) + call broadcast_scalar(calc_Tsfc, master_task) + call broadcast_scalar(formdrag, master_task) + call broadcast_scalar(highfreq, master_task) + call broadcast_scalar(natmiter, master_task) + call broadcast_scalar(atmiter_conv, master_task) + call broadcast_scalar(update_ocn_f, master_task) + call broadcast_scalar(cpl_frazil, master_task) + call broadcast_scalar(l_mpond_fresh, master_task) + call broadcast_scalar(ustar_min, master_task) + call broadcast_scalar(hi_min, master_task) + call broadcast_scalar(iceruf, master_task) + call broadcast_scalar(iceruf_ocn, master_task) + call broadcast_scalar(calc_dragio, master_task) + call broadcast_scalar(emissivity, master_task) + call broadcast_scalar(fbot_xfer_type, master_task) + call broadcast_scalar(precip_units, master_task) + call broadcast_scalar(oceanmixed_ice, master_task) + call broadcast_scalar(wave_spec_type, master_task) + call broadcast_scalar(wave_spec_file, master_task) + call broadcast_scalar(nfreq, master_task) + call broadcast_scalar(congel_freeze, master_task) + call broadcast_scalar(tfrz_option, master_task) + call broadcast_scalar(saltflux_option, master_task) + call broadcast_scalar(ice_ref_salinity, master_task) + call broadcast_scalar(ocn_data_format, master_task) + call broadcast_scalar(bgc_data_type, master_task) + call broadcast_scalar(fe_data_type, master_task) + call broadcast_scalar(ice_data_type, master_task) + call broadcast_scalar(ice_data_conc, master_task) + call broadcast_scalar(ice_data_dist, master_task) + call broadcast_scalar(bgc_data_dir, master_task) + call broadcast_scalar(ocn_data_type, master_task) + call broadcast_scalar(ocn_data_dir, master_task) + call broadcast_scalar(oceanmixed_file, master_task) + call broadcast_scalar(restore_ocn, master_task) + call broadcast_scalar(trestore, master_task) + call broadcast_scalar(restore_ice, master_task) + call broadcast_scalar(debug_forcing, master_task) + call broadcast_array (latpnt(1:2), master_task) + call broadcast_array (lonpnt(1:2), master_task) + call broadcast_scalar(runid, master_task) + call broadcast_scalar(runtype, master_task) + !call broadcast_scalar(nu_diag, master_task) + + ! tracers + call broadcast_scalar(tr_iage, master_task) + call broadcast_scalar(restart_age, master_task) + call broadcast_scalar(tr_FY, master_task) + call broadcast_scalar(restart_FY, master_task) + call broadcast_scalar(tr_lvl, master_task) + call broadcast_scalar(restart_lvl, master_task) + call broadcast_scalar(tr_pond_lvl, master_task) + call broadcast_scalar(restart_pond_lvl, master_task) + call broadcast_scalar(tr_pond_topo, master_task) + call broadcast_scalar(restart_pond_topo, master_task) + call broadcast_scalar(tr_snow, master_task) + call broadcast_scalar(restart_snow, master_task) + call broadcast_scalar(tr_iso, master_task) + call broadcast_scalar(restart_iso, master_task) + call broadcast_scalar(tr_aero, master_task) + call broadcast_scalar(restart_aero, master_task) + call broadcast_scalar(tr_fsd, master_task) + call broadcast_scalar(restart_fsd, master_task) + call broadcast_scalar(ncat, master_task) + call broadcast_scalar(nfsd, master_task) + call broadcast_scalar(nilyr, master_task) + call broadcast_scalar(nslyr, master_task) + call broadcast_scalar(nblyr, master_task) + call broadcast_scalar(n_iso, master_task) + call broadcast_scalar(n_aero, master_task) + call broadcast_scalar(n_zaero, master_task) + call broadcast_scalar(n_algae, master_task) + call broadcast_scalar(n_doc, master_task) + call broadcast_scalar(n_dic, master_task) + call broadcast_scalar(n_don, master_task) + call broadcast_scalar(n_fed, master_task) + call broadcast_scalar(n_fep, master_task) + call broadcast_scalar(a_rapid_mode, master_task) + call broadcast_scalar(floediam, master_task) + call broadcast_scalar(hfrazilmin, master_task) + call broadcast_scalar(Rac_rapid_mode, master_task) + call broadcast_scalar(aspect_rapid_mode, master_task) + call broadcast_scalar(dSdt_slow_mode, master_task) + call broadcast_scalar(phi_c_slow_mode, master_task) + call broadcast_scalar(phi_i_mushy, master_task) + call broadcast_scalar(Tliquidus_max, master_task) + call broadcast_scalar(sw_redist, master_task) + call broadcast_scalar(sw_frac, master_task) + call broadcast_scalar(sw_dtemp, master_task) + + !----------------------------------------------------------------- + ! update defaults + !----------------------------------------------------------------- + + if (trim(ice_ic) == 'default') ice_ic = 'internal' + if (trim(ice_data_conc) == 'default') ice_data_conc = 'parabolic' + if (trim(ice_data_dist) == 'default') ice_data_dist = 'uniform' + if (trim(ice_data_type) == 'default') ice_data_type = 'latsst' + + !----------------------------------------------------------------- + ! verify inputs + !----------------------------------------------------------------- + + if (my_task == master_task) then + if (trim(diag_type) == 'file') then + write(ice_stdout,*) 'Diagnostic output will be in file ',diag_file + open (nu_diag, file=diag_file, status='unknown') + endif + write(nu_diag,*) '--------------------------------' + write(nu_diag,*) ' ',subname + write(nu_diag,*) ' CICE model diagnostic output ' + write(nu_diag,*) '--------------------------------' + write(nu_diag,*) ' ' + endif + + if (trim(runtype) == 'continue') then + if (my_task == master_task) then + write(nu_diag,*) subname//'NOTE: runtype=continue, setting restart=.true.' + if (.not. use_restart_time) & + write(nu_diag,*) subname//'NOTE: runtype=continue, setting use_restart_time=.true.' + write(nu_diag,*) ' ' + endif + restart = .true. + use_restart_time = .true. + elseif (trim(runtype) == 'initial') then + if (ice_ic == 'none' .or. ice_ic == 'internal') then + if (my_task == master_task) then + write(nu_diag,*) subname//'NOTE: ice_ic = none or internal, setting restart flags to .false.' + if (.not. use_restart_time) & + write(nu_diag,*) subname//'NOTE: ice_ic = none or internal, setting use_restart_time=.false.' + write(nu_diag,*) ' ' + endif + use_restart_time = .false. + restart = .false. + restart_iso = .false. + restart_aero = .false. + restart_fsd = .false. + restart_age = .false. + restart_fy = .false. + restart_lvl = .false. + restart_pond_lvl = .false. + restart_pond_topo = .false. + restart_snow = .false. +! tcraig, OK to leave as true, needed for boxrestore case +! restart_ext = .false. + else + if (my_task == master_task) then + write(nu_diag,*) subname//'NOTE: ice_ic /= none or internal, setting restart=.true.' + write(nu_diag,*) ' ' + endif + restart = .true. + endif + else + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: runtype unknown = ',trim(runtype) + endif + abort_list = trim(abort_list)//":1" + endif + + if (history_format /= 'cdf1' .and. & + history_format /= 'cdf2' .and. & + history_format /= 'cdf5' .and. & + history_format /= 'hdf5' .and. & + history_format /= 'pnetcdf1' .and. & + history_format /= 'pnetcdf2' .and. & + history_format /= 'pnetcdf5' .and. & + history_format /= 'pio_netcdf' .and. & ! backwards compatibility + history_format /= 'pio_pnetcdf' .and. & ! backwards compatibility + history_format /= 'binary' .and. & + history_format /= 'default') then ! backwards compatibility + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: history_format unknown = ',trim(history_format) + endif + abort_list = trim(abort_list)//":50" + endif + + if (restart_format /= 'cdf1' .and. & + restart_format /= 'cdf2' .and. & + restart_format /= 'cdf5' .and. & + restart_format /= 'hdf5' .and. & + restart_format /= 'pnetcdf1' .and. & + restart_format /= 'pnetcdf2' .and. & + restart_format /= 'pnetcdf5' .and. & + restart_format /= 'pio_netcdf' .and. & ! backwards compatibility + restart_format /= 'pio_pnetcdf' .and. & ! backwards compatibility + restart_format /= 'binary' .and. & + restart_format /= 'default') then ! backwards compatibility + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: restart_format unknown = ',trim(restart_format) + endif + abort_list = trim(abort_list)//":51" + endif + + ! backwards compatibility for history and restart formats, lcdf64 + + if (history_format == 'pio_pnetcdf' .or. history_format == 'pio_netcdf') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: history_format='//trim(history_format)// & + ' is deprecated, please update namelist settings' + endif + endif + if (restart_format == 'pio_pnetcdf' .or. restart_format == 'pio_netcdf') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: restart_format='//trim(restart_format)// & + ' is deprecated, please update namelist settings' + endif + endif + + if (lcdf64) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: lcdf64 is deprecated, please update namelist settings' + endif + + if (history_format == 'default' .or. history_format == 'pio_netcdf') then + history_format = 'cdf2' + elseif (history_format == 'pio_pnetcdf') then + history_format = 'pnetcdf2' + else + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: lcdf64 is T and history_format not supported for '//trim(history_format) + endif + abort_list = trim(abort_list)//":52" + endif + + if (restart_format == 'default' .or. restart_format == 'pio_netcdf') then + restart_format = 'cdf2' + elseif (restart_format == 'pio_pnetcdf') then + restart_format = 'pnetcdf2' + else + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: lcdf64 is T and restart_format not supported for '//trim(restart_format) + endif + abort_list = trim(abort_list)//":53" + endif + else + if (history_format == 'default' .or. history_format == 'pio_netcdf') then + history_format = 'cdf1' + elseif (history_format == 'pio_pnetcdf') then + history_format = 'pnetcdf1' + endif + + if (restart_format == 'default' .or. restart_format == 'pio_netcdf') then + restart_format = 'cdf1' + elseif (restart_format == 'pio_pnetcdf') then + restart_format = 'pnetcdf1' + endif + endif + + if (ktransport <= 0) then + advection = 'none' + endif + + if (ktransport > 0 .and. advection /= 'remap' .and. advection /= 'upwind') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: invalid advection=',trim(advection) + abort_list = trim(abort_list)//":3" + endif + + if (ncat == 1 .and. kitd == 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: kitd incompatability: ncat=1 and kitd=1' + write(nu_diag,*) subname//' ERROR: Remapping the ITD is not allowed for ncat=1.' + write(nu_diag,*) subname//' ERROR: Use kitd = 0 (delta function ITD) with kcatbound = 0' + write(nu_diag,*) subname//' ERROR: or for column configurations use kcatbound = -1' + endif + abort_list = trim(abort_list)//":4" + endif + + if (ncat /= 1 .and. kcatbound == -1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: ITD required for ncat > 1' + write(nu_diag,*) subname//' ERROR: ncat=',ncat,' kcatbound=',kcatbound + write(nu_diag,*) subname//' ERROR: Please review user guide' + endif + abort_list = trim(abort_list)//":5" + endif + + if (kdyn == 1 .and. evp_algorithm == 'shared_mem_1d') then + save_ghte_ghtn = .true. + endif + + if (kdyn == 2 .and. revised_evp) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: revised_evp = T with EAP dynamics' + write(nu_diag,*) subname//' WARNING: revised_evp is ignored' + endif + revised_evp = .false. + endif + + if (kdyn > 3) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: kdyn out of range' + endif + abort_list = trim(abort_list)//":33" + endif + + if (seabed_stress) then + if (seabed_stress_method /= 'LKD' .and. seabed_stress_method /= 'probabilistic') then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: invalid seabed stress method' + write(nu_diag,*) subname//' ERROR: seabed_stress_method should be LKD or probabilistic' + endif + abort_list = trim(abort_list)//":48" + endif + endif + + if (grid_ice == 'CD') then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: grid_ice = CD not supported yet' + endif + abort_list = trim(abort_list)//":47" + elseif (grid_ice == 'C_override_D') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: using grid_ice = CD, not supported' + endif + grid_ice = 'CD' + endif + + if (grid_ice == 'C' .or. grid_ice == 'CD') then + if (kdyn > 1 .or. (kdyn == 1 .and. evp_algorithm /= 'standard_2d')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: grid_ice = C | CD only supported with kdyn=1 and evp_algorithm=standard_2d' + write(nu_diag,*) subname//' ERROR: kdyn and/or evp_algorithm and grid_ice inconsistency' + endif + abort_list = trim(abort_list)//":46" + endif + if (visc_method /= 'avg_zeta' .and. visc_method /= 'avg_strength') then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: invalid method for viscosities' + write(nu_diag,*) subname//' ERROR: visc_method should be avg_zeta or avg_strength' + endif + abort_list = trim(abort_list)//":44" + endif + endif + + if (evp_algorithm == 'shared_mem_1d' .and. & + grid_type == 'tripole') then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: evp_algorithm=shared_mem_1d is not tested for gridtype=tripole' + write(nu_diag,*) subname//' ERROR: change evp_algorithm to standard_2d' + endif + abort_list = trim(abort_list)//":49" + endif + + capping = -9.99e30 + if (kdyn == 1 .or. kdyn == 3) then + if (capping_method == 'max') then + capping = c1 + elseif (capping_method == 'sum') then + capping = c0 + else + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: invalid method for capping viscosities' + write(nu_diag,*) subname//' ERROR: capping_method should be equal to max or sum' + endif + abort_list = trim(abort_list)//":45" + endif + endif + + rplvl = 0 + rptopo = 0 + if (tr_pond_lvl ) rplvl = 1 + if (tr_pond_topo) rptopo = 1 + + tr_pond = .false. ! explicit melt ponds + if (rplvl + rptopo > 0) tr_pond = .true. + + if (rplvl + rptopo > 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: Must use only one melt pond scheme' + endif + abort_list = trim(abort_list)//":6" + endif + + if (tr_pond_lvl .and. .not. tr_lvl) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: tr_pond_lvl=T but tr_lvl=F' + endif + abort_list = trim(abort_list)//":30" + endif + +! tcraig - this was originally implemented by resetting hs0=0. EH says it might be OK +! to not reset it but extra calculations are done and it might not be bfb. In our +! testing, we should explicitly set hs0 to 0. when setting tr_pond_lvl=T, and otherwise +! this will abort (safest option until additional testing is done) + if (tr_pond_lvl .and. abs(hs0) > puny) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: tr_pond_lvl=T and hs0 /= 0' + endif + abort_list = trim(abort_list)//":7" + endif + + if (shortwave(1:4) /= 'dEdd' .and. tr_pond .and. calc_tsfc) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: tr_pond=T, calc_tsfc=T, invalid shortwave' + write(nu_diag,*) subname//' ERROR: Must use shortwave=dEdd or dEdd_snicar_ad' + endif + abort_list = trim(abort_list)//":8" + endif + + if (snwredist(1:3) == 'ITD' .and. .not. tr_snow) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: snwredist on but tr_snow=F' + write (nu_diag,*) 'ERROR: Use tr_snow=T for snow redistribution' + endif + abort_list = trim(abort_list)//":37" + endif + if (snwredist(1:4) == 'bulk' .and. .not. tr_lvl) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: snwredist=bulk but tr_lvl=F' + write (nu_diag,*) 'ERROR: Use tr_lvl=T for snow redistribution' + endif + abort_list = trim(abort_list)//":38" + endif + if (snwredist(1:6) == 'ITDrdg' .and. .not. tr_lvl) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: snwredist=ITDrdg but tr_lvl=F' + write (nu_diag,*) 'ERROR: Use tr_lvl=T for snow redistribution' + endif + abort_list = trim(abort_list)//":39" + endif + if (use_smliq_pnd .and. .not. snwgrain) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: use_smliq_pnd = T but' + write (nu_diag,*) 'ERROR: snow metamorphosis not used' + write (nu_diag,*) 'ERROR: Use snwgrain=T with smliq for ponds' + endif + abort_list = trim(abort_list)//":40" + endif + if (use_smliq_pnd .and. .not. tr_snow) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: use_smliq_pnd = T but' + write (nu_diag,*) 'ERROR: snow tracers are not active' + write (nu_diag,*) 'ERROR: Use tr_snow=T with smliq for ponds' + endif + abort_list = trim(abort_list)//":41" + endif + if (snwgrain .and. .not. tr_snow) then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: snwgrain=T but tr_snow=F' + write (nu_diag,*) 'ERROR: Use tr_snow=T for snow metamorphosis' + endif + abort_list = trim(abort_list)//":42" + endif + if (trim(snw_aging_table) /= 'test' .and. & + trim(snw_aging_table) /= 'snicar' .and. & + trim(snw_aging_table) /= 'file') then + if (my_task == master_task) then + write (nu_diag,*) 'ERROR: unknown snw_aging_table = '//trim(snw_aging_table) + endif + abort_list = trim(abort_list)//":43" + endif + + if (tr_iso .and. n_iso==0) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: isotopes activated but' + write(nu_diag,*) subname//' ERROR: not allocated in tracer array.' + write(nu_diag,*) subname//' ERROR: if tr_iso, n_iso must be > 0.' + endif + abort_list = trim(abort_list)//":31" + endif + + if (tr_aero .and. n_aero==0) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: aerosols activated but' + write(nu_diag,*) subname//' ERROR: not allocated in tracer array.' + write(nu_diag,*) subname//' ERROR: if tr_aero, n_aero must be > 0.' + endif + abort_list = trim(abort_list)//":9" + endif + + if (ncat < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: ncat < 1' + endif + abort_list = trim(abort_list)//":32" + endif + + if (nilyr < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: nilyr < 1' + endif + abort_list = trim(abort_list)//":2" + endif + + if (nslyr < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: nslyr < 1' + endif + abort_list = trim(abort_list)//":34" + endif + + if (nblyr < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: nblyr < 1' + write(nu_diag,*) subname//' ERROR: not allowed due to history implementation.' + endif + abort_list = trim(abort_list)//":35" + endif + + if (nfsd < 1) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: nfsd < 1' + write(nu_diag,*) subname//' ERROR: not allowed due to history implementation.' + endif + abort_list = trim(abort_list)//":36" + endif + + if (shortwave(1:4) /= 'dEdd' .and. tr_aero) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: tr_aero=T, invalid shortwave' + write(nu_diag,*) subname//' ERROR: Must use shortwave=dEdd or dEdd_snicar_ad' + endif + abort_list = trim(abort_list)//":10" + endif + + if (shortwave(1:4) /= 'dEdd' .and. snwgrain) then + if (my_task == master_task) then + write (nu_diag,*) subname//' ERROR: snow grain radius is activated' + write (nu_diag,*) subname//' ERROR: Must use shortwave=dEdd or dEdd_snicar_ad' + endif + abort_list = trim(abort_list)//":17" + endif + + if ((rfracmin < -puny .or. rfracmin > c1+puny) .or. & + (rfracmax < -puny .or. rfracmax > c1+puny) .or. & + (rfracmin > rfracmax)) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: rfracmin, rfracmax must be between 0 and 1' + write(nu_diag,*) subname//' ERROR: and rfracmax >= rfracmin' + endif + abort_list = trim(abort_list)//":11" + endif + rfracmin = min(max(rfracmin,c0),c1) + rfracmax = min(max(rfracmax,c0),c1) + + if (trim(atm_data_type) == 'monthly' .and. calc_strair) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: atm_data_type=monthly and calc_strair=T' + abort_list = trim(abort_list)//":12" + endif + + if (ktherm == 2 .and. .not. calc_Tsfc) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: ktherm = 2 and calc_Tsfc=F' + abort_list = trim(abort_list)//":13" + endif + +! ech: allow inconsistency for testing sensitivities. It's not recommended for science runs + if (ktherm == 1 .and. trim(tfrz_option) /= 'linear_salt') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and tfrz_option = ',trim(tfrz_option) + write(nu_diag,*) subname//' WARNING: For consistency, set tfrz_option = linear_salt' + endif + endif + if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 2 and tfrz_option = ',trim(tfrz_option) + write(nu_diag,*) subname//' WARNING: For consistency, set tfrz_option = mushy' + endif + endif + if (ktherm == 1 .and. trim(saltflux_option) /= 'constant') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and saltflux_option = ',trim(saltflux_option) + write(nu_diag,*) subname//' WARNING: For consistency, set saltflux_option = constant' + endif + endif + if (ktherm == 1 .and. .not.sw_redist) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: ktherm = 1 and sw_redist = ',sw_redist + write(nu_diag,*) subname//' WARNING: For consistency, set sw_redist = .true.' + endif + endif + + if (trim(atmbndy) == 'default') then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: atmbndy = default is deprecated' + write(nu_diag,*) subname//' WARNING: setting atmbndy = similarity' + endif + atmbndy = 'similarity' + endif + + if (formdrag) then + if (trim(atmbndy) == 'constant') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and atmbndy=constant' + abort_list = trim(abort_list)//":14" + endif + + if (.not. calc_strair) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and calc_strair=F' + abort_list = trim(abort_list)//":15" + endif + + if (.not. tr_pond) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and tr_pond=F' + abort_list = trim(abort_list)//":16" + endif + + if (.not. tr_lvl) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=T and tr_lvl=F' + abort_list = trim(abort_list)//":18" + endif + endif + + if (trim(fbot_xfer_type) == 'Cdn_ocn' .and. .not. formdrag) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: formdrag=F and fbot_xfer_type=Cdn_ocn' + abort_list = trim(abort_list)//":19" + endif + + if (history_precision .ne. 4 .and. history_precision .ne. 8) then + write (nu_diag,*) subname//' ERROR: bad value for history_precision, allowed values: 4, 8' + abort_list = trim(abort_list)//":22" + endif + + do n = 1,max_nstrm + if (histfreq_base(n) /= 'init' .and. histfreq_base(n) /= 'zero') then + write (nu_diag,*) subname//' ERROR: bad value for histfreq_base, allowed values: init, zero: '//trim(histfreq_base(n)) + abort_list = trim(abort_list)//":24" + endif + + if (dumpfreq_base(n) /= 'init' .and. dumpfreq_base(n) /= 'zero') then + write (nu_diag,*) subname//' ERROR: bad value for dumpfreq_base, allowed values: init, zero: '//trim(dumpfreq_base(n)) + abort_list = trim(abort_list)//":25" + endif + + if (.not.(scan(dumpfreq(n)(1:1),'ymdhx1YMDHX') == 1 .and. (dumpfreq(n)(2:2) == '1' .or. dumpfreq(n)(2:2) == ' '))) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: unrecognized dumpfreq=', trim(dumpfreq(n)) + write(nu_diag,*) subname//' WARNING: No restarts files will be written for this stream' + write(nu_diag,*) subname//' WARNING: Allowed values : y,m,d,h,x,1 followed by an optional 1' + endif + dumpfreq(n) = 'x' + endif + enddo + + if (trim(hist_time_axis) /= 'begin' .and. trim(hist_time_axis) /= 'middle' .and. trim(hist_time_axis) /= 'end') then + write (nu_diag,*) subname//' ERROR: hist_time_axis value not valid = '//trim(hist_time_axis) + abort_list = trim(abort_list)//":29" + endif + +#ifdef USE_PIO1 + if (history_deflate/=0 .or. restart_deflate/=0 .or. & + history_chunksize(1)/=0 .or. history_chunksize(2)/=0 .or. & + restart_chunksize(1)/=0 .or. restart_chunksize(2)/=0) then + if (my_task == master_task) write (nu_diag,*) subname//' ERROR: _deflate and _chunksize not compatible with PIO1' + abort_list = trim(abort_list)//":54" + endif +#else +#ifndef CESMCOUPLED + ! history_format not used by nuopc driver + if (history_format/='hdf5' .and. history_deflate/=0) then + if (my_task == master_task) then + write (nu_diag,*) subname//' WARNING: history_deflate not compatible with '//history_format + write (nu_diag,*) subname//' WARNING: netcdf compression only possible with history_type="hdf5" ' + endif + endif + + if (history_format/='hdf5' .and. (history_chunksize(1)/=0 .or. history_chunksize(2)/=0)) then + if (my_task == master_task) then + write (nu_diag,*) subname//' WARNING: history_chunksize not compatible with '//history_format + write (nu_diag,*) subname//' WARNING: netcdf chunking only possible with history_type="hdf5" ' + endif + endif + + if (restart_format/='hdf5' .and. restart_deflate/=0) then + if (my_task == master_task) then + write (nu_diag,*) subname//' WARNING: restart_deflate not compatible with '//restart_format + write (nu_diag,*) subname//' WARNING: netcdf compression only possible with restart_type="hdf5" ' + endif + endif + + if (restart_format/='hdf5' .and. (restart_chunksize(1)/=0 .or. restart_chunksize(2)/=0)) then + if (my_task == master_task) then + write (nu_diag,*) subname//' WARNING: restart_chunksize not compatible with '//restart_format + write (nu_diag,*) subname//' WARNING: netcdf chunking only possible with restart_type="hdf5" ' + endif + endif +#endif + + if (history_deflate<0 .or. history_deflate>9) then + if (my_task == master_task) write (nu_diag,*) subname//& + ' ERROR: history_deflate value not valid. Allowed range: integers from 0 to 9 ' + abort_list = trim(abort_list)//":55" + endif + + if (restart_deflate<0 .or. restart_deflate>9) then + if (my_task == master_task) write (nu_diag,*) subname//& + ' ERROR: restart_deflate value not valid. Allowed range: integers from 0 to 9 ' + abort_list = trim(abort_list)//":56" + endif +#endif + + ! Implicit solver input validation + if (kdyn == 3) then + if (.not. (trim(algo_nonlin) == 'picard' .or. trim(algo_nonlin) == 'anderson')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown algo_nonlin: '//algo_nonlin + write(nu_diag,*) subname//' ERROR: allowed values: ''picard'', ''anderson''' + endif + abort_list = trim(abort_list)//":60" + endif + + if (trim(algo_nonlin) == 'picard') then + ! Picard solver is implemented in the Anderson solver; reset number of saved residuals to zero + dim_andacc = 0 + endif + + if (.not. (trim(precond) == 'ident' .or. trim(precond) == 'diag' .or. trim(precond) == 'pgmres')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown precond: '//precond + write(nu_diag,*) subname//' ERROR: allowed values: ''ident'', ''diag'', ''pgmres''' + endif + abort_list = trim(abort_list)//":61" + endif + + if (.not. (trim(ortho_type) == 'cgs' .or. trim(ortho_type) == 'mgs')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown ortho_type: '//ortho_type + write(nu_diag,*) subname//' ERROR: allowed values: ''cgs'', ''mgs''' + endif + abort_list = trim(abort_list)//":62" + endif + endif + + if (orca_halogrid) then + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: orca_halogrid has been deprecated' + endif + abort_list = trim(abort_list)//":63" + endif + + if (trim(grid_type) == 'cpom_grid') then + if (my_task == master_task) then + write(nu_diag,*) subname//" ERROR: grid_type = 'cpom_grid' has been deprecated" + endif + abort_list = trim(abort_list)//":64" + endif + + ice_IOUnitsMinUnit = numin + ice_IOUnitsMaxUnit = numax + + call icepack_init_parameters(Cf_in=Cf) + call icepack_init_parameters(ksno_in=ksno) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname//'Icepack Abort1', & + file=__FILE__, line=__LINE__) + + wave_spec = .false. + if (tr_fsd .and. (trim(wave_spec_type) /= 'none')) wave_spec = .true. + if (tr_fsd .and. (trim(wave_spec_type) == 'none')) then + if (my_task == master_task) then + write(nu_diag,*) subname//' WARNING: tr_fsd=T but wave_spec=F - not recommended' + endif + end if + + ! compute grid locations for thermo, u and v fields + + grid_ice_thrm = 'T' + if (grid_ice == 'A') then + grid_ice_dynu = 'T' + grid_ice_dynv = 'T' + elseif (grid_ice == 'B') then + grid_ice_dynu = 'U' + grid_ice_dynv = 'U' + elseif (grid_ice == 'C') then + grid_ice_dynu = 'E' + grid_ice_dynv = 'N' + elseif (grid_ice == 'CD') then + grid_ice_dynu = 'NE' + grid_ice_dynv = 'NE' + else + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown grid_ice: '//trim(grid_ice) + endif + abort_list = trim(abort_list)//":64" + endif + + grid_atm_thrm = 'T' + if (grid_atm == 'A') then + grid_atm_dynu = 'T' + grid_atm_dynv = 'T' + elseif (grid_atm == 'B') then + grid_atm_dynu = 'U' + grid_atm_dynv = 'U' + elseif (grid_atm == 'C') then + grid_atm_dynu = 'E' + grid_atm_dynv = 'N' + elseif (grid_atm == 'CD') then + grid_atm_dynu = 'NE' + grid_atm_dynv = 'NE' + else + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown grid_atm: '//trim(grid_atm) + endif + abort_list = trim(abort_list)//":65" + endif + + grid_ocn_thrm = 'T' + if (grid_ocn == 'A') then + grid_ocn_dynu = 'T' + grid_ocn_dynv = 'T' + elseif (grid_ocn == 'B') then + grid_ocn_dynu = 'U' + grid_ocn_dynv = 'U' + elseif (grid_ocn == 'C') then + grid_ocn_dynu = 'E' + grid_ocn_dynv = 'N' + elseif (grid_ocn == 'CD') then + grid_ocn_dynu = 'NE' + grid_ocn_dynv = 'NE' + else + if (my_task == master_task) then + write(nu_diag,*) subname//' ERROR: unknown grid_ocn: '//trim(grid_ocn) + endif + abort_list = trim(abort_list)//":66" + endif + + !----------------------------------------------------------------- + ! spew + !----------------------------------------------------------------- + + if (my_task == master_task) then + + write(nu_diag,*) ' Overview of model configuration with relevant parameters' + write(nu_diag,*) '=========================================================' + write(nu_diag,*) 'For details, compare namelist output below with the' + write(nu_diag,*) 'Case Settings section in the model documentation.' + write(nu_diag,*) ' ' + write(nu_diag,*) ' Calendar' + write(nu_diag,*) '--------------------------------' + write(nu_diag,1020) ' days_per_year = ',days_per_year,' : number of days in a model year' + if (use_leap_years) then + tmpstr2 = ' : leap days are included' + else + tmpstr2 = ' : leap days are not included' + endif + write(nu_diag,1010) ' use_leap_years = ',use_leap_years,trim(tmpstr2) + write(nu_diag,1002) ' dt = ', dt, ' : model time step' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Grid, Discretization' + write(nu_diag,*) '--------------------------------' + tmpstr2 = ' ' + if (trim(grid_type) == 'rectangular') tmpstr2 = ' : internally defined, rectangular grid' + if (trim(grid_type) == 'regional') tmpstr2 = ' : user-defined, regional grid' + if (trim(grid_type) == 'displaced_pole') tmpstr2 = ' : user-defined grid with rotated north pole' + if (trim(grid_type) == 'tripole') tmpstr2 = ' : user-defined grid with northern hemisphere zipper' + write(nu_diag,1030) ' grid_type = ',trim(grid_type),trim(tmpstr2) + write(nu_diag,1030) ' grid_ice = ',trim(grid_ice) + write(nu_diag,1030) ' grid_ice_thrm = ',trim(grid_ice_thrm) + write(nu_diag,1030) ' grid_ice_dynu = ',trim(grid_ice_dynu) + write(nu_diag,1030) ' grid_ice_dynv = ',trim(grid_ice_dynv) + write(nu_diag,1030) ' grid_atm = ',trim(grid_atm) + write(nu_diag,1030) ' grid_atm_thrm = ',trim(grid_atm_thrm) + write(nu_diag,1030) ' grid_atm_dynu = ',trim(grid_atm_dynu) + write(nu_diag,1030) ' grid_atm_dynv = ',trim(grid_atm_dynv) + write(nu_diag,1030) ' grid_ocn = ',trim(grid_ocn) + write(nu_diag,1030) ' grid_ocn_thrm = ',trim(grid_ocn_thrm) + write(nu_diag,1030) ' grid_ocn_dynu = ',trim(grid_ocn_dynu) + write(nu_diag,1030) ' grid_ocn_dynv = ',trim(grid_ocn_dynv) + write(nu_diag,1030) ' kmt_type = ',trim(kmt_type) + if (trim(grid_type) /= 'rectangular') then + if (use_bathymetry) then + tmpstr2 = ' : bathymetric input data is used' + else + tmpstr2 = ' : bathymetric input data is not used' + endif + write(nu_diag,1010) ' use_bathymetry = ', use_bathymetry,trim(tmpstr2) + write(nu_diag,1030) ' bathymetry_format= ', trim(bathymetry_format) + endif + write(nu_diag,1020) ' nilyr = ', nilyr, ' : number of ice layers (equal thickness)' + write(nu_diag,1020) ' nslyr = ', nslyr, ' : number of snow layers (equal thickness)' + write(nu_diag,1020) ' nblyr = ', nblyr, ' : number of bio layers (equal thickness)' + if (shortwave(1:4) == 'dEdd') & + write(nu_diag,*) 'dEdd interior and sfc scattering layers are used in both ice, snow (unequal)' + write(nu_diag,1020) ' ncat = ', ncat, ' : number of ice categories' + if (kcatbound == 0) then + tmpstr2 = ' : original ITD category bounds' + elseif (kcatbound == 1) then + tmpstr2 = ' : round-number category bounds' + elseif (kcatbound == 2) then + tmpstr2 = ' : WMO standard ITD categories' + elseif (kcatbound == -1) then + tmpstr2 = ' : one thickness category' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1020) ' kcatbound = ', kcatbound,trim(tmpstr2) + if (kitd==0) then + tmpstr2 = ' : delta function ITD approx' + else + tmpstr2 = ' : linear remapping ITD approx' + endif + write(nu_diag,1020) ' kitd = ', kitd,trim(tmpstr2) + + if (tr_fsd) then + tmpstr2 = ' : floe size distribution is enabled' + else + tmpstr2 = ' : floe size distribution is disabled' + endif + write(nu_diag,1010) ' tr_fsd = ', tr_fsd,trim(tmpstr2) + write(nu_diag,1020) ' nfsd = ', nfsd, ' : number of floe size categories' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Horizontal Dynamics' + write(nu_diag,*) '--------------------------------' + if (kdyn == 1) then + tmpstr2 = ' : elastic-viscous-plastic dynamics' + elseif (kdyn == 2) then + tmpstr2 = ' : elastic-anisotropic-plastic dynamics' + elseif (kdyn == 3) then + tmpstr2 = ' : viscous-plastic dynamics' + elseif (kdyn < 1) then + tmpstr2 = ' : dynamics disabled' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1020) ' kdyn = ', kdyn,trim(tmpstr2) + if (kdyn >= 1) then + if (kdyn == 1 .or. kdyn == 2) then + if (revised_evp) then + tmpstr2 = ' : revised EVP formulation used' + write(nu_diag,1002) ' arlx = ', arlx, ' : stress equation factor alpha' + write(nu_diag,1002) ' brlx = ', brlx, ' : stress equation factor beta' + else + tmpstr2 = ' : revised EVP formulation not used' + endif + write(nu_diag,1010) ' revised_evp = ', revised_evp,trim(tmpstr2) + + if (evp_algorithm == 'standard_2d') then + tmpstr2 = ' : standard 2d EVP solver' + elseif (evp_algorithm == 'shared_mem_1d') then + tmpstr2 = ' : vectorized 1d EVP solver' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1031) ' evp_algorithm = ', trim(evp_algorithm),trim(tmpstr2) + write(nu_diag,1020) ' ndtd = ', ndtd, ' : number of dynamics/advection/ridging/steps per thermo timestep' + write(nu_diag,1020) ' ndte = ', ndte, ' : number of EVP or EAP subcycles' + endif + + if (kdyn == 1 .or. kdyn == 3) then + write(nu_diag,1030) ' yield_curve = ', trim(yield_curve), ' : yield curve' + if (trim(yield_curve) == 'ellipse') & + write(nu_diag,1002) ' e_yieldcurve = ', e_yieldcurve, ' : aspect ratio of yield curve' + write(nu_diag,1002) ' e_plasticpot = ', e_plasticpot, ' : aspect ratio of plastic potential' + endif + + if (kdyn == 1) then + write(nu_diag,1003) ' deltamin = ', deltaminEVP, ' : minimum delta for viscosities' + write(nu_diag,1030) ' capping_meth = ', trim(capping_method), ' : capping method for viscosities' + elseif (kdyn == 3) then + write(nu_diag,1003) ' deltamin = ', deltaminVP, ' : minimum delta for viscosities' + write(nu_diag,1030) ' capping_meth = ', trim(capping_method), ' : capping method for viscosities' + endif + !write(nu_diag,1002) ' capping = ', capping, ' : capping value for viscosities' + + write(nu_diag,1002) ' elasticDamp = ', elasticDamp, ' : coefficient for calculating the parameter E' + + if (trim(coriolis) == 'latitude') then + tmpstr2 = ' : latitude-dependent Coriolis parameter' + elseif (trim(coriolis) == 'contant') then + tmpstr2 = ' : = 1.46e-4/s' + elseif (trim(coriolis) == 'zero') then + tmpstr2 = ' : = 0.0' + else + tmpstr2 = ': unknown value' + endif + write(nu_diag,1030) ' coriolis = ',trim(coriolis),trim(tmpstr2) + + if (trim(ssh_stress) == 'geostrophic') then + tmpstr2 = ' : from ocean velocity' + elseif (trim(ssh_stress) == 'coupled') then + tmpstr2 = ' : from coupled sea surface height gradients' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1030) ' ssh_stress = ',trim(ssh_stress),trim(tmpstr2) + + if (trim(advection) == 'remap') then + tmpstr2 = ' : linear remapping advection' + elseif (trim(advection) == 'upwind') then + tmpstr2 = ' : donor cell (upwind) advection' + elseif (trim(advection) == 'none') then + tmpstr2 = ' : advection disabled by ktransport namelist' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1030) ' advection = ', trim(advection),trim(tmpstr2) + + if (seabed_stress) then + tmpstr2 = ' : use seabed stress parameterization for landfast ice' + else + tmpstr2 = ' : no seabed stress parameterization' + endif + write(nu_diag,1010) ' seabed_stress = ', seabed_stress,trim(tmpstr2) + if (seabed_stress) then + write(nu_diag,1030) ' seabed method = ',trim(seabed_stress_method) + if (seabed_stress_method == 'LKD') then + write(nu_diag,1002) ' k1 = ', k1, ' : free parameter for landfast ice' + write(nu_diag,1002) ' k2 = ', k2, ' : free parameter for landfast ice' + write(nu_diag,1002) ' alphab = ', alphab, ' : factor for landfast ice' + write(nu_diag,1002) ' threshold_hw = ', threshold_hw, ' : max water depth for grounding ice' + elseif (seabed_stress_method == 'probabilistic') then + write(nu_diag,1002) ' alphab = ', alphab, ' : factor for landfast ice' + endif + endif + if (grid_ice == 'C' .or. grid_ice == 'CD') then + write(nu_diag,1030) ' visc_method= ', trim(visc_method),' : viscosities method (U point)' + endif + + write(nu_diag,1002) ' Ktens = ', Ktens, ' : tensile strength factor' + + if (kdyn == 3) then + write(nu_diag,1020) ' maxits_nonlin = ', maxits_nonlin,' : max nb of iteration for nonlinear solver' + write(nu_diag,1030) ' precond = ', trim(precond),' : preconditioner for FGMRES' + write(nu_diag,1020) ' dim_fgmres = ', dim_fgmres,' : size of FGMRES Krylov subspace' + write(nu_diag,1020) ' dim_pgmres = ', dim_pgmres,' : size of PGMRES Krylov subspace' + write(nu_diag,1020) ' maxits_fgmres = ', maxits_fgmres,' : max nb of iteration for FGMRES' + write(nu_diag,1020) ' maxits_pgmres = ', maxits_pgmres,' : max nb of iteration for PGMRES' + write(nu_diag,1010) ' monitor_nonlin = ', monitor_nonlin,' : print nonlinear residual norm' + write(nu_diag,1010) ' monitor_fgmres = ', monitor_fgmres,' : print FGMRES residual norm' + write(nu_diag,1010) ' monitor_pgmres = ', monitor_pgmres,' : print PGMRES residual norm' + write(nu_diag,1030) ' ortho_type = ', trim(ortho_type),' : type of orthogonalization for FGMRES' + write(nu_diag,1009) ' reltol_nonlin = ', reltol_nonlin,' : nonlinear stopping criterion' + write(nu_diag,1009) ' reltol_fgmres = ', reltol_fgmres,' : FGMRES stopping criterion' + write(nu_diag,1009) ' reltol_pgmres = ', reltol_pgmres,' : PGMRES stopping criterion' + write(nu_diag,1030) ' algo_nonlin = ', trim(algo_nonlin),' : nonlinear algorithm' + write(nu_diag,1010) ' use_mean_vrel = ', use_mean_vrel,' : use mean of previous 2 iterates to compute vrel' + if (algo_nonlin == 'anderson') then + write(nu_diag,1020) ' fpfunc_andacc = ', fpfunc_andacc,' : fixed point function for Anderson acceleration' + write(nu_diag,1020) ' dim_andacc = ', dim_andacc,' : size of Anderson minimization matrix' + write(nu_diag,1009) ' reltol_andacc = ', reltol_andacc,' : relative tolerance for Anderson acceleration' + write(nu_diag,1000) ' damping_andacc = ', damping_andacc,' : damping factor for Anderson acceleration' + write(nu_diag,1020) ' start_andacc = ', start_andacc,' : nonlinear iteration at which acceleration starts' + endif + endif + + endif ! kdyn enabled + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Mechanical Deformation (Ridging) and Ice Strength' + write(nu_diag,*) '--------------------------------------------------' + if (kridge == 1) then + tmpstr2 = ' : ridging enabled' + else + tmpstr2 = ' : ridging disabled' + endif + write(nu_diag,1010) ' tr_lvl = ', tr_lvl,' : ridging related tracers' + write(nu_diag,1020) ' kridge = ', kridge,trim(tmpstr2) + if (kridge == 1) then + if (krdg_partic == 1) then + tmpstr2 = ' : new participation function' + else + tmpstr2 = ' : old participation function' + endif + write(nu_diag,1020) ' krdg_partic = ', krdg_partic,trim(tmpstr2) + if (krdg_partic == 1) & + write(nu_diag,1002) ' mu_rdg = ', mu_rdg,' : e-folding scale of ridged ice' + if (krdg_redist == 1) then + tmpstr2 = ' : new redistribution function' + else + tmpstr2 = ' : old redistribution function' + endif + write(nu_diag,1020) ' krdg_redist = ', krdg_redist,trim(tmpstr2) + endif + + if (kstrength == 0) then + tmpstr2 = ' : Hibler (1979)' + elseif (kstrength == 1) then + tmpstr2 = ' : Rothrock (1975)' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1020) ' kstrength = ', kstrength,trim(tmpstr2) + if (kstrength == 0) then + write(nu_diag,1009) ' Pstar = ', Pstar, ' : P* strength factor' + write(nu_diag,1002) ' Cstar = ', Cstar, ' : C* strength exponent factor' + elseif (kstrength == 1) then + write(nu_diag,1002) ' Cf = ', Cf, ' : ratio of ridging work to PE change' + endif + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Thermodynamics' + write(nu_diag,*) '--------------------------------' + + if (ktherm == 1) then + tmpstr2 = ' : Bitz and Lipscomb 1999 thermo' + elseif (ktherm == 2) then + tmpstr2 = ' : mushy-layer thermo' + elseif (ktherm < 0) then + tmpstr2 = ' : Thermodynamics disabled' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1020) ' ktherm = ', ktherm,trim(tmpstr2) + if (ktherm >= 0) then + write(nu_diag,1002) ' dt = ', dt, ' : thermodynamic time step' + write(nu_diag,1002) ' ksno = ', ksno,' : snow thermal conductivity' + if (ktherm == 1) & + write(nu_diag,1030) ' conduct = ', trim(conduct),' : ice thermal conductivity' + if (ktherm == 2) then + write(nu_diag,1002) ' a_rapid_mode = ', a_rapid_mode,' : brine channel diameter' + write(nu_diag,1002) ' Rac_rapid_mode = ', Rac_rapid_mode,' : critical Rayleigh number' + write(nu_diag,1002) ' aspect_rapid_mode= ', aspect_rapid_mode,' : brine convection aspect ratio' + write(nu_diag,1009) ' dSdt_slow_mode = ', dSdt_slow_mode,' : drainage strength parameter' + write(nu_diag,1002) ' phi_c_slow_mode = ', phi_c_slow_mode,' : critical liquid fraction' + write(nu_diag,1002) ' phi_i_mushy = ', phi_i_mushy,' : solid fraction at lower boundary' + write(nu_diag,1002) ' Tliquidus_max = ', Tliquidus_max,' : max mush liquidus temperature' + endif + write(nu_diag,1002) ' hfrazilmin = ', hfrazilmin,' : minimum new frazil ice thickness' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Radiation' + write(nu_diag,*) '--------------------------------' + if (trim(shortwave) == 'dEdd') then + tmpstr2 = ' : delta-Eddington multiple-scattering method' + elseif (trim(shortwave) == 'dEdd_snicar_ad') then + tmpstr2 = ' : delta-Eddington multiple-scattering method with SNICAR AD' + elseif (trim(shortwave) == 'ccsm3') then + tmpstr2 = ' : NCAR CCSM3 distribution method' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1030) ' shortwave = ', trim(shortwave),trim(tmpstr2) + if (shortwave(1:4) == 'dEdd') then + write(nu_diag,1002) ' R_ice = ', R_ice,' : tuning parameter for sea ice albedo' + write(nu_diag,1002) ' R_pnd = ', R_pnd,' : tuning parameter for ponded sea ice albedo' + write(nu_diag,1002) ' R_snw = ', R_snw,' : tuning parameter for snow broadband albedo' + write(nu_diag,1002) ' dT_mlt = ', dT_mlt,' : change in temperature per change in snow grain radius' + write(nu_diag,1002) ' rsnw_mlt = ', rsnw_mlt,' : maximum melting snow grain radius' + write(nu_diag,1002) ' kalg = ', kalg,' : absorption coefficient for algae' + if (trim(shortwave) == 'dEdd_snicar_ad') then + write(nu_diag,1030) ' snw_ssp_table = ', trim(snw_ssp_table) + endif + else + if (trim(albedo_type) == 'ccsm3') then + tmpstr2 = ' : NCAR CCSM3 albedos' + elseif (trim(albedo_type) == 'constant') then + tmpstr2 = ' : four constant albedos' + else + tmpstr2 = ' : unknown value' + abort_list = trim(abort_list)//":23" + endif + write(nu_diag,1030) ' albedo_type = ', trim(albedo_type),trim(tmpstr2) + if (trim(albedo_type) == 'ccsm3') then + write(nu_diag,1002) ' albicev = ', albicev,' : visible ice albedo for thicker ice' + write(nu_diag,1002) ' albicei = ', albicei,' : near infrared ice albedo for thicker ice' + write(nu_diag,1002) ' albsnowv = ', albsnowv,' : visible, cold snow albedo' + write(nu_diag,1002) ' albsnowi = ', albsnowi,' : near infrared, cold snow albedo' + write(nu_diag,1002) ' ahmax = ', ahmax,' : albedo is constant above this thickness' + write(nu_diag,1002) ' ahmax = ', ahmax,' : albedo is constant above this thickness' + endif + endif + write(nu_diag,1000) ' emissivity = ', emissivity,' : emissivity of snow and ice' + write(nu_diag,1010) ' sw_redist = ', sw_redist,' : redistribute internal shortwave to surface' + if (sw_redist) then + write(nu_diag,1002) ' sw_frac = ', sw_frac,' : fraction redistributed' + write(nu_diag,1002) ' sw_dtemp = ', sw_dtemp,' : temperature difference from freezing to redistribute' + endif + endif + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Atmospheric Forcing / Coupling' + write(nu_diag,*) '--------------------------------' + write(nu_diag,1010) ' calc_Tsfc = ', calc_Tsfc,' : calculate surface temperature as part of thermo' + write(nu_diag,1010) ' calc_strair = ', calc_strair,' : calculate wind stress and speed' + write(nu_diag,1010) ' rotate_wind = ', rotate_wind,' : rotate wind/stress to computational grid' + write(nu_diag,1010) ' formdrag = ', formdrag,' : use form drag parameterization' + write(nu_diag,1000) ' iceruf = ', iceruf, ' : ice surface roughness at atmosphere interface (m)' + if (trim(atmbndy) == 'constant') then + tmpstr2 = ' : constant-based boundary layer' + elseif (trim(atmbndy) == 'similarity' .or. & + trim(atmbndy) == 'mixed') then + write(nu_diag,1010) ' highfreq = ', highfreq,' : high-frequency atmospheric coupling' + write(nu_diag,1020) ' natmiter = ', natmiter,' : number of atmo boundary layer iterations' + write(nu_diag,1002) ' atmiter_conv = ', atmiter_conv,' : convergence criterion for ustar' + if (trim(atmbndy) == 'similarity') then + tmpstr2 = ' : stability-based boundary layer' + else + tmpstr2 = ' : stability-based boundary layer for wind stress, constant-based for sensible+latent heat fluxes' + endif + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1030) ' atmbndy = ', trim(atmbndy),trim(tmpstr2) + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Oceanic Forcing / Coupling' + write(nu_diag,*) '--------------------------------' + if (oceanmixed_ice) then + tmpstr2 = ' : ocean mixed layer calculation (SST) enabled' + else + tmpstr2 = ' : ocean mixed layer calculation (SST) disabled' + endif + write(nu_diag,1010) ' oceanmixed_ice = ', oceanmixed_ice,trim(tmpstr2) + if (oceanmixed_ice) then + write(nu_diag,*) ' WARNING: ocean mixed layer ON' + write(nu_diag,*) ' WARNING: will impact ocean forcing interaction' + write(nu_diag,*) ' WARNING: coupled forcing will be modified by mixed layer routine' + endif + write(nu_diag,1030) ' saltflux_option = ', trim(saltflux_option) + if (trim(saltflux_option) == 'constant') then + write(nu_diag,1002) ' ice_ref_salinity = ',ice_ref_salinity + endif + if (trim(tfrz_option) == 'constant') then + tmpstr2 = ' : constant ocean freezing temperature (Tocnfrz)' + elseif (trim(tfrz_option) == 'minus1p8') then + tmpstr2 = ' : constant ocean freezing temperature (-1.8C) (to be deprecated)' + elseif (trim(tfrz_option) == 'linear_salt') then + tmpstr2 = ' : linear function of salinity (use with ktherm=1)' + elseif (trim(tfrz_option) == 'mushy') then + tmpstr2 = ' : Assur (1958) as in mushy-layer thermo (ktherm=2)' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1030) ' tfrz_option = ', trim(tfrz_option),trim(tmpstr2) + if (trim(tfrz_option) == 'constant') then + write(nu_diag,1002) ' Tocnfrz = ', Tocnfrz + endif + write(nu_diag,1030) ' congel_freeze = ', trim(congel_freeze) + if (update_ocn_f) then + tmpstr2 = ' : frazil water/salt fluxes included in ocean fluxes' + else + tmpstr2 = ' : frazil water/salt fluxes not included in ocean fluxes' + endif + write(nu_diag,1010) ' update_ocn_f = ', update_ocn_f,trim(tmpstr2) + write(nu_diag,1030) ' cpl_frazil = ', trim(cpl_frazil) + if (l_mpond_fresh .and. tr_pond_topo) then + tmpstr2 = ' : retain (topo) pond water until ponds drain' + else + tmpstr2 = ' : pond water not retained on ice (virtual only)' + endif + write(nu_diag,1010) ' l_mpond_fresh = ', l_mpond_fresh,trim(tmpstr2) + if (trim(fbot_xfer_type) == 'constant') then + tmpstr2 = ' : ocean heat transfer coefficient is constant' + elseif (trim(fbot_xfer_type) == 'Cdn_ocn') then + tmpstr2 = ' : variable ocean heat transfer coefficient' ! only used with form_drag=T? + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1030) ' fbot_xfer_type = ', trim(fbot_xfer_type),trim(tmpstr2) + write(nu_diag,1000) ' ustar_min = ', ustar_min,' : minimum value of ocean friction velocity' + write(nu_diag,1000) ' hi_min = ', hi_min,' : minimum ice thickness allowed (m)' + if (calc_dragio) then + tmpstr2 = ' : dragio computed from iceruf_ocn' + else + tmpstr2 = ' : dragio hard-coded' + endif + write(nu_diag,1010) ' calc_dragio = ', calc_dragio,trim(tmpstr2) + if (calc_dragio) then + write(nu_diag,1002) ' iceruf_ocn = ', iceruf_ocn,' : under-ice roughness length' + endif + + if (tr_fsd) then + write(nu_diag,1002) ' floediam = ', floediam, ' constant floe diameter' + if (wave_spec) then + tmpstr2 = ' : use wave spectrum for floe size distribution' + else + tmpstr2 = 'WARNING : floe size distribution does not use wave spectrum' + endif + write(nu_diag,1010) ' wave_spec = ', wave_spec,trim(tmpstr2) + if (wave_spec) then + if (trim(wave_spec_type) == 'none') then + tmpstr2 = ' : no wave data provided, no wave-ice interactions' + elseif (trim(wave_spec_type) == 'profile') then + tmpstr2 = ' : use fixed dummy wave spectrum for testing, sea surface height generated '// & + 'using constant phase (1 iteration of wave fracture)' + elseif (trim(wave_spec_type) == 'constant') then + tmpstr2 = ' : wave spectrum data file provided, sea surface height generated '// & + 'using constant phase (1 iteration of wave fracture)' + elseif (trim(wave_spec_type) == 'random') then + tmpstr2 = ' : wave spectrum data file provided, sea surface height generated using '// & + 'random number (multiple iterations of wave fracture to convergence)' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1030) ' wave_spec_type = ', trim(wave_spec_type),trim(tmpstr2) + endif + write(nu_diag,1020) ' nfreq = ', nfreq,' : number of wave spectral forcing frequencies' + endif + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Age related tracers' + write(nu_diag,*) '--------------------------------' + write(nu_diag,1010) ' tr_iage = ', tr_iage,' : chronological ice age' + write(nu_diag,1010) ' tr_FY = ', tr_FY,' : first-year ice area' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Melt ponds' + write(nu_diag,*) '--------------------------------' + if (tr_pond_lvl) then + write(nu_diag,1010) ' tr_pond_lvl = ', tr_pond_lvl,' : level-ice pond formulation' + write(nu_diag,1002) ' pndaspect = ', pndaspect,' : ratio of pond depth to area fraction' + write(nu_diag,1000) ' dpscale = ', dpscale,' : time scale for flushing in permeable ice' + if (trim(frzpnd) == 'hlid') then + tmpstr2 = ' : Stefan refreezing with pond ice thickness' + elseif (trim(frzpnd) == 'cesm') then + tmpstr2 = ' : CESM refreezing empirical formula' + else + tmpstr2 = ' : unknown value' + endif + write(nu_diag,1030) ' frzpnd = ', trim(frzpnd),trim(tmpstr2) + write(nu_diag,1002) ' hs1 = ', hs1,' : snow depth of transition to pond ice' + elseif (tr_pond_topo) then + write(nu_diag,1010) ' tr_pond_topo = ', tr_pond_topo,' : topo pond formulation' + write(nu_diag,1002) ' hp1 = ', hp1,' : critical ice lid thickness for topo ponds' + elseif (trim(shortwave) == 'ccsm3') then + write(nu_diag,*) 'Pond effects on radiation are treated implicitly in the ccsm3 shortwave scheme' + else + write(nu_diag,*) 'Using default dEdd melt pond scheme for testing only' + endif + + if (shortwave(1:4) == 'dEdd') then + write(nu_diag,1002) ' hs0 = ', hs0,' : snow depth of transition to bare sea ice' + endif + + write(nu_diag,1002) ' rfracmin = ', rfracmin,' : minimum fraction of melt water added to ponds' + write(nu_diag,1002) ' rfracmax = ', rfracmax,' : maximum fraction of melt water added to ponds' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Snow redistribution/metamorphism tracers' + write(nu_diag,*) '-----------------------------------------' + if (tr_snow) then + write(nu_diag,1010) ' tr_snow = ', tr_snow, & + ' : advanced snow physics' + if (snwredist(1:4) == 'none') then + write(nu_diag,1030) ' snwredist = ', trim(snwredist), & + ' : Snow redistribution scheme turned off' + else + if (snwredist(1:4) == 'bulk') then + write(nu_diag,1030) ' snwredist = ', trim(snwredist), & + ' : Using bulk snow redistribution scheme' + elseif (snwredist(1:6) == 'ITDrdg') then + write(nu_diag,1030) ' snwredist = ', trim(snwredist), & + ' : Using ridging based snow redistribution scheme' + write(nu_diag,1002) ' rhosnew = ', rhosnew, & + ' : new snow density (kg/m^3)' + write(nu_diag,1002) ' rhosmin = ', rhosmin, & + ' : minimum snow density (kg/m^3)' + write(nu_diag,1002) ' rhosmax = ', rhosmax, & + ' : maximum snow density (kg/m^3)' + write(nu_diag,1002) ' windmin = ', windmin, & + ' : minimum wind speed to compact snow (m/s)' + write(nu_diag,1002) ' drhosdwind = ', drhosdwind, & + ' : wind compaction factor (kg s/m^4)' + endif + write(nu_diag,1002) ' snwlvlfac = ', snwlvlfac, & + ' : fractional increase in snow depth for redistribution on ridges' + endif + if (.not. snwgrain) then + write(nu_diag,1010) ' snwgrain = ', snwgrain, & + ' : Snow metamorphosis turned off' + else + write(nu_diag,1010) ' snwgrain = ', snwgrain, & + ' : Using snow metamorphosis scheme' + write(nu_diag,1002) ' rsnw_tmax = ', rsnw_tmax, & + ' : maximum snow radius (10^-6 m)' + endif + write(nu_diag,1002) ' rsnw_fall = ', rsnw_fall, & + ' : radius of new snow (10^-6 m)' + if (snwgrain) then + if (use_smliq_pnd) then + tmpstr2 = ' : Using liquid water in snow for melt ponds' + else + tmpstr2 = ' : NOT using liquid water in snow for melt ponds' + endif + write(nu_diag,1010) ' use_smliq_pnd = ', use_smliq_pnd, trim(tmpstr2) + if (snw_aging_table == 'test') then + tmpstr2 = ' : Using 5x5x1 test matrix of internallly defined snow aging parameters' + write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + elseif (snw_aging_table == 'snicar') then + tmpstr2 = ' : Reading 3D snow aging parameters from SNICAR file' + write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + write(nu_diag,1031) ' snw_filename = ',trim(snw_filename) + write(nu_diag,1031) ' snw_tau_fname = ',trim(snw_tau_fname) + write(nu_diag,1031) ' snw_kappa_fname = ',trim(snw_kappa_fname) + write(nu_diag,1031) ' snw_drdt0_fname = ',trim(snw_drdt0_fname) + elseif (snw_aging_table == 'file') then + tmpstr2 = ' : Reading 1D and 3D snow aging dimensions and parameters from external file' + write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + write(nu_diag,1031) ' snw_filename = ',trim(snw_filename) + write(nu_diag,1031) ' snw_rhos_fname = ',trim(snw_rhos_fname) + write(nu_diag,1031) ' snw_Tgrd_fname = ',trim(snw_Tgrd_fname) + write(nu_diag,1031) ' snw_T_fname = ',trim(snw_T_fname) + write(nu_diag,1031) ' snw_tau_fname = ',trim(snw_tau_fname) + write(nu_diag,1031) ' snw_kappa_fname = ',trim(snw_kappa_fname) + write(nu_diag,1031) ' snw_drdt0_fname = ',trim(snw_drdt0_fname) + endif + endif + endif + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Primary state variables, tracers' + write(nu_diag,*) ' (excluding biogeochemistry)' + write(nu_diag,*) '---------------------------------' + write(nu_diag,*) 'Conserved properties (all tracers are conserved):' + write(nu_diag,*) 'ice concentration, volume and enthalpy' + write(nu_diag,*) 'snow volume and enthalpy' + if (ktherm == 2) write(nu_diag,1030) ' ice salinity' + if (tr_fsd) write(nu_diag,1010) ' tr_fsd = ', tr_fsd,' : floe size distribution' + if (tr_lvl) write(nu_diag,1010) ' tr_lvl = ', tr_lvl,' : ridging related tracers' + if (tr_pond_lvl) write(nu_diag,1010) ' tr_pond_lvl = ', tr_pond_lvl,' : level-ice pond formulation' + if (tr_pond_topo) write(nu_diag,1010) ' tr_pond_topo = ', tr_pond_topo,' : topo pond formulation' + if (tr_snow) write(nu_diag,1010) ' tr_snow = ', tr_snow,' : advanced snow physics' + if (tr_iage) write(nu_diag,1010) ' tr_iage = ', tr_iage,' : chronological ice age' + if (tr_FY) write(nu_diag,1010) ' tr_FY = ', tr_FY,' : first-year ice area' + if (tr_iso) write(nu_diag,1010) ' tr_iso = ', tr_iso,' : diagnostic isotope tracers' + if (tr_aero) write(nu_diag,1010) ' tr_aero = ', tr_aero,' : CESM aerosol tracers' + write(nu_diag,*) 'Non-conserved properties:' + write(nu_diag,*) 'ice surface temperature' + write(nu_diag,*) 'ice velocity components and internal stress' + + write(nu_diag,*) ' ' + write(nu_diag,*) ' Other ice_in namelist parameters:' + write(nu_diag,*) '===================================== ' + if (trim(runid) /= 'unknown') & + write(nu_diag,1031) ' runid = ', trim(runid) + write(nu_diag,1031) ' runtype = ', trim(runtype) + write(nu_diag,1021) ' year_init = ', year_init + write(nu_diag,1021) ' month_init = ', month_init + write(nu_diag,1021) ' day_init = ', day_init + write(nu_diag,1021) ' sec_init = ', sec_init + write(nu_diag,1021) ' istep0 = ', istep0 + write(nu_diag,1031) ' npt_unit = ', trim(npt_unit) + write(nu_diag,1021) ' npt = ', npt + write(nu_diag,1021) ' diagfreq = ', diagfreq + write(nu_diag,1011) ' print_global = ', print_global + write(nu_diag,1011) ' print_points = ', print_points + write(nu_diag,1011) ' debug_model = ', debug_model + write(nu_diag,1022) ' debug_model_step = ', debug_model_step + write(nu_diag,1021) ' debug_model_i = ', debug_model_i + write(nu_diag,1021) ' debug_model_i = ', debug_model_j + write(nu_diag,1021) ' debug_model_iblk = ', debug_model_iblk + write(nu_diag,1021) ' debug_model_task = ', debug_model_task + write(nu_diag,1011) ' timer_stats = ', timer_stats + write(nu_diag,1011) ' memory_stats = ', memory_stats + write(nu_diag,1031) ' bfbflag = ', trim(bfbflag) + write(nu_diag,1021) ' numin = ', numin + write(nu_diag,1021) ' numax = ', numax + write(nu_diag,1033) ' histfreq = ', histfreq(:) + write(nu_diag,1023) ' histfreq_n = ', histfreq_n(:) + write(nu_diag,1033) ' histfreq_base = ', histfreq_base(:) + write(nu_diag,1013) ' hist_avg = ', hist_avg(:) + write(nu_diag,1033) ' hist_suffix = ', hist_suffix(:) + write(nu_diag,1031) ' history_dir = ', trim(history_dir) + write(nu_diag,1031) ' history_file = ', trim(history_file) + write(nu_diag,1021) ' history_precision= ', history_precision + write(nu_diag,1031) ' history_format = ', trim(history_format) + write(nu_diag,1031) ' history_rearranger = ', trim(history_rearranger) + write(nu_diag,1021) ' history_iotasks = ', history_iotasks + write(nu_diag,1021) ' history_root = ', history_root + write(nu_diag,1021) ' history_stride = ', history_stride + write(nu_diag,1031) ' hist_time_axis = ', trim(hist_time_axis) + write(nu_diag,1021) ' history_deflate = ', history_deflate + write(nu_diag,1023) ' history_chunksize= ', history_chunksize + if (write_ic) then + write(nu_diag,1039) ' Initial condition will be written in ', & + trim(incond_dir) + endif + write(nu_diag,1033) ' dumpfreq = ', dumpfreq(:) + write(nu_diag,1023) ' dumpfreq_n = ', dumpfreq_n(:) + write(nu_diag,1033) ' dumpfreq_base = ', dumpfreq_base(:) + write(nu_diag,1011) ' dump_last = ', dump_last + write(nu_diag,1011) ' restart = ', restart + write(nu_diag,1031) ' restart_dir = ', trim(restart_dir) + write(nu_diag,1011) ' restart_ext = ', restart_ext + write(nu_diag,1011) ' restart_coszen = ', restart_coszen + write(nu_diag,1031) ' restart_format = ', trim(restart_format) + write(nu_diag,1021) ' restart_deflate = ', restart_deflate + write(nu_diag,1023) ' restart_chunksize= ', restart_chunksize +! write(nu_diag,1011) ' lcdf64 = ', lcdf64 ! deprecated + write(nu_diag,1031) ' restart_rearranger = ', trim(restart_rearranger) + write(nu_diag,1021) ' restart_iotasks = ', restart_iotasks + write(nu_diag,1021) ' restart_root = ', restart_root + write(nu_diag,1021) ' restart_stride = ', restart_stride + write(nu_diag,1031) ' restart_file = ', trim(restart_file) + write(nu_diag,1031) ' pointer_file = ', trim(pointer_file) + write(nu_diag,1011) ' use_restart_time = ', use_restart_time + write(nu_diag,1031) ' ice_ic = ', trim(ice_ic) + if (trim(grid_type) /= 'rectangular' .or. & + trim(grid_type) /= 'column') then + write(nu_diag,1031) ' grid_file = ', trim(grid_file) + write(nu_diag,1031) ' gridcpl_file = ', trim(gridcpl_file) + write(nu_diag,1031) ' bathymetry_file = ', trim(bathymetry_file) + if (trim(kmt_type) == 'file') & + write(nu_diag,1031) ' kmt_file = ', trim(kmt_file) + endif + + write(nu_diag,1011) ' conserv_check = ', conserv_check + + write(nu_diag,1021) ' fyear_init = ', fyear_init + write(nu_diag,1021) ' ycycle = ', ycycle + write(nu_diag,1031) ' atm_data_type = ', trim(atm_data_type) + write(nu_diag,1031) ' atm_data_version = ', trim(atm_data_version) + + if (trim(atm_data_type) /= 'default') then + write(nu_diag,1031) ' atm_data_dir = ', trim(atm_data_dir) + write(nu_diag,1031) ' precip_units = ', trim(precip_units) + elseif (trim(atm_data_type) == 'default') then + write(nu_diag,1031) ' default_season = ', trim(default_season) + endif + + if (wave_spec) then + write(nu_diag,1031) ' wave_spec_file = ', trim(wave_spec_file) + endif + if (trim(bgc_data_type) == 'ncar' .or. & + trim(ocn_data_type) == 'ncar') then + write(nu_diag,1031) ' oceanmixed_file = ', trim(oceanmixed_file) + endif + if (cpl_bgc) then + write(nu_diag,*) 'BGC coupling is switched ON' + else + write(nu_diag,*) 'BGC coupling is switched OFF' + endif + write(nu_diag,1031) ' bgc_data_type = ', trim(bgc_data_type) + write(nu_diag,1031) ' fe_data_type = ', trim(fe_data_type) + write(nu_diag,1031) ' ice_data_type = ', trim(ice_data_type) + write(nu_diag,1031) ' ice_data_conc = ', trim(ice_data_conc) + write(nu_diag,1031) ' ice_data_dist = ', trim(ice_data_dist) + write(nu_diag,1031) ' bgc_data_dir = ', trim(bgc_data_dir) + write(nu_diag,1031) ' ocn_data_type = ', trim(ocn_data_type) + if (trim(bgc_data_type) /= 'default' .or. & + trim(ocn_data_type) /= 'default') then + write(nu_diag,1031) ' ocn_data_dir = ', trim(ocn_data_dir) + write(nu_diag,1011) ' restore_ocn = ', restore_ocn + endif + write(nu_diag,1011) ' restore_ice = ', restore_ice + if (restore_ice .or. restore_ocn) & + write(nu_diag,1021) ' trestore = ', trestore + + write(nu_diag,*) ' ' + write(nu_diag,'(a31,2f8.2)') 'Diagnostic point 1: lat, lon =', & + latpnt(1), lonpnt(1) + write(nu_diag,'(a31,2f8.2)') 'Diagnostic point 2: lat, lon =', & + latpnt(2), lonpnt(2) + write(nu_diag,*) ' ' + + ! tracer restarts + write(nu_diag,1011) ' restart_age = ', restart_age + write(nu_diag,1011) ' restart_FY = ', restart_FY + write(nu_diag,1011) ' restart_lvl = ', restart_lvl + write(nu_diag,1011) ' restart_pond_lvl = ', restart_pond_lvl + write(nu_diag,1011) ' restart_pond_topo= ', restart_pond_topo + write(nu_diag,1011) ' restart_snow = ', restart_snow + write(nu_diag,1011) ' restart_iso = ', restart_iso + write(nu_diag,1011) ' restart_aero = ', restart_aero + write(nu_diag,1011) ' restart_fsd = ', restart_fsd + + write(nu_diag,1021) ' n_iso = ', n_iso + write(nu_diag,1021) ' n_aero = ', n_aero + write(nu_diag,1021) ' n_zaero = ', n_zaero + write(nu_diag,1021) ' n_algae = ', n_algae + write(nu_diag,1021) ' n_doc = ', n_doc + write(nu_diag,1021) ' n_dic = ', n_dic + write(nu_diag,1021) ' n_don = ', n_don + write(nu_diag,1021) ' n_fed = ', n_fed + write(nu_diag,1021) ' n_fep = ', n_fep + write(nu_diag,*) ' ' + + endif ! my_task = master_task + + if (grid_type /= 'displaced_pole' .and. & + grid_type /= 'tripole' .and. & + grid_type /= 'column' .and. & + grid_type /= 'rectangular' .and. & + grid_type /= 'cpom_grid' .and. & + grid_type /= 'regional' .and. & + grid_type /= 'latlon') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown grid_type=',trim(grid_type) + abort_list = trim(abort_list)//":20" + endif + + if (grid_ice /= 'B' .and. & + grid_ice /= 'C' .and. & + grid_ice /= 'CD' ) then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown grid_ice=',trim(grid_ice) + abort_list = trim(abort_list)//":26" + endif + + if (kmt_type /= 'file' .and. & + kmt_type /= 'channel' .and. & + kmt_type /= 'channel_oneeast' .and. & + kmt_type /= 'channel_onenorth' .and. & + kmt_type /= 'wall' .and. & + kmt_type /= 'default' .and. & + kmt_type /= 'boxislands') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown kmt_type=',trim(kmt_type) + abort_list = trim(abort_list)//":27" + endif + + if (grid_type /= 'column' .and. & + grid_type /= 'rectangular' .and. & + kmt_type /= 'file') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: need kmt file, kmt_type=',trim(kmt_type) + abort_list = trim(abort_list)//":28" + endif + + if (kdyn == 1 .and. & + evp_algorithm /= 'standard_2d' .and. & + evp_algorithm /= 'shared_mem_1d') then + if (my_task == master_task) write(nu_diag,*) subname//' ERROR: unknown evp_algorithm=',trim(evp_algorithm) + abort_list = trim(abort_list)//":21" + endif + + if (abort_list /= "") then + call flush_fileunit(nu_diag) + endif + call ice_barrier() + if (abort_list /= "") then + write(nu_diag,*) subname,' ERROR: abort_list = ',trim(abort_list) + call abort_ice (subname//' ABORTING on input ERRORS', & + file=__FILE__, line=__LINE__) + endif + + call icepack_init_parameters(ustar_min_in=ustar_min, albicev_in=albicev, albicei_in=albicei, & + albsnowv_in=albsnowv, albsnowi_in=albsnowi, natmiter_in=natmiter, atmiter_conv_in=atmiter_conv, & + emissivity_in=emissivity, snw_ssp_table_in=snw_ssp_table, hi_min_in=hi_min, & + ahmax_in=ahmax, shortwave_in=shortwave, albedo_type_in=albedo_type, R_ice_in=R_ice, R_pnd_in=R_pnd, & + R_snw_in=R_snw, dT_mlt_in=dT_mlt, rsnw_mlt_in=rsnw_mlt, & + kstrength_in=kstrength, krdg_partic_in=krdg_partic, krdg_redist_in=krdg_redist, mu_rdg_in=mu_rdg, & + atmbndy_in=atmbndy, calc_strair_in=calc_strair, formdrag_in=formdrag, highfreq_in=highfreq, & + kitd_in=kitd, kcatbound_in=kcatbound, hs0_in=hs0, dpscale_in=dpscale, frzpnd_in=frzpnd, & + rfracmin_in=rfracmin, rfracmax_in=rfracmax, pndaspect_in=pndaspect, hs1_in=hs1, hp1_in=hp1, & + ktherm_in=ktherm, calc_Tsfc_in=calc_Tsfc, conduct_in=conduct, & + a_rapid_mode_in=a_rapid_mode, Rac_rapid_mode_in=Rac_rapid_mode, & + floediam_in=floediam, hfrazilmin_in=hfrazilmin, Tliquidus_max_in=Tliquidus_max, & + aspect_rapid_mode_in=aspect_rapid_mode, dSdt_slow_mode_in=dSdt_slow_mode, & + phi_c_slow_mode_in=phi_c_slow_mode, phi_i_mushy_in=phi_i_mushy, conserv_check_in=conserv_check, & + wave_spec_type_in = wave_spec_type, wave_spec_in=wave_spec, nfreq_in=nfreq, & + update_ocn_f_in=update_ocn_f, cpl_frazil_in=cpl_frazil, congel_freeze_in=congel_freeze, & + tfrz_option_in=tfrz_option, kalg_in=kalg, fbot_xfer_type_in=fbot_xfer_type, & + saltflux_option_in=saltflux_option, ice_ref_salinity_in=ice_ref_salinity, & + Pstar_in=Pstar, Cstar_in=Cstar, iceruf_in=iceruf, iceruf_ocn_in=iceruf_ocn, calc_dragio_in=calc_dragio, & + windmin_in=windmin, drhosdwind_in=drhosdwind, & + rsnw_fall_in=rsnw_fall, rsnw_tmax_in=rsnw_tmax, rhosnew_in=rhosnew, & + snwlvlfac_in=snwlvlfac, rhosmin_in=rhosmin, rhosmax_in=rhosmax, & + snwredist_in=snwredist, snwgrain_in=snwgrain, snw_aging_table_in=trim(snw_aging_table), & + sw_redist_in=sw_redist, sw_frac_in=sw_frac, sw_dtemp_in=sw_dtemp) + call icepack_init_tracer_flags(tr_iage_in=tr_iage, tr_FY_in=tr_FY, & + tr_lvl_in=tr_lvl, tr_iso_in=tr_iso, tr_aero_in=tr_aero, & + tr_fsd_in=tr_fsd, tr_snow_in=tr_snow, tr_pond_in=tr_pond, & + tr_pond_lvl_in=tr_pond_lvl, tr_pond_topo_in=tr_pond_topo) + call icepack_init_tracer_sizes(ncat_in=ncat, nilyr_in=nilyr, nslyr_in=nslyr, nblyr_in=nblyr, & + nfsd_in=nfsd, n_algae_in=n_algae, n_iso_in=n_iso, n_aero_in=n_aero, & + n_DOC_in=n_DOC, n_DON_in=n_DON, & + n_DIC_in=n_DIC, n_fed_in=n_fed, n_fep_in=n_fep, n_zaero_in=n_zaero) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + 1000 format (a20,1x,f13.6,1x,a) ! float + 1002 format (a20,5x,f9.2,1x,a) + 1003 format (a20,1x,G13.4,1x,a) + 1009 format (a20,1x,d13.6,1x,a) + 1010 format (a20,8x,l6,1x,a) ! logical + 1011 format (a20,1x,l6) + 1013 format (a20,1x,6l3) + 1020 format (a20,8x,i6,1x,a) ! integer + 1021 format (a20,1x,i6) + 1022 format (a20,1x,i12) + 1023 format (a20,1x,6i6) + 1030 format (a20,a14,1x,a) ! character + 1031 format (a20,1x,a,a) + 1033 format (a20,1x,6a6) + 1039 format (a,1x,a,1x,a,1x,a) + + end subroutine input_data + +!======================================================================= + +! Initialize state for the itd model +! +! authors: C. M. Bitz, UW +! William H. Lipscomb, LANL + + subroutine init_state + + use ice_blocks, only: block, get_block, nx_block, ny_block + use ice_domain, only: nblocks, blocks_ice, halo_info + use ice_domain_size, only: ncat, nilyr, nslyr, n_iso, n_aero, nfsd + use ice_flux, only: sst, Tf, Tair, salinz, Tmltz + use ice_grid, only: tmask, umask, ULON, TLAT, grid_ice, grid_average_X2Y + use ice_boundary, only: ice_HaloUpdate + use ice_constants, only: field_loc_Nface, field_loc_Eface, field_type_scalar + use ice_state, only: trcr_depend, aicen, trcrn, vicen, vsnon, & + aice0, aice, vice, vsno, trcr, aice_init, bound_state, & + n_trcr_strata, nt_strata, trcr_base, uvel, vvel, & + uvelN, vvelN, uvelE, vvelE + + integer (kind=int_kind) :: & + ilo, ihi , & ! physical domain indices + jlo, jhi , & ! physical domain indices + iglob(nx_block), & ! global indices + jglob(ny_block), & ! global indices + i, j , & ! horizontal indices + k , & ! vertical index + it , & ! tracer index + iblk ! block index + + + integer (kind=int_kind) :: ntrcr + logical (kind=log_kind) :: tr_iage, tr_FY, tr_lvl, tr_iso, tr_aero + logical (kind=log_kind) :: tr_pond_lvl, tr_pond_topo + logical (kind=log_kind) :: tr_snow, tr_fsd + integer (kind=int_kind) :: nt_Tsfc, nt_sice, nt_qice, nt_qsno, nt_iage, nt_FY + integer (kind=int_kind) :: nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd + integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw + integer (kind=int_kind) :: nt_isosno, nt_isoice, nt_aero, nt_fsd + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname='(init_state)' + + !----------------------------------------------------------------- + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_iso_out=tr_iso, tr_aero_out=tr_aero, & + tr_pond_lvl_out=tr_pond_lvl, tr_pond_topo_out=tr_pond_topo, & + tr_snow_out=tr_snow, tr_fsd_out=tr_fsd) + call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno, nt_iage_out=nt_iage, nt_fy_out=nt_fy, & + nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, & + nt_ipnd_out=nt_ipnd, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Check number of layers in ice and snow. + !----------------------------------------------------------------- + + if (my_task == master_task) then + + if (nilyr < 1) then + write(nu_diag,*) subname//' ERROR: Must have at least one ice layer' + write(nu_diag,*) subname//' ERROR: nilyr =', nilyr + call abort_ice (error_message=subname//' Not enough ice layers', & + file=__FILE__, line=__LINE__) + endif + + if (nslyr < 1) then + write(nu_diag,*) subname//' ERROR: Must have at least one snow layer' + write(nu_diag,*) subname//' ERROR: nslyr =', nslyr + call abort_ice(error_message=subname//' Not enough snow layers', & + file=__FILE__, line=__LINE__) + endif + + endif ! my_task + + !----------------------------------------------------------------- + ! Set tracer types + !----------------------------------------------------------------- + + trcr_depend(nt_Tsfc) = 0 ! ice/snow surface temperature + do k = 1, nilyr + trcr_depend(nt_sice + k - 1) = 1 ! volume-weighted ice salinity + trcr_depend(nt_qice + k - 1) = 1 ! volume-weighted ice enthalpy + enddo + do k = 1, nslyr + trcr_depend(nt_qsno + k - 1) = 2 ! volume-weighted snow enthalpy + enddo + if (tr_iage) trcr_depend(nt_iage) = 1 ! volume-weighted ice age + if (tr_FY) trcr_depend(nt_FY) = 0 ! area-weighted first-year ice area + if (tr_lvl) trcr_depend(nt_alvl) = 0 ! level ice area + if (tr_lvl) trcr_depend(nt_vlvl) = 1 ! level ice volume + if (tr_pond_lvl) then + trcr_depend(nt_apnd) = 2+nt_alvl ! melt pond area + trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth + trcr_depend(nt_ipnd) = 2+nt_apnd ! refrozen pond lid + endif + if (tr_pond_topo) then + trcr_depend(nt_apnd) = 0 ! melt pond area + trcr_depend(nt_hpnd) = 2+nt_apnd ! melt pond depth + trcr_depend(nt_ipnd) = 2+nt_apnd ! refrozen pond lid + endif + if (tr_snow) then ! snow-volume-weighted snow tracers + do k = 1, nslyr + trcr_depend(nt_smice + k - 1) = 2 ! ice mass in snow + trcr_depend(nt_smliq + k - 1) = 2 ! liquid mass in snow + trcr_depend(nt_rhos + k - 1) = 2 ! effective snow density + trcr_depend(nt_rsnw + k - 1) = 2 ! snow radius + enddo + endif + if (tr_fsd) then + do it = 1, nfsd + trcr_depend(nt_fsd + it - 1) = 0 ! area-weighted floe size distribution + enddo + endif + if (tr_iso) then ! isotopes + do it = 1, n_iso + trcr_depend(nt_isosno+it-1) = 2 ! snow + trcr_depend(nt_isoice+it-1) = 1 ! ice + enddo + endif + if (tr_aero) then ! volume-weighted aerosols + do it = 1, n_aero + trcr_depend(nt_aero+(it-1)*4 ) = 2 ! snow + trcr_depend(nt_aero+(it-1)*4+1) = 2 ! snow + trcr_depend(nt_aero+(it-1)*4+2) = 1 ! ice + trcr_depend(nt_aero+(it-1)*4+3) = 1 ! ice + enddo + endif + + trcr_base = c0 + + do it = 1, ntrcr + ! mask for base quantity on which tracers are carried + if (trcr_depend(it) == 0) then ! area + trcr_base(it,1) = c1 + elseif (trcr_depend(it) == 1) then ! ice volume + trcr_base(it,2) = c1 + elseif (trcr_depend(it) == 2) then ! snow volume + trcr_base(it,3) = c1 + else + trcr_base(it,1) = c1 ! default: ice area + trcr_base(it,2) = c0 + trcr_base(it,3) = c0 + endif + + ! initialize number of underlying tracer layers + n_trcr_strata(it) = 0 + ! default indices of underlying tracer layers + nt_strata (it,1) = 0 + nt_strata (it,2) = 0 + enddo + + if (tr_pond_lvl) then + n_trcr_strata(nt_apnd) = 1 ! melt pond area + nt_strata (nt_apnd,1) = nt_alvl ! on level ice area + n_trcr_strata(nt_hpnd) = 2 ! melt pond depth + nt_strata (nt_hpnd,2) = nt_apnd ! on melt pond area + nt_strata (nt_hpnd,1) = nt_alvl ! on level ice area + n_trcr_strata(nt_ipnd) = 2 ! refrozen pond lid + nt_strata (nt_ipnd,2) = nt_apnd ! on melt pond area + nt_strata (nt_ipnd,1) = nt_alvl ! on level ice area + endif + if (tr_pond_topo) then + n_trcr_strata(nt_hpnd) = 1 ! melt pond depth + nt_strata (nt_hpnd,1) = nt_apnd ! on melt pond area + n_trcr_strata(nt_ipnd) = 1 ! refrozen pond lid + nt_strata (nt_ipnd,1) = nt_apnd ! on melt pond area + endif + + !----------------------------------------------------------------- + ! Set state variables + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk,ilo,ihi,jlo,jhi,this_block, & + !$OMP iglob,jglob) + do iblk = 1, nblocks + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + iglob = this_block%i_glob + jglob = this_block%j_glob + + call set_state_var (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + iglob, jglob, & + ice_ic, tmask(:,:, iblk), & + umask(:,:, iblk), & + ULON (:,:, iblk), & + TLAT (:,:, iblk), & + Tair (:,:, iblk), sst (:,:, iblk), & + Tf (:,:, iblk), & + salinz(:,:,:, iblk), Tmltz(:,:,:, iblk), & + aicen(:,:, :,iblk), trcrn(:,:,:,:,iblk), & + vicen(:,:, :,iblk), vsnon(:,:, :,iblk), & + uvel (:,:, iblk), vvel (:,:, iblk)) + + enddo ! iblk + !$OMP END PARALLEL DO + + !----------------------------------------------------------------- + ! ghost cell updates + !----------------------------------------------------------------- + + call bound_state (aicen, & + vicen, vsnon, & + ntrcr, trcrn) + + if (grid_ice == 'CD' .or. grid_ice == 'C') then + + call grid_average_X2Y('A',uvel,'U',uvelN,'N') + call grid_average_X2Y('A',vvel,'U',vvelN,'N') + call grid_average_X2Y('A',uvel,'U',uvelE,'E') + call grid_average_X2Y('A',vvel,'U',vvelE,'E') + + ! Halo update on North, East faces + call ice_HaloUpdate(uvelN, halo_info, & + field_loc_Nface, field_type_scalar) + call ice_HaloUpdate(vvelN, halo_info, & + field_loc_Nface, field_type_scalar) + + call ice_HaloUpdate(uvelE, halo_info, & + field_loc_Eface, field_type_scalar) + call ice_HaloUpdate(vvelE, halo_info, & + field_loc_Eface, field_type_scalar) + + endif + + !----------------------------------------------------------------- + ! compute aggregate ice state and open water area + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk,it,i,j) + do iblk = 1, nblocks + + do j = 1, ny_block + do i = 1, nx_block + aice(i,j,iblk) = c0 + vice(i,j,iblk) = c0 + vsno(i,j,iblk) = c0 + do it = 1, ntrcr + trcr(i,j,it,iblk) = c0 + enddo + + if (tmask(i,j,iblk)) & + call icepack_aggregate(aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + trcr_depend = trcr_depend(:), & + trcr_base = trcr_base(:,:), & + n_trcr_strata = n_trcr_strata(:), & + nt_strata = nt_strata(:,:), & + Tf = Tf(i,j,iblk)) + + aice_init(i,j,iblk) = aice(i,j,iblk) + + enddo + enddo + + enddo ! iblk + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine init_state + +!======================================================================= + +! Initialize state in each ice thickness category +! +! authors: C. M. Bitz +! William H. Lipscomb, LANL + + subroutine set_state_var (nx_block, ny_block, & + ilo, ihi, jlo, jhi, & + iglob, jglob, & + ice_ic, tmask, & + umask, & + ULON, & + TLAT, & + Tair, sst, & + Tf, & + salinz, Tmltz, & + aicen, trcrn, & + vicen, vsnon, & + uvel, vvel) + + + use ice_arrays_column, only: hin_max + use ice_domain_size, only: nilyr, nslyr, nx_global, ny_global, ncat + use ice_grid, only: dxrect, dyrect + use ice_forcing, only: ice_data_type, ice_data_conc, ice_data_dist + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block, & ! block dimensions + ilo, ihi , & ! physical domain indices + jlo, jhi , & ! + iglob(nx_block) , & ! global indices + jglob(ny_block) ! + + character(len=char_len_long), intent(in) :: & + ice_ic ! method of ice cover initialization + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & + tmask , & ! true for ice/ocean cells + umask ! for U points + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + ULON , & ! longitude of velocity pts (radians) + TLAT ! latitude of temperature pts (radians) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(in) :: & + Tair , & ! air temperature (K) + Tf , & ! freezing temperature (C) + sst ! sea surface temperature (C) + + real (kind=dbl_kind), dimension (nx_block,ny_block,nilyr), intent(in) :: & + salinz , & ! initial salinity profile + Tmltz ! initial melting temperature profile + + real (kind=dbl_kind), dimension (nx_block,ny_block,ncat), intent(out) :: & + aicen , & ! concentration of ice + vicen , & ! volume per unit area of ice (m) + vsnon ! volume per unit area of snow (m) + + real (kind=dbl_kind), intent(out), dimension (:,:,:,:) :: & ! (nx_block,ny_block,ntrcr,ncat) + trcrn ! ice tracers + ! 1: surface temperature of ice/snow (C) + + real (kind=dbl_kind), dimension (nx_block,ny_block), intent(out) :: & + uvel , & ! ice velocity B grid + vvel ! + + ! local variables + integer (kind=int_kind) :: & + i, j , & ! horizontal indices + ij , & ! horizontal index, combines i and j loops + k , & ! ice layer index + n , & ! thickness category index + it , & ! tracer index + iedge , & ! edge around big block + jedge , & ! edge around big block + icells ! number of cells initialized with ice + + logical (kind=log_kind) :: & + in_slot, in_cyl ! boxslotcyl flags + + real (kind=dbl_kind) :: & ! boxslotcyl parameters + diam , & ! cylinder diameter + radius , & ! cylinder radius + center_x, & ! cylinder center + center_y, & + width , & ! slot width + length ! slot height + + integer (kind=int_kind), dimension(nx_block*ny_block) :: & + indxi, indxj ! compressed indices for cells with aicen > puny + + real (kind=dbl_kind) :: & + Tsfc, asum, hbar, abar, puny, rhos, Lfresh, rad_to_deg, rsnw_fall, dist_ratio, Tffresh + + real (kind=dbl_kind), dimension(ncat) :: & + ainit, hinit ! initial area, thickness + + real (kind=dbl_kind), dimension(nilyr) :: & + qin ! ice enthalpy (J/m3) + + real (kind=dbl_kind), dimension(nslyr) :: & + qsn ! snow enthalpy (J/m3) + + real (kind=dbl_kind), parameter :: & + hsno_init = 0.20_dbl_kind , & ! initial snow thickness (m) + edge_init_nh = 70._dbl_kind, & ! initial ice edge, N.Hem. (deg) + edge_init_sh = -60._dbl_kind ! initial ice edge, S.Hem. (deg) + + real (kind=dbl_kind) :: & ! boxslotcyl + pi , & ! pi + secday , & ! seconds per day + max_vel , & ! max velocity + domain_length , & ! physical domain length + period ! rotational period + + logical (kind=log_kind) :: tr_brine, tr_lvl, tr_snow + integer (kind=int_kind) :: ntrcr + integer (kind=int_kind) :: nt_Tsfc, nt_qice, nt_qsno, nt_sice + integer (kind=int_kind) :: nt_fbri, nt_alvl, nt_vlvl + integer (kind=int_kind) :: nt_smice, nt_smliq, nt_rhos, nt_rsnw + + character(len=*), parameter :: subname='(set_state_var)' + + !----------------------------------------------------------------- + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_query_tracer_flags(tr_brine_out=tr_brine, tr_lvl_out=tr_lvl, & + tr_snow_out=tr_snow) + call icepack_query_tracer_indices( nt_Tsfc_out=nt_Tsfc, nt_qice_out=nt_qice, & + nt_qsno_out=nt_qsno, nt_sice_out=nt_sice, & + nt_fbri_out=nt_fbri, nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw) + call icepack_query_parameters(rhos_out=rhos, Lfresh_out=Lfresh, puny_out=puny, & + rad_to_deg_out=rad_to_deg, rsnw_fall_out=rsnw_fall, Tffresh_out=Tffresh) + call icepack_query_parameters(secday_out=secday, pi_out=pi) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + indxi(:) = 0 + indxj(:) = 0 + + ! Initialize state variables. + ! If restarting, these values are overwritten. + + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + aicen(i,j,n) = c0 + vicen(i,j,n) = c0 + vsnon(i,j,n) = c0 + if (tmask(i,j)) then + trcrn(i,j,nt_Tsfc,n) = Tf(i,j) ! surface temperature + else + trcrn(i,j,nt_Tsfc,n) = c0 ! at land grid cells (for clean history/restart files) + endif + if (ntrcr >= 2) then + do it = 2, ntrcr + trcrn(i,j,it,n) = c0 + enddo + endif + if (tr_lvl) trcrn(i,j,nt_alvl,n) = c1 + if (tr_lvl) trcrn(i,j,nt_vlvl,n) = c1 + if (tr_brine) trcrn(i,j,nt_fbri,n) = c1 + do k = 1, nilyr + trcrn(i,j,nt_sice+k-1,n) = salinz(i,j,k) + enddo + do k = 1, nslyr + trcrn(i,j,nt_qsno+k-1,n) = -rhos * Lfresh + enddo + if (tr_snow) then + do k = 1, nslyr + trcrn(i,j,nt_rsnw +k-1,n) = rsnw_fall + trcrn(i,j,nt_rhos +k-1,n) = rhos + trcrn(i,j,nt_smice+k-1,n) = rhos + trcrn(i,j,nt_smliq+k-1,n) = c0 + enddo ! nslyr + endif + enddo + enddo + enddo + + if (trim(ice_ic) == 'internal') then + + !--------------------------------------------------------- + ! ice concentration/thickness + !--------------------------------------------------------- + + if (trim(ice_data_conc) == 'p5' .or. & + trim(ice_data_conc) == 'p8' .or. & + trim(ice_data_conc) == 'p9' .or. & + trim(ice_data_conc) == 'c1' .or. & + trim(ice_data_conc) == 'box2001') then + + if (trim(ice_data_conc) == 'p5') then + hbar = c2 ! initial ice thickness + abar = p5 ! initial ice concentration + elseif (trim(ice_data_conc) == 'p8') then + hbar = c1 ! initial ice thickness + abar = 0.8_dbl_kind ! initial ice concentration + elseif (trim(ice_data_conc) == 'p9') then + hbar = c1 ! initial ice thickness + abar = 0.9_dbl_kind ! initial ice concentration + elseif (trim(ice_data_conc) == 'c1') then + hbar = c1 ! initial ice thickness + abar = c1 ! initial ice concentration + elseif (trim(ice_data_conc) == 'box2001') then + hbar = c2 ! initial ice thickness + abar = p5 ! initial ice concentration + endif + + do n = 1, ncat + hinit(n) = c0 + ainit(n) = c0 + if (hbar > hin_max(n-1) .and. hbar <= hin_max(n)) then + hinit(n) = hbar + ainit(n) = abar + endif + enddo + + elseif (trim(ice_data_conc) == 'parabolic') then + + ! initial category areas in cells with ice + hbar = c3 ! initial ice thickness with greatest area + ! Note: the resulting average ice thickness + ! tends to be less than hbar due to the + ! nonlinear distribution of ice thicknesses + asum = c0 + do n = 1, ncat + if (n < ncat) then + hinit(n) = p5*(hin_max(n-1) + hin_max(n)) ! m + else ! n=ncat + hinit(n) = (hin_max(n-1) + c1) ! m + endif + ! parabola, max at h=hbar, zero at h=0, 2*hbar + ainit(n) = max(c0, (c2*hbar*hinit(n) - hinit(n)**2)) + asum = asum + ainit(n) + enddo + do n = 1, ncat + ainit(n) = ainit(n) / (asum + puny/ncat) ! normalize + enddo + + else + + call abort_ice(subname//'ERROR: ice_data_conc setting = '//trim(ice_data_conc), & + file=__FILE__, line=__LINE__) + + endif ! ice_data_conc + + !--------------------------------------------------------- + ! location of ice + !--------------------------------------------------------- + + if (trim(ice_data_type) == 'box2001') then + + ! place ice on left side of domain + icells = 0 + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j)) then + if (ULON(i,j) < -50./rad_to_deg) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif ! ULON + endif ! tmask + enddo ! i + enddo ! j + + elseif (trim(ice_data_type) == 'boxslotcyl') then + + ! Geometric configuration of the slotted cylinder + diam = p3 *dxrect*(nx_global-1) + center_x = p5 *dxrect*(nx_global-1) + center_y = p75*dyrect*(ny_global-1) + radius = p5*diam + width = p166*diam + length = c5*p166*diam + + icells = 0 + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j)) then + ! check if grid point is inside slotted cylinder + in_slot = (dxrect*real(iglob(i)-1, kind=dbl_kind) >= center_x - width/c2) .and. & + (dxrect*real(iglob(i)-1, kind=dbl_kind) <= center_x + width/c2) .and. & + (dyrect*real(jglob(j)-1, kind=dbl_kind) >= center_y - radius) .and. & + (dyrect*real(jglob(j)-1, kind=dbl_kind) <= center_y + (length - radius)) + + in_cyl = sqrt((dxrect*real(iglob(i)-1, kind=dbl_kind) - center_x)**c2 + & + (dyrect*real(jglob(j)-1, kind=dbl_kind) - center_y)**c2) <= radius + + if (in_cyl .and. .not. in_slot) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif + endif + enddo + enddo + + elseif (trim(ice_data_type) == 'uniform') then + ! all cells not land mask are ice + icells = 0 + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j)) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif + enddo + enddo + + elseif (ice_data_type(1:7) == 'channel') then + ! channel ice in center of domain in i direction + icells = 0 + do j = jlo, jhi + do i = ilo, ihi + if (jglob(j) > ny_global/4 .and. jglob(j) < 3*nx_global/4) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif + enddo + enddo + + elseif (trim(ice_data_type) == 'block') then + ! ice in 50% of domain, not at edges + icells = 0 + iedge = int(real(nx_global,kind=dbl_kind) * 0.25) + 1 + jedge = int(real(ny_global,kind=dbl_kind) * 0.25) + 1 + do j = jlo, jhi + do i = ilo, ihi + if ((iglob(i) > iedge .and. iglob(i) < nx_global-iedge+1) .and. & + (jglob(j) > jedge .and. jglob(j) < ny_global-jedge+1)) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif + enddo + enddo + + elseif (trim(ice_data_type) == 'eastblock') then + ! block on east half of domain in center of domain + icells = 0 + do j = jlo, jhi + do i = ilo, ihi + if (jglob(j) > ny_global/4 .and. jglob(j) < 3*nx_global/4 .and. & + iglob(i) >= nx_global/2) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif + enddo + enddo + + elseif (trim(ice_data_type) == 'latsst') then + + !----------------------------------------------------------------- + ! Place ice where ocean surface is cold. + ! Note: If SST is not read from a file, then the ocean is assumed + ! to be at its freezing point everywhere, and ice will + ! extend to the prescribed edges. + !----------------------------------------------------------------- + + icells = 0 + do j = jlo, jhi + do i = ilo, ihi + if (tmask(i,j)) then + ! place ice in high latitudes where ocean sfc is cold +#ifdef CESMCOUPLED + ! Option to use Tair instead. + if ( (Tair (i,j) <= Tffresh) .and. & +#else + if ( (sst (i,j) <= Tf(i,j)+p2) .and. & +#endif + (TLAT(i,j) < edge_init_sh/rad_to_deg .or. & + TLAT(i,j) > edge_init_nh/rad_to_deg) ) then + icells = icells + 1 + indxi(icells) = i + indxj(icells) = j + endif ! cold surface + endif ! tmask + enddo ! i + enddo ! j + + else + + call abort_ice(subname//'ERROR: ice_data_type setting = '//trim(ice_data_type), & + file=__FILE__, line=__LINE__) + + endif ! ice_data_type + + !--------------------------------------------------------- + ! ice distribution + !--------------------------------------------------------- + + do n = 1, ncat + + ! ice volume, snow volume + do ij = 1, icells + i = indxi(ij) + j = indxj(ij) + + aicen(i,j,n) = ainit(n) + + if (trim(ice_data_dist) == 'box2001') then + if (hinit(n) > c0) then +! ! varies linearly from 0 to 1 in x direction + aicen(i,j,n) = (real(iglob(i), kind=dbl_kind)-p5) & + / (real(nx_global,kind=dbl_kind)) +! ! constant slope from 0 to 0.5 in x direction +! aicen(i,j,n) = (real(iglob(i), kind=dbl_kind)-p5) & +! / (real(nx_global,kind=dbl_kind)) * p5 +! ! quadratic +! aicen(i,j,n) = max(c0,(real(iglob(i), kind=dbl_kind)-p5) & +! / (real(nx_global,kind=dbl_kind)) & +! * (real(jglob(j), kind=dbl_kind)-p5) & +! / (real(ny_global,kind=dbl_kind)) * p5) +! aicen(i,j,n) = max(c0,(real(nx_global, kind=dbl_kind) & +! - real(iglob(i), kind=dbl_kind)-p5) & +! / (real(nx_global,kind=dbl_kind)) & +! * (real(ny_global, kind=dbl_kind) & +! - real(jglob(j), kind=dbl_kind)-p5) & +! / (real(ny_global,kind=dbl_kind)) * p5) + endif + + elseif (trim(ice_data_dist) == 'gauss') then + if (hinit(n) > c0) then + dist_ratio = 8._dbl_kind * & + sqrt((real(iglob(i),kind=dbl_kind)-real(nx_global+1,kind=dbl_kind)/c2)**2 + & + (real(jglob(j),kind=dbl_kind)-real(ny_global+1,kind=dbl_kind)/c2)**2) / & + sqrt((real(nx_global,kind=dbl_kind))**2 + & + (real(ny_global,kind=dbl_kind))**2) + aicen(i,j,n) = ainit(n) * exp(-dist_ratio) + endif + + elseif (trim(ice_data_dist) == 'uniform') then + + ! nothing extra to do + + else + + call abort_ice(subname//'ERROR: ice_data_dist setting = '//trim(ice_data_dist), & + file=__FILE__, line=__LINE__) + + endif ! ice_data_dist + + vicen(i,j,n) = hinit(n) * aicen(i,j,n) ! m + vsnon(i,j,n) = min(aicen(i,j,n)*hsno_init,p2*vicen(i,j,n)) + + call icepack_init_trcr(Tair = Tair(i,j), Tf = Tf(i,j), & + Sprofile = salinz(i,j,:), & + Tprofile = Tmltz(i,j,:), & + Tsfc = Tsfc, & + qin = qin(:), qsn = qsn(:)) + + ! surface temperature + trcrn(i,j,nt_Tsfc,n) = Tsfc ! deg C + ! ice enthalpy, salinity + do k = 1, nilyr + trcrn(i,j,nt_qice+k-1,n) = qin(k) + trcrn(i,j,nt_sice+k-1,n) = salinz(i,j,k) + enddo + ! snow enthalpy + do k = 1, nslyr + trcrn(i,j,nt_qsno+k-1,n) = qsn(k) + enddo ! nslyr + ! brine fraction + if (tr_brine) trcrn(i,j,nt_fbri,n) = c1 + + enddo ! ij + enddo ! ncat + + !--------------------------------------------------------- + ! ice velocity + ! these velocites are defined on B-grid + !--------------------------------------------------------- + + if (trim(ice_data_type) == 'boxslotcyl') then + domain_length = dxrect*cm_to_m*nx_global + period = c12*secday ! 12 days rotational period + max_vel = pi*domain_length/period + + do j = 1, ny_block + do i = 1, nx_block + + if (umask(i,j)) then + uvel(i,j) = c2*max_vel*(real(jglob(j), kind=dbl_kind) - p5) & + / real(ny_global - 1, kind=dbl_kind) - max_vel + vvel(i,j) = -c2*max_vel*(real(iglob(i), kind=dbl_kind) - p5) & + / real(nx_global - 1, kind=dbl_kind) + max_vel + else + uvel(i,j) = c0 + vvel(i,j) = c0 + endif + enddo ! j + enddo ! i + else + uvel = c0 + vvel = c0 + endif + + endif ! ice_ic + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + end subroutine set_state_var + +!======================================================================= + + end module ice_init + +!======================================================================= diff --git a/cicecore/cicedyn/general/ice_state.F90 b/cicecore/cicedyn/general/ice_state.F90 index 82b03f2cb..92066d038 100644 --- a/cicecore/cicedyn/general/ice_state.F90 +++ b/cicecore/cicedyn/general/ice_state.F90 @@ -172,6 +172,32 @@ subroutine alloc_state stat=ierr) if (ierr/=0) call abort_ice('(alloc_state): Out of memory1') + aice = c0 + aiU = c0 + vice = c0 + vsno = c0 + aice0 = c0 + uvel = c0 + vvel = c0 + uvelE = c0 + vvelE = c0 + uvelN = c0 + vvelN = c0 + divu = c0 + shear = c0 + vort = c0 + strength = c0 + aice_init = c0 + aicen = c0 + vicen = c0 + vsnon = c0 + aicen_init = c0 + vicen_init = c0 + vsnon_init = c0 + Tsfcn_init = c0 + trcr = c0 + trcrn = c0 + allocate ( & trcr_depend(ntrcr) , & ! n_trcr_strata(ntrcr) , & ! number of underlying tracer layers @@ -184,12 +210,6 @@ subroutine alloc_state n_trcr_strata = 0 nt_strata = 0 trcr_base = c0 - aicen = c0 - aicen_init = c0 - vicen = c0 - vicen_init = c0 - vsnon = c0 - vsnon_init = c0 end subroutine alloc_state diff --git a/cicecore/cicedyn/general/ice_step_mod.F90 b/cicecore/cicedyn/general/ice_step_mod.F90 index b7aff0779..26faa27df 100644 --- a/cicecore/cicedyn/general/ice_step_mod.F90 +++ b/cicecore/cicedyn/general/ice_step_mod.F90 @@ -673,12 +673,16 @@ subroutine step_therm2 (dt, iblk) tr_fsd, & ! floe size distribution tracers z_tracers ! vertical biogeochemistry + character (len=char_len) :: & + wave_height_type ! type of significant wave height forcing + type (block) :: & this_block ! block information for current block character(len=*), parameter :: subname = '(step_therm2)' - call icepack_query_parameters(z_tracers_out=z_tracers) + call icepack_query_parameters(z_tracers_out=z_tracers, & + wave_height_type_out=wave_height_type) call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr) call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -703,9 +707,11 @@ subroutine step_therm2 (dt, iblk) if (tmask(i,j,iblk) .or. opmask(i,j,iblk)) then - ! significant wave height for FSD - if (tr_fsd) & - wave_sig_ht(i,j,iblk) = c4*SQRT(SUM(wave_spectrum(i,j,:,iblk)*dwavefreq(:))) + ! significant wave height + if (tr_fsd .and. trim(wave_height_type) == 'internal') then + wave_sig_ht(i,j,iblk) = c4*SQRT(SUM(wave_spectrum(i,j,:,iblk)*dwavefreq(:))) + ! else wave_sig_ht = 0 unless provided by coupler or other external data + endif call icepack_step_therm2(dt=dt, & hin_max = hin_max (:), & @@ -749,7 +755,6 @@ subroutine step_therm2 (dt, iblk) wave_spectrum = & wave_spectrum(i,j,:,iblk), & wavefreq = wavefreq (:), & - dwavefreq = dwavefreq (:), & d_afsd_latg = d_afsd_latg(i,j,:,iblk), & d_afsd_newi = d_afsd_newi(i,j,:,iblk), & d_afsd_latm = d_afsd_latm(i,j,:,iblk), & @@ -890,7 +895,7 @@ end subroutine update_state subroutine step_dyn_wave (dt) - use ice_arrays_column, only: wave_spectrum, & + use ice_arrays_column, only: wave_spectrum, wave_sig_ht, & d_afsd_wave, wavefreq, dwavefreq use ice_domain_size, only: ncat, nfsd, nfreq use ice_state, only: trcrn, aicen, aice, vice @@ -910,14 +915,11 @@ subroutine step_dyn_wave (dt) iblk, & ! block index i, j ! horizontal indices - character (len=char_len) :: wave_spec_type - character(len=*), parameter :: subname = '(step_dyn_wave)' call ice_timer_start(timer_column) call ice_timer_start(timer_fsd) - call icepack_query_parameters(wave_spec_type_out=wave_spec_type) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -934,8 +936,7 @@ subroutine step_dyn_wave (dt) do j = jlo, jhi do i = ilo, ihi d_afsd_wave(i,j,:,iblk) = c0 - call icepack_step_wavefracture(wave_spec_type = wave_spec_type, & - dt = dt, nfreq = nfreq, & + call icepack_step_wavefracture(dt = dt, nfreq = nfreq, & aice = aice (i,j, iblk), & vice = vice (i,j, iblk), & aicen = aicen (i,j,:, iblk), & @@ -943,7 +944,8 @@ subroutine step_dyn_wave (dt) wavefreq = wavefreq (:), & dwavefreq = dwavefreq (:), & trcrn = trcrn (i,j,:,:,iblk), & - d_afsd_wave = d_afsd_wave (i,j,:, iblk)) + d_afsd_wave = d_afsd_wave (i,j,:, iblk), & + wave_height = wave_sig_ht (i,j, iblk)) end do ! i end do ! j end do ! iblk diff --git a/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 index 11cd0d2e1..5a690d490 100644 --- a/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 @@ -64,7 +64,7 @@ module ice_boundary use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use ice_blocks, only: nx_block, ny_block, nghost, & - nblocks_tot, ice_blocksNorth, & + nblocks_tot, nblocks_x, nblocks_y, ice_blocksNorth, & ice_blocksSouth, ice_blocksEast, ice_blocksWest, & ice_blocksEast2, ice_blocksWest2, & ice_blocksNorthEast, ice_blocksNorthWest, & @@ -106,6 +106,10 @@ module ice_boundary sendAddr, &! src addresses for each sent message recvAddr ! dst addresses for each recvd message + character (char_len) :: & + nsBoundaryType, &! type of boundary to use in logical ns dir + ewBoundaryType ! type of boundary to use in logical ew dir + end type public :: ice_HaloCreate, & @@ -252,6 +256,8 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & if (my_task >= numProcs) return halo%communicator = communicator + halo%ewBoundaryType = ewBoundaryType + halo%nsBoundaryType = nsBoundaryType blockSizeX = nx_block - 2*nghost blockSizeY = ny_block - 2*nghost @@ -1239,6 +1245,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & integer (int_kind) :: & i,j,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message @@ -1261,6 +1268,8 @@ subroutine ice_HaloUpdate2DR8(array, halo, & x1,x2,xavg ! scalars for enforcing symmetry at U pts logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter, &! fill outer boundary ns ltripoleOnly ! local tripoleOnly value integer (int_kind) :: len ! length of messages @@ -1290,8 +1299,20 @@ subroutine ice_HaloUpdate2DR8(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_dbl_kind endif @@ -1367,29 +1388,77 @@ subroutine ice_HaloUpdate2DR8(array, halo, & !----------------------------------------------------------------------- ! -! while messages are being communicated, fill out halo region +! While messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- - if (ltripoleOnly) then - ! skip fill, not needed since tripole seam always exists if running - ! on tripole grid and set tripoleOnly flag - else + if (.not. ltripoleOnly) then + ! tripoleOnly skip fill, do not overwrite any values in interior as they may + ! already be set and filling tripole is not necessary + + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,iblk) = fill - array(1:nx_block, jhi+j,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,iblk) = fill - array(ihi+i, 1:ny_block,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, iblk) = fill + enddo + enddo + endif + enddo ! iblk endif !----------------------------------------------------------------------- @@ -1683,6 +1752,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & integer (int_kind) :: & i,j,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message @@ -1704,6 +1774,10 @@ subroutine ice_HaloUpdate2DR4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + integer (int_kind) :: len ! length of messages character(len=*), parameter :: subname = '(ice_HaloUpdate2DR4)' @@ -1731,8 +1805,20 @@ subroutine ice_HaloUpdate2DR4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_real_kind endif @@ -1804,23 +1890,71 @@ subroutine ice_HaloUpdate2DR4(array, halo, & ! ! while messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,iblk) = fill - array(1:nx_block, jhi+j,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,iblk) = fill - array(ihi+i, 1:ny_block,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -2098,6 +2232,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & integer (int_kind) :: & i,j,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message @@ -2119,6 +2254,10 @@ subroutine ice_HaloUpdate2DI4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + integer (int_kind) :: len ! length of messages character(len=*), parameter :: subname = '(ice_HaloUpdate2DI4)' @@ -2146,8 +2285,20 @@ subroutine ice_HaloUpdate2DI4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0_int_kind endif @@ -2219,23 +2370,71 @@ subroutine ice_HaloUpdate2DI4(array, halo, & ! ! while messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,iblk) = fill - array(1:nx_block, jhi+j,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,iblk) = fill - array(ihi+i, 1:ny_block,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -2593,6 +2792,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & integer (int_kind) :: & i,j,k,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension @@ -2615,6 +2815,10 @@ subroutine ice_HaloUpdate3DR8(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + real (dbl_kind), dimension(:,:), allocatable :: & bufSend, bufRecv ! 3d send,recv buffers @@ -2648,8 +2852,20 @@ subroutine ice_HaloUpdate3DR8(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_dbl_kind endif @@ -2742,23 +2958,71 @@ subroutine ice_HaloUpdate3DR8(array, halo, & ! ! while messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,iblk) = fill - array(1:nx_block, jhi+j,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,iblk) = fill - array(ihi+i, 1:ny_block,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -3067,6 +3331,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & integer (int_kind) :: & i,j,k,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension @@ -3089,6 +3354,10 @@ subroutine ice_HaloUpdate3DR4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + real (real_kind), dimension(:,:), allocatable :: & bufSend, bufRecv ! 3d send,recv buffers @@ -3122,8 +3391,20 @@ subroutine ice_HaloUpdate3DR4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_real_kind endif @@ -3216,23 +3497,71 @@ subroutine ice_HaloUpdate3DR4(array, halo, & ! ! while messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,iblk) = fill - array(1:nx_block, jhi+j,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,iblk) = fill - array(ihi+i, 1:ny_block,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -3541,6 +3870,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & integer (int_kind) :: & i,j,k,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension @@ -3563,6 +3893,10 @@ subroutine ice_HaloUpdate3DI4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + integer (int_kind), dimension(:,:), allocatable :: & bufSend, bufRecv ! 3d send,recv buffers @@ -3596,8 +3930,20 @@ subroutine ice_HaloUpdate3DI4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0_int_kind endif @@ -3690,23 +4036,71 @@ subroutine ice_HaloUpdate3DI4(array, halo, & ! ! while messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,iblk) = fill - array(1:nx_block, jhi+j,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,iblk) = fill - array(ihi+i, 1:ny_block,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -4015,6 +4409,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & integer (int_kind) :: & i,j,k,l,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions @@ -4037,6 +4432,10 @@ subroutine ice_HaloUpdate4DR8(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + real (dbl_kind), dimension(:,:), allocatable :: & bufSend, bufRecv ! 4d send,recv buffers @@ -4070,8 +4469,20 @@ subroutine ice_HaloUpdate4DR8(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_dbl_kind endif @@ -4168,23 +4579,71 @@ subroutine ice_HaloUpdate4DR8(array, halo, & ! ! while messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,:,iblk) = fill - array(1:nx_block, jhi+j,:,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,:,iblk) = fill - array(ihi+i, 1:ny_block,:,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -4513,6 +4972,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & integer (int_kind) :: & i,j,k,l,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions @@ -4535,6 +4995,10 @@ subroutine ice_HaloUpdate4DR4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + real (real_kind), dimension(:,:), allocatable :: & bufSend, bufRecv ! 4d send,recv buffers @@ -4568,8 +5032,20 @@ subroutine ice_HaloUpdate4DR4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_real_kind endif @@ -4666,23 +5142,71 @@ subroutine ice_HaloUpdate4DR4(array, halo, & ! ! while messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,:,iblk) = fill - array(1:nx_block, jhi+j,:,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,:,iblk) = fill - array(ihi+i, 1:ny_block,:,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -5011,6 +5535,7 @@ subroutine ice_HaloUpdate4DI4(array, halo, & integer (int_kind) :: & i,j,k,l,n,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices ierr, &! error or status flag for MPI,alloc nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions @@ -5033,6 +5558,10 @@ subroutine ice_HaloUpdate4DI4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + integer (int_kind), dimension(:,:), allocatable :: & bufSend, bufRecv ! 4d send,recv buffers @@ -5066,8 +5595,20 @@ subroutine ice_HaloUpdate4DI4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0_int_kind endif @@ -5164,23 +5705,71 @@ subroutine ice_HaloUpdate4DI4(array, halo, & ! ! while messages are being communicated, fill out halo region ! needed for masked halos to ensure halo values are filled for -! halo grid cells that are not updated +! halo grid cells that are not updated except in cases where +! you don't want to overwrite those halos ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,:,iblk) = fill - array(1:nx_block, jhi+j,:,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,:,iblk) = fill - array(ihi+i, 1:ny_block,:,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -7027,15 +7616,15 @@ end subroutine ice_HaloMsgCreate subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) -! This subroutine extrapolates ARRAY values into the first row or column -! of ghost cells, and is intended for grid variables whose ghost cells +! This subroutine extrapolates ARRAY values into the ghost cells, +! and is intended for grid variables whose ghost cells ! would otherwise be set using the default boundary conditions (Dirichlet ! or Neumann). -! Note: This routine will need to be modified for nghost > 1. -! We assume padding occurs only on east and north edges. ! ! This is the specific interface for double precision arrays ! corresponding to the generic interface ice_HaloExtrapolate +! +! T.Craig, Oct 2025 - extend to nghost > 1 use ice_blocks, only: block, nblocks_x, nblocks_y, get_block use ice_constants, only: c2 @@ -7058,8 +7647,9 @@ subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) !----------------------------------------------------------------------- integer (int_kind) :: & - i,j,iblk, &! dummy loop indices - numBlocks, &! number of local blocks + i,j,n,iblk,ii,jj, &! dummy loop indices + ilo,ihi,jlo,jhi, &! active block indices + numBlocks, &! number of local blocks blockID, &! block location ibc ! ghost cell column or row @@ -7067,6 +7657,7 @@ subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) this_block ! block info for current block character(len=*), parameter :: subname = '(ice_HaloExtrapolate2DR8)' + !----------------------------------------------------------------------- ! ! Linear extrapolation @@ -7079,32 +7670,40 @@ subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) do iblk = 1, numBlocks call ice_distributionGetBlockID(dist, iblk, blockID) this_block = get_block(blockID, blockID) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi if (this_block%iblock == 1) then ! west edge if (trim(ew_bndy_type) /= 'cyclic') then + do n = 1, nghost + ii = ilo - n ! gridcell to extrapolate to do j = 1, ny_block - ARRAY(1,j,iblk) = c2*ARRAY(2,j,iblk) - ARRAY(3,j,iblk) + ARRAY(ii,j,iblk) = c2*ARRAY(ii+1,j,iblk) - ARRAY(ii+2,j,iblk) + enddo enddo endif endif if (this_block%iblock == nblocks_x) then ! east edge if (trim(ew_bndy_type) /= 'cyclic') then - ! locate ghost cell column (avoid padding) - ibc = nx_block - do i = nx_block, nghost + 1, -1 - if (this_block%i_glob(i) == 0) ibc = ibc - 1 - enddo + do n = 1, nghost + ii = ihi + n ! gridcell to extrapolate to do j = 1, ny_block - ARRAY(ibc,j,iblk) = c2*ARRAY(ibc-1,j,iblk) - ARRAY(ibc-2,j,iblk) + ARRAY(ii,j,iblk) = c2*ARRAY(ii-1,j,iblk) - ARRAY(ii-2,j,iblk) + enddo enddo endif endif if (this_block%jblock == 1) then ! south edge if (trim(ns_bndy_type) /= 'cyclic') then + do n = 1, nghost + jj = jlo - n ! gridcell to extrapolate to do i = 1, nx_block - ARRAY(i,1,iblk) = c2*ARRAY(i,2,iblk) - ARRAY(i,3,iblk) + ARRAY(i,jj,iblk) = c2*ARRAY(i,jj+1,iblk) - ARRAY(i,jj+2,iblk) + enddo enddo endif endif @@ -7113,13 +7712,11 @@ subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) if (trim(ns_bndy_type) /= 'cyclic' .and. & trim(ns_bndy_type) /= 'tripole' .and. & trim(ns_bndy_type) /= 'tripoleT' ) then - ! locate ghost cell column (avoid padding) - ibc = ny_block - do j = ny_block, nghost + 1, -1 - if (this_block%j_glob(j) == 0) ibc = ibc - 1 - enddo + do n = 1, nghost + jj = jhi + n ! gridcell to extrapolate to do i = 1, nx_block - ARRAY(i,ibc,iblk) = c2*ARRAY(i,ibc-1,iblk) - ARRAY(i,ibc-2,iblk) + ARRAY(i,jj,iblk) = c2*ARRAY(i,jj-1,iblk) - ARRAY(i,jj-2,iblk) + enddo enddo endif endif diff --git a/cicecore/cicedyn/infrastructure/comm/mpi/ice_gather_scatter.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_gather_scatter.F90 index cfb98befe..1c0191aee 100644 --- a/cicecore/cicedyn/infrastructure/comm/mpi/ice_gather_scatter.F90 +++ b/cicecore/cicedyn/infrastructure/comm/mpi/ice_gather_scatter.F90 @@ -1643,74 +1643,35 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & msg_buffer = c0 this_block = get_block(n,n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - msg_buffer(i,j-yoffset2) = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + msg_buffer(i,j-yoffset2) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + msg_buffer(i,j) = ARRAY_G(isrc,jsrc) + end do + endif + end do call MPI_SEND(msg_buffer, nx_block*ny_block, & mpiR8, dst_dist%blockLocation(n)-1, 3*mpitag_gs+n, & @@ -1728,75 +1689,35 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & dst_block = dst_dist%blockLocalID(n) this_block = get_block(n,n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - ARRAY(i,j-yoffset2,dst_block) & - = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + ARRAY(i,j-yoffset2,dst_block) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + ARRAY(i,j,dst_block) = ARRAY_G(isrc,jsrc) + end do + endif + end do endif end do @@ -1832,7 +1753,7 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & endif !----------------------------------------------------------------- - ! Ensure unused ghost cell values are 0 + ! Set ghost cell values to 0 for noupdate !----------------------------------------------------------------- if (field_loc == field_loc_noupdate) then @@ -2029,74 +1950,35 @@ subroutine scatter_global_real(ARRAY, ARRAY_G, src_task, dst_dist, & msg_buffer = 0._real_kind this_block = get_block(n,n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - msg_buffer(i,j-yoffset2) = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + msg_buffer(i,j-yoffset2) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + msg_buffer(i,j) = ARRAY_G(isrc,jsrc) + end do + endif + end do call MPI_SEND(msg_buffer, nx_block*ny_block, & mpiR4, dst_dist%blockLocation(n)-1, 3*mpitag_gs+n, & @@ -2114,75 +1996,35 @@ subroutine scatter_global_real(ARRAY, ARRAY_G, src_task, dst_dist, & dst_block = dst_dist%blockLocalID(n) this_block = get_block(n,n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - ARRAY(i,j-yoffset2,dst_block) & - = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + ARRAY(i,j-yoffset2,dst_block) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + ARRAY(i,j,dst_block) = ARRAY_G(isrc,jsrc) + end do + endif + end do endif end do @@ -2218,7 +2060,7 @@ subroutine scatter_global_real(ARRAY, ARRAY_G, src_task, dst_dist, & endif !----------------------------------------------------------------- - ! Ensure unused ghost cell values are 0 + ! Set ghost cell values to 0 for noupdate !----------------------------------------------------------------- if (field_loc == field_loc_noupdate) then @@ -2415,74 +2257,35 @@ subroutine scatter_global_int(ARRAY, ARRAY_G, src_task, dst_dist, & msg_buffer = 0 this_block = get_block(n,n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - msg_buffer(i,j) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - msg_buffer(i,j-yoffset2) = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + msg_buffer(i,j-yoffset2) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + msg_buffer(i,j) = ARRAY_G(isrc,jsrc) + end do + endif + end do call MPI_SEND(msg_buffer, nx_block*ny_block, & mpi_integer, dst_dist%blockLocation(n)-1, 3*mpitag_gs+n, & @@ -2500,75 +2303,35 @@ subroutine scatter_global_int(ARRAY, ARRAY_G, src_task, dst_dist, & dst_block = dst_dist%blockLocalID(n) this_block = get_block(n,n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - ARRAY(i,j-yoffset2,dst_block) & - = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + ARRAY(i,j-yoffset2,dst_block) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + ARRAY(i,j,dst_block) = ARRAY_G(isrc,jsrc) + end do + endif + end do endif end do @@ -2604,7 +2367,7 @@ subroutine scatter_global_int(ARRAY, ARRAY_G, src_task, dst_dist, & endif !----------------------------------------------------------------- - ! Ensure unused ghost cell values are 0 + ! Set ghost cell values to 0 for noupdate !----------------------------------------------------------------- if (field_loc == field_loc_noupdate) then @@ -2681,7 +2444,7 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) integer (int_kind) :: & i,j,n, &! dummy loop indices iblk, jblk, &! block indices - iglb, jglb, &! global indices + isrc, jsrc, &! global indices nrecvs, &! actual number of messages received dst_block, &! location of block in dst array ierr ! MPI error flag @@ -2748,13 +2511,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! southwest corner iblk = i jblk = j - iglb = this_block%i_glob(this_block%ilo)+i-1 - jglb = j - msg_buffer(iblk,jblk) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ilo)+i-1 + jsrc = j + msg_buffer(iblk,jblk) = ARRAY_G(isrc,jsrc) ! southeast corner iblk = this_block%ihi+i - iglb = this_block%i_glob(this_block%ihi)+nghost+i - msg_buffer(iblk,jblk) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ihi)+nghost+i + msg_buffer(iblk,jblk) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -2769,13 +2532,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northwest corner iblk = i jblk = this_block%jhi+j - iglb = this_block%i_glob(this_block%ilo)+i-1 - jglb = ny_global+nghost+j - msg_buffer(iblk,jblk) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ilo)+i-1 + jsrc = ny_global+nghost+j + msg_buffer(iblk,jblk) = ARRAY_G(isrc,jsrc) ! northeast corner iblk = this_block%ihi+i - iglb = this_block%i_glob(this_block%ihi)+nghost+i - msg_buffer(iblk,jblk) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ihi)+nghost+i + msg_buffer(iblk,jblk) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -2791,13 +2554,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northwest corner iblk = i jblk = this_block%jhi+j - iglb = i - jglb = this_block%j_glob(this_block%jhi)+nghost+j - msg_buffer(iblk,jblk) = ARRAY_G(iglb,jglb) + isrc = i + jsrc = this_block%j_glob(this_block%jhi)+nghost+j + msg_buffer(iblk,jblk) = ARRAY_G(isrc,jsrc) ! southwest corner jblk = j - jglb = this_block%j_glob(this_block%jlo)+j-1 - msg_buffer(iblk,jblk) = ARRAY_G(iglb,jglb) + jsrc = this_block%j_glob(this_block%jlo)+j-1 + msg_buffer(iblk,jblk) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -2814,13 +2577,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northeast corner iblk = this_block%ihi+i jblk = this_block%jhi+j - iglb = nx_global+nghost+i - jglb = this_block%j_glob(this_block%jhi)+nghost+j - msg_buffer(iblk,jblk) = ARRAY_G(iglb,jglb) + isrc = nx_global+nghost+i + jsrc = this_block%j_glob(this_block%jhi)+nghost+j + msg_buffer(iblk,jblk) = ARRAY_G(isrc,jsrc) ! southeast corner jblk = j - jglb = this_block%j_glob(this_block%jlo)+j-1 - msg_buffer(iblk,jblk) = ARRAY_G(iglb,jglb) + jsrc = this_block%j_glob(this_block%jlo)+j-1 + msg_buffer(iblk,jblk) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -2861,13 +2624,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! southwest corner iblk = i jblk = j - iglb = this_block%i_glob(this_block%ilo)+i-1 - jglb = j - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ilo)+i-1 + jsrc = j + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) ! southeast corner iblk = this_block%ihi+i - iglb = this_block%i_glob(this_block%ihi)+nghost+i - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ihi)+nghost+i + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -2882,13 +2645,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northwest corner iblk = i jblk = this_block%jhi+j - iglb = this_block%i_glob(this_block%ilo)+i-1 - jglb = ny_global+nghost+j - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ilo)+i-1 + jsrc = ny_global+nghost+j + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) ! northeast corner iblk = this_block%ihi+i - iglb = this_block%i_glob(this_block%ihi)+nghost+i - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ihi)+nghost+i + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -2904,13 +2667,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northwest corner iblk = i jblk = this_block%jhi+j - iglb = i - jglb = this_block%j_glob(this_block%jhi)+nghost+j - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = i + jsrc = this_block%j_glob(this_block%jhi)+nghost+j + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) ! southwest corner jblk = j - jglb = this_block%j_glob(this_block%jlo)+j-1 - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + jsrc = this_block%j_glob(this_block%jlo)+j-1 + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -2927,17 +2690,16 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northeast corner iblk = this_block%ihi+i jblk = this_block%jhi+j - iglb = nx_global+nghost+i - jglb = this_block%j_glob(this_block%jhi)+nghost+j - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = nx_global+nghost+i + jsrc = this_block%j_glob(this_block%jhi)+nghost+j + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) ! southeast corner jblk = j - jglb = this_block%j_glob(this_block%jlo)+j-1 - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + jsrc = this_block%j_glob(this_block%jlo)+j-1 + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) enddo enddo endif - endif end do @@ -3071,70 +2833,30 @@ subroutine scatter_global_stress(ARRAY, ARRAY_G1, ARRAY_G2, & msg_buffer = c0 this_block = get_block(n,n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - msg_buffer(i,j) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - msg_buffer(i,j) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - msg_buffer(i,j) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - jsrc = ny_global + yoffset + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - msg_buffer(i,j) = isign * ARRAY_G2(isrc,jsrc) - endif - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + jsrc = ny_global + yoffset + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + msg_buffer(i,j) = isign * ARRAY_G2(isrc,jsrc) + endif + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + msg_buffer(i,j) = ARRAY_G1(isrc,jsrc) + end do + endif + end do call MPI_SEND(msg_buffer, nx_block*ny_block, & mpiR8, dst_dist%blockLocation(n)-1, 3*mpitag_gs+n, & @@ -3152,75 +2874,35 @@ subroutine scatter_global_stress(ARRAY, ARRAY_G1, ARRAY_G2, & dst_block = dst_dist%blockLocalID(n) this_block = get_block(n,n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - ARRAY(i,j,dst_block) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - ARRAY(i,j-yoffset2,dst_block) & - = isign * ARRAY_G2(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + ARRAY(i,j-yoffset2,dst_block) & + = isign * ARRAY_G2(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + ARRAY(i,j,dst_block) = ARRAY_G1(isrc,jsrc) + end do + endif + end do endif end do diff --git a/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 index b9ac8fe33..205f2150b 100644 --- a/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 @@ -31,7 +31,7 @@ module ice_boundary use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use ice_blocks, only: nx_block, ny_block, nghost, & - nblocks_tot, ice_blocksNorth, & + nblocks_tot, ice_blocksNorth, nblocks_x, nblocks_y, & ice_blocksSouth, ice_blocksEast, ice_blocksWest, & ice_blocksEast2, ice_blocksWest2, & ice_blocksNorthEast, ice_blocksNorthWest, & @@ -61,6 +61,10 @@ module ice_boundary srcLocalAddr, &! src addresses for each local copy dstLocalAddr ! dst addresses for each local copy + character (char_len) :: & + nsBoundaryType, &! type of boundary to use in logical ns dir + ewBoundaryType ! type of boundary to use in logical ew dir + end type public :: ice_HaloCreate, & @@ -177,6 +181,8 @@ function ice_HaloCreate(dist, nsBoundaryType, ewBoundaryType, & if (my_task >= numProcs) return halo%communicator = communicator + halo%ewBoundaryType = ewBoundaryType + halo%nsBoundaryType = nsBoundaryType blockSizeX = nx_block - 2*nghost blockSizeY = ny_block - 2*nghost @@ -659,7 +665,7 @@ subroutine ice_HaloUpdate2DR8(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 2d horizontal arrays of double precision. type (ice_halo), intent(in) :: & @@ -690,9 +696,10 @@ subroutine ice_HaloUpdate2DR8(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message iDst,jDst, &! dest addresses for message @@ -706,6 +713,8 @@ subroutine ice_HaloUpdate2DR8(array, halo, & x1,x2,xavg ! scalars for enforcing symmetry at U pts logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter, &! fill outer boundary ns ltripoleOnly ! local tripoleOnly value character(len=*), parameter :: subname = '(ice_HaloUpdate2DR8)' @@ -733,8 +742,20 @@ subroutine ice_HaloUpdate2DR8(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic or tripole + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_dbl_kind endif @@ -753,29 +774,78 @@ subroutine ice_HaloUpdate2DR8(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- - if (ltripoleOnly) then - ! skip fill, not needed since tripole seam always exists if running - ! on tripole grid and set tripoleOnly flag - else + if (.not. ltripoleOnly) then + ! tripoleOnly skip fill, do not overwrite any values in interior as they may + ! already be set and filling tripole is not necessary + + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,iblk) = fill - array(1:nx_block, jhi+j,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,iblk) = fill - array(ihi+i, 1:ny_block,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, iblk) = fill + enddo + enddo + endif + enddo ! iblk endif !----------------------------------------------------------------------- @@ -994,7 +1064,7 @@ subroutine ice_HaloUpdate2DR4(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 2d horizontal arrays of single precision. type (ice_halo), intent(in) :: & @@ -1022,9 +1092,10 @@ subroutine ice_HaloUpdate2DR4(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message iDst,jDst, &! dest addresses for message @@ -1037,6 +1108,10 @@ subroutine ice_HaloUpdate2DR4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + character(len=*), parameter :: subname = '(ice_HaloUpdate2DR4)' !----------------------------------------------------------------------- @@ -1062,8 +1137,20 @@ subroutine ice_HaloUpdate2DR4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_real_kind endif @@ -1076,25 +1163,74 @@ subroutine ice_HaloUpdate2DR4(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,iblk) = fill - array(1:nx_block, jhi+j,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,iblk) = fill - array(ihi+i, 1:ny_block,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -1303,7 +1439,7 @@ subroutine ice_HaloUpdate2DI4(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 2d horizontal integer arrays. type (ice_halo), intent(in) :: & @@ -1331,9 +1467,10 @@ subroutine ice_HaloUpdate2DI4(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) iSrc,jSrc, &! source addresses for message iDst,jDst, &! dest addresses for message @@ -1346,6 +1483,10 @@ subroutine ice_HaloUpdate2DI4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + character(len=*), parameter :: subname = '(ice_HaloUpdate2DI4)' !----------------------------------------------------------------------- @@ -1371,8 +1512,20 @@ subroutine ice_HaloUpdate2DI4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0_int_kind endif @@ -1385,25 +1538,74 @@ subroutine ice_HaloUpdate2DI4(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,iblk) = fill - array(1:nx_block, jhi+j,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,iblk) = fill - array(ihi+i, 1:ny_block,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -1692,7 +1894,7 @@ subroutine ice_HaloUpdate3DR8(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 3d horizontal arrays of double precision. type (ice_halo), intent(in) :: & @@ -1720,9 +1922,10 @@ subroutine ice_HaloUpdate3DR8(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,k,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension iSrc,jSrc, &! source addresses for message @@ -1736,6 +1939,10 @@ subroutine ice_HaloUpdate3DR8(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + real (dbl_kind), dimension(:,:,:), allocatable :: & bufTripole ! 3d tripole buffer @@ -1764,8 +1971,20 @@ subroutine ice_HaloUpdate3DR8(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_dbl_kind endif @@ -1781,25 +2000,74 @@ subroutine ice_HaloUpdate3DR8(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,iblk) = fill - array(1:nx_block, jhi+j,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,iblk) = fill - array(ihi+i, 1:ny_block,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -2027,7 +2295,7 @@ subroutine ice_HaloUpdate3DR4(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 3d horizontal arrays of single precision. type (ice_halo), intent(in) :: & @@ -2055,9 +2323,10 @@ subroutine ice_HaloUpdate3DR4(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,k,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension iSrc,jSrc, &! source addresses for message @@ -2071,6 +2340,10 @@ subroutine ice_HaloUpdate3DR4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + real (real_kind), dimension(:,:,:), allocatable :: & bufTripole ! 3d tripole buffer @@ -2099,8 +2372,20 @@ subroutine ice_HaloUpdate3DR4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_real_kind endif @@ -2116,25 +2401,74 @@ subroutine ice_HaloUpdate3DR4(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,iblk) = fill - array(1:nx_block, jhi+j,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,iblk) = fill - array(ihi+i, 1:ny_block,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -2362,7 +2696,7 @@ subroutine ice_HaloUpdate3DI4(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 3d horizontal arrays of double precision. type (ice_halo), intent(in) :: & @@ -2390,9 +2724,10 @@ subroutine ice_HaloUpdate3DI4(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,k,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) nz, &! size of array in 3rd dimension iSrc,jSrc, &! source addresses for message @@ -2406,6 +2741,10 @@ subroutine ice_HaloUpdate3DI4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + integer (int_kind), dimension(:,:,:), allocatable :: & bufTripole ! 3d tripole buffer @@ -2434,8 +2773,20 @@ subroutine ice_HaloUpdate3DI4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0_int_kind endif @@ -2451,25 +2802,74 @@ subroutine ice_HaloUpdate3DI4(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,iblk) = fill - array(1:nx_block, jhi+j,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,iblk) = fill - array(ihi+i, 1:ny_block,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -2697,7 +3097,7 @@ subroutine ice_HaloUpdate4DR8(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 4d horizontal arrays of double precision. type (ice_halo), intent(in) :: & @@ -2725,9 +3125,10 @@ subroutine ice_HaloUpdate4DR8(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,k,l,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions iSrc,jSrc, &! source addresses for message @@ -2741,6 +3142,10 @@ subroutine ice_HaloUpdate4DR8(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + real (dbl_kind), dimension(:,:,:,:), allocatable :: & bufTripole ! 4d tripole buffer @@ -2769,8 +3174,20 @@ subroutine ice_HaloUpdate4DR8(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_dbl_kind endif @@ -2787,25 +3204,74 @@ subroutine ice_HaloUpdate4DR8(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,:,iblk) = fill - array(1:nx_block, jhi+j,:,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,:,iblk) = fill - array(ihi+i, 1:ny_block,:,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -3049,7 +3515,7 @@ subroutine ice_HaloUpdate4DR4(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 4d horizontal arrays of single precision. type (ice_halo), intent(in) :: & @@ -3077,9 +3543,10 @@ subroutine ice_HaloUpdate4DR4(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,k,l,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions iSrc,jSrc, &! source addresses for message @@ -3093,6 +3560,10 @@ subroutine ice_HaloUpdate4DR4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + real (real_kind), dimension(:,:,:,:), allocatable :: & bufTripole ! 4d tripole buffer @@ -3121,8 +3592,20 @@ subroutine ice_HaloUpdate4DR4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0.0_real_kind endif @@ -3139,25 +3622,74 @@ subroutine ice_HaloUpdate4DR4(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,:,iblk) = fill - array(1:nx_block, jhi+j,:,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,:,iblk) = fill - array(ihi+i, 1:ny_block,:,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -3401,7 +3933,7 @@ subroutine ice_HaloUpdate4DI4(array, halo, & ! This routine updates ghost cells for an input array and is a ! member of a group of routines under the generic interface -! POP\_HaloUpdate. This routine is the specific interface +! ice\_HaloUpdate. This routine is the specific interface ! for 4d horizontal integer arrays. type (ice_halo), intent(in) :: & @@ -3429,9 +3961,10 @@ subroutine ice_HaloUpdate4DI4(array, halo, & ! !----------------------------------------------------------------------- - integer (int_kind) :: & + integer (int_kind) :: & i,j,k,l,nmsg, &! dummy loop indices iblk,ilo,ihi,jlo,jhi, &! block sizes for fill + iblock,jblock, &! global block indices nxGlobal, &! global domain size in x (tripole) nz, nt, &! size of array in 3rd,4th dimensions iSrc,jSrc, &! source addresses for message @@ -3445,6 +3978,10 @@ subroutine ice_HaloUpdate4DI4(array, halo, & fill, &! value to use for unknown points x1,x2,xavg ! scalars for enforcing symmetry at U pts + logical (log_kind) :: & + ewfillouter, &! fill outer boundary ew + nsfillouter ! fill outer boundary ns + integer (int_kind), dimension(:,:,:,:), allocatable :: & bufTripole ! 4d tripole buffer @@ -3473,8 +4010,20 @@ subroutine ice_HaloUpdate4DI4(array, halo, & ! !----------------------------------------------------------------------- + ewfillouter = .false. + nsfillouter = .false. + + ! fill outer boundary if cyclic + if (halo%ewBoundaryType == 'cyclic') ewfillouter=.true. + if (halo%nsBoundaryType == 'tripole' .or. & + halo%nsBoundaryType == 'tripoleT' .or. & + halo%nsBoundaryType == 'cyclic') nsfillouter=.true. + if (present(fillValue)) then fill = fillValue + ! always fill outer boundary if fillValue is passed + ewfillouter = .true. + nsfillouter = .true. else fill = 0_int_kind endif @@ -3491,25 +4040,74 @@ subroutine ice_HaloUpdate4DI4(array, halo, & !----------------------------------------------------------------------- ! -! fill out halo region -! needed for masked halos to ensure halo values are filled for +! Fill out halo region +! Needed for masked halos to ensure halo values are filled for ! halo grid cells that are not updated +! In general, do NOT fill outer boundary for open boundary conditions +! because do not want to overwrite existing data ! !----------------------------------------------------------------------- + ! fill outer boundary as needed + ! only fill corners if both edges are being filled do iblk = 1, halo%numLocalBlocks - call get_block_parameter(halo%blockGlobalID(iblk), & - ilo=ilo, ihi=ihi, & - jlo=jlo, jhi=jhi) - do j = 1,nghost - array(1:nx_block, jlo-j,:,:,iblk) = fill - array(1:nx_block, jhi+j,:,:,iblk) = fill - enddo - do i = 1,nghost - array(ilo-i, 1:ny_block,:,:,iblk) = fill - array(ihi+i, 1:ny_block,:,:,iblk) = fill - enddo - enddo + call get_block_parameter(halo%blockGlobalID(iblk), & + ilo=ilo, ihi=ihi, & + jlo=jlo, jhi=jhi, & + iblock=iblock, jblock=jblock) + if (ewfillouter .or. iblock > 1) then ! west edge + do i = 1,nghost + array(ilo-i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (ewfillouter .or. iblock < nblocks_x) then ! east edge + do i = 1,nghost + array(ihi+i, jlo:jhi, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock > 1) then ! south edge + do j = 1,nghost + array(ilo:ihi, jlo-j, :, :, iblk) = fill + enddo + endif + if (nsfillouter .or. jblock < nblocks_y) then ! north edge + do j = 1,nghost + array(ilo:ihi, jhi+j, :, :, iblk) = fill + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! southwest corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock > 1) .and. & ! northwest corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ilo-i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! southeast corner + (nsfillouter .or. jblock > 1)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jlo-j, :, :, iblk) = fill + enddo + enddo + endif + if ((ewfillouter .or. iblock < nblocks_x) .and. & ! northeast corner + (nsfillouter .or. jblock < nblocks_y)) then + do j = 1,nghost + do i = 1,nghost + array(ihi+i, jhi+j, :, :, iblk) = fill + enddo + enddo + endif + enddo ! iblk !----------------------------------------------------------------------- ! @@ -4778,15 +5376,15 @@ end subroutine ice_HaloMsgCreate subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) -! This subroutine extrapolates ARRAY values into the first row or column -! of ghost cells, and is intended for grid variables whose ghost cells +! This subroutine extrapolates ARRAY values into the ghost cells, +! and is intended for grid variables whose ghost cells ! would otherwise be set using the default boundary conditions (Dirichlet ! or Neumann). -! Note: This routine will need to be modified for nghost > 1. -! We assume padding occurs only on east and north edges. ! ! This is the specific interface for double precision arrays ! corresponding to the generic interface ice_HaloExtrapolate +! +! T.Craig, Oct 2025 - extend to nghost > 1 use ice_blocks, only: block, nblocks_x, nblocks_y, get_block use ice_constants, only: c2 @@ -4809,8 +5407,9 @@ subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) !----------------------------------------------------------------------- integer (int_kind) :: & - i,j,iblk, &! dummy loop indices - numBlocks, &! number of local blocks + i,j,n,iblk,ii,jj, &! dummy loop indices + ilo,ihi,jlo,jhi, &! active block indices + numBlocks, &! number of local blocks blockID, &! block location ibc ! ghost cell column or row @@ -4831,32 +5430,40 @@ subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) do iblk = 1, numBlocks call ice_distributionGetBlockID(dist, iblk, blockID) this_block = get_block(blockID, blockID) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi if (this_block%iblock == 1) then ! west edge if (trim(ew_bndy_type) /= 'cyclic') then + do n = 1, nghost + ii = ilo - n ! gridcell to extrapolate to do j = 1, ny_block - ARRAY(1,j,iblk) = c2*ARRAY(2,j,iblk) - ARRAY(3,j,iblk) + ARRAY(ii,j,iblk) = c2*ARRAY(ii+1,j,iblk) - ARRAY(ii+2,j,iblk) + enddo enddo endif endif if (this_block%iblock == nblocks_x) then ! east edge if (trim(ew_bndy_type) /= 'cyclic') then - ! locate ghost cell column (avoid padding) - ibc = nx_block - do i = nx_block, nghost + 1, -1 - if (this_block%i_glob(i) == 0) ibc = ibc - 1 - enddo + do n = 1, nghost + ii = ihi + n ! gridcell to extrapolate to do j = 1, ny_block - ARRAY(ibc,j,iblk) = c2*ARRAY(ibc-1,j,iblk) - ARRAY(ibc-2,j,iblk) + ARRAY(ii,j,iblk) = c2*ARRAY(ii-1,j,iblk) - ARRAY(ii-2,j,iblk) + enddo enddo endif endif if (this_block%jblock == 1) then ! south edge if (trim(ns_bndy_type) /= 'cyclic') then + do n = 1, nghost + jj = jlo - n ! gridcell to extrapolate to do i = 1, nx_block - ARRAY(i,1,iblk) = c2*ARRAY(i,2,iblk) - ARRAY(i,3,iblk) + ARRAY(i,jj,iblk) = c2*ARRAY(i,jj+1,iblk) - ARRAY(i,jj+2,iblk) + enddo enddo endif endif @@ -4865,13 +5472,11 @@ subroutine ice_HaloExtrapolate2DR8(ARRAY,dist,ew_bndy_type,ns_bndy_type) if (trim(ns_bndy_type) /= 'cyclic' .and. & trim(ns_bndy_type) /= 'tripole' .and. & trim(ns_bndy_type) /= 'tripoleT' ) then - ! locate ghost cell column (avoid padding) - ibc = ny_block - do j = ny_block, nghost + 1, -1 - if (this_block%j_glob(j) == 0) ibc = ibc - 1 - enddo + do n = 1, nghost + jj = jhi + n ! gridcell to extrapolate to do i = 1, nx_block - ARRAY(i,ibc,iblk) = c2*ARRAY(i,ibc-1,iblk) - ARRAY(i,ibc-2,iblk) + ARRAY(i,jj,iblk) = c2*ARRAY(i,jj-1,iblk) - ARRAY(i,jj-2,iblk) + enddo enddo endif endif diff --git a/cicecore/cicedyn/infrastructure/comm/serial/ice_gather_scatter.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_gather_scatter.F90 index 5f4938281..be1845e56 100644 --- a/cicecore/cicedyn/infrastructure/comm/serial/ice_gather_scatter.F90 +++ b/cicecore/cicedyn/infrastructure/comm/serial/ice_gather_scatter.F90 @@ -925,80 +925,40 @@ subroutine scatter_global_dbl(ARRAY, ARRAY_G, src_task, dst_dist, & this_block = get_block(n,n) dst_block = dst_dist%blockLocalID(n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - ARRAY(i,j-yoffset2,dst_block) & - = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + ARRAY(i,j-yoffset2,dst_block) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + ARRAY(i,j,dst_block) = ARRAY_G(isrc,jsrc) + end do + endif + end do endif ! dst block not land end do ! block loop !----------------------------------------------------------------- - ! Ensure unused ghost cell values are 0 + ! Set ghost cell values to 0 for noupdate !----------------------------------------------------------------- if (field_loc == field_loc_noupdate) then @@ -1173,80 +1133,40 @@ subroutine scatter_global_real(ARRAY, ARRAY_G, src_task, dst_dist, & this_block = get_block(n,n) dst_block = dst_dist%blockLocalID(n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - ARRAY(i,j-yoffset2,dst_block) & - = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + ARRAY(i,j-yoffset2,dst_block) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + ARRAY(i,j,dst_block) = ARRAY_G(isrc,jsrc) + end do + endif + end do endif ! dst block not land end do ! block loop !----------------------------------------------------------------- - ! Ensure unused ghost cell values are 0 + ! Set ghost cell values to 0 for noupdate !----------------------------------------------------------------- if (field_loc == field_loc_noupdate) then @@ -1421,80 +1341,40 @@ subroutine scatter_global_int(ARRAY, ARRAY_G, src_task, dst_dist, & this_block = get_block(n,n) dst_block = dst_dist%blockLocalID(n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - ARRAY(i,j-yoffset2,dst_block) & - = isign * ARRAY_G(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + ARRAY(i,j-yoffset2,dst_block) & + = isign * ARRAY_G(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + ARRAY(i,j,dst_block) = ARRAY_G(isrc,jsrc) + end do + endif + end do endif ! dst block not land end do ! block loop !----------------------------------------------------------------- - ! Ensure unused ghost cell values are 0 + ! Set ghost cell values to 0 for noupdate !----------------------------------------------------------------- if (field_loc == field_loc_noupdate) then @@ -1570,8 +1450,8 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) integer (int_kind) :: & i,j,n, &! dummy loop indices + isrc, jsrc, &! source addresses iblk, jblk, &! source addresses - iglb, jglb, &! global indices dst_block ! local block index in dest distribution type (block) :: & @@ -1618,13 +1498,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! southwest corner iblk = i jblk = j - iglb = this_block%i_glob(this_block%ilo)+i-1 - jglb = j - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ilo)+i-1 + jsrc = j + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) ! southeast corner iblk = this_block%ihi+i - iglb = this_block%i_glob(this_block%ihi)+nghost+i - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ihi)+nghost+i + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -1639,13 +1519,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northwest corner iblk = i jblk = this_block%jhi+j - iglb = this_block%i_glob(this_block%ilo)+i-1 - jglb = ny_global+nghost+j - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ilo)+i-1 + jsrc = ny_global+nghost+j + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) ! northeast corner iblk = this_block%ihi+i - iglb = this_block%i_glob(this_block%ihi)+nghost+i - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = this_block%i_glob(this_block%ihi)+nghost+i + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -1661,13 +1541,13 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northwest corner iblk = i jblk = this_block%jhi+j - iglb = i - jglb = this_block%j_glob(this_block%jhi)+nghost+j - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = i + jsrc = this_block%j_glob(this_block%jhi)+nghost+j + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) ! southwest corner jblk = j - jglb = this_block%j_glob(this_block%jlo)+j-1 - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + jsrc = this_block%j_glob(this_block%jlo)+j-1 + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) enddo enddo endif @@ -1684,17 +1564,16 @@ subroutine scatter_global_ext(ARRAY, ARRAY_G, src_task, dst_dist) ! northeast corner iblk = this_block%ihi+i jblk = this_block%jhi+j - iglb = nx_global+nghost+i - jglb = this_block%j_glob(this_block%jhi)+nghost+j - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + isrc = nx_global+nghost+i + jsrc = this_block%j_glob(this_block%jhi)+nghost+j + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) ! southeast corner jblk = j - jglb = this_block%j_glob(this_block%jlo)+j-1 - ARRAY(iblk,jblk,dst_block) = ARRAY_G(iglb,jglb) + jsrc = this_block%j_glob(this_block%jlo)+j-1 + ARRAY(iblk,jblk,dst_block) = ARRAY_G(isrc,jsrc) enddo enddo endif - endif ! dst block not land end do ! block loop @@ -1775,75 +1654,35 @@ subroutine scatter_global_stress(ARRAY, ARRAY_G1, ARRAY_G2, & this_block = get_block(n,n) dst_block = dst_dist%blockLocalID(n) - !*** if this is an interior block, then there is no - !*** padding or update checking required - - if (this_block%iblock > 1 .and. & - this_block%iblock < nblocks_x .and. & - this_block%jblock > 1 .and. & - this_block%jblock < nblocks_y) then - - do j=1,ny_block - do i=1,nx_block - ARRAY(i,j,dst_block) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - end do - end do - - !*** if this is an edge block but not a northern edge - !*** we only need to check for closed boundaries and - !*** padding (global index = 0) - - else if (this_block%jblock /= nblocks_y) then - - do j=1,ny_block - if (this_block%j_glob(j) /= 0) then - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - endif - end do - - !*** if this is a northern edge block, we need to check - !*** for and properly deal with tripole boundaries - - else - - do j=1,ny_block - if (this_block%j_glob(j) > 0) then ! normal boundary - - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - ARRAY(i,j,dst_block) = ARRAY_G1(this_block%i_glob(i),& - this_block%j_glob(j)) - endif - end do - - else if (this_block%j_glob(j) < 0) then ! tripole - - ! for yoffset=0 or 1, yoffset2=0,0 - ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid - do yoffset2=0,max(yoffset,0)-yoffset - jsrc = ny_global + yoffset + yoffset2 + & - (this_block%j_glob(j) + ny_global) - do i=1,nx_block - if (this_block%i_glob(i) /= 0) then - isrc = nx_global + xoffset - this_block%i_glob(i) - if (isrc < 1) isrc = isrc + nx_global - if (isrc > nx_global) isrc = isrc - nx_global - ARRAY(i,j-yoffset2,dst_block) & - = isign * ARRAY_G2(isrc,jsrc) - endif - end do - end do - - endif - end do - - endif + do j=1,ny_block + if (this_block%jblock == nblocks_y .and. this_block%j_glob(j) < 0) then + ! tripole is top block with j_glob < 0 + ! for yoffset=0 or 1, yoffset2=0,0 + ! for yoffset=-1, yoffset2=0,1, for u-rows on T-fold grid + do yoffset2=0,max(yoffset,0)-yoffset + jsrc = ny_global + yoffset + yoffset2 + & + (this_block%j_glob(j) + ny_global) + do i=1,nx_block + if (this_block%i_glob(i) /= 0) then + isrc = nx_global + xoffset - this_block%i_glob(i) + if (isrc < 1) isrc = isrc + nx_global + if (isrc > nx_global) isrc = isrc - nx_global + ARRAY(i,j-yoffset2,dst_block) & + = isign * ARRAY_G2(isrc,jsrc) + endif + end do + end do + else + ! normal block + do i=1,nx_block + isrc = this_block%i_glob(i) + jsrc = this_block%j_glob(j) + if (isrc >=1 .and. isrc <= nx_global .and. & + jsrc >=1 .and. jsrc <= ny_global) & + ARRAY(i,j,dst_block) = ARRAY_G1(isrc,jsrc) + end do + endif + end do endif ! dst block not land end do ! block loop diff --git a/cicecore/cicedyn/infrastructure/ice_blocks.F90 b/cicecore/cicedyn/infrastructure/ice_blocks.F90 index ccaf23999..245f77bc7 100644 --- a/cicecore/cicedyn/infrastructure/ice_blocks.F90 +++ b/cicecore/cicedyn/infrastructure/ice_blocks.F90 @@ -31,7 +31,13 @@ module ice_blocks tripoleTFlag ! tripole boundary is a T-fold integer (int_kind), dimension(:), pointer :: & - i_glob, j_glob ! global domain location for each point + i_glob, j_glob ! global domain location for each point. + ! valid values between 1:nx_global, 1:ny_global. + ! outside that range may occur in the halo with + ! open or closed bcs or on the tripole. + ! by definition, tripole is only on the north + ! boundary and in that case, the j_glob values + ! will be valid j_glob values with minus sign. end type public :: create_blocks ,& @@ -140,9 +146,23 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & !---------------------------------------------------------------------- ! -! compute number of blocks and cartesian decomposition -! if the requested block size does not divide the global domain -! size evenly, add additional block space to accomodate padding +! Compute number of blocks and cartesian decomposition. +! If the requested block size does not divide the global domain +! size evenly, add additional block space to accomodate padding. +! +! Compute the global indices for each block including on the halo. +! The global indices go from 1:nx_global and 1:ny_global for +! most of the domain including the halo that's in the internal part +! of the domain. On the outer boundaries, the global indices will +! be wrapped around for the 'cyclic' option and will be given a +! negative value on the north tripole. Padded gridcells will be +! given a global index of zero (0). All other cases will extrapolate +! the global index outside of 1:nx_global, 1:ny_global. That means +! the global index will go from -nghost+1:0 on the lower boundary +! and n*_global+1:n*_global+nghost on the upper boundary and the +! haloUpdate and scatter, for instance, will not fill those values +! in those cases. Other boundary condition methods will fill the +! outer halo values in cases where ice exists on those boundaries. ! !---------------------------------------------------------------------- @@ -206,7 +226,7 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & all_blocks_ij(iblock,jblock) = n do j=1,ny_block - j_global(j,n) = js - nghost + j - 1 + j_global(j,n) = js - nghost + j - 1 ! simple lower to upper counting !*** southern ghost cells @@ -215,13 +235,13 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & case ('cyclic') j_global(j,n) = j_global(j,n) + ny_global case ('open') - j_global(j,n) = nghost - j + 1 + ! lower to upper case ('closed') - j_global(j,n) = 0 + ! lower to upper case ('tripole') - j_global(j,n) = nghost - j + 1 ! open + ! lower to upper case ('tripoleT') - j_global(j,n) = -j_global(j,n) + 1 ! open + ! lower to upper case default call abort_ice(subname//' ERROR: unknown n-s bndy type') end select @@ -239,13 +259,13 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & case ('cyclic') j_global(j,n) = j_global(j,n) - ny_global case ('open') - j_global(j,n) = 2*ny_global - j_global(j,n) + 1 + ! lower to upper case ('closed') - j_global(j,n) = 0 + ! lower to upper case ('tripole') - j_global(j,n) = -j_global(j,n) + j_global(j,n) = -j_global(j,n) ! negative case ('tripoleT') - j_global(j,n) = -j_global(j,n) + j_global(j,n) = -j_global(j,n) ! negative case default call abort_ice(subname//' ERROR: unknown n-s bndy type') end select @@ -262,7 +282,7 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & all_blocks(n)%j_glob => j_global(:,n) do i=1,nx_block - i_global(i,n) = is - nghost + i - 1 + i_global(i,n) = is - nghost + i - 1 ! left to right counting !*** western ghost cells @@ -271,9 +291,9 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & case ('cyclic') i_global(i,n) = i_global(i,n) + nx_global case ('open') - i_global(i,n) = nghost - i + 1 + ! left to right case ('closed') - i_global(i,n) = 0 + ! left to right case default call abort_ice(subname//' ERROR: unknown e-w bndy type') end select @@ -291,9 +311,9 @@ subroutine create_blocks(nx_global, ny_global, ew_boundary_type, & case ('cyclic') i_global(i,n) = i_global(i,n) - nx_global case ('open') - i_global(i,n) = 2*nx_global - i_global(i,n) + 1 + ! left to right case ('closed') - i_global(i,n) = 0 + ! left to right case default call abort_ice(subname//' ERROR: unknown e-w bndy type') end select diff --git a/cicecore/cicedyn/infrastructure/ice_domain.F90 b/cicecore/cicedyn/infrastructure/ice_domain.F90 index 86d6a1939..f112517a3 100644 --- a/cicecore/cicedyn/infrastructure/ice_domain.F90 +++ b/cicecore/cicedyn/infrastructure/ice_domain.F90 @@ -44,7 +44,7 @@ module ice_domain nblocks ! actual number of blocks on this processor logical (kind=log_kind), public :: & - close_boundaries + close_boundaries ! deprecated Nov, 2025 integer (int_kind), dimension(:), pointer, public :: & blocks_ice => null() ! block ids for local blocks @@ -371,100 +371,11 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) character(len=*), parameter :: subname = '(init_domain_distribution)' -!---------------------------------------------------------------------- -! -! check that there are at least nghost+1 rows or columns of land cells -! for closed boundary conditions (otherwise grid lengths are zero in -! cells neighboring ocean points). -! -!---------------------------------------------------------------------- - call icepack_query_parameters(puny_out=puny, rad_to_deg_out=rad_to_deg) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (trim(ns_boundary_type) == 'closed') then - call abort_ice(subname//' ERROR: ns_boundary_type = closed not supported', file=__FILE__, line=__LINE__) - allocate(nocn(nblocks_tot)) - nocn = 0 - do n=1,nblocks_tot - this_block = get_block(n,n) - if (this_block%jblock == nblocks_y) then ! north edge - do j = this_block%jhi-1, this_block%jhi - if (this_block%j_glob(j) > 0) then - do i = 1, nx_block - if (this_block%i_glob(i) > 0) then - ig = this_block%i_glob(i) - jg = this_block%j_glob(j) - if (KMTG(ig,jg) > puny) nocn(n) = nocn(n) + 1 - endif - enddo - endif - enddo - endif - if (this_block%jblock == 1) then ! south edge - do j = this_block%jlo, this_block%jlo+1 - if (this_block%j_glob(j) > 0) then - do i = 1, nx_block - if (this_block%i_glob(i) > 0) then - ig = this_block%i_glob(i) - jg = this_block%j_glob(j) - if (KMTG(ig,jg) > puny) nocn(n) = nocn(n) + 1 - endif - enddo - endif - enddo - endif - if (nocn(n) > 0) then - write(nu_diag,*) subname,'ns closed, Not enough land cells along ns edge' - call abort_ice(subname//' ERROR: Not enough land cells along ns edge for ns closed', & - file=__FILE__, line=__LINE__) - endif - enddo - deallocate(nocn) - endif - if (trim(ew_boundary_type) == 'closed') then - call abort_ice(subname//' ERROR: ew_boundary_type = closed not supported', file=__FILE__, line=__LINE__) - allocate(nocn(nblocks_tot)) - nocn = 0 - do n=1,nblocks_tot - this_block = get_block(n,n) - if (this_block%iblock == nblocks_x) then ! east edge - do j = 1, ny_block - if (this_block%j_glob(j) > 0) then - do i = this_block%ihi-1, this_block%ihi - if (this_block%i_glob(i) > 0) then - ig = this_block%i_glob(i) - jg = this_block%j_glob(j) - if (KMTG(ig,jg) > puny) nocn(n) = nocn(n) + 1 - endif - enddo - endif - enddo - endif - if (this_block%iblock == 1) then ! west edge - do j = 1, ny_block - if (this_block%j_glob(j) > 0) then - do i = this_block%ilo, this_block%ilo+1 - if (this_block%i_glob(i) > 0) then - ig = this_block%i_glob(i) - jg = this_block%j_glob(j) - if (KMTG(ig,jg) > puny) nocn(n) = nocn(n) + 1 - endif - enddo - endif - enddo - endif - if (nocn(n) > 0) then - write(nu_diag,*) subname,'ew closed, Not enough land cells along ew edge' - call abort_ice(subname//' ERROR: Not enough land cells along ew edge for ew closed', & - file=__FILE__, line=__LINE__) - endif - enddo - deallocate(nocn) - endif - !---------------------------------------------------------------------- ! ! estimate the amount of work per processor using the topography @@ -519,11 +430,11 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) do n=1,nblocks_tot this_block = get_block(n,n) do j=this_block%jlo,this_block%jhi - if (this_block%j_glob(j) > 0) then + jg = this_block%j_glob(j) + if (jg > 0) then do i=this_block%ilo,this_block%ihi - if (this_block%i_glob(i) > 0) then - ig = this_block%i_glob(i) - jg = this_block%j_glob(j) + ig = this_block%i_glob(i) + if (ig > 0) then ! if (KMTG(ig,jg) > puny) & ! nocn(n) = max(nocn(n),nint(wght(ig,jg)+1.0_dbl_kind)) if (KMTG(ig,jg) > puny) then @@ -544,11 +455,11 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) do n=1,nblocks_tot this_block = get_block(n,n) do j=this_block%jlo,this_block%jhi - if (this_block%j_glob(j) > 0) then + jg = this_block%j_glob(j) + if (jg > 0) then do i=this_block%ilo,this_block%ihi - if (this_block%i_glob(i) > 0) then - ig = this_block%i_glob(i) - jg = this_block%j_glob(j) + ig = this_block%i_glob(i) + if (ig > 0) then if (grid_ice == 'C' .or. grid_ice == 'CD') then ! Have to be careful about block elimination with C/CD ! Use a bigger stencil @@ -606,9 +517,9 @@ subroutine init_domain_distribution(KMTG,ULATG,grid_ice) work_per_block = 0 end where if (my_task == master_task) then - write(nu_diag,'(2a,4i9)') subname,' work_unit = ',work_unit, max_work_unit - write(nu_diag,'(2a,4i9)') subname,' nocn = ',minval(nocn),maxval(nocn),sum(nocn) - write(nu_diag,'(2a,4i9)') subname,' work_per_block = ',minval(work_per_block),maxval(work_per_block),sum(work_per_block) + write(nu_diag,'(2a,2i9)') subname,' work_unit = ',work_unit, max_work_unit + write(nu_diag,'(2a,3i16)') subname,' nocn = ',minval(nocn),maxval(nocn),sum(nocn) + write(nu_diag,'(2a,3i9)') subname,' work_per_block = ',minval(work_per_block),maxval(work_per_block),sum(work_per_block) endif deallocate(nocn) diff --git a/cicecore/cicedyn/infrastructure/ice_grid.F90 b/cicecore/cicedyn/infrastructure/ice_grid.F90 index eb2d22411..8b9cbc7d4 100644 --- a/cicecore/cicedyn/infrastructure/ice_grid.F90 +++ b/cicecore/cicedyn/infrastructure/ice_grid.F90 @@ -33,11 +33,10 @@ module ice_grid use ice_blocks, only: block, get_block, nx_block, ny_block, nghost use ice_domain_size, only: nx_global, ny_global, max_blocks use ice_domain, only: blocks_ice, nblocks, halo_info, distrb_info, & - ew_boundary_type, ns_boundary_type, init_domain_distribution, & - close_boundaries + ew_boundary_type, ns_boundary_type, init_domain_distribution use ice_fileunits, only: nu_diag, nu_grid, nu_kmt, & get_fileunit, release_fileunit, flush_fileunit - use ice_gather_scatter, only: gather_global, scatter_global + use ice_gather_scatter, only: gather_global, scatter_global, gather_global_ext, scatter_global_ext use ice_read_write, only: ice_read, ice_read_nc, ice_read_global, & ice_read_global_nc, ice_open, ice_open_nc, ice_close_nc, ice_check_nc use ice_timers, only: timer_bound, ice_timer_start, ice_timer_stop @@ -253,7 +252,62 @@ subroutine alloc_grid stat=ierr) if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory1', file=__FILE__, line=__LINE__) + dxT = c0 + dyT = c0 + dxU = c0 + dyU = c0 + dxN = c0 + dyN = c0 + dxE = c0 + dyE = c0 + HTE = c0 + HTN = c0 + tarea = c0 + uarea = c0 + narea = c0 + earea = c0 + tarear = c0 + uarear = c0 + narear = c0 + earear = c0 + tarean = c0 + tareas = c0 + ULON = c0 + ULAT = c0 + TLON = c0 + TLAT = c0 + NLON = c0 + NLAT = c0 + ELON = c0 + ELAT = c0 + ANGLE = c0 + ANGLET = c0 + bathymetry = c0 ocn_gridcell_frac(:,:,:) = -c1 ! special value to start, will be ignored unless set elsewhere + hm = c0 + bm = c0 + uvm = c0 + npm = c0 + epm = c0 + kmt = c0 + tmask = .false. + umask = .false. + umaskCD = .false. + nmask = .false. + emask = .false. + opmask = .false. + lmask_n = .false. + lmask_s = .false. + rndex_global = c0 + lont_bounds = c0 + latt_bounds = c0 + lonu_bounds = c0 + latu_bounds = c0 + lonn_bounds = c0 + latn_bounds = c0 + lone_bounds = c0 + late_bounds = c0 + if (save_ghte_ghtn) then if (my_task == master_task) then @@ -268,6 +322,8 @@ subroutine alloc_grid stat=ierr) endif if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory3', file=__FILE__, line=__LINE__) + G_HTE = c0 + G_HTN = c0 endif end subroutine alloc_grid @@ -310,7 +366,7 @@ subroutine init_grid1 fieldname ! field name in netCDF file real (kind=dbl_kind), dimension(:,:), allocatable :: & - work_g1, work_g2, work_mom + work_g1, work_g2, work_g1x, work_mom integer (kind=int_kind) :: & max_blocks_min, & ! min value of max_blocks across procs @@ -371,7 +427,7 @@ subroutine init_grid1 allocate(work_mom(nx_global*2+1, ny_global*2+1), stat=ierr) if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory', file=__FILE__, line=__LINE__) - fieldname='y' ! use mom y field to fill cice ULAT + fieldname='y' ! use mom y field to fill cice ULAT call ice_open_nc(grid_file,fid_grid) call ice_read_global_nc(fid_grid,1,fieldname,work_mom,.true.) call ice_close_nc(fid_grid) @@ -382,7 +438,7 @@ subroutine init_grid1 work_g1(i,j) = work_mom(im, jm) jm = jm + 2 enddo - im = im + 2 + im = im + 2 enddo deallocate(work_mom, stat=ierr) @@ -397,6 +453,17 @@ subroutine init_grid1 call ice_read_global_nc(fid_grid,1,fieldname,work_g1,.true.) call ice_close_nc(fid_grid) + case('pop_nc_ext') + + fieldname='ulat' + call ice_open_nc(grid_file,fid_grid) + allocate(work_g1x(nx_global+2*nghost, ny_global+2*nghost), stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory', file=__FILE__, line=__LINE__) + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,.true.) + work_g1(1:nx_global,1:ny_global) = work_g1x(1+nghost:nx_global+nghost,1+nghost:ny_global+nghost) + deallocate(work_g1x) + call ice_close_nc(fid_grid) + case default call ice_open(nu_grid,grid_file,64) @@ -412,7 +479,7 @@ subroutine init_grid1 ! Fill kmt if (trim(kmt_type) =='file') then select case(trim(grid_format)) - case ('mom_nc', 'pop_nc', 'geosnc') + case ('mom_nc', 'pop_nc', 'pop_nc_ext', 'geosnc') ! mask variable name might be kmt or mask, check both call ice_open_nc(kmt_file,fid_kmt) @@ -431,7 +498,15 @@ subroutine init_grid1 #endif call broadcast_scalar(mask_fieldname, master_task) - call ice_read_global_nc(fid_kmt,1,mask_fieldname,work_g2,.true.) + if (trim(grid_format) == 'pop_nc_ext') then + allocate(work_g1x(nx_global+2*nghost, ny_global+2*nghost), stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory', file=__FILE__, line=__LINE__) + call ice_read_global_nc(fid_kmt,1,mask_fieldname,work_g1x,.true.) + work_g2(1:nx_global,1:ny_global) = work_g1x(1+nghost:nx_global+nghost,1+nghost:ny_global+nghost) + deallocate(work_g1x) + else + call ice_read_global_nc(fid_kmt,1,mask_fieldname,work_g2,.true.) + endif call ice_close_nc(fid_kmt) case default @@ -537,6 +612,8 @@ subroutine init_grid2 call mom_grid ! derive cice grid from MOM supergrid nc file case ('pop_nc') call popgrid_nc ! read POP grid lengths from nc file + case ('pop_nc_ext') + call popgrid_nc_ext ! read POP extended grid lengths from nc file case ('geosnc') call geosgrid_nc ! read GEOS MOM grid used from nc file case default @@ -558,6 +635,8 @@ subroutine init_grid2 select case (trim(grid_format)) case('mom_nc', 'pop_nc' ,'geosnc') call kmtmask('nc') + case('pop_nc_ext') + call kmtmask('nc_ext') case default call kmtmask('bin') end select @@ -595,14 +674,8 @@ subroutine init_grid2 !----------------------------------------------------------------- if (trim(grid_format) /= 'mom_nc') then - !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi - do j = 1,ny_block do i = 1,nx_block tarea(i,j,iblk) = dxT(i,j,iblk)*dyT(i,j,iblk) @@ -615,13 +688,8 @@ subroutine init_grid2 !$OMP END PARALLEL DO endif - !$OMP PARALLEL DO PRIVATE(iblk,i,j,ilo,ihi,jlo,jhi,this_block) + !$OMP PARALLEL DO PRIVATE(iblk,i,j) do iblk = 1, nblocks - this_block = get_block(blocks_ice(iblk),iblk) - ilo = this_block%ilo - ihi = this_block%ihi - jlo = this_block%jlo - jhi = this_block%jhi do j = 1,ny_block do i = 1,nx_block @@ -877,8 +945,14 @@ subroutine kmtmask(filetype) elseif (filetype == 'nc') then call ice_open_nc(kmt_file,fid_kmt) call ice_read_nc(fid_kmt,1,mask_fieldname,kmt,diag, & - field_loc=field_loc_center, & - field_type=field_type_scalar) + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_close_nc(fid_kmt) + elseif (filetype == 'nc_ext') then + call ice_open_nc(kmt_file,fid_kmt) + call ice_read_nc(fid_kmt,1,mask_fieldname,kmt,diag,restart_ext=.true., & + field_loc=field_loc_center, & + field_type=field_type_scalar) call ice_close_nc(fid_kmt) else call abort_ice(subname//' ERROR: invalid filetype='//trim(filetype), file=__FILE__, line=__LINE__) @@ -891,7 +965,6 @@ subroutine kmtmask(filetype) ihi = this_block%ihi jlo = this_block%jlo jhi = this_block%jhi - do j = jlo, jhi do i = ilo, ihi ! force grid cells to land if ocn_gridcell_frac is defined @@ -899,6 +972,10 @@ subroutine kmtmask(filetype) ocn_gridcell_frac(i,j,iblk) < puny) then kmt(i,j,iblk) = c0 endif + enddo + enddo + do j = 1,ny_block + do i = 1,nx_block if (kmt(i,j,iblk) >= p5) hm(i,j,iblk) = c1 enddo enddo @@ -943,22 +1020,24 @@ subroutine popgrid if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory', file=__FILE__, line=__LINE__) call ice_read_global(nu_grid,1,work_g1,'rda8',.true.) ! ULAT - call gridbox_verts(work_g1,latt_bounds) call scatter_global(ULAT, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULAT, distrb_info, & ew_boundary_type, ns_boundary_type) + call gridbox_verts(ULAT,latt_bounds) call ice_read_global(nu_grid,2,work_g1,'rda8',.true.) ! ULON - call gridbox_verts(work_g1,lont_bounds) call scatter_global(ULON, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & ew_boundary_type, ns_boundary_type) + call gridbox_verts(ULON,lont_bounds) call ice_read_global(nu_grid,7,work_g1,'rda8',.true.) ! ANGLE call scatter_global(ANGLE, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_angle) + call ice_HaloExtrapolate(ANGLE, distrb_info, & + ew_boundary_type, ns_boundary_type) !----------------------------------------------------------------- ! cell dimensions @@ -1043,24 +1122,26 @@ subroutine popgrid_nc fieldname='ulat' call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ULAT - call gridbox_verts(work_g1,latt_bounds) call scatter_global(ULAT, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULAT, distrb_info, & ew_boundary_type, ns_boundary_type) + call gridbox_verts(ULAT,latt_bounds) fieldname='ulon' call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ULON - call gridbox_verts(work_g1,lont_bounds) call scatter_global(ULON, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & ew_boundary_type, ns_boundary_type) + call gridbox_verts(ULON,lont_bounds) fieldname='angle' call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ANGLE call scatter_global(ANGLE, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_angle) + call ice_HaloExtrapolate(ANGLE, distrb_info, & + ew_boundary_type, ns_boundary_type) ! fix ANGLE: roundoff error due to single precision where (ANGLE > pi) ANGLE = pi where (ANGLE < -pi) ANGLE = -pi @@ -1081,16 +1162,22 @@ subroutine popgrid_nc call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) call scatter_global(ANGLET, work_g1, master_task, distrb_info, & field_loc_center, field_type_angle) + call ice_HaloExtrapolate(ANGLET, distrb_info, & + ew_boundary_type, ns_boundary_type) where (ANGLET > pi) ANGLET = pi where (ANGLET < -pi) ANGLET = -pi fieldname="tlon" call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) call scatter_global(TLON, work_g1, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(TLON, distrb_info, & + ew_boundary_type, ns_boundary_type) fieldname="tlat" call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) call scatter_global(TLAT, work_g1, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(TLAT, distrb_info, & + ew_boundary_type, ns_boundary_type) endif !----------------------------------------------------------------- ! cell dimensions @@ -1117,6 +1204,210 @@ subroutine popgrid_nc end subroutine popgrid_nc +!======================================================================= +! POP extended displaced pole grid and land mask. +! Grid record number, field and units are: \\ +! (1) ULAT (radians) \\ +! (2) ULON (radians) \\ +! (3) HTN (cm) \\ +! (4) HTE (cm) \\ +! (5) HUS (cm) \\ +! (6) HUW (cm) \\ +! (7) ANGLE (radians) +! +! author: T. Craig +! Revised for netcdf input: Ann Keen, Met Office, May 2007 + + subroutine popgrid_nc_ext + +#ifdef USE_NETCDF + use netcdf, only : nf90_inq_varid , nf90_inq_dimid, & + nf90_inquire_dimension, nf90_get_var, nf90_noerr +#endif + + integer (kind=int_kind) :: & + i, j, iblk, & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + fid_grid , & ! file id for netCDF grid file + ierr + + logical (kind=log_kind) :: diag + + character (char_len) :: & + fieldname ! field name in netCDF file + + real (kind=dbl_kind) :: & + pi + + real (kind=dbl_kind), dimension(:,:), allocatable :: & + work_g1x ! temporary global extended array + + integer(kind=int_kind) :: & + varid, status + + type (block) :: & + this_block ! block information for current block + + character(len=*), parameter :: subname = '(popgrid_nc_ext)' + +#ifdef USE_NETCDF + call icepack_query_parameters(pi_out=pi) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call ice_open_nc(grid_file,fid_grid) + + diag = .true. ! write diagnostic info + + !----------------------------------------------------------------- + ! lat, lon, angle + !----------------------------------------------------------------- + + allocate(work_g1x(nx_global+2*nghost,ny_global+2*nghost), stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory', file=__FILE__, line=__LINE__) + work_g1x = c0 + + fieldname='ulat' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,diag) ! ULAT + call scatter_global_ext(ULAT, work_g1x, master_task, distrb_info) + call gridbox_verts(ULAT,latt_bounds) + + fieldname='ulon' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,diag) ! ULON + call scatter_global_ext(ULON, work_g1x, master_task, distrb_info) + call gridbox_verts(ULON,lont_bounds) + + fieldname='angle' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,diag) ! ANGLE + call scatter_global_ext(ANGLE, work_g1x, master_task, distrb_info) + ! fix ANGLE: roundoff error due to single precision + where (ANGLE > pi) ANGLE = pi + where (ANGLE < -pi) ANGLE = -pi + + ! if grid file includes anglet then read instead + fieldname='anglet' + if (my_task == master_task) then + status = nf90_inq_varid(fid_grid, fieldname , varid) + if (status /= nf90_noerr) then + write(nu_diag,*) subname//' CICE will calculate angleT, TLON and TLAT' + else + write(nu_diag,*) subname//' angleT, TLON and TLAT is read from grid file' + l_readCenter = .true. + endif + endif + call broadcast_scalar(l_readCenter,master_task) + if (l_readCenter) then + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,diag) + call scatter_global_ext(ANGLET, work_g1x, master_task, distrb_info) + where (ANGLET > pi) ANGLET = pi + where (ANGLET < -pi) ANGLET = -pi + + fieldname="tlon" + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,diag) + call scatter_global_ext(TLON, work_g1x, master_task, distrb_info) + + fieldname="tlat" + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,diag) + call scatter_global_ext(TLAT, work_g1x, master_task, distrb_info) + endif + !----------------------------------------------------------------- + ! cell dimensions + ! calculate derived quantities from global arrays to preserve + ! information on boundaries + !----------------------------------------------------------------- + + fieldname='htn' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,diag) ! HTN + if (my_task == master_task) then + work_g1x(:,:) = work_g1x(:,:) * cm_to_m ! HTN + endif +! call primary_grid_lengths_HTN(work_g1) ! dxU, dxT, dxN, dxE + if (save_ghte_ghtn) then + if (my_task == master_task) then + G_HTN = work_g1x + endif + endif + call scatter_global_ext(HTN, work_g1x, master_task, distrb_info) + + dxN(:,:,:) = HTN(:,:,:) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + dxU(i,j,iblk) = p5*(HTN(i,j,iblk)+HTN(i+1,j,iblk)) + dxT(i,j,iblk) = p5*(HTN(i,j,iblk)+HTN(i,j-1,iblk)) + dxE(i,j,iblk) = p25*(HTN(i,j,iblk)+HTN(i+1,j,iblk)+HTN(i,j-1,iblk)+HTN(i+1,j-1,iblk)) + enddo + enddo + enddo + call ice_HaloUpdate (dxU, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dxU, distrb_info, & + ew_boundary_type, ns_boundary_type) + call ice_HaloUpdate (dxT, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dxT, distrb_info, & + ew_boundary_type, ns_boundary_type) + call ice_HaloUpdate (dxE, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dxE, distrb_info, & + ew_boundary_type, ns_boundary_type) + + fieldname='hte' + call ice_read_global_nc(fid_grid,1,fieldname,work_g1x,diag) ! HTE + if (my_task == master_task) then + work_g1x(:,:) = work_g1x(:,:) * cm_to_m ! HTN + endif +! call primary_grid_lengths_HTE(work_g1) ! dyU, dyT, dyN, dyE + if (save_ghte_ghtn) then + G_HTE = work_g1x + endif + call scatter_global_ext(HTE, work_g1x, master_task, distrb_info) + dyE(:,:,:) = HTE(:,:,:) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + dyU(i,j,iblk) = p5*(HTE(i,j,iblk)+HTE(i,j+1,iblk)) + dyT(i,j,iblk) = p5*(HTE(i,j,iblk)+HTE(i-1,j,iblk)) + dyN(i,j,iblk) = p25*(HTE(i,j,iblk)+HTE(i-1,j,iblk)+HTE(i,j+1,iblk)+HTE(i-1,j+1,iblk)) + enddo + enddo + enddo + call ice_HaloUpdate (dyU, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dyU, distrb_info, & + ew_boundary_type, ns_boundary_type) + call ice_HaloUpdate (dyT, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dyT, distrb_info, & + ew_boundary_type, ns_boundary_type) + call ice_HaloUpdate (dyN, halo_info, & + field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dyN, distrb_info, & + ew_boundary_type, ns_boundary_type) + + deallocate(work_g1x, stat=ierr) + if (ierr/=0) call abort_ice(subname//' ERROR: Dealloc error', file=__FILE__, line=__LINE__) + + call ice_close_nc(fid_grid) + +#else + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', & + file=__FILE__, line=__LINE__) +#endif + + end subroutine popgrid_nc_ext + #ifdef CESMCOUPLED !======================================================================= ! Read in kmt file that matches CAM lat-lon grid and has single column @@ -1487,9 +1778,13 @@ subroutine mom_grid call mom_grid_rotation_angle(G_ULON, G_ULAT, G_TLON(1:nx_global,1:ny_global), work_g1) ! anglet call scatter_global(ANGLET, work_g1, master_task, distrb_info, & field_loc_center, field_type_angle) + call ice_HaloExtrapolate(ANGLET, distrb_info, & + ew_boundary_type, ns_boundary_type) call mom_grid_rotation_angle(G_TLON, G_TLAT, G_ULON(2:nx_global+1,2:ny_global+1), work_g1) ! angle call scatter_global(ANGLE, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_angle) + call ice_HaloExtrapolate(ANGLE, distrb_info, & + ew_boundary_type, ns_boundary_type) deallocate(work_g1, G_ULAT, G_TLAT, G_TLON, G_ULON, stat=ierr) if (ierr/=0) call abort_ice(subname//' ERROR: Dealloc error', file=__FILE__, line=__LINE__) @@ -1616,8 +1911,8 @@ subroutine mom_corners_global(work_mom, G_U, G_T, G_E, G_N) select case (trim(ns_boundary_type)) case ('tripole') do i = 1, nx_global+1 - G_T(i,ny_global+1) = G_T(nx_global+1-i, ny_global) - G_E(i,ny_global+1) = G_E(nx_global+1-i, ny_global) + G_T(i,ny_global+1) = G_T(nx_global+2-i, ny_global) + G_E(i,ny_global+1) = G_E(nx_global+2-i, ny_global) enddo case ('cyclic') G_T(:,ny_global+1) = G_T(:,1) @@ -1792,26 +2087,28 @@ subroutine mom_dx(work_mom) jm1 = jm1 + 2 ; jm2 = jm2 + 2 enddo endif - - if (save_ghte_ghtn) then - do j = 1, ny_global - do i = 1, nx_global - G_HTN(i+nghost,j+nghost) = G_dxN(i,j) - enddo - enddo - call global_ext_halo(G_HTN) - endif endif call scatter_global(dxT, G_dxT, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dxT, distrb_info, & + ew_boundary_type, ns_boundary_type) call scatter_global(HTN, G_dxN, master_task, distrb_info, & field_loc_Nface, field_type_scalar) + call ice_HaloExtrapolate(HTN, distrb_info, & + ew_boundary_type, ns_boundary_type) + if (save_ghte_ghtn) then + call gather_global_ext(G_HTN, HTN, master_task, distrb_info) + endif dxN(:,:,:) = HTN(:,:,:) call scatter_global(dxE, G_dxE, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dxE, distrb_info, & + ew_boundary_type, ns_boundary_type) call scatter_global(dxU, G_dxU, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(dxU, distrb_info, & + ew_boundary_type, ns_boundary_type) deallocate(G_dxT, G_dxE, G_dxU, G_dxN, stat=ierr) if (ierr/=0) call abort_ice(subname//' ERROR: Dealloc error', file=__FILE__, line=__LINE__) @@ -1890,26 +2187,28 @@ subroutine mom_dy(work_mom) im1 = im1 + 2 ; im2 = im2 + 2 enddo endif - - if (save_ghte_ghtn) then - do j = 1, ny_global - do i = 1, nx_global - G_HTE(i+nghost,j+nghost) = G_dyE(i,j) - enddo - enddo - call global_ext_halo(G_HTE) - endif endif call scatter_global(dyT, G_dyT, master_task, distrb_info, & - field_loc_center, field_type_scalar) + field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dyT, distrb_info, & + ew_boundary_type, ns_boundary_type) call scatter_global(dyN, G_dyN, master_task, distrb_info, & field_loc_Nface, field_type_scalar) + call ice_HaloExtrapolate(dyN, distrb_info, & + ew_boundary_type, ns_boundary_type) call scatter_global(HTE, G_dyE, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(HTE, distrb_info, & + ew_boundary_type, ns_boundary_type) + if (save_ghte_ghtn) then + call gather_global_ext(G_HTE, HTE, master_task, distrb_info) + endif dyE(:,:,:) = HTE(:,:,:) call scatter_global(dyU, G_dyU, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(dyU, distrb_info, & + ew_boundary_type, ns_boundary_type) deallocate(G_dyT, G_dyN, G_dyE, G_dyU) if (ierr/=0) call abort_ice(subname//' ERROR: Dealloc error', file=__FILE__, line=__LINE__) @@ -2171,24 +2470,26 @@ subroutine geosgrid_nc fieldname='ulat' call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ULAT - call gridbox_verts(work_g1,latt_bounds) call scatter_global(ULAT, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULAT, distrb_info, & ew_boundary_type, ns_boundary_type) + call gridbox_verts(ULAT,latt_bounds) fieldname='ulon' call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ULON - call gridbox_verts(work_g1,lont_bounds) call scatter_global(ULON, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) call ice_HaloExtrapolate(ULON, distrb_info, & ew_boundary_type, ns_boundary_type) + call gridbox_verts(ULON,lont_bounds) fieldname='angle' call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) ! ANGLE call scatter_global(ANGLE, work_g1, master_task, distrb_info, & field_loc_NEcorner, field_type_angle) + call ice_HaloExtrapolate(ANGLE, distrb_info, & + ew_boundary_type, ns_boundary_type) ! fix ANGLE: roundoff error due to single precision where (ANGLE > pi) ANGLE = pi where (ANGLE < -pi) ANGLE = -pi @@ -2209,16 +2510,22 @@ subroutine geosgrid_nc call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) call scatter_global(ANGLET, work_g1, master_task, distrb_info, & field_loc_center, field_type_angle) + call ice_HaloExtrapolate(ANGLET, distrb_info, & + ew_boundary_type, ns_boundary_type) where (ANGLET > pi) ANGLET = pi where (ANGLET < -pi) ANGLET = -pi fieldname="tlon" call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) call scatter_global(TLON, work_g1, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(TLON, distrb_info, & + ew_boundary_type, ns_boundary_type) fieldname="tlat" call ice_read_global_nc(fid_grid,1,fieldname,work_g1,diag) call scatter_global(TLAT, work_g1, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(TLAT, distrb_info, & + ew_boundary_type, ns_boundary_type) endif !----------------------------------------------------------------- ! cell dimensions @@ -2358,6 +2665,10 @@ subroutine rectgrid call grid_boxislands_kmt(work_g1) + elseif (trim(kmt_type) == 'none') then + + work_g1(:,:) = c1 ! initialize hm as ocean + elseif (trim(kmt_type) == 'channel') then do j = 3,ny_global-2 ! closed top and bottom @@ -2426,12 +2737,14 @@ subroutine rectgrid endif ! kmt_type - if (close_boundaries) then - work_g1(:, 1:2) = c0 - work_g1(:, ny_global-1:ny_global) = c0 + if (ew_boundary_type == 'closed') then work_g1(1:2, :) = c0 work_g1(nx_global-1:nx_global, :) = c0 endif + if (ns_boundary_type == 'closed') then + work_g1(:, 1:2) = c0 + work_g1(:, ny_global-1:ny_global) = c0 + endif endif @@ -2597,7 +2910,6 @@ subroutine rectgrid_scale_dxdy call ice_HaloExtrapolate(ULAT, distrb_info, & ew_boundary_type, ns_boundary_type) - deallocate(work_g1) end subroutine rectgrid_scale_dxdy @@ -2769,25 +3081,24 @@ subroutine primary_grid_lengths_HTN(work_g) enddo do j = 1, ny_global do i = 1, nx_global - ! assume cyclic; noncyclic will be handled during scatter + ! assume cyclic; noncyclic will be handled during extrapolate ip1 = i+1 if (i == nx_global) ip1 = 1 work_g2(i,j) = p5*(work_g(i,j) + work_g(ip1,j)) ! dxU enddo enddo - if (save_ghte_ghtn) then - do j = 1, ny_global - do i = 1,nx_global - G_HTN(i+nghost,j+nghost) = work_g(i,j) - enddo - enddo - call global_ext_halo(G_HTN) - endif endif call scatter_global(HTN, work_g, master_task, distrb_info, & field_loc_Nface, field_type_scalar) + call ice_HaloExtrapolate(HTN, distrb_info, & + ew_boundary_type, ns_boundary_type) + if (save_ghte_ghtn) then + call gather_global_ext(G_HTN, HTN, master_task, distrb_info) + endif call scatter_global(dxU, work_g2, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(dxU, distrb_info, & + ew_boundary_type, ns_boundary_type) ! dxT = average of 2 neighbor HTNs in j @@ -2804,6 +3115,8 @@ subroutine primary_grid_lengths_HTN(work_g) endif call scatter_global(dxT, work_g2, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dxT, distrb_info, & + ew_boundary_type, ns_boundary_type) ! dxN = HTN @@ -2814,7 +3127,7 @@ subroutine primary_grid_lengths_HTN(work_g) if (my_task == master_task) then do j = 2, ny_global do i = 1, nx_global - ! assume cyclic; noncyclic will be handled during scatter + ! assume cyclic; noncyclic will be handled during extrapolate ip1 = i+1 if (i == nx_global) ip1 = 1 work_g2(i,j) = p25*(work_g(i,j)+work_g(ip1,j)+work_g(i,j-1)+work_g(ip1,j-1)) ! dxE @@ -2822,7 +3135,7 @@ subroutine primary_grid_lengths_HTN(work_g) enddo ! extrapolate to obtain dxT along j=1 do i = 1, nx_global - ! assume cyclic; noncyclic will be handled during scatter + ! assume cyclic; noncyclic will be handled during extrapolate ip1 = i+1 if (i == nx_global) ip1 = 1 work_g2(i,1) = p5*(c2*work_g(i ,2) - work_g(i ,3) + & @@ -2831,6 +3144,8 @@ subroutine primary_grid_lengths_HTN(work_g) endif call scatter_global(dxE, work_g2, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dxE, distrb_info, & + ew_boundary_type, ns_boundary_type) deallocate(work_g2, stat=ierr) if (ierr/=0) call abort_ice(subname//' ERROR: Dealloc error', file=__FILE__, line=__LINE__) @@ -2886,26 +3201,25 @@ subroutine primary_grid_lengths_HTE(work_g) work_g2(i,ny_global) = c2*work_g(i,ny_global-1) - work_g(i,ny_global-2) ! dyU enddo endif - if (save_ghte_ghtn) then - do j = 1, ny_global - do i = 1, nx_global - G_HTE(i+nghost,j+nghost) = work_g(i,j) - enddo - enddo - call global_ext_halo(G_HTE) - endif endif call scatter_global(HTE, work_g, master_task, distrb_info, & field_loc_Eface, field_type_scalar) + call ice_HaloExtrapolate(HTE, distrb_info, & + ew_boundary_type, ns_boundary_type) + if (save_ghte_ghtn) then + call gather_global_ext(G_HTE, HTE, master_task, distrb_info) + endif call scatter_global(dyU, work_g2, master_task, distrb_info, & field_loc_NEcorner, field_type_scalar) + call ice_HaloExtrapolate(dyU, distrb_info, & + ew_boundary_type, ns_boundary_type) ! dyT = average of 2 neighbor HTE in i if (my_task == master_task) then do j = 1, ny_global do i = 1, nx_global - ! assume cyclic; noncyclic will be handled during scatter + ! assume cyclic; noncyclic will be handled during extrapolate im1 = i-1 if (i == 1) im1 = nx_global work_g2(i,j) = p5*(work_g(i,j) + work_g(im1,j)) ! dyT @@ -2914,13 +3228,15 @@ subroutine primary_grid_lengths_HTE(work_g) endif call scatter_global(dyT, work_g2, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dyT, distrb_info, & + ew_boundary_type, ns_boundary_type) ! dyN = average of 4 neighbor HTEs if (my_task == master_task) then do j = 1, ny_global-1 do i = 1, nx_global - ! assume cyclic; noncyclic will be handled during scatter + ! assume cyclic; noncyclic will be handled during extrapolate im1 = i-1 if (i == 1) im1 = nx_global work_g2(i,j) = p25*(work_g(i,j) + work_g(im1,j) + work_g(i,j+1) + work_g(im1,j+1)) ! dyN @@ -2929,7 +3245,7 @@ subroutine primary_grid_lengths_HTE(work_g) ! extrapolate to obtain dyN along j=ny_global if (ny_global > 1) then do i = 1, nx_global - ! assume cyclic; noncyclic will be handled during scatter + ! assume cyclic; noncyclic will be handled during extrapolate im1 = i-1 if (i == 1) im1 = nx_global work_g2(i,ny_global) = p5*(c2*work_g(i ,ny_global-1) - work_g(i ,ny_global-2) + & @@ -2939,6 +3255,8 @@ subroutine primary_grid_lengths_HTE(work_g) endif call scatter_global(dyN, work_g2, master_task, distrb_info, & field_loc_center, field_type_scalar) + call ice_HaloExtrapolate(dyN, distrb_info, & + ew_boundary_type, ns_boundary_type) ! dyE = HTE @@ -5185,27 +5503,25 @@ end subroutine gridbox_edges ! ! authors: A. McLaren, Met Office ! E. Hunke, LANL +! T. Craig - subroutine gridbox_verts(work_g,vbounds) + subroutine gridbox_verts(work,vbounds) - real (kind=dbl_kind), dimension(:,:), intent(in) :: & - work_g + real (kind=dbl_kind), dimension(:,:,:), intent(in) :: & + work real (kind=dbl_kind), dimension(4,nx_block,ny_block,max_blocks), intent(out) :: & - vbounds + vbounds integer (kind=int_kind) :: & - i,j , & ! index counters + iblk,i,j,ilo,ihi,jlo,jhi, & ! index counters ierr - real (kind=dbl_kind) :: & - rad_to_deg - - real (kind=dbl_kind), dimension(:,:), allocatable :: & - work_g2 + type (block) :: & + this_block ! block information for current block - real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & - work1 + real (kind=dbl_kind) :: & + rad_to_deg character(len=*), parameter :: subname = '(gridbox_verts)' @@ -5214,87 +5530,22 @@ subroutine gridbox_verts(work_g,vbounds) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) - if (my_task == master_task) then - allocate(work_g2(nx_global,ny_global), stat=ierr) - else - allocate(work_g2(1,1), stat=ierr) - endif - if (ierr/=0) call abort_ice(subname//' ERROR: Out of memory', file=__FILE__, line=__LINE__) - - !------------------------------------------------------------- - ! Get coordinates of grid boxes for each block as follows: - ! (1) SW corner, (2) SE corner, (3) NE corner, (4) NW corner - !------------------------------------------------------------- - - work_g2(:,:) = c0 - if (my_task == master_task) then - do j = 2, ny_global - do i = 2, nx_global - work_g2(i,j) = work_g(i-1,j-1) * rad_to_deg - enddo - enddo - ! extrapolate - do j = 1, ny_global - work_g2(1,j) = c2*work_g2(2,j) - work_g2(3,j) - enddo - do i = 1, nx_global - work_g2(i,1) = c2*work_g2(i,2) - work_g2(i,3) - enddo - endif - call scatter_global(work1, work_g2, & - master_task, distrb_info, & - field_loc_NEcorner, field_type_scalar) - vbounds(1,:,:,:) = work1(:,:,:) - - work_g2(:,:) = c0 - if (my_task == master_task) then - do j = 2, ny_global - do i = 1, nx_global - work_g2(i,j) = work_g(i,j-1) * rad_to_deg - enddo - enddo - ! extrapolate - do i = 1, nx_global - work_g2(i,1) = (c2*work_g2(i,2) - work_g2(i,3)) - enddo - endif - call scatter_global(work1, work_g2, & - master_task, distrb_info, & - field_loc_NEcorner, field_type_scalar) - vbounds(2,:,:,:) = work1(:,:,:) - - work_g2(:,:) = c0 - if (my_task == master_task) then - do j = 1, ny_global - do i = 1, nx_global - work_g2(i,j) = work_g(i,j) * rad_to_deg - enddo - enddo - endif - call scatter_global(work1, work_g2, & - master_task, distrb_info, & - field_loc_NEcorner, field_type_scalar) - vbounds(3,:,:,:) = work1(:,:,:) - - work_g2(:,:) = c0 - if (my_task == master_task) then - do j = 1, ny_global - do i = 2, nx_global - work_g2(i,j) = work_g(i-1,j ) * rad_to_deg - enddo + vbounds = c0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + vbounds(1,i,j,iblk) = work(i-1,j-1,iblk)*rad_to_deg + vbounds(2,i,j,iblk) = work(i ,j-1,iblk)*rad_to_deg + vbounds(3,i,j,iblk) = work(i ,j ,iblk)*rad_to_deg + vbounds(4,i,j,iblk) = work(i-1,j ,iblk)*rad_to_deg enddo - ! extrapolate - do j = 1, ny_global - work_g2(1,j) = c2*work_g2(2,j) - work_g2(3,j) enddo - endif - call scatter_global(work1, work_g2, & - master_task, distrb_info, & - field_loc_NEcorner, field_type_scalar) - vbounds(4,:,:,:) = work1(:,:,:) - - deallocate (work_g2, stat=ierr) - if (ierr/=0) call abort_ice(subname//' ERROR: Dealloc error', file=__FILE__, line=__LINE__) + enddo end subroutine gridbox_verts diff --git a/cicecore/cicedyn/infrastructure/ice_read_write.F90 b/cicecore/cicedyn/infrastructure/ice_read_write.F90 index 1858d9221..e726220d9 100644 --- a/cicecore/cicedyn/infrastructure/ice_read_write.F90 +++ b/cicecore/cicedyn/infrastructure/ice_read_write.F90 @@ -13,6 +13,7 @@ module ice_read_write + use,intrinsic :: ieee_arithmetic use ice_kinds_mod use ice_constants, only: c0, spval_dbl, & field_loc_noupdate, field_type_noupdate @@ -1139,22 +1140,29 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & real (kind=dbl_kind), dimension(:,:), allocatable :: & work_g1 + logical, dimension(:,:), allocatable :: mask + integer (kind=int_kind) :: nx, ny integer (kind=int_kind) :: lnrec ! local value of nrec - lnrec = nrec + logical (kind=log_kind) :: lrestart_ext ! local value of restart_ext - nx = nx_global - ny = ny_global + lnrec = nrec work = c0 ! to satisfy intent(out) attribute + lrestart_ext = .false. if (present(restart_ext)) then - if (restart_ext) then - nx = nx_global + 2*nghost - ny = ny_global + 2*nghost - endif + lrestart_ext = restart_ext + endif + + if (lrestart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + else + nx = nx_global + ny = ny_global endif if (my_task == master_task) then @@ -1227,10 +1235,17 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & missingvalue = spval_dbl endif ! write(nu_diag,*) subname,' missingvalue= ',missingvalue - amin = minval(work_g1) - amax = maxval(work_g1, mask = work_g1 /= missingvalue) - asum = sum (work_g1, mask = work_g1 /= missingvalue) + allocate(mask(nx,ny)) + if ( ieee_is_nan(missingvalue) ) then + mask = ieee_is_nan(work_g1) + else + mask = work_g1 /= missingvalue + endif + amin = minval(work_g1, mask = mask ) + amax = maxval(work_g1, mask = mask ) + asum = sum (work_g1, mask = mask ) write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) + deallocate(mask) endif !------------------------------------------------------------------- @@ -1238,10 +1253,8 @@ subroutine ice_read_nc_xy(fid, nrec, varname, work, diag, & ! NOTE: Ghost cells are not updated unless field_loc is present. !------------------------------------------------------------------- - if (present(restart_ext)) then - if (restart_ext) then - call scatter_global_ext(work, work_g1, master_task, distrb_info) - endif + if (lrestart_ext) then + call scatter_global_ext(work, work_g1, master_task, distrb_info) else if (present(field_loc)) then call scatter_global(work, work_g1, master_task, distrb_info, & @@ -1325,20 +1338,27 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g1 + logical, dimension(:,:), allocatable :: mask + integer (kind=int_kind) :: nx, ny integer (kind=int_kind) :: lnrec ! local value of nrec - lnrec = nrec + logical (kind=log_kind) :: lrestart_ext ! local value of restart_ext - nx = nx_global - ny = ny_global + lnrec = nrec + lrestart_ext = .false. if (present(restart_ext)) then - if (restart_ext) then - nx = nx_global + 2*nghost - ny = ny_global + 2*nghost - endif + lrestart_ext = restart_ext + endif + + if (lrestart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + else + nx = nx_global + ny = ny_global endif if (my_task == master_task) then @@ -1405,17 +1425,19 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & status = nf90_get_att(fid, varid, "_FillValue", missingvalue) ! call ice_check_nc(status, subname//' ERROR: Missing _FillValue', & ! file=__FILE__, line=__LINE__) - if (isnan(missingvalue)) then - ! ' _FillValue not defined, resetting to spval_dbl - ! this is only needed for the logging statement below' - missingvalue = spval_dbl - endif + allocate(mask(nx,ny)) do n=1,ncat - amin = minval(work_g1(:,:,n)) - amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) - asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) + if ( ieee_is_nan(missingvalue) ) then + mask = ieee_is_nan(work_g1(:,:,n)) + else + mask = work_g1(:,:,n) /= missingvalue + endif + amin = minval(work_g1(:,:,n), mask = mask ) + amax = maxval(work_g1(:,:,n), mask = mask ) + asum = sum (work_g1(:,:,n), mask = mask ) write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) enddo + deallocate(mask) endif !------------------------------------------------------------------- @@ -1423,13 +1445,11 @@ subroutine ice_read_nc_xyz(fid, nrec, varname, work, diag, & ! NOTE: Ghost cells are not updated unless field_loc is present. !------------------------------------------------------------------- - if (present(restart_ext)) then - if (restart_ext) then - do n=1,ncat - call scatter_global_ext(work(:,:,n,:), work_g1(:,:,n), & - master_task, distrb_info) - enddo - endif + if (lrestart_ext) then + do n=1,ncat + call scatter_global_ext(work(:,:,n,:), work_g1(:,:,n), & + master_task, distrb_info) + enddo else if (present(field_loc)) then do n=1,ncat @@ -1517,24 +1537,31 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & real (kind=dbl_kind), dimension(:,:,:), allocatable :: & work_g1 + logical, dimension(:,:), allocatable :: mask + integer (kind=int_kind) :: nx, ny integer (kind=int_kind) :: lnrec ! local value of nrec + logical (kind=log_kind) :: lrestart_ext ! local value of restart_ext + character(len=*), parameter :: subname = '(ice_read_nc_xyf)' #ifdef USE_NETCDF lnrec = nrec - nx = nx_global - ny = ny_global - + lrestart_ext = .false. if (present(restart_ext)) then - if (restart_ext) then - nx = nx_global + 2*nghost - ny = ny_global + 2*nghost - endif + lrestart_ext = restart_ext + endif + + if (lrestart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + else + nx = nx_global + ny = ny_global endif if (my_task == master_task) then @@ -1601,17 +1628,19 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & status = nf90_get_att(fid, varid, "_FillValue", missingvalue) ! call ice_check_nc(status, subname//' ERROR: Missing _FillValue', & ! file=__FILE__, line=__LINE__) - if (isnan(missingvalue)) then - ! ' _FillValue not defined, resetting to spval_dbl - ! this is only needed for the logging statement below' - missingvalue = spval_dbl - endif - do n = 1, nfreq - amin = minval(work_g1(:,:,n)) - amax = maxval(work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) - asum = sum (work_g1(:,:,n), mask = work_g1(:,:,n) /= missingvalue) + allocate(mask(nx,ny)) + do n=1,ncat + if ( ieee_is_nan(missingvalue) ) then + mask = ieee_is_nan(work_g1(:,:,n)) + else + mask = work_g1(:,:,n) /= missingvalue + endif + amin = minval(work_g1(:,:,n), mask = mask ) + amax = maxval(work_g1(:,:,n), mask = mask ) + asum = sum (work_g1(:,:,n), mask = mask ) write(nu_diag,*) subname,' min, max, sum =', amin, amax, asum, trim(varname) enddo + deallocate(mask) endif !------------------------------------------------------------------- @@ -1619,13 +1648,11 @@ subroutine ice_read_nc_xyf(fid, nrec, varname, work, diag, & ! NOTE: Ghost cells are not updated unless field_loc is present. !------------------------------------------------------------------- - if (present(restart_ext)) then - if (restart_ext) then - do n = 1, nfreq - call scatter_global_ext(work(:,:,n,1,:), work_g1(:,:,n), & - master_task, distrb_info) - enddo - endif + if (lrestart_ext) then + do n = 1, nfreq + call scatter_global_ext(work(:,:,n,1,:), work_g1(:,:,n), & + master_task, distrb_info) + enddo else if (present(field_loc)) then do n = 1, nfreq @@ -2201,14 +2228,19 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & integer (kind=int_kind) :: nx, ny - nx = nx_global - ny = ny_global + logical (kind=log_kind) :: lrestart_ext ! local value of restart_ext + lrestart_ext = .false. if (present(restart_ext)) then - if (restart_ext) then - nx = nx_global + 2*nghost - ny = ny_global + 2*nghost - endif + lrestart_ext = restart_ext + endif + + if (lrestart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + else + nx = nx_global + ny = ny_global endif if (present(varname)) then @@ -2223,10 +2255,8 @@ subroutine ice_write_nc_xy(fid, nrec, varid, work, diag, & allocate(work_g1(1,1)) ! to save memory endif - if (present(restart_ext)) then - if (restart_ext) then - call gather_global_ext(work_g1, work, master_task, distrb_info, spc_val=c0) - endif + if (lrestart_ext) then + call gather_global_ext(work_g1, work, master_task, distrb_info, spc_val=c0) else call gather_global(work_g1, work, master_task, distrb_info, spc_val=c0) endif @@ -2325,14 +2355,19 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & integer (kind=int_kind) :: nx, ny - nx = nx_global - ny = ny_global + logical (kind=log_kind) :: lrestart_ext ! local value of restart_ext + lrestart_ext = .false. if (present(restart_ext)) then - if (restart_ext) then - nx = nx_global + 2*nghost - ny = ny_global + 2*nghost - endif + lrestart_ext = restart_ext + endif + + if (lrestart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + else + nx = nx_global + ny = ny_global endif if (my_task == master_task) then @@ -2341,13 +2376,11 @@ subroutine ice_write_nc_xyz(fid, nrec, varid, work, diag, & allocate(work_g1(1,1,ncat)) ! to save memory endif - if (present(restart_ext)) then - if (restart_ext) then - do n=1,ncat - call gather_global_ext(work_g1(:,:,n), work(:,:,n,:), & - master_task, distrb_info, spc_val=c0) - enddo - endif + if (lrestart_ext) then + do n=1,ncat + call gather_global_ext(work_g1(:,:,n), work(:,:,n,:), & + master_task, distrb_info, spc_val=c0) + enddo else do n=1,ncat call gather_global(work_g1(:,:,n), work(:,:,n,:), & @@ -2651,14 +2684,19 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & integer (kind=int_kind) :: nx, ny - nx = nx_global - ny = ny_global + logical (kind=log_kind) :: lrestart_ext ! local value of restart_ext + lrestart_ext = .false. if (present(restart_ext)) then - if (restart_ext) then - nx = nx_global + 2*nghost - ny = ny_global + 2*nghost - endif + lrestart_ext = restart_ext + endif + + if (lrestart_ext) then + nx = nx_global + 2*nghost + ny = ny_global + 2*nghost + else + nx = nx_global + ny = ny_global endif if (my_task == master_task) then @@ -2704,10 +2742,8 @@ subroutine ice_read_nc_uv(fid, nrec, nzlev, varname, work, diag, & ! NOTE: Ghost cells are not updated unless field_loc is present. !------------------------------------------------------------------- - if (present(restart_ext)) then - if (restart_ext) then - call scatter_global_ext(work, work_g1, master_task, distrb_info) - endif + if (lrestart_ext) then + call scatter_global_ext(work, work_g1, master_task, distrb_info) else if (present(field_loc)) then call scatter_global(work, work_g1, master_task, distrb_info, & diff --git a/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 b/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 index 8f11f7f5e..1f6f00591 100644 --- a/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 +++ b/cicecore/cicedyn/infrastructure/ice_restart_driver.F90 @@ -25,7 +25,7 @@ module ice_restart_driver field_loc_Eface, field_loc_Nface, & field_type_scalar, field_type_vector use ice_restart_shared, only: restart_dir, pointer_file, & - runid, use_restart_time, lenstr, restart_coszen + runid, use_restart_time, lenstr, restart_coszen, restart_mod use ice_restart use ice_exit, only: abort_ice use ice_fileunits, only: nu_diag, nu_rst_pointer, nu_restart, nu_dump @@ -33,6 +33,8 @@ module ice_restart_driver use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted use icepack_intfc, only: icepack_aggregate use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_tracer_sizes + use icepack_intfc, only: icepack_query_parameters + use icepack_intfc, only: icepack_query_tracer_flags implicit none private @@ -297,6 +299,11 @@ subroutine restartfile (ice_ic) aice0, aicen, vicen, vsnon, trcrn, aice_init, uvel, vvel, & uvelE, vvelE, uvelN, vvelN, & trcr_base, nt_strata, n_trcr_strata + use icepack_itd, only: cleanup_itd !for restart_mod + use ice_arrays_column, only: first_ice, hin_max + use ice_flux, only: fpond, fresh, fsalt, fhocn + use ice_flux_bgc, only: faero_ocn, fiso_ocn, flux_bio + use ice_calendar, only: dt character (*), optional :: ice_ic @@ -308,7 +315,8 @@ subroutine restartfile (ice_ic) nt_Tsfc, nt_sice, nt_qice, nt_qsno logical (kind=log_kind) :: & - diag + diag, & + tr_aero, tr_pond_topo real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & work1 @@ -324,6 +332,7 @@ subroutine restartfile (ice_ic) call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_pond_topo_out=tr_pond_topo) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -717,6 +726,76 @@ subroutine restartfile (ice_ic) npt = npt - istep0 endif + !----------------------------------------------------------------- + ! update concentration from a file + !----------------------------------------------------------------- + if (restart_mod /= "none") then + + select case (trim(restart_mod)) + + case('adjust_aice') + call direct_adjust_aice + + case('adjust_aice_test') + call direct_adjust_aice(test=.true.) + + case default + call abort_ice(subname//'ERROR: unsupported restart_mod='//trim(restart_mod), & + file=__FILE__, line=__LINE__) + + end select + + !----------------------------------------------------------------- + ! Ensure ice is binned in correct categories + !----------------------------------------------------------------- + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j,iblk)) then + + call cleanup_itd(dt, hin_max, & + aicen(i,j,:,iblk), trcrn(i,j,:,:,iblk), & + vicen(i,j,:,iblk), vsnon(i,j,:, iblk), & + aice0(i,j, iblk), aice(i,j, iblk), & + tr_aero, tr_pond_topo, & + first_ice(i,j,:,iblk), & + trcr_depend, trcr_base, & + n_trcr_strata, nt_strata, & + fpond = fpond(i,j, iblk), & + fresh = fresh(i,j, iblk), & + fsalt = fsalt(i,j, iblk), & + fhocn = fhocn(i,j, iblk), & + faero_ocn = faero_ocn(i,j,:,iblk), & + fiso_ocn = fiso_ocn(i,j,:,iblk), & + flux_bio = flux_bio(i,j,:,iblk), & + Tf = Tf(i,j, iblk), & + limit_aice = .true. ) + + call icepack_aggregate( & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + trcr_depend = trcr_depend, & + trcr_base = trcr_base, & + n_trcr_strata = n_trcr_strata, & + nt_strata = nt_strata, & + Tf = Tf(i,j,iblk)) + + aice_init(i,j,iblk) = aice(i,j,iblk) + + endif ! tmask + enddo ! i + enddo ! j + enddo ! iblk + + endif !restart_mod + end subroutine restartfile !======================================================================= @@ -1091,6 +1170,310 @@ subroutine restartfile_v4 (ice_ic) end subroutine restartfile_v4 +!======================================================================= + +!======================================================================= +! Direct insertion of ice concentration read from file. +! +! Posey, et. al. 2015: Improving Arctic sea ice edge forecasts by +! assimilating high horizontal resolution sea ice concentration +! data into the US Navy's ice forecast systems. +! The Cryosphere. doi:10.5194/tc-9-1735-2015 +! +! Alan J. Wallcraft, COAPS/FSU, Nov 2024 + + subroutine direct_adjust_aice(test) + + use ice_blocks, only: nghost, nx_block, ny_block + use ice_domain, only: nblocks + use ice_domain_size, only: nilyr, nslyr, ncat, max_blocks + use ice_grid, only: tmask + use ice_communicate, only: my_task, master_task + use ice_constants, only: c0, c1, c4, c20, c100, & + p5, p2, p1, p01, p001, & + field_loc_center, field_loc_NEcorner, & + field_type_scalar, field_type_vector + use ice_fileunits, only: nu_diag + use ice_flux, only: & + Tair, Tf, salinz, Tmltz, sst, & + stressp_1, stressp_2, stressp_3, stressp_4, & + stressm_1, stressm_2, stressm_3, stressm_4, & + stress12_1, stress12_2, stress12_3, stress12_4 + use ice_state, only: & + aice, aicen, vicen, vsnon, trcrn + use ice_read_write, only: ice_check_nc, ice_read_nc, & + ice_open_nc, ice_close_nc + use ice_arrays_column, only: hin_max + ! use icepack_mushy_physics, only: enthalpy_mush + use icepack_intfc, only: icepack_init_trcr + + logical(kind=log_kind), optional, intent(in) :: & + test ! use internally generated aice + + ! --- local variables + real(kind=dbl_kind) :: & + q , & ! scale factor + aice_m, & ! model aice + aice_o, & ! observation aice + aice_t, & ! target aice + aice_i, & ! insert ice + slope, & ! used to compute surf Temp + Ti, & ! target surface temperature + edge_om, & ! nominal ice edge zone + diff_om, & ! allowed model vs obs difference + hin_om, & ! new ice thickness + aicen_old, & ! old value of aice to check when adding ice + vsnon_old, & ! old value of snow volume to check when adding ice + Tsfc ! surface temp. + integer (kind=int_kind) :: & + fid ! file id for netCDF file + integer (kind=int_kind) :: & + i, j, k, n, iblk ! counting indices + logical (kind=log_kind) :: & + diag, & ! diagnostic output + ltest ! local value of test argument + real (kind=dbl_kind), dimension (nx_block,ny_block,max_blocks) :: & + work1 + real (kind=dbl_kind), dimension(nilyr) :: & + qin ! ice enthalpy (J/m3) + real (kind=dbl_kind), dimension(nslyr) :: & + qsn ! snow enthalpy (J/m3) + character(len=char_len_long) :: & + aice_filename, &! filename to read in + aice_fldname ! fieldname to read in + + ! parameters from icepack + real (kind=dbl_kind) :: & + puny, Tffresh, Tsmelt, Lfresh, cp_ice, cp_ocn, & + rhos, rhoi + integer (kind=int_kind) :: & + nt_Tsfc, nt_sice, nt_qice, nt_qsno, & + ktherm + character(len=*), parameter :: subname = '(direct_adjust_aice)' + + diag = .true. + ltest = .false. + if (present(test)) then + ltest = test + endif + aice_filename = trim(restart_dir)//'/sic.nc' + aice_fldname = 'sic' + + ! get parameters from icepack + call icepack_query_parameters( & + puny_out=puny, & + Tffresh_out=Tffresh, & + Tsmelt_out=Tsmelt, & + Lfresh_out=Lfresh, & + cp_ice_out=cp_ice, & + cp_ocn_out=cp_ocn, & + rhos_out=rhos, & + rhoi_out=rhoi, & + ktherm_out=ktherm ) + + call icepack_query_tracer_indices( & + nt_Tsfc_out=nt_Tsfc, & + nt_sice_out=nt_sice, & + nt_qice_out=nt_qice, & + nt_qsno_out=nt_qsno ) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (ltest) then + if (my_task == master_task) then + write(nu_diag,*) subname//" direct_adjust_aice rounding to nearest 1/20th" + endif + work1 = nint(aice*c20)/c20 ! round to nearest 5/100th + else + if (my_task == master_task) then + write(nu_diag,*) subname//" direct_adjust_aice from "//trim(aice_filename) + endif + + call ice_open_nc(trim(aice_filename), fid) + call ice_read_nc(fid,1,trim(aice_fldname),work1,diag, & + field_loc=field_loc_center, & + field_type=field_type_scalar) + call ice_close_nc(fid) + endif + + edge_om = p2 ! nominal ice edge zone + diff_om = p1 ! allowed model vs obs difference + hin_om = hin_max(1)*0.9_dbl_kind !new ice thickness + + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + + aice_o = work1(i,j,iblk) ! obs. ice concentration + aice_m = aice(i,j,iblk) ! model ice concentration + + if (.not.tmask(i,j,iblk)) then + ! land - do nothing + elseif (aice_o.gt.p01 .and. & + abs(aice_o-aice_m).le.p01) then + ! model and obs are very close - do nothing + elseif (min(aice_o,aice_m).ge.edge_om .and. & + abs(aice_o-aice_m).le.diff_om) then + ! model and obs are close enough - do nothing + elseif (aice_o.eq.aice_m) then + elseif (aice_o.lt.aice_m) then + if (aice_o.lt.p01)then + ! --- remove all ice --- + ! warm sst so the ice won't grow immediately + sst(i,j,iblk) = sst(i,j,iblk) + p2 + do n=1,ncat + aicen(i,j,n,iblk) = c0 + vicen(i,j,n,iblk) = c0 + vsnon(i,j,n,iblk) = c0 + call icepack_init_trcr( & + Tair = Tair(i,j, iblk), & + Tf = Tf(i,j, iblk), & + Sprofile = salinz(i,j,:,iblk), & + Tprofile = Tmltz(i,j,:,iblk), & + Tsfc = Tsfc, & + qin = qin(:), & + qsn = qsn(:) ) + ! surface temperature + trcrn(i,j,nt_Tsfc,n,iblk) = Tsfc ! deg C + ! ice enthalpy, salinity + do k = 1, nilyr + trcrn(i,j,nt_qice+k-1,n,iblk) = qin(k) + trcrn(i,j,nt_sice+k-1,n,iblk) = salinz(i,j,k,iblk) + enddo ! nilyr + ! snow enthalpy + do k = 1, nslyr + trcrn(i,j,nt_qsno+k-1,n,iblk) = qsn(k) + enddo ! nslyr + enddo !n + else !aice_o.ge.p01 + if (aice_o.lt.edge_om) then + ! --- target ice conc. is obs. + aice_t = aice_o + else !aice_m-aice_o.gt.diff_om + ! --- target ice conc. is obs.+diff_om + aice_t = aice_o + diff_om + endif + ! --- reduce ice to the target concentration, + ! completely exhasting ice categories in order --- + aice_i = aice_m - aice_t !>=0.0 + do n=1,ncat + if (aice_i.le.p001) then + exit + elseif (aice_i.ge.aicen(i,j,n,iblk)) then + ! --- remove all of this category + aice_i = aice_i - aicen(i,j,n,iblk) + aicen(i,j,n,iblk) = c0 + vicen(i,j,n,iblk) = c0 + vsnon(i,j,n,iblk) = c0 + call icepack_init_trcr( & + Tair = Tair(i,j, iblk), & + Tf = Tf(i,j, iblk), & + Sprofile = salinz(i,j,:,iblk), & + Tprofile = Tmltz(i,j,:,iblk), & + Tsfc = Tsfc, & + qin = qin(:), & + qsn = qsn(:) ) + ! surface temperature + trcrn(i,j,nt_Tsfc,n,iblk) = Tsfc ! deg C + ! ice enthalpy, salinity + do k = 1, nilyr + trcrn(i,j,nt_qice+k-1,n,iblk) = qin(k) + trcrn(i,j,nt_sice+k-1,n,iblk) = salinz(i,j,k,iblk) + enddo ! nilyr + ! snow enthalpy + do k = 1, nslyr + trcrn(i,j,nt_qsno+k-1,n,iblk) = qsn(k) + enddo ! nslyr + else !aice_i.lt.aicen(i,j,n,iblk) + ! --- remove part of this category + q = (aicen(i,j,n,iblk) - aice_i) & + /aicen(i,j,n,iblk) !<1 + aice_i = c0 + + ! reduce aicen, vicen, vsnon by q + ! do not alter Tsfc since there is already + ! ice here. + aicen(i,j,n,iblk) = q*aicen(i,j,n,iblk) + vicen(i,j,n,iblk) = q*vicen(i,j,n,iblk) + vsnon(i,j,n,iblk) = q*vsnon(i,j,n,iblk) + endif ! aice_i.gt.p001 and aice_i.lt.aicen + enddo ! n + endif ! aice_o.lt.p01 + elseif (aice_o.gt.p01) then ! .and. aice_o.gt.aicen + if (aice_m.lt.edge_om) then + ! --- target ice conc. is obs. + aice_t = aice_o + else !aice_o-aice_m.gt.diff_om + ! --- target ice conc. is obs.-diff_om + aice_t = aice_o - diff_om + endif + q = (aice_t-aice_m) + ! --- add ice to the target concentration, + ! --- with all new ice in category 1 + ! --- cool sst so the ice won't melt immediately + sst( i,j, iblk) = sst( i,j, iblk) - q ! 0 <= q <= 1 + aicen_old = aicen(i,j,1,iblk) ! store to check for zero ice later + vsnon_old = vsnon(i,j,1,iblk) ! store to check for zero snow later + aicen(i,j,1,iblk) = aicen(i,j,1,iblk) + q + vicen(i,j,1,iblk) = vicen(i,j,1,iblk) + q*hin_om + vsnon(i,j,1,iblk) = vsnon(i,j,1,iblk) + q*hin_om*p2 + + ! ------------------------------------------------------ + ! check for zero snow in 1st category. + ! It is possible that there was ice + ! but no snow. This would skip the loop below and an + ! error in snow thermo would occur. If snow was zero + ! specify enthalpy here + ! ------------------------------------------------------ + if (vsnon_old < puny) then + do n=1,1 ! only do 1st category + ! --- snow layers + trcrn(i,j,nt_Tsfc,n,iblk) = & ! Tsfc + min(Tsmelt,Tair(i,j,iblk) - Tffresh) + Ti = min(c0,trcrn(i,j,nt_Tsfc,n,iblk)) + do k=1,nslyr + trcrn(i,j,nt_qsno+k-1,n,iblk) = -rhos*(Lfresh - cp_ice*Ti) + enddo ! k + enddo ! n = 1,1 + endif + + ! ------------------------------------------------------ + ! check for zero aice in 1st category. + ! if adding to an initially zero ice, we must define + ! qice, qsno, sice so thermo does not blow up. + ! ------------------------------------------------------ + if (aicen_old < puny) then + do n =1,1 ! only do 1st category + call icepack_init_trcr( & + Tair = Tair(i,j, iblk), & + Tf = Tf(i,j, iblk), & + Sprofile = salinz(i,j,:,iblk), & + Tprofile = Tmltz(i,j,:,iblk), & + Tsfc = Tsfc, & + qin = qin(:), & + qsn = qsn(:) ) + ! surface temperature + trcrn(i,j,nt_Tsfc,n,iblk) = Tsfc ! deg C + ! ice enthalpy, salinity + do k = 1, nilyr + trcrn(i,j,nt_qice+k-1,n,iblk) = qin(k) + trcrn(i,j,nt_sice+k-1,n,iblk) = salinz(i,j,k,iblk) + enddo + ! snow enthalpy + do k = 1, nslyr + trcrn(i,j,nt_qsno+k-1,n,iblk) = qsn(k) + enddo ! nslyr + enddo ! n + endif ! qice == c0 + endif ! aice_o vs aice_m or tmask + enddo ! j + enddo ! i + enddo ! iblk + + end subroutine direct_adjust_aice + !======================================================================= end module ice_restart_driver diff --git a/cicecore/cicedyn/infrastructure/ice_restoring.F90 b/cicecore/cicedyn/infrastructure/ice_restoring.F90 index b7f1b3971..71c236a8a 100644 --- a/cicecore/cicedyn/infrastructure/ice_restoring.F90 +++ b/cicecore/cicedyn/infrastructure/ice_restoring.F90 @@ -215,7 +215,7 @@ subroutine ice_HaloRestore_init vicen_rest(i,j,n,iblk) = vicen(i,jlo,n,iblk) vsnon_rest(i,j,n,iblk) = vsnon(i,jlo,n,iblk) do nt = 1, ntrcr - trcrn_rest(i,j,nt,n,iblk) = trcrn(ilo,j,nt,n,iblk) + trcrn_rest(i,j,nt,n,iblk) = trcrn(i,jlo,nt,n,iblk) enddo enddo enddo @@ -246,7 +246,7 @@ subroutine ice_HaloRestore_init vicen_rest(i,j,n,iblk) = vicen(i,jhi,n,iblk) vsnon_rest(i,j,n,iblk) = vsnon(i,jhi,n,iblk) do nt = 1, ntrcr - trcrn_rest(i,j,nt,n,iblk) = trcrn(ihi,j,nt,n,iblk) + trcrn_rest(i,j,nt,n,iblk) = trcrn(i,jhi,nt,n,iblk) enddo enddo enddo diff --git a/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 index af4a88007..234011f61 100644 --- a/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 @@ -18,13 +18,14 @@ module ice_history_write + use ice_communicate, only: my_task, master_task use ice_fileunits, only: nu_history, nu_hdr, nu_diag use ice_exit, only: abort_ice use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted implicit none private - public :: ice_write_hist + public :: ice_write_hist, ice_read_hist !======================================================================= @@ -44,7 +45,6 @@ subroutine ice_write_hist(ns) use ice_kinds_mod use ice_calendar, only: write_ic, dayyr, histfreq, use_leap_years - use ice_communicate, only: my_task, master_task use ice_constants, only: spval use ice_domain_size, only: nx_global, ny_global, max_nstrm use ice_read_write, only: ice_open, ice_write @@ -72,6 +72,14 @@ subroutine ice_write_hist(ns) character(len=*), parameter :: subname = '(ice_write_hist)' + ! not supported in binary IO + if (write_histrest_now) then + if (my_task == master_task) then + write(nu_diag,*) subname,' WARNING: history restarts not supported with binary IO' + endif + return + endif + diag = .false. ! single precision @@ -163,7 +171,8 @@ subroutine ice_write_hist(ns) .or. n==n_vort(ns) & ! snapshots .or. n==n_sig1(ns) .or. n==n_sig2(ns) & .or. n==n_sigP(ns) .or. n==n_trsig(ns) & - .or. n==n_sistreave(ns) .or. n==n_sistremax(ns) & + .or. n==n_sidivvel(ns) .or. n==n_sishearvel(ns) & + .or. n==n_sistressave(ns) .or. n==n_sistressmax(ns) & .or. n==n_mlt_onset(ns) .or. n==n_frz_onset(ns) & .or. n==n_hisnap(ns) .or. n==n_aisnap(ns)) then write (nu_hdr, 996) nrec,trim(avail_hist_fields(n)%vname), & @@ -396,6 +405,22 @@ subroutine ice_write_hist(ns) end subroutine ice_write_hist +!======================================================================= +! +! read history restarts, only called for history restarts +! NOT supported with Binary +! +! author: T. Craig, Nov 2025 + + subroutine ice_read_hist + + character(len=*), parameter :: subname = '(ice_read_hist)' + + if (my_task == master_task) then + write(nu_diag,*) subname,' WARNING: history restarts not supported with binary IO' + endif + + end subroutine ice_read_hist !======================================================================= end module ice_history_write diff --git a/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 index d8931866a..9ae19f1d9 100644 --- a/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_binary/ice_restart.F90 @@ -9,9 +9,7 @@ module ice_restart use ice_broadcast use ice_kinds_mod - use ice_restart_shared, only: & - restart, restart_ext, restart_dir, restart_file, pointer_file, & - runid, runtype, use_restart_time, lenstr + use ice_restart_shared use ice_communicate, only: my_task, master_task use ice_fileunits, only: nu_diag, nu_rst_pointer use ice_fileunits, only: nu_dump, nu_dump_eap, nu_dump_FY, nu_dump_age @@ -396,6 +394,7 @@ subroutine init_restart_write(filename_spec) nbtrcr ! number of bgc tracers character(len=char_len_long) :: filename + character(len=char_len_long) :: lpointer_file character(len=*), parameter :: subname = '(init_restart_write)' @@ -422,7 +421,13 @@ subroutine init_restart_write(filename_spec) ! write pointer (path/file) if (my_task == master_task) then - open(nu_rst_pointer,file=pointer_file) + lpointer_file = pointer_file + if (pointer_date) then + ! append date to pointer filename + write(lpointer_file,'(a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & + trim(lpointer_file)//'.',myear,'-',mmonth,'-',mday,'-',msec + end if + open(nu_rst_pointer,file=lpointer_file) write(nu_rst_pointer,'(a)') filename close(nu_rst_pointer) if (restart_ext) then diff --git a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 index d7720cd1e..db3eef513 100644 --- a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -18,6 +18,7 @@ ! 2009 D Bailey and ECH: Generalized for multiple frequency output ! 2010 Alison McLaren and ECH: Added 3D capability ! 2013 ECH split from ice_history.F90 +! 2025 T Craig: Add history restart capability module ice_history_write @@ -47,7 +48,7 @@ module ice_history_write character (len=20) :: coordinates END TYPE req_attributes - public :: ice_write_hist + public :: ice_write_hist, ice_read_hist integer (kind=int_kind) :: imtid,jmtid @@ -58,6 +59,7 @@ module ice_history_write !======================================================================= ! ! write average ice quantities or snapshots +! supports history output, write_ic, and history restarts ! ! author: Elizabeth C. Hunke, LANL @@ -73,6 +75,7 @@ subroutine ice_write_hist (ns) use ice_communicate, only: my_task, master_task use ice_domain, only: distrb_info use ice_domain_size, only: nx_global, ny_global, max_nstrm, max_blocks + use ice_flux, only: albcnt, snwcnt use ice_gather_scatter, only: gather_global use ice_grid, only: TLON, TLAT, ULON, ULAT, NLON, NLAT, ELON, ELAT, & hm, uvm, npm, epm, bm, tarea, uarea, narea, earea, & @@ -80,6 +83,7 @@ subroutine ice_write_hist (ns) lont_bounds, latt_bounds, lonu_bounds, latu_bounds, & lonn_bounds, latn_bounds, lone_bounds, late_bounds use ice_history_shared + use ice_restart_shared, only: restart_dir #ifdef CESMCOUPLED use ice_restart_shared, only: runid #endif @@ -93,8 +97,8 @@ subroutine ice_write_hist (ns) real (kind=dbl_kind), dimension(nx_block,ny_block,max_blocks) :: work1 integer (kind=int_kind) :: i,k,ic,n,nn, & - ncid,status,kmtidi,kmtids,kmtidb, cmtid,timid,varid, & - nvertexid,ivertex,kmtida,iflag, fmtid + ncid,status,kmtidi,kmtids,kmtidb,cmtid,timid,varid, & + nvertexid,ivertex,kmtida,iflag,fmtid,lhistprec integer (kind=int_kind), dimension(3) :: dimid integer (kind=int_kind), dimension(4) :: dimidz integer (kind=int_kind), dimension(5) :: dimidcz @@ -111,6 +115,7 @@ subroutine ice_write_hist (ns) character (char_len) :: start_time,current_date,current_time character (len=8) :: cdate + character (len=1) :: cns ! time coord TYPE(coord_attributes) :: time_coord @@ -137,16 +142,30 @@ subroutine ice_write_hist (ns) file=__FILE__, line=__LINE__) extvars = '' + write(cns,'(i1.1)') ns + + ! modify history restart output + lhistprec = history_precision + if (write_histrest_now) then + history_precision = 8 + endif + lprecision = nf90_float if (history_precision == 8) lprecision = nf90_double if (my_task == master_task) then - call construct_filename(ncfile,'nc',ns) + if (write_histrest_now) then + call construct_filename(ncfile,'nc',ns, option='histrest') + else + call construct_filename(ncfile,'nc',ns) + endif ! add local directory path name to ncfile if (write_ic) then ncfile = trim(incond_dir)//ncfile + elseif (write_histrest_now) then + ncfile = trim(restart_dir)//ncfile else ncfile = trim(history_dir)//ncfile endif @@ -270,14 +289,15 @@ subroutine ice_write_hist (ns) endif ! Define coord time_bounds if hist_avg is true - ! bounds inherit attributes if (hist_avg(ns) .and. .not. write_ic) then - time_coord = coord_attributes('time_bounds', 'undefined', 'undefined', 'undefined') + time_coord = coord_attributes('time_bounds', 'time interval endpoints', trim(cal_units), 'undefined') dimid(1) = boundid dimid(2) = timid call ice_hist_coord_def(ncid, time_coord, nf90_double, dimid(1:2), varid) + status = nf90_put_att(ncid,varid,'calendar',cal_att) !extra attribute + call ice_check_nc(status, subname//' ERROR: defining att calendar: '//cal_att,file=__FILE__,line=__LINE__) endif endif ! histfreq(ns)/='g' @@ -406,15 +426,22 @@ subroutine ice_write_hist (ns) ! bounds fields are required for CF compliance ! dimensions (nx,ny,nverts) - ! bounds inherit attributes - var_nverts(n_lont_bnds) = coord_attributes('lont_bounds','und','und','und') - var_nverts(n_latt_bnds) = coord_attributes('latt_bounds','und','und','und') - var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds','und','und','und') - var_nverts(n_latu_bnds) = coord_attributes('latu_bounds','und','und','und') - var_nverts(n_lonn_bnds) = coord_attributes('lonn_bounds','und','und','und') - var_nverts(n_latn_bnds) = coord_attributes('latn_bounds','und','und','und') - var_nverts(n_lone_bnds) = coord_attributes('lone_bounds','und','und','und') - var_nverts(n_late_bnds) = coord_attributes('late_bounds','und','und','und') + var_nverts(n_lont_bnds) = coord_attributes('lont_bounds', & + 'longitude of gridbox corners for T points','degrees_east','und') + var_nverts(n_latt_bnds) = coord_attributes('latt_bounds', & + 'latitude of gridbox corners for T points','degrees_north','und') + var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds', & + 'longitude of gridbox corners for U points','degrees_east','und') + var_nverts(n_latu_bnds) = coord_attributes('latu_bounds', & + 'latitude of gridbox corners for U points','degrees_north','und') + var_nverts(n_lonn_bnds) = coord_attributes('lonn_bounds', & + 'longitude of gridbox corners for N points','degrees_east','und') + var_nverts(n_latn_bnds) = coord_attributes('latn_bounds', & + 'latitude of gridbox corners for N points','degrees_north','und') + var_nverts(n_lone_bnds) = coord_attributes('lone_bounds', & + 'longitude of gridbox corners for E points','degrees_east','und') + var_nverts(n_late_bnds) = coord_attributes('late_bounds', & + 'latitude of gridbox corners for E points','degrees_north','und') !----------------------------------------------------------------- ! define attributes for time-invariant variables @@ -473,7 +500,6 @@ subroutine ice_write_hist (ns) enddo ! bounds fields with dimensions (nverts,nx,ny) - ! bounds inherits attributes dimid_nverts(1) = nvertexid dimid_nverts(2) = imtid dimid_nverts(3) = jmtid @@ -487,6 +513,13 @@ subroutine ice_write_hist (ns) ! define attributes for time-variant variables !----------------------------------------------------------------- + if (write_histrest_now) then + status = nf90_def_var(ncid, 'time_beg', lprecision, varid=varid) + status = nf90_def_var(ncid, 'avgct', lprecision, varid=varid) + status = nf90_def_var(ncid, 'albcnt'//cns, lprecision, dimid, varid) + status = nf90_def_var(ncid, 'snwcnt'//cns, lprecision, dimid, varid) + endif + do n=1,num_avail_hist_fields_2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then call ice_hist_field_def(ncid, avail_hist_fields(n),lprecision, dimid,ns) @@ -591,6 +624,7 @@ subroutine ice_write_hist (ns) !----------------------------------------------------------------- ! ... the user should change these to something useful ... !----------------------------------------------------------------- + #ifdef CESMCOUPLED status = nf90_put_att(ncid,nf90_global,'title',runid) call ice_check_nc(status, subname// ' ERROR: in global attribute title', & @@ -966,12 +1000,48 @@ subroutine ice_write_hist (ns) ! write variable data !----------------------------------------------------------------- + if (write_histrest_now) then + if (my_task == master_task) then + status = nf90_inq_varid(ncid,'time_beg',varid) + call ice_check_nc(status, subname// ' ERROR: getting varid for '//'time_beg', & + file=__FILE__, line=__LINE__) + status = nf90_put_var(ncid,varid,time_beg(ns)) + call ice_check_nc(status, subname// ' ERROR: writing variable '//'time_beg', & + file=__FILE__, line=__LINE__) + status = nf90_inq_varid(ncid,'avgct',varid) + call ice_check_nc(status, subname// ' ERROR: getting varid for '//'avgct', & + file=__FILE__, line=__LINE__) + status = nf90_put_var(ncid,varid,avgct(ns)) + call ice_check_nc(status, subname// ' ERROR: writing variable '//'avgct', & + file=__FILE__, line=__LINE__) + endif + call gather_global(work_g1, albcnt(:,:,:,ns), master_task, distrb_info) + if (my_task == master_task) then + status = nf90_inq_varid(ncid,'albcnt'//cns,varid) + call ice_check_nc(status, subname// ' ERROR: getting varid for '//'albcnt'//cns, & + file=__FILE__, line=__LINE__) + status = nf90_put_var(ncid,varid,work_g1, & + count=(/nx_global,ny_global/)) + call ice_check_nc(status, subname// ' ERROR: writing variable '//'albcnt'//cns, & + file=__FILE__, line=__LINE__) + endif + call gather_global(work_g1, snwcnt(:,:,:,ns), master_task, distrb_info) + if (my_task == master_task) then + status = nf90_inq_varid(ncid,'snwcnt'//cns,varid) + call ice_check_nc(status, subname// ' ERROR: getting varid for '//'snwcnt'//cns, & + file=__FILE__, line=__LINE__) + status = nf90_put_var(ncid,varid,work_g1, & + count=(/nx_global,ny_global/)) + call ice_check_nc(status, subname// ' ERROR: writing variable '//'snwcnt'//cns, & + file=__FILE__, line=__LINE__) + endif + endif + work_g1(:,:) = c0 do n=1,num_avail_hist_fields_2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then - call gather_global(work_g1, a2D(:,:,n,:), & - master_task, distrb_info) + call gather_global(work_g1, a2D(:,:,n,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) call ice_check_nc(status, subname// ' ERROR: getting varid for '//avail_hist_fields(n)%vname, & @@ -996,9 +1066,7 @@ subroutine ice_write_hist (ns) file=__FILE__, line=__LINE__) endif do k = 1, ncat_hist - call gather_global(work_g1, a3Dc(:,:,k,nn,:), & - master_task, distrb_info) - + call gather_global(work_g1, a3Dc(:,:,k,nn,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) call ice_check_nc(status, subname// ' ERROR: getting varid for '//avail_hist_fields(n)%vname, & @@ -1024,9 +1092,7 @@ subroutine ice_write_hist (ns) file=__FILE__, line=__LINE__) endif do k = 1, nzilyr - call gather_global(work_g1, a3Dz(:,:,k,nn,:), & - master_task, distrb_info) - + call gather_global(work_g1, a3Dz(:,:,k,nn,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & @@ -1049,9 +1115,7 @@ subroutine ice_write_hist (ns) file=__FILE__, line=__LINE__) endif do k = 1, nzblyr - call gather_global(work_g1, a3Db(:,:,k,nn,:), & - master_task, distrb_info) - + call gather_global(work_g1, a3Db(:,:,k,nn,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & @@ -1074,9 +1138,7 @@ subroutine ice_write_hist (ns) file=__FILE__, line=__LINE__) endif do k = 1, nzalyr - call gather_global(work_g1, a3Da(:,:,k,nn,:), & - master_task, distrb_info) - + call gather_global(work_g1, a3Da(:,:,k,nn,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & @@ -1099,8 +1161,7 @@ subroutine ice_write_hist (ns) file=__FILE__, line=__LINE__) endif do k = 1, nfsd_hist - call gather_global(work_g1, a3Df(:,:,k,nn,:), & - master_task, distrb_info) + call gather_global(work_g1, a3Df(:,:,k,nn,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k/), & @@ -1124,8 +1185,7 @@ subroutine ice_write_hist (ns) endif do ic = 1, ncat_hist do k = 1, nzilyr - call gather_global(work_g1, a4Di(:,:,k,ic,nn,:), & - master_task, distrb_info) + call gather_global(work_g1, a4Di(:,:,k,ic,nn,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k,ic/), & @@ -1150,8 +1210,7 @@ subroutine ice_write_hist (ns) endif do ic = 1, ncat_hist do k = 1, nzslyr - call gather_global(work_g1, a4Ds(:,:,k,ic,nn,:), & - master_task, distrb_info) + call gather_global(work_g1, a4Ds(:,:,k,ic,nn,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k,ic/), & @@ -1174,8 +1233,7 @@ subroutine ice_write_hist (ns) endif do ic = 1, ncat_hist do k = 1, nfsd_hist - call gather_global(work_g1, a4Df(:,:,k,ic,nn,:), & - master_task, distrb_info) + call gather_global(work_g1, a4Df(:,:,k,ic,nn,:), master_task, distrb_info) if (my_task == master_task) then status = nf90_put_var(ncid,varid,work_g1, & start=(/ 1, 1,k,ic/), & @@ -1198,8 +1256,12 @@ subroutine ice_write_hist (ns) status = nf90_close(ncid) call ice_check_nc(status, subname// ' ERROR: closing netCDF history file', & file=__FILE__, line=__LINE__) - write(nu_diag,*) ' ' - write(nu_diag,*) 'Finished writing ',trim(ncfile) + write(nu_diag,*) subname,' Finished writing ',trim(ncfile) + endif + + ! reset history parameters + if (write_histrest_now) then + history_precision = lhistprec endif #else @@ -1208,9 +1270,424 @@ subroutine ice_write_hist (ns) end subroutine ice_write_hist +!======================================================================= +! +! read history restarts, only called for history restarts +! +! author: T. Craig Nov 2025 + + subroutine ice_read_hist + + use ice_kinds_mod + use ice_blocks, only: nx_block, ny_block + use ice_broadcast, only: broadcast_scalar + use ice_calendar, only: nstreams, histfreq + use ice_communicate, only: my_task, master_task + use ice_constants, only: field_loc_noupdate, field_type_noupdate + use ice_domain, only: distrb_info + use ice_domain_size, only: nx_global, ny_global, max_nstrm, max_blocks + use ice_flux, only: albcnt, snwcnt + use ice_gather_scatter, only: scatter_global + use ice_history_shared + use ice_restart_shared, only: restart_dir + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite + + ! local variables + + real (kind=dbl_kind), dimension(:,:), allocatable :: work_g1 + + integer (kind=int_kind) :: k,ic,n,nn,ns,ncid,status,varid + character (char_len_long) :: ncfile + character (len=1) :: cns + character (len=32) :: readstr + character (len=*), parameter :: readstrT = ' read ok:' + character (len=*), parameter :: readstrF = ' DID NOT READ:' + + character(len=*), parameter :: subname = '(ice_read_hist)' + + call ice_timer_start(timer_readwrite) ! reading/writing + do ns = 1,nstreams + if (hist_avg(ns)) then + + write(cns,'(i1.1)') ns +#ifdef USE_NETCDF + if (my_task == master_task) then + + call construct_filename(ncfile,'nc',ns, option='histrest') + + ! add local directory path name to ncfile + ncfile = trim(restart_dir)//ncfile + + ! open file + status = nf90_open(ncfile, nf90_nowrite, ncid) + endif ! master_task + + call broadcast_scalar(status,master_task) + if (status /= nf90_noerr) then + if (my_task == master_task) then + write(nu_diag,*) subname,' file not found ',trim(ncfile) + endif + else + if (my_task == master_task) then + write(nu_diag,*) subname,' reading file ',trim(ncfile) + endif + + if (my_task==master_task) then + allocate(work_g1(nx_global,ny_global)) + else + allocate(work_g1(1,1)) + endif + + work_g1(:,:) = c0 + + !----------------------------------------------------------------- + ! read variable data + !----------------------------------------------------------------- + + if (my_task == master_task) then + readstr = readstrF + status = nf90_inq_varid(ncid,'time_beg',varid) + if (status == nf90_noerr) status = nf90_get_var(ncid,varid,time_beg(ns)) + if (status == nf90_noerr) readstr = readstrT + write(nu_diag,*) subname,trim(readstr),' time_beg' + + readstr = readstrF + status = nf90_inq_varid(ncid,'avgct',varid) + if (status == nf90_noerr) status = nf90_get_var(ncid,varid,avgct(ns)) + if (status == nf90_noerr) readstr = readstrT + write(nu_diag,*) subname,trim(readstr),' time_beg' + endif + call broadcast_scalar(time_beg(ns),master_task) + call broadcast_scalar(avgct(ns),master_task) + + if (my_task == master_task) then + readstr = readstrF + status = nf90_inq_varid(ncid,'albcnt'//cns,varid) + if (status == nf90_noerr) then + status = nf90_get_var(ncid,varid,work_g1, & + count=(/nx_global,ny_global/)) + if (status == nf90_noerr) readstr = readstrT + endif + write(nu_diag,*) subname,trim(readstr),' albcnt'//cns + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(albcnt(:,:,:,ns), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + + if (my_task == master_task) then + readstr = readstrF + status = nf90_inq_varid(ncid,'snwcnt'//cns,varid) + if (status == nf90_noerr) then + status = nf90_get_var(ncid,varid,work_g1, & + count=(/nx_global,ny_global/)) + if (status == nf90_noerr) readstr = readstrT + endif + write(nu_diag,*) subname,trim(readstr),' snwcnt'//cns + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(snwcnt(:,:,:,ns), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + + do n=1,num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + if (status == nf90_noerr) then + status = nf90_get_var(ncid,varid,work_g1, & + count=(/nx_global,ny_global/)) + if (status == nf90_noerr) readstr = readstrT + endif + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a2D(:,:,n,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + endif + enddo ! num_avail_hist_fields_2D + + do n = n2D + 1, n3Dccum + nn = n - n2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + do k = 1, ncat_hist + if (my_task == master_task) then + status = nf90_get_var(ncid,varid,work_g1, & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status == nf90_noerr) readstr = readstrT + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a3Dc(:,:,k,nn,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + enddo ! k + endif ! varid + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif ! histfreq + enddo ! num_avail_hist_fields_3Dc + + work_g1(:,:) = c0 + + do n = n3Dccum+1, n3Dzcum + nn = n - n3Dccum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + do k = 1, nzilyr + if (my_task == master_task) then + status = nf90_get_var(ncid,varid,work_g1, & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status == nf90_noerr) readstr = readstrT + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a3Dz(:,:,k,nn,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + enddo ! k + endif ! varid + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif ! histfreq + enddo ! num_avail_hist_fields_3Dz + + work_g1(:,:) = c0 + + do n = n3Dzcum+1, n3Dbcum + nn = n - n3Dzcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + do k = 1, nzblyr + if (my_task == master_task) then + status = nf90_get_var(ncid,varid,work_g1, & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status == nf90_noerr) readstr = readstrT + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a3Db(:,:,k,nn,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + enddo ! k + endif ! varid + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif ! histfreq + enddo ! num_avail_hist_fields_3Db + + work_g1(:,:) = c0 + + do n = n3Dbcum+1, n3Dacum + nn = n - n3Dbcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + do k = 1, nzalyr + if (my_task == master_task) then + status = nf90_get_var(ncid,varid,work_g1, & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a3Da(:,:,k,nn,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + enddo ! k + endif ! varid + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif ! histfreq + enddo ! num_avail_hist_fields_3Da + + work_g1(:,:) = c0 + + do n = n3Dacum+1, n3Dfcum + nn = n - n3Dacum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + do k = 1, nfsd_hist + if (my_task == master_task) then + status = nf90_get_var(ncid,varid,work_g1, & + start=(/ 1, 1,k/), & + count=(/nx_global,ny_global,1/)) + if (status == nf90_noerr) readstr = readstrT + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a3Df(:,:,k,nn,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + enddo ! k + endif ! varid + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif ! histfreq + enddo ! num_avail_hist_fields_3Df + + work_g1(:,:) = c0 + + do n = n3Dfcum+1, n4Dicum + nn = n - n3Dfcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + do ic = 1, ncat_hist + do k = 1, nzilyr + if (my_task == master_task) then + status = nf90_get_var(ncid,varid,work_g1, & + start=(/ 1, 1,k,ic/), & + count=(/nx_global,ny_global,1, 1/)) + if (status == nf90_noerr) readstr = readstrT + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a4Di(:,:,k,ic,nn,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + enddo ! k + enddo ! ic + endif ! varid + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif ! histfreq + enddo ! num_avail_hist_fields_4Di + + work_g1(:,:) = c0 + + do n = n4Dicum+1, n4Dscum + nn = n - n4Dicum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + do ic = 1, ncat_hist + do k = 1, nzslyr + if (my_task == master_task) then + status = nf90_get_var(ncid,varid,work_g1, & + start=(/ 1, 1,k,ic/), & + count=(/nx_global,ny_global,1, 1/)) + if (status == nf90_noerr) readstr = readstrT + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a4Ds(:,:,k,ic,nn,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + enddo ! k + enddo ! ic + endif ! varid + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif ! histfreq + enddo ! num_avail_hist_fields_4Ds + + do n = n4Dscum+1, n4Dfcum + nn = n - n4Dscum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + if (my_task == master_task) then + status = nf90_inq_varid(ncid,avail_hist_fields(n)%vname,varid) + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + do ic = 1, ncat_hist + do k = 1, nfsd_hist + if (my_task == master_task) then + status = nf90_get_var(ncid,varid,work_g1, & + start=(/ 1, 1,k,ic/), & + count=(/nx_global,ny_global,1, 1/)) + if (status == nf90_noerr) readstr = readstrT + endif + call broadcast_scalar(status,master_task) + if (status == nf90_noerr) then + call scatter_global(a4Df(:,:,k,ic,nn,:), work_g1, master_task, distrb_info, & + field_loc_noupdate, field_type_noupdate) + endif + enddo ! k + enddo ! ic + endif ! varid + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif ! histfreq + enddo ! num_avail_hist_fields_4Df + + deallocate(work_g1) + + !----------------------------------------------------------------- + ! close output dataset + !----------------------------------------------------------------- + + if (my_task == master_task) then + status = nf90_close(ncid) + call ice_check_nc(status, subname// ' ERROR: closing netCDF history file', & + file=__FILE__, line=__LINE__) + write(nu_diag,*) subname,' Finished reading ',trim(ncfile) + endif + + endif ! open file success + endif ! hist_avg + enddo ! nstreams + call ice_timer_stop(timer_readwrite) ! reading/writing + +#else + call abort_ice(subname//' ERROR: USE_NETCDF cpp not defined', file=__FILE__, line=__LINE__) +#endif + + end subroutine ice_read_hist + !======================================================================= ! Defines a (time-dependent) history var in the history file -! variables have short_name, long_name and units, coordiantes and cell_measures attributes, +! variables have short_name, long_name and units, coordinates and cell_measures attributes, ! and are compressed and chunked for 'hdf5' subroutine ice_hist_field_def(ncid, hfield, lprecision, dimids, ns) @@ -1284,12 +1761,28 @@ subroutine ice_hist_field_def(ncid, hfield, lprecision, dimids, ns) if (hist_avg(ns) .and. .not. write_ic) then if (TRIM(hfield%vname(1:4))/='sig1' & .and.TRIM(hfield%vname(1:4))/='sig2' & - .and.TRIM(hfield%vname(1:9))/='sistreave' & - .and.TRIM(hfield%vname(1:9))/='sistremax' & + .and.TRIM(hfield%vname(1:5))/='trsig' & + .and.TRIM(hfield%vname(1:4))/='divu' & + .and.TRIM(hfield%vname(1:5))/='shear' & + .and.TRIM(hfield%vname(1:4))/='vort' & + .and.TRIM(hfield%vname(1:9))/='frz_onset' & + .and.TRIM(hfield%vname(1:9))/='mlt_onset' & + .and.TRIM(hfield%vname(1:6))/='aisnap' & + .and.TRIM(hfield%vname(1:6))/='hisnap' & + .and.TRIM(hfield%vname(1:8))/='sidivvel' & + .and.TRIM(hfield%vname(1:10))/='sishearvel' & + .and.TRIM(hfield%vname(1:11))/='sistressave' & + .and.TRIM(hfield%vname(1:11))/='sistressmax' & .and.TRIM(hfield%vname(1:4))/='sigP') then - status = nf90_put_att(ncid,varid,'cell_methods','time: mean') - call ice_check_nc(status, subname// ' ERROR: defining cell methods for '//hfield%vname, & - file=__FILE__, line=__LINE__) + if (trim(hfield%avg_ice_present) /= 'none') then + status = nf90_put_att(ncid,varid,'cell_methods','area: time: mean where sea ice (mask=siconc)') + call ice_check_nc(status, subname// ' ERROR: defining cell methods for '//hfield%vname, & + file=__FILE__, line=__LINE__) + else + status = nf90_put_att(ncid,varid,'cell_methods','time: mean') + call ice_check_nc(status, subname// ' ERROR: defining cell methods for '//hfield%vname, & + file=__FILE__, line=__LINE__) + endif endif endif @@ -1303,8 +1796,10 @@ subroutine ice_hist_field_def(ncid, hfield, lprecision, dimids, ns) .or.TRIM(hfield%vname(1:4))=='sig2' & .or.TRIM(hfield%vname(1:4))=='sigP' & .or.TRIM(hfield%vname(1:5))=='trsig' & - .or.TRIM(hfield%vname(1:9))=='sistreave' & - .or.TRIM(hfield%vname(1:9))=='sistremax' & + .or.TRIM(hfield%vname(1:8))=='sidivvel' & + .or.TRIM(hfield%vname(1:10))=='sishearvel' & + .or.TRIM(hfield%vname(1:11))=='sistressave' & + .or.TRIM(hfield%vname(1:11))=='sistressmax' & .or.TRIM(hfield%vname(1:9))=='mlt_onset' & .or.TRIM(hfield%vname(1:9))=='frz_onset' & .or.TRIM(hfield%vname(1:6))=='hisnap' & diff --git a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 index 33fd0cd1f..1c25e3f30 100644 --- a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_restart.F90 @@ -10,16 +10,14 @@ module ice_restart use ice_broadcast + use ice_constants, only: c0 use ice_communicate, only: my_task, master_task use ice_kinds_mod #ifdef USE_NETCDF use netcdf #endif use ice_read_write, only: ice_check_nc - use ice_restart_shared, only: & - restart_ext, restart_dir, restart_file, pointer_file, & - runid, use_restart_time, lenstr, restart_coszen, restart_format, & - restart_chunksize, restart_deflate + use ice_restart_shared use ice_fileunits, only: nu_diag, nu_rst_pointer use ice_exit, only: abort_ice use icepack_intfc, only: icepack_query_parameters @@ -168,6 +166,7 @@ subroutine init_restart_write(filename_spec) nbtrcr ! number of bgc tracers character(len=char_len_long) :: filename + character(len=char_len_long) :: lpointer_file integer (kind=int_kind), allocatable :: dims(:) @@ -215,7 +214,13 @@ subroutine init_restart_write(filename_spec) ! write pointer (path/file) if (my_task == master_task) then filename = trim(filename) // '.nc' - open(nu_rst_pointer,file=pointer_file) + lpointer_file = pointer_file + if (pointer_date) then + ! append date to pointer filename + write(lpointer_file,'(a,i4.4,a,i2.2,a,i2.2,a,i5.5)') & + trim(lpointer_file)//'.',myear,'-',mmonth,'-',mday,'-',msec + end if + open(nu_rst_pointer,file=lpointer_file) write(nu_rst_pointer,'(a)') filename close(nu_rst_pointer) @@ -744,39 +749,25 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3, & character(len=*), parameter :: subname = '(read_restart_field)' + work (:,:,:,:) = c0 + work2(:,:,:) = c0 #ifdef USE_NETCDF if (present(field_loc)) then if (ndim3 == ncat) then - if (restart_ext) then - call ice_read_nc(ncid,1,vname,work,diag, & - field_loc=field_loc,field_type=field_type,restart_ext=restart_ext) - else - call ice_read_nc(ncid,1,vname,work,diag,field_loc,field_type) - endif + call ice_read_nc(ncid,1,vname,work,diag, & + field_loc=field_loc,field_type=field_type,restart_ext=restart_ext) elseif (ndim3 == 1) then - if (restart_ext) then - call ice_read_nc(ncid,1,vname,work2,diag, & - field_loc=field_loc,field_type=field_type,restart_ext=restart_ext) - else - call ice_read_nc(ncid,1,vname,work2,diag,field_loc,field_type) - endif + call ice_read_nc(ncid,1,vname,work2,diag, & + field_loc=field_loc,field_type=field_type,restart_ext=restart_ext) work(:,:,1,:) = work2(:,:,:) else write(nu_diag,*) 'ndim3 not supported ',ndim3 endif else if (ndim3 == ncat) then - if (restart_ext) then - call ice_read_nc(ncid, 1, vname, work, diag, restart_ext=restart_ext) - else - call ice_read_nc(ncid, 1, vname, work, diag) - endif + call ice_read_nc(ncid, 1, vname, work, diag, restart_ext=restart_ext) elseif (ndim3 == 1) then - if (restart_ext) then - call ice_read_nc(ncid, 1, vname, work2, diag, restart_ext=restart_ext) - else - call ice_read_nc(ncid, 1, vname, work2, diag) - endif + call ice_read_nc(ncid, 1, vname, work2, diag, restart_ext=restart_ext) work(:,:,1,:) = work2(:,:,:) else write(nu_diag,*) 'ndim3 not supported ',ndim3 @@ -837,18 +828,10 @@ subroutine write_restart_field(nu,nrec,work,atype,vname,ndim3,diag) call ice_check_nc(status, subname//' ERROR: inq varid '//trim(vname), file=__FILE__, line=__LINE__) endif if (ndim3 == ncat) then - if (restart_ext) then - call ice_write_nc(ncid, 1, varid, work, diag, restart_ext, varname=trim(vname)) - else - call ice_write_nc(ncid, 1, varid, work, diag, varname=trim(vname)) - endif + call ice_write_nc(ncid, 1, varid, work, diag, restart_ext, varname=trim(vname)) elseif (ndim3 == 1) then work2(:,:,:) = work(:,:,1,:) - if (restart_ext) then - call ice_write_nc(ncid, 1, varid, work2, diag, restart_ext, varname=trim(vname)) - else - call ice_write_nc(ncid, 1, varid, work2, diag, varname=trim(vname)) - endif + call ice_write_nc(ncid, 1, varid, work2, diag, restart_ext, varname=trim(vname)) else write(nu_diag,*) 'ndim3 not supported',ndim3 endif diff --git a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 index d935f2577..acbd7d8de 100644 --- a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 @@ -14,6 +14,7 @@ ! Added option for binary output instead of netCDF ! 2009 D Bailey and ECH: Generalized for multiple frequency output ! 2010 Alison McLaren and ECH: Added 3D capability +! 2025 T Craig: Add history restart capability ! module ice_history_write @@ -41,7 +42,7 @@ module ice_history_write character (len=20) :: coordinates END TYPE req_attributes - public :: ice_write_hist + public :: ice_write_hist, ice_read_hist integer (kind=int_kind) :: imtid,jmtid @@ -52,6 +53,7 @@ module ice_history_write !======================================================================= ! ! write average ice quantities or snapshots +! supports history output, write_ic, and history restarts ! ! author: Elizabeth C. Hunke, LANL @@ -64,7 +66,8 @@ subroutine ice_write_hist (ns) hh_init, mm_init, ss_init use ice_communicate, only: my_task, master_task use ice_domain, only: distrb_info, nblocks - use ice_domain_size, only: nx_global, ny_global, max_blocks, max_nstrm + use ice_domain_size, only: nx_global, ny_global, max_blocks + use ice_flux, only: albcnt, snwcnt use ice_gather_scatter, only: gather_global use ice_grid, only: TLON, TLAT, ULON, ULAT, NLON, NLAT, ELON, ELAT, & hm, bm, uvm, npm, epm, & @@ -74,7 +77,7 @@ subroutine ice_write_hist (ns) lonn_bounds, latn_bounds, lone_bounds, late_bounds use ice_history_shared use ice_arrays_column, only: hin_max, floe_rad_c - use ice_restart_shared, only: runid + use ice_restart_shared, only: runid, restart_dir use pio integer (kind=int_kind), intent(in) :: ns @@ -82,8 +85,8 @@ subroutine ice_write_hist (ns) ! local variables integer (kind=int_kind) :: i,j,k,ic,n,nn, & - ncid,status,kmtidi,kmtids,kmtidb, cmtid,timid, & - length,nvertexid,ivertex,kmtida,fmtid + ncid,status,kmtidi,kmtids,kmtidb,cmtid,timid, & + length,nvertexid,ivertex,kmtida,fmtid,lhistprec integer (kind=int_kind), dimension(2) :: dimid2 integer (kind=int_kind), dimension(3) :: dimid3 integer (kind=int_kind), dimension(4) :: dimidz @@ -92,6 +95,7 @@ subroutine ice_write_hist (ns) integer (kind=int_kind), dimension(6) :: dimidex real (kind= dbl_kind) :: ltime2 character (len=8) :: cdate + character (len=1) :: cns character (len=char_len_long) :: title, cal_units, cal_att character (len=char_len) :: time_period_freq = 'none' character (len=char_len_long) :: ncfile @@ -158,12 +162,27 @@ subroutine ice_write_hist (ns) file=__FILE__, line=__LINE__) extvars = '' + + write(cns,'(i1.1)') ns + + ! modify history restart output + lhistprec = history_precision + if (write_histrest_now) then + history_precision = 8 + endif + if (my_task == master_task) then - call construct_filename(ncfile,'nc',ns) + if (write_histrest_now) then + call construct_filename(ncfile,'nc',ns,option='histrest') + else + call construct_filename(ncfile,'nc',ns) + endif ! add local directory path name to ncfile if (write_ic) then ncfile = trim(incond_dir)//ncfile + elseif (write_histrest_now) then + ncfile = trim(restart_dir)//ncfile else ncfile = trim(history_dir)//ncfile endif @@ -268,14 +287,15 @@ subroutine ice_write_hist (ns) endif ! Define coord time_bounds if hist_avg is true - ! bounds inherit attributes if (hist_avg(ns) .and. .not. write_ic) then - time_coord = coord_attributes('time_bounds', 'undefined', 'undefined', 'undefined') + time_coord = coord_attributes('time_bounds', 'time interval bounds', trim(cal_units), 'undefined') dimid2(1) = boundid dimid2(2) = timid call ice_hist_coord_def(File, time_coord, pio_double, dimid2, varid) + call ice_pio_check(pio_put_att(File,varid,'calendar',cal_att), & + subname//' ERROR: defining att calendar: '//cal_att,file=__FILE__,line=__LINE__) endif endif ! histfreq(ns)/='g' @@ -404,15 +424,14 @@ subroutine ice_write_hist (ns) ! bounds fields are required for CF compliance ! dimensions (nx,ny,nverts) - ! bounds inherit attributes - var_nverts(n_lont_bnds) = coord_attributes('lont_bounds','und','und','und') - var_nverts(n_latt_bnds) = coord_attributes('latt_bounds','und','und','und') - var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds','und','und','und') - var_nverts(n_latu_bnds) = coord_attributes('latu_bounds','und','und','und') - var_nverts(n_lonn_bnds) = coord_attributes('lonn_bounds','und','und','und') - var_nverts(n_latn_bnds) = coord_attributes('latn_bounds','und','und','und') - var_nverts(n_lone_bnds) = coord_attributes('lone_bounds','und','und','und') - var_nverts(n_late_bnds) = coord_attributes('late_bounds','und','und','und') + var_nverts(n_lont_bnds) = coord_attributes('lont_bounds','longitude bounds (T-cell)','degrees_east','und') + var_nverts(n_latt_bnds) = coord_attributes('latt_bounds','latitude bounds (T-cell)','degrees_north','und') + var_nverts(n_lonu_bnds) = coord_attributes('lonu_bounds','longitude bounds (U-cell)','degrees_east','und') + var_nverts(n_latu_bnds) = coord_attributes('latu_bounds','latitude bounds (U-cell)','degrees_north','und') + var_nverts(n_lonn_bnds) = coord_attributes('lonn_bounds','longitude bounds (N-cell)','degrees_east','und') + var_nverts(n_latn_bnds) = coord_attributes('latn_bounds','latitude bounds (N-cell)','degrees_north','und') + var_nverts(n_lone_bnds) = coord_attributes('lone_bounds','longitude bounds (E-cell)','degrees_east','und') + var_nverts(n_late_bnds) = coord_attributes('late_bounds','latitude bounds (E-cell)','degrees_north','und') !----------------------------------------------------------------- ! define attributes for time-invariant variables @@ -467,7 +486,6 @@ subroutine ice_write_hist (ns) enddo ! bounds fields with dimensions (nverts,nx,ny) - ! bounds inherit attributes dimid_nverts(1) = nvertexid dimid_nverts(2) = imtid dimid_nverts(3) = jmtid @@ -486,6 +504,13 @@ subroutine ice_write_hist (ns) dimid3(2) = jmtid dimid3(3) = timid + if (write_histrest_now) then + status = pio_def_var(File, 'time_beg', lprecision, varid) + status = pio_def_var(File, 'avgct', lprecision, varid) + status = pio_def_var(File, 'albcnt'//cns, lprecision, dimid3, varid) + status = pio_def_var(File, 'snwcnt'//cns, lprecision, dimid3, varid) + endif + do n=1,num_avail_hist_fields_2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then call ice_hist_field_def(File, avail_hist_fields(n),lprecision, dimid3, ns) @@ -924,6 +949,79 @@ subroutine ice_write_hist (ns) ! write variable data !----------------------------------------------------------------- + if (write_histrest_now) then + call ice_pio_check(pio_inq_varid(File,'time_beg',varid), & + subname// ' ERROR: getting varid for '//'time_beg', & + file=__FILE__, line=__LINE__) + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_seterrorhandling(File, PIO_RETURN_ERROR) + call ice_pio_check(pio_put_var(File,varid,(/1/),time_beg(ns)), & + subname// ' ERROR: writing variable '//'time_beg', & + file=__FILE__, line=__LINE__) + + call ice_pio_check(pio_inq_varid(File,'avgct',varid), & + subname// ' ERROR: getting varid for '//'avgct', & + file=__FILE__, line=__LINE__) + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_seterrorhandling(File, PIO_RETURN_ERROR) + call ice_pio_check(pio_put_var(File,varid,(/1/),avgct(ns)), & + subname// ' ERROR: writing variable '//'avgct', & + file=__FILE__, line=__LINE__) + + call ice_pio_check(pio_inq_varid(File,'albcnt'//cns,varid), & + subname//' ERROR: getting varid for '//'albcnt'//cns,file=__FILE__,line=__LINE__) + workd2(:,:,:) = albcnt(:,:,1:nblocks,ns) + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_seterrorhandling(File, PIO_RETURN_ERROR) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc2d,& + workd2, status, fillval=spval_dbl) + else + workr2 = workd2 + call pio_write_darray(File, varid, iodesc2d,& + workr2, status, fillval=spval) + endif + call ice_pio_check(status,subname//' ERROR: writing '//'albcnt'//cns, & + file=__FILE__,line=__LINE__) + + call ice_pio_check(pio_inq_varid(File,'snwcnt'//cns,varid), & + subname//' ERROR: getting varid for '//'snwcnt'//cns,file=__FILE__,line=__LINE__) + workd2(:,:,:) = snwcnt(:,:,1:nblocks,ns) + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) +#ifdef CESM1_PIO + call pio_setframe(varid, int(1,kind=PIO_OFFSET)) +#else + call pio_setframe(File, varid, int(1,kind=PIO_OFFSET_KIND)) +#endif + call pio_seterrorhandling(File, PIO_RETURN_ERROR) + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc2d,& + workd2, status, fillval=spval_dbl) + else + workr2 = workd2 + call pio_write_darray(File, varid, iodesc2d,& + workr2, status, fillval=spval) + endif + call ice_pio_check(status,subname//' ERROR: writing '//'snwcnt'//cns, & + file=__FILE__,line=__LINE__) + + endif + ! 2D do n=1,num_avail_hist_fields_2D if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then @@ -1136,7 +1234,7 @@ subroutine ice_write_hist (ns) allocate(workd4(nx_block,ny_block,nblocks,nzilyr,ncat_hist)) allocate(workr4(nx_block,ny_block,nblocks,nzilyr,ncat_hist)) - ! 4D (categories, fsd) + ! 4D (categories, vertical ice) do n = n3Dfcum+1, n4Dicum nn = n - n3Dfcum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then @@ -1173,7 +1271,7 @@ subroutine ice_write_hist (ns) allocate(workd4(nx_block,ny_block,nblocks,nzslyr,ncat_hist)) allocate(workr4(nx_block,ny_block,nblocks,nzslyr,ncat_hist)) - ! 4D (categories, vertical ice) + ! 4D (categories, vertical snow) do n = n4Dicum+1, n4Dscum nn = n - n4Dicum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then @@ -1211,7 +1309,7 @@ subroutine ice_write_hist (ns) allocate(workd4(nx_block,ny_block,nblocks,nfsd_hist,ncat_hist)) allocate(workr4(nx_block,ny_block,nblocks,nfsd_hist,ncat_hist)) - ! 4D (categories, vertical ice) + ! 4D (categories, fsd) do n = n4Dscum+1, n4Dfcum nn = n - n4Dscum if (avail_hist_fields(n)%vhistfreq == histfreq(ns) .or. write_ic) then @@ -1270,8 +1368,7 @@ subroutine ice_write_hist (ns) call pio_closefile(File) if (my_task == master_task) then - write(nu_diag,*) ' ' - write(nu_diag,*) 'Finished writing ',trim(ncfile) + write(nu_diag,*) subname,' Finished writing ',trim(ncfile) endif !----------------------------------------------------------------- @@ -1280,11 +1377,430 @@ subroutine ice_write_hist (ns) call ice_pio_finalize() + ! reset history parameters + if (write_histrest_now) then + history_precision = lhistprec + endif + first_call = .false. end subroutine ice_write_hist +!======================================================================= +! +! read history restarts, only called for history restarts +! +! author: T. Craig Nov 2025 + + subroutine ice_read_hist + + use ice_kinds_mod + use ice_blocks, only: nx_block, ny_block + use ice_broadcast, only: broadcast_scalar + use ice_calendar, only: nstreams, histfreq + use ice_communicate, only: my_task, master_task + use ice_domain, only: nblocks + use ice_domain_size, only: max_blocks + use ice_flux, only: albcnt, snwcnt + use ice_history_shared + use ice_restart_shared, only: restart_dir + use ice_timers, only: ice_timer_start, ice_timer_stop, timer_readwrite + use pio + + ! local variables + + real (kind=dbl_kind), dimension(:,:,:), allocatable :: work2 + real (kind=dbl_kind), dimension(:,:,:,:), allocatable :: work3 + real (kind=dbl_kind), dimension(:,:,:,:,:), allocatable :: work4 + + integer (kind=int_kind) :: i,j,k,n,nn,ns,ncid,status + logical (kind=log_kind) :: exists + character (char_len_long) :: ncfile + character (len=1) :: cns + character (len=32) :: readstr + character (len=*), parameter :: readstrT = ' read ok:' + character (len=*), parameter :: readstrF = ' DID NOT READ:' + + integer (kind=int_kind), parameter :: histprec=8 ! hardwired to double + + type(file_desc_t) :: File + type(io_desc_t) :: iodesc2d, & + iodesc3dc, iodesc3di, iodesc3db, iodesc3da, iodesc3df, & + iodesc4di, iodesc4ds, iodesc4df + type(var_desc_t) :: varid + + logical (kind=log_kind), save :: first_call = .true. + + character(len=*), parameter :: subname = '(ice_read_hist)' + + call ice_timer_start(timer_readwrite) ! reading/writing + do ns = 1,nstreams + if (hist_avg(ns)) then + + write(cns,'(i1.1)') ns + + if (my_task == master_task) then + call construct_filename(ncfile,'nc',ns, option='histrest') + ncfile = trim(restart_dir)//ncfile + write(nu_diag,*) subname,' reading file ',trim(ncfile) + endif + call broadcast_scalar(ncfile, master_task) + + ! open file + inquire(file=trim(ncfile),exist=exists) + if (exists) then + File%fh=-1 + call ice_pio_init(mode='read', filename=trim(ncfile), File=File, & + fformat=trim(history_format), rearr=trim(history_rearranger), & + iotasks=history_iotasks, root=history_root, stride=history_stride, debug=first_call) + + call pio_seterrorhandling(File, PIO_RETURN_ERROR) + + call ice_pio_initdecomp(iodesc=iodesc2d, precision=histprec) + call ice_pio_initdecomp(ndim3=ncat_hist, iodesc=iodesc3dc, precision=histprec) + call ice_pio_initdecomp(ndim3=nzilyr, iodesc=iodesc3di, precision=histprec) + call ice_pio_initdecomp(ndim3=nzblyr, iodesc=iodesc3db, precision=histprec) + call ice_pio_initdecomp(ndim3=nzalyr, iodesc=iodesc3da, precision=histprec) + call ice_pio_initdecomp(ndim3=nfsd_hist, iodesc=iodesc3df, precision=histprec) + call ice_pio_initdecomp(ndim3=nzilyr, ndim4=ncat_hist, iodesc=iodesc4di, precision=histprec) + call ice_pio_initdecomp(ndim3=nzslyr, ndim4=ncat_hist, iodesc=iodesc4ds, precision=histprec) + call ice_pio_initdecomp(ndim3=nfsd_hist, ndim4=ncat_hist, iodesc=iodesc4df, precision=histprec) + + !----------------------------------------------------------------- + ! read variable data + !----------------------------------------------------------------- + + readstr = readstrF + status = pio_inq_varid(File, 'time_beg', varid) + if (status == PIO_NOERR) status = pio_get_var(File,varid,(/1/),time_beg(ns)) + if (status == PIO_NOERR) readstr = readstrT + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),' time_beg' + endif + + readstr = readstrF + status = pio_inq_varid(File, 'avgct', varid) + if (status == PIO_NOERR) status = pio_get_var(File,varid,(/1/),avgct(ns)) + if (status == PIO_NOERR) readstr = readstrT + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),' avgct' + endif + + allocate(work2(nx_block,ny_block,max_blocks)) + + work2(:,:,:) = c0 + readstr = readstrF + status = pio_inq_varid(File, 'albcnt'//cns, varid) + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc2d, work2, status) + if (status == PIO_NOERR) then + readstr = readstrT + albcnt(:,:,:,ns) = work2(:,:,:) + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),' albcnt'//cns + endif + + work2(:,:,:) = c0 + readstr = readstrF + status = pio_inq_varid(File, 'snwcnt'//cns, varid) + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc2d, work2, status) + if (status == PIO_NOERR) then + readstr = readstrT + snwcnt(:,:,:,ns) = work2(:,:,:) + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),' snwcnt'//cns + endif + + do n=1,num_avail_hist_fields_2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work2(:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc2d, work2, status) + if (status == PIO_NOERR) then + readstr = readstrT + a2D(:,:,n,:) = work2(:,:,:) + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work2) + allocate(work3(nx_block,ny_block,max_blocks,ncat_hist)) + + ! 2D + do n = n2D + 1, n3Dccum + nn = n - n2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work3(:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc3dc, work3, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, ncat_hist + a3Dc(:,:,i,nn,j) = work3(:,:,j,i) + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work3) + allocate(work3(nx_block,ny_block,max_blocks,ncat_hist)) + + ! 3D (category) + do n = n2D + 1, n3Dccum + nn = n - n2D + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work3(:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc3dc, work3, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, ncat_hist + a3Dc(:,:,i,nn,j) = work3(:,:,j,i) + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work3) + allocate(work3(nx_block,ny_block,max_blocks,nzilyr)) + + ! 3D (vertical ice) + do n = n3Dccum+1, n3Dzcum + nn = n - n3Dccum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work3(:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc3di, work3, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, nzilyr + a3Dz(:,:,i,nn,j) = work3(:,:,j,i) + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work3) + allocate(work3(nx_block,ny_block,max_blocks,nzblyr)) + + ! 3D (vertical ice biology) + do n = n3Dzcum+1, n3Dbcum + nn = n - n3Dzcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work3(:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc3db, work3, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, nzblyr + a3Db(:,:,i,nn,j) = work3(:,:,j,i) + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work3) + allocate(work3(nx_block,ny_block,max_blocks,nzalyr)) + + ! 3D (vertical snow biology) + do n = n3Dbcum+1, n3Dacum + nn = n - n3Dbcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work3(:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc3da, work3, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, nzalyr + a3Da(:,:,i,nn,j) = work3(:,:,j,i) + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work3) + allocate(work3(nx_block,ny_block,max_blocks,nfsd_hist)) + + ! 3D (fsd) + do n = n3Dacum+1, n3Dfcum + nn = n - n3Dacum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work3(:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc3df, work3, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, nfsd_hist + a3Df(:,:,i,nn,j) = work3(:,:,j,i) + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work3) + allocate(work4(nx_block,ny_block,max_blocks,nzilyr,ncat_hist)) + + ! 4D (categories, vertical ice) + do n = n3Dfcum+1, n4Dicum + nn = n - n3Dfcum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work4(:,:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc4di, work4, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, ncat_hist + do k = 1, nzilyr + a4Di(:,:,k,i,nn,j) = work4(:,:,j,k,i) + enddo + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work4) + allocate(work4(nx_block,ny_block,max_blocks,nzslyr,ncat_hist)) + + ! 4D (categories, vertical snow) + do n = n4Dicum+1, n4Dscum + nn = n - n4Dicum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work4(:,:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc4ds, work4, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, ncat_hist + do k = 1, nzslyr + a4Ds(:,:,k,i,nn,j) = work4(:,:,j,k,i) + enddo + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work4) + allocate(work4(nx_block,ny_block,max_blocks,nfsd_hist,ncat_hist)) + + ! 4D (categories, fsd) + do n = n4Dscum+1, n4Dfcum + nn = n - n4Dscum + if (avail_hist_fields(n)%vhistfreq == histfreq(ns)) then + readstr = readstrF + status = pio_inq_varid(File,avail_hist_fields(n)%vname,varid) + work4(:,:,:,:,:) = c0 + if (status == PIO_NOERR) call pio_read_darray(File, varid, iodesc4df, work4, status) + if (status == PIO_NOERR) then + readstr = readstrT + do j = 1, nblocks + do i = 1, ncat_hist + do k = 1, nfsd_hist + a4Df(:,:,k,i,nn,j) = work4(:,:,j,k,i) + enddo + enddo + enddo + endif + if (my_task == master_task) then + write(nu_diag,*) subname,trim(readstr),trim(avail_hist_fields(n)%vname) + endif + endif + enddo + + deallocate(work4) + + !----------------------------------------------------------------- + ! clean-up PIO descriptors + !----------------------------------------------------------------- + call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) + + call pio_freedecomp(File,iodesc2d) + call pio_freedecomp(File,iodesc3dc) + call pio_freedecomp(File,iodesc3di) + call pio_freedecomp(File,iodesc3db) + call pio_freedecomp(File,iodesc3da) + call pio_freedecomp(File,iodesc3df) + call pio_freedecomp(File,iodesc4di) + call pio_freedecomp(File,iodesc4ds) + call pio_freedecomp(File,iodesc4df) + + !----------------------------------------------------------------- + ! close output dataset + !----------------------------------------------------------------- + + call pio_closefile(File) + if (my_task == master_task) then + write(nu_diag,*) subname,' Finished reading ',trim(ncfile) + endif + + !----------------------------------------------------------------- + ! clean up PIO + !----------------------------------------------------------------- + + call ice_pio_finalize() + + first_call = .false. + + endif ! open file success + endif ! hist_avg + enddo ! nstreams + call ice_timer_stop(timer_readwrite) ! reading/writing + + end subroutine ice_read_hist + !======================================================================= ! Defines a coordinate var in the history file ! coordinates have short_name, long_name and units attributes, @@ -1351,7 +1867,7 @@ end subroutine ice_hist_coord_def !======================================================================= ! Defines a (time-dependent) history var in the history file -! variables have short_name, long_name and units, coordiantes and cell_measures attributes, +! variables have short_name, long_name and units, coordinates and cell_measures attributes, ! and are compressed and chunked for 'hdf5' subroutine ice_hist_field_def(File, hfield,lprecision, dimids, ns) @@ -1425,11 +1941,27 @@ subroutine ice_hist_field_def(File, hfield,lprecision, dimids, ns) if (hist_avg(ns) .and. .not. write_ic) then if (TRIM(hfield%vname(1:4))/='sig1' & .and.TRIM(hfield%vname(1:4))/='sig2' & - .and.TRIM(hfield%vname(1:9))/='sistreave' & - .and.TRIM(hfield%vname(1:9))/='sistremax' & + .and.TRIM(hfield%vname(1:5))/='trsig' & + .and.TRIM(hfield%vname(1:4))/='divu' & + .and.TRIM(hfield%vname(1:5))/='shear' & + .and.TRIM(hfield%vname(1:4))/='vort' & + .and.TRIM(hfield%vname(1:9))/='frz_onset' & + .and.TRIM(hfield%vname(1:9))/='mlt_onset' & + .and.TRIM(hfield%vname(1:6))/='aisnap' & + .and.TRIM(hfield%vname(1:6))/='hisnap' & + .and.TRIM(hfield%vname(1:8))/='sidivvel' & + .and.TRIM(hfield%vname(1:10))/='sishearvel' & + .and.TRIM(hfield%vname(1:11))/='sistressave' & + .and.TRIM(hfield%vname(1:11))/='sistressmax' & .and.TRIM(hfield%vname(1:4))/='sigP') then - call ice_pio_check(pio_put_att(File,varid,'cell_methods','time: mean'), & - subname//' ERROR: defining att cell_methods',file=__FILE__,line=__LINE__) + if (trim(hfield%avg_ice_present) /= 'none') then + call ice_pio_check(pio_put_att(File,varid,'cell_methods', & + 'area: time: mean where sea ice (mask=siconc)'), & + subname//' ERROR: defining att cell_methods',file=__FILE__,line=__LINE__) + else + call ice_pio_check(pio_put_att(File,varid,'cell_methods','time: mean'), & + subname//' ERROR: defining att cell_methods',file=__FILE__,line=__LINE__) + endif endif endif @@ -1443,8 +1975,10 @@ subroutine ice_hist_field_def(File, hfield,lprecision, dimids, ns) .or.TRIM(hfield%vname(1:4))=='sig2' & .or.TRIM(hfield%vname(1:4))=='sigP' & .or.TRIM(hfield%vname(1:5))=='trsig' & - .or.TRIM(hfield%vname(1:9))=='sistreave' & - .or.TRIM(hfield%vname(1:9))=='sistremax' & + .or.TRIM(hfield%vname(1:8))=='sidivvel' & + .or.TRIM(hfield%vname(1:10))=='sishearvel' & + .or.TRIM(hfield%vname(1:11))=='sistressave' & + .or.TRIM(hfield%vname(1:11))=='sistressmax' & .or.TRIM(hfield%vname(1:9))=='mlt_onset' & .or.TRIM(hfield%vname(1:9))=='frz_onset' & .or.TRIM(hfield%vname(1:6))=='hisnap' & diff --git a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 index 6e60838e4..0186d73f2 100644 --- a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_restart.F90 @@ -220,11 +220,11 @@ subroutine init_restart_write(filename_spec) myear,'-',mmonth,'-',mday,'-',msec end if - if (restart_format(1:3) /= 'bin') filename = trim(filename) // '.nc' + filename = trim(filename) // '.nc' ! write pointer (path/file) if (my_task == master_task) then -#ifdef CESMCOUPLED +#ifdef CESMCOUPLED lpointer_file = 'rpointer.ice'//trim(inst_suffix) #else lpointer_file = pointer_file @@ -756,6 +756,7 @@ subroutine read_restart_field(nu,nrec,work,atype,vname,ndim3,diag, & subname// " ERROR: missing varndims "//trim(vname),file=__FILE__,line=__LINE__) call pio_seterrorhandling(File, PIO_INTERNAL_ERROR) + work (:,:,:,:) = c0 if (ndim3 == ncat .and. ndims == 3) then call pio_read_darray(File, vardesc, iodesc3d_ncat, work, status) #ifdef CESMCOUPLED diff --git a/cicecore/drivers/direct/hadgem3/CICE.F90 b/cicecore/drivers/direct/hadgem3/CICE.F90 index a7233fe39..cacf96c46 100644 --- a/cicecore/drivers/direct/hadgem3/CICE.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE.F90 @@ -1,5 +1,5 @@ !======================================================================= -! Copyright (c) 1998, 2017, Triad National Security, LLC +! Copyright 1998-2026, Triad National Security, LLC ! All rights reserved. ! ! This program was produced under U.S. Government contract 89233218CNA000001 diff --git a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 index cc3a4caff..cbafc3868 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_InitMod.F90 @@ -82,6 +82,7 @@ subroutine cice_init faero_data, faero_default, alloc_forcing_bgc use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runid, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -214,6 +215,7 @@ subroutine cice_init call dealloc_grid ! deallocate temporary grid arrays if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts end subroutine cice_init diff --git a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 index ca0099680..874b382ea 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 @@ -358,7 +358,7 @@ subroutine coupling_prep (iblk) fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyt, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsens, flat, fswabs, fsw, fswup, flwout, evap, Tref, Qref, & fsurfn_f, flatn_f, scale_fluxes, frzmlt_init, frzmlt use ice_flux_bgc, only: faero_ocn, flux_bio, flux_bio_ai use ice_grid, only: tmask @@ -507,6 +507,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fswup (i,j,iblk) = aice_init(i,j,iblk) & + * fsw (i,j,iblk) - fswabs(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 index 4921482a6..581510154 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_InitMod.F90 @@ -82,6 +82,7 @@ subroutine cice_init faero_data, faero_default, alloc_forcing_bgc use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runid, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -183,6 +184,7 @@ subroutine cice_init call init_shortwave ! initialize radiative transfer if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts ! determine the time and date at the end of the first timestep call advance_timestep() diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 index 23eb990a0..0fa89f984 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 @@ -358,7 +358,7 @@ subroutine coupling_prep (iblk) fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyt, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsens, flat, fswabs, fsw, fswup, flwout, evap, Tref, Qref, & fsurfn_f, flatn_f, scale_fluxes, frzmlt_init, frzmlt use ice_flux_bgc, only: faero_ocn, flux_bio, flux_bio_ai use ice_grid, only: tmask @@ -507,6 +507,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fswup (i,j,iblk) = aice_init(i,j,iblk) & + * fsw (i,j,iblk) - fswabs(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr diff --git a/cicecore/drivers/mapl/geos/CICE_InitMod.F90 b/cicecore/drivers/mapl/geos/CICE_InitMod.F90 index f187b9eb7..5a4e796ce 100644 --- a/cicecore/drivers/mapl/geos/CICE_InitMod.F90 +++ b/cicecore/drivers/mapl/geos/CICE_InitMod.F90 @@ -145,6 +145,7 @@ subroutine cice_init2!(yr, mo, dy, hr, mn, sc) use ice_forcing_bgc , only: get_forcing_bgc, get_atm_bgc use ice_forcing_bgc , only: faero_default, alloc_forcing_bgc, fiso_default use ice_history , only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared , only: restart, runtype use ice_init , only: input_data, init_state use ice_init_column , only: init_thermo_vertical, init_shortwave, init_zbgc @@ -262,6 +263,7 @@ subroutine cice_init2!(yr, mo, dy, hr, mn, sc) if (write_ic) then call accum_hist(dt) ! write initial conditions end if + call ice_read_hist ! read history restarts call dealloc_grid ! deallocate temporary grid arrays diff --git a/cicecore/drivers/mapl/geos/CICE_RunMod.F90 b/cicecore/drivers/mapl/geos/CICE_RunMod.F90 index ca5cf8739..04c7fcc97 100644 --- a/cicecore/drivers/mapl/geos/CICE_RunMod.F90 +++ b/cicecore/drivers/mapl/geos/CICE_RunMod.F90 @@ -370,7 +370,7 @@ subroutine coupling_prep (iblk) fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & fswthru_uvrdr, fswthru_uvrdf, fswthru_pardr, fswthru_pardf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsens, flat, fswabs, fsw, fswup, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & flux_bio, flux_bio_ai, & @@ -556,6 +556,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fswup (i,j,iblk) = aice_init(i,j,iblk) & + * fsw (i,j,iblk) - fswabs(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr diff --git a/cicecore/drivers/mapl/geos/CICE_copyright.txt b/cicecore/drivers/mapl/geos/CICE_copyright.txt index 9ee3d2c60..bc8975d79 100644 --- a/cicecore/drivers/mapl/geos/CICE_copyright.txt +++ b/cicecore/drivers/mapl/geos/CICE_copyright.txt @@ -1,4 +1,4 @@ -! Copyright (c) 1998, 2017, Triad National Security, LLC +! Copyright 1998-2026, Triad National Security, LLC ! All rights reserved. ! ! This program was produced under U.S. Government contract 89233218CNA000001 diff --git a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 index 8af7704fb..e864372d8 100644 --- a/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_InitMod.F90 @@ -84,6 +84,7 @@ subroutine cice_init(mpicom_ice) faero_default, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -200,6 +201,7 @@ subroutine cice_init(mpicom_ice) call init_shortwave ! initialize radiative transfer if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts ! call advance_timestep() diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index d22570ae1..f951e762d 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -429,7 +429,7 @@ subroutine coupling_prep (iblk) fswthru_ai, fhocn, scale_factor, snowfrac, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsens, flat, fswabs, fsw, fswup, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & flux_bio, flux_bio_ai, & @@ -585,6 +585,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fswup (i,j,iblk) = aice_init(i,j,iblk) & + * fsw (i,j,iblk) - fswabs(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr diff --git a/cicecore/drivers/mct/cesm1/CICE_copyright.txt b/cicecore/drivers/mct/cesm1/CICE_copyright.txt index 9ee3d2c60..bc8975d79 100644 --- a/cicecore/drivers/mct/cesm1/CICE_copyright.txt +++ b/cicecore/drivers/mct/cesm1/CICE_copyright.txt @@ -1,4 +1,4 @@ -! Copyright (c) 1998, 2017, Triad National Security, LLC +! Copyright 1998-2026, Triad National Security, LLC ! All rights reserved. ! ! This program was produced under U.S. Government contract 89233218CNA000001 diff --git a/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 b/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 index 454895410..f0e83612c 100644 --- a/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 +++ b/cicecore/drivers/mct/cesm1/ice_comp_esmf.F90 @@ -147,6 +147,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) use CICE_InitMod use ice_restart_shared, only: runid, runtype, restart_dir, restart_format use ice_history, only: accum_hist + use ice_history_write, only: ice_read_hist use ice_history_shared, only: history_dir, history_file use icepack_intfc, only: tr_aero, tr_zaero ! @@ -384,6 +385,7 @@ subroutine ice_init_esmf(comp, import_state, export_state, EClock, rc) call calendar(time) ! update calendar info if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts !--------------------------------------------------------------------------- ! Initialize MCT attribute vectors and indices diff --git a/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 b/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 index a1d1a2ad1..f05a47888 100644 --- a/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 +++ b/cicecore/drivers/mct/cesm1/ice_comp_mct.F90 @@ -122,6 +122,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) use CICE_InitMod use ice_restart_shared, only: runid, runtype, restart_dir, restart_format use ice_history, only: accum_hist + use ice_history_write, only: ice_read_hist use ice_history_shared, only: history_dir, history_file ! ! !ARGUMENTS: @@ -352,6 +353,7 @@ subroutine ice_init_mct( EClock, cdata_i, x2i_i, i2x_i, NLFilename ) call calendar ! update calendar info if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts !--------------------------------------------------------------------------- ! Initialize MCT attribute vectors and indices diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 876f10512..9d487ea60 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -420,7 +420,7 @@ subroutine coupling_prep (iblk) fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsens, flat, fswabs, fsw, fswup, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt, Uref, wind use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & flux_bio, flux_bio_ai, fnit, fsil, famm, fdmsp, fdms, fhum, & @@ -577,6 +577,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fswup (i,j,iblk) = aice_init(i,j,iblk) & + * fsw (i,j,iblk) - fswabs(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr diff --git a/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt b/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt index 9ee3d2c60..bc8975d79 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt +++ b/cicecore/drivers/nuopc/cmeps/CICE_copyright.txt @@ -1,4 +1,4 @@ -! Copyright (c) 1998, 2017, Triad National Security, LLC +! Copyright 1998-2026, Triad National Security, LLC ! All rights reserved. ! ! This program was produced under U.S. Government contract 89233218CNA000001 diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index f2b2e2833..3db0b568b 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -33,6 +33,7 @@ module ice_comp_nuopc use ice_restart_shared , only : runid, runtype, restart, use_restart_time, restart_dir, restart_file, & restart_format, restart_chunksize, pointer_date use ice_history , only : accum_hist + use ice_history_write , only : ice_read_hist use ice_history_shared , only : history_format, history_chunksize use ice_exit , only : abort_ice use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted @@ -903,6 +904,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (write_ic) then call accum_hist(dt) ! write initial conditions end if + call ice_read_hist ! read history restarts !----------------------------------------------------------------- ! Prescribed ice initialization diff --git a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 index 412b2cce5..c252b05a1 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_import_export.F90 @@ -94,6 +94,7 @@ module ice_import_export type (fld_list_type) :: fldsToIce(fldsMax) type (fld_list_type) :: fldsFrIce(fldsMax) + logical :: flds_wave ! wave ice coupling integer , parameter :: io_dbug = 10 ! i/o debug messages character(*), parameter :: u_FILE_u = & __FILE__ @@ -116,7 +117,6 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam character(char_len) :: stdname character(char_len) :: cvalue logical :: flds_wiso ! use case - logical :: flds_wave ! use case logical :: isPresent, isSet character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' !------------------------------------------------------------------------------- @@ -266,10 +266,13 @@ subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_nam ! ice/ocn fluxes computed by ice call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_melth' ) call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_vdr' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_vdf' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_idr' ) - call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_idf' ) + + if (.not.prescribed_ice) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_vdr' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_vdf' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_idr' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_idf' ) + endif if (send_i2x_per_cat) then call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_ifrac_n', & @@ -926,7 +929,7 @@ subroutine ice_export( exportState, rc ) real (kind=dbl_kind) :: floethick(nx_block,ny_block,max_blocks) ! ice thickness logical (kind=log_kind) :: tr_fsd integer (kind=int_kind) :: nt_fsd - real (kind=dbl_kind) :: Tffresh + real (kind=dbl_kind) :: Tffresh, stefan_boltzmann real (kind=dbl_kind), allocatable :: tempfld(:,:,:) real (kind=dbl_kind), pointer :: dataptr_ifrac_n(:,:) real (kind=dbl_kind), pointer :: dataptr_swpen_n(:,:) @@ -938,6 +941,7 @@ subroutine ice_export( exportState, rc ) if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) call icepack_query_parameters(Tffresh_out=Tffresh) + call icepack_query_parameters(stefan_boltzmann_out=stefan_boltzmann) ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & ! Tffresh_out=Tffresh) @@ -980,7 +984,7 @@ subroutine ice_export( exportState, rc ) ! surface temperature Tsrf(i,j,iblk) = Tffresh + trcr(i,j,1,iblk) !Kelvin (original ???) - if (tr_fsd) then + if (flds_wave) then ! floe thickness (m) if (aice(i,j,iblk) > puny) then floethick(i,j,iblk) = vice(i,j,iblk) / aice(i,j,iblk) @@ -988,17 +992,22 @@ subroutine ice_export( exportState, rc ) floethick(i,j,iblk) = c0 end if - ! floe diameter (m) - workx = c0 - worky = c0 - do n = 1, ncat - do k = 1, nfsd - workx = workx + floe_rad_c(k) * aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) - worky = worky + aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) + if (tr_fsd) then + ! floe diameter (m) + workx = c0 + worky = c0 + do n = 1, ncat + do k = 1, nfsd + workx = workx + floe_rad_c(k) * aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) + worky = worky + aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) + end do end do - end do - if (worky > c0) workx = c2*workx / worky - floediam(i,j,iblk) = MAX(c2*floe_rad_c(1),workx) + if (worky > c0) workx = c2*workx / worky + floediam(i,j,iblk) = MAX(c2*floe_rad_c(1),workx) + else ! with FSD off + ! floe diameter (m) + floediam(i,j,iblk) = 50.0_dbl_kind + endif endif ! wind stress (on POP T-grid: convert to lat-lon) @@ -1063,7 +1072,7 @@ subroutine ice_export( exportState, rc ) endif ! Create a temporary field - allocate(tempfld(nx_block,ny_block,nblocks)) + allocate(tempfld(nx_block,ny_block,max_blocks)) ! Fractions and mask call state_setexport(exportState, 'Si_ifrac', input=ailohi, rc=rc) @@ -1073,6 +1082,7 @@ subroutine ice_export( exportState, rc ) call state_setexport(exportState, 'Si_imask', input=ocn_gridcell_frac, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return else + tempfld(:,:,:) = c0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -1134,6 +1144,7 @@ subroutine ice_export( exportState, rc ) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! Snow height + tempfld(:,:,:) = c0 do iblk = 1, nblocks this_block = get_block(blocks_ice(iblk),iblk) ilo = this_block%ilo @@ -1191,11 +1202,29 @@ subroutine ice_export( exportState, rc ) areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! Fix outgoing longwave if aice_init = 0, but aice > 0. + tempfld(:,:,:) = flwout(:,:,:) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + if ( tmask(i,j,iblk) .and. ailohi(i,j,iblk) > c0 .and. flwout(i,j,iblk) > -puny) then + tempfld(i,j,iblk) = (-stefan_boltzmann *(Tf(i,j,iblk) + Tffresh)**4) / ailohi(i,j,iblk) + end if + end do + end do + end do ! longwave outgoing (upward), average over ice fraction only - call state_setexport(exportState, 'Faii_lwup' , input=flwout, lmask=tmask, ifrac=ailohi, & + call state_setexport(exportState, 'Faii_lwup' , input=tempfld, lmask=tmask, ifrac=ailohi, & areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + deallocate(tempfld) + ! Evaporative water flux (kg/m^2/s) call state_setexport(exportState, 'Faii_evap' , input=evap, lmask=tmask, ifrac=ailohi, & areacor=mod2med_areacor, rc=rc) @@ -1215,6 +1244,8 @@ subroutine ice_export( exportState, rc ) areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (.not.prescribed_ice) then + ! flux of vis dir shortwave through ice to ocean call state_setexport(exportState, 'Fioi_swpen_vdr' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, & areacor=mod2med_areacor, rc=rc) @@ -1235,6 +1266,8 @@ subroutine ice_export( exportState, rc ) areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + endif + ! flux of heat exchange with ocean call state_setexport(exportState, 'Fioi_melth' , input=fhocn, lmask=tmask, ifrac=ailohi, & areacor=mod2med_areacor, rc=rc) diff --git a/cicecore/drivers/nuopc/dmi/CICE.F90 b/cicecore/drivers/nuopc/dmi/CICE.F90 index c92f0ea24..a96ca4ce7 100644 --- a/cicecore/drivers/nuopc/dmi/CICE.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE.F90 @@ -1,5 +1,5 @@ !======================================================================= -! Copyright (c) 1998, 2017, Triad National Security, LLC +! Copyright 1998-2026, Triad National Security, LLC ! All rights reserved. ! ! This program was produced under U.S. Government contract 89233218CNA000001 diff --git a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 index 4f4449546..7cea84db3 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_InitMod.F90 @@ -89,6 +89,7 @@ subroutine cice_init(mpi_comm) faero_default, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -258,6 +259,7 @@ subroutine cice_init(mpi_comm) call init_flux_ocn ! initialize ocean fluxes sent to coupler if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts call dealloc_grid ! deallocate temporary grid arrays if (my_task == master_task) then diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index 5f8fb52bc..2de15f3a4 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -442,14 +442,13 @@ subroutine coupling_prep (iblk) fswthru_ai, fhocn, scale_factor, snowfrac, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsens, flat, fswabs, fsw, fswup, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & flux_bio, flux_bio_ai use ice_grid, only: tmask - use ice_state, only: aicen, aice + use ice_state, only: aicen, aice, aice_init #ifdef CICE_IN_NEMO - use ice_state, only: aice_init use ice_flux, only: flatn_f, fsurfn_f #endif use ice_step_mod, only: ocean_mixed_layer @@ -596,6 +595,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fswup (i,j,iblk) = aice_init(i,j,iblk) & + * fsw (i,j,iblk) - fswabs(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr diff --git a/cicecore/drivers/standalone/cice/CICE.F90 b/cicecore/drivers/standalone/cice/CICE.F90 index c92f0ea24..a96ca4ce7 100644 --- a/cicecore/drivers/standalone/cice/CICE.F90 +++ b/cicecore/drivers/standalone/cice/CICE.F90 @@ -1,5 +1,5 @@ !======================================================================= -! Copyright (c) 1998, 2017, Triad National Security, LLC +! Copyright 1998-2026, Triad National Security, LLC ! All rights reserved. ! ! This program was produced under U.S. Government contract 89233218CNA000001 diff --git a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 index 66a5256e0..74589a064 100644 --- a/cicecore/drivers/standalone/cice/CICE_InitMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_InitMod.F90 @@ -84,6 +84,7 @@ subroutine cice_init faero_default, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -197,6 +198,7 @@ subroutine cice_init call init_shortwave ! initialize radiative transfer if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts ! tcraig, use advance_timestep here ! istep = istep + 1 ! update time step counters diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 53476776b..335fc1e50 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -435,14 +435,13 @@ subroutine coupling_prep (iblk) fswthru_ai, fhocn, scale_factor, snowfrac, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsens, flat, fswabs, fsw, fswup, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & flux_bio, flux_bio_ai use ice_grid, only: tmask - use ice_state, only: aicen, aice + use ice_state, only: aicen, aice, aice_init #ifdef CICE_IN_NEMO - use ice_state, only: aice_init use ice_flux, only: flatn_f, fsurfn_f #endif use ice_step_mod, only: ocean_mixed_layer @@ -585,10 +584,18 @@ subroutine coupling_prep (iblk) alidf_ai (i,j,iblk) = alidf (i,j,iblk) alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) alidr_ai (i,j,iblk) = alidr (i,j,iblk) + + + !---------------------------------------------------------------- + ! Store fluxes before scaling by aice + !---------------------------------------------------------------- + fresh_ai (i,j,iblk) = fresh (i,j,iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fswup (i,j,iblk) = aice_init(i,j,iblk) & + * fsw (i,j,iblk) - fswabs(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr diff --git a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 index 66a5256e0..74589a064 100644 --- a/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/gridavgchk/CICE_InitMod.F90 @@ -84,6 +84,7 @@ subroutine cice_init faero_default, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -197,6 +198,7 @@ subroutine cice_init call init_shortwave ! initialize radiative transfer if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts ! tcraig, use advance_timestep here ! istep = istep + 1 ! update time step counters diff --git a/cicecore/drivers/unittest/gridavgchk/gridavgchk.F90 b/cicecore/drivers/unittest/gridavgchk/gridavgchk.F90 index bd7ed3165..02e51fbba 100644 --- a/cicecore/drivers/unittest/gridavgchk/gridavgchk.F90 +++ b/cicecore/drivers/unittest/gridavgchk/gridavgchk.F90 @@ -16,7 +16,6 @@ program gridavgchk use CICE_InitMod use ice_kinds_mod, only: int_kind, dbl_kind use ice_blocks, only: block, get_block, nx_block, ny_block, nblocks_tot - use ice_boundary, only: ice_HaloUpdate use ice_constants, only: c0, c1, c2, p25, & field_loc_center, field_loc_NEcorner, & field_loc_Nface, field_loc_Eface, field_type_scalar diff --git a/cicecore/drivers/unittest/halochk/CICE_InitMod.F90 b/cicecore/drivers/unittest/halochk/CICE_InitMod.F90 index 66a5256e0..74589a064 100644 --- a/cicecore/drivers/unittest/halochk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/halochk/CICE_InitMod.F90 @@ -84,6 +84,7 @@ subroutine cice_init faero_default, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -197,6 +198,7 @@ subroutine cice_init call init_shortwave ! initialize radiative transfer if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts ! tcraig, use advance_timestep here ! istep = istep + 1 ! update time step counters diff --git a/cicecore/drivers/unittest/halochk/halochk.F90 b/cicecore/drivers/unittest/halochk/halochk.F90 index 29eaa8150..6e7ff4173 100644 --- a/cicecore/drivers/unittest/halochk/halochk.F90 +++ b/cicecore/drivers/unittest/halochk/halochk.F90 @@ -40,11 +40,12 @@ program halochk implicit none - integer(int_kind) :: nn, nl, nt, i, j, k1, k2, n, ib, ie, jb, je + integer(int_kind) :: nn, nl, nt, nf, i, j, k1, k2, n, ib, ie, jb, je integer(int_kind) :: iblock, itrip, ioffset, joffset integer(int_kind) :: blockID, numBlocks, jtrip type (block) :: this_block + ! fields sent to the haloupdate real(dbl_kind) , allocatable :: darrayi1(:,:,:) , darrayj1(:,:,:) real(dbl_kind) , allocatable :: darrayi2(:,:,:,:) , darrayj2(:,:,:,:) real(dbl_kind) , allocatable :: darrayi3(:,:,:,:,:), darrayj3(:,:,:,:,:) @@ -58,25 +59,27 @@ program halochk real(dbl_kind) , allocatable :: darrayi1str(:,:,:) , darrayj1str(:,:,:) real(dbl_kind) , allocatable :: darrayi10(:,:,:) , darrayj10(:,:,:) - real(dbl_kind), allocatable :: cidata_bas(:,:,:,:,:),cjdata_bas(:,:,:,:,:) - real(dbl_kind), allocatable :: cidata_nup(:,:,:,:,:),cjdata_nup(:,:,:,:,:) - real(dbl_kind), allocatable :: cidata_std(:,:,:,:,:),cjdata_std(:,:,:,:,:) + ! expected results + real(dbl_kind), allocatable :: cidata_bas(:,:,:,:,:),cjdata_bas(:,:,:,:,:) ! baseline integer(int_kind), parameter :: maxtests = 11 integer(int_kind), parameter :: maxtypes = 4 integer(int_kind), parameter :: maxlocs = 5 + integer(int_kind), parameter :: maxfills = 2 integer(int_kind), parameter :: nz1 = 3 integer(int_kind), parameter :: nz2 = 4 real(dbl_kind) :: aichk,ajchk,cichk,cjchk,rival,rjval,rsign - character(len=16) :: locs_name(maxlocs), types_name(maxtypes) + real(dbl_kind) :: fillexpected + character(len=16) :: locs_name(maxlocs), types_name(maxtypes), fill_name(maxfills) integer(int_kind) :: field_loc(maxlocs), field_type(maxtypes) + logical :: halofill integer(int_kind) :: npes, ierr, ntask, testcnt, tottest, tpcnt, tfcnt integer(int_kind) :: errorflag0, gflag, k1m, k2m, ptcntsum, failcntsum integer(int_kind), allocatable :: errorflag(:) integer(int_kind), allocatable :: ptcnt(:), failcnt(:) character(len=128), allocatable :: teststring(:) character(len=32) :: halofld - logical :: tripole_average, tripole_pole, spvalL1 + logical :: tripole_average, tripole_pole logical :: first_call = .true. real(dbl_kind) , parameter :: fillval = -88888.0_dbl_kind @@ -94,6 +97,7 @@ program halochk locs_name (:) = 'unknown' types_name(:) = 'unknown' + fill_name (:) = 'unknown' field_type(:) = field_type_unknown field_loc (:) = field_loc_unknown @@ -110,7 +114,7 @@ program halochk locs_name (1) = 'center' field_loc (1) = field_loc_center - locs_name (2) = 'NEcorner' + locs_name (2) = 'NEcorn' field_loc (2) = field_loc_NEcorner locs_name (3) = 'Nface' field_loc (3) = field_loc_Nface @@ -121,7 +125,10 @@ program halochk ! locs_name (6) = 'unknown' ! field_loc (6) = field_loc_unknown ! aborts in CICE, as expected - tottest = maxtests * maxlocs * maxtypes + fill_name (1) = 'fill' + fill_name (2) = 'nofill' + + tottest = maxtests * maxlocs * maxtypes * maxfills allocate(errorflag(tottest)) allocate(teststring(tottest)) allocate(ptcnt(tottest)) @@ -187,10 +194,6 @@ program halochk allocate(cidata_bas(nx_block,ny_block,nz1,nz2,max_blocks)) allocate(cjdata_bas(nx_block,ny_block,nz1,nz2,max_blocks)) - allocate(cidata_std(nx_block,ny_block,nz1,nz2,max_blocks)) - allocate(cjdata_std(nx_block,ny_block,nz1,nz2,max_blocks)) - allocate(cidata_nup(nx_block,ny_block,nz1,nz2,max_blocks)) - allocate(cjdata_nup(nx_block,ny_block,nz1,nz2,max_blocks)) darrayi1 = fillval darrayj1 = fillval @@ -218,14 +221,14 @@ program halochk darrayj10 = fillval cidata_bas = fillval cjdata_bas = fillval - cidata_std = fillval - cjdata_std = fillval - cidata_nup = fillval - cjdata_nup = fillval call ice_distributionGet(distrb_info, numLocalBlocks = numBlocks) !--- baseline data --- + ! set to the global index + ! i/j valid everywhere for "cyclic" + ! i/j valid for "open" with extrapolation on outer boundary + ! i/j zero on outer boundary for "closed" do iblock = 1,numBlocks call ice_distributionGetBlockID(distrb_info, iblock, blockID) @@ -244,102 +247,32 @@ program halochk enddo enddo - !--- setup nup (noupdate) solution, set halo/pad will fillval --- - - cidata_nup(:,:,:,:,:) = cidata_bas(:,:,:,:,:) - cjdata_nup(:,:,:,:,:) = cjdata_bas(:,:,:,:,:) - - do iblock = 1,numBlocks - call ice_distributionGetBlockID(distrb_info, iblock, blockID) - this_block = get_block(blockID, blockID) - ib = this_block%ilo - ie = this_block%ihi - jb = this_block%jlo - je = this_block%jhi - cidata_nup(1:ib-1 ,: ,:,:,iblock) = fillval - cjdata_nup(1:ib-1 ,: ,:,:,iblock) = fillval - cidata_nup(ie+1:nx_block,: ,:,:,iblock) = fillval - cjdata_nup(ie+1:nx_block,: ,:,:,iblock) = fillval - cidata_nup(: ,1:jb-1 ,:,:,iblock) = fillval - cjdata_nup(: ,1:jb-1 ,:,:,iblock) = fillval - cidata_nup(: ,je+1:ny_block,:,:,iblock) = fillval - cjdata_nup(: ,je+1:ny_block,:,:,iblock) = fillval - enddo - - !--- setup std solution for cyclic, closed, open, tripole solution --- - - cidata_std(:,:,:,:,:) = cidata_bas(:,:,:,:,:) - cjdata_std(:,:,:,:,:) = cjdata_bas(:,:,:,:,:) - - !--- halo off on east and west boundary --- - if (ew_boundary_type == 'closed' .or. & - ew_boundary_type == 'open' ) then - do iblock = 1,numBlocks - call ice_distributionGetBlockID(distrb_info, iblock, blockID) - this_block = get_block(blockID, blockID) - ib = this_block%ilo - ie = this_block%ihi - jb = this_block%jlo - je = this_block%jhi - if (this_block%i_glob(ib) == 1) then - cidata_std(1:ib-1 ,:,:,:,iblock) = dhalofillval - cjdata_std(1:ib-1 ,:,:,:,iblock) = dhalofillval - endif - if (this_block%i_glob(ie) == nx_global) then - cidata_std(ie+1:nx_block,:,:,:,iblock) = dhalofillval - cjdata_std(ie+1:nx_block,:,:,:,iblock) = dhalofillval - endif - enddo - endif - - !--- halo off on south boundary --- - if (ns_boundary_type == 'closed' .or. & - ns_boundary_type == 'open' .or. & - ns_boundary_type == 'tripole' .or. & - ns_boundary_type == 'tripoleT' ) then - do iblock = 1,numBlocks - call ice_distributionGetBlockID(distrb_info, iblock, blockID) - this_block = get_block(blockID, blockID) - ib = this_block%ilo - ie = this_block%ihi - jb = this_block%jlo - je = this_block%jhi - if (this_block%j_glob(jb) == 1) then - cidata_std(:,1:jb-1,:,:,iblock) = dhalofillval - cjdata_std(:,1:jb-1,:,:,iblock) = dhalofillval - endif - enddo - endif - - !--- halo off on north boundary, tripole handled later --- - if (ns_boundary_type == 'closed' .or. & - ns_boundary_type == 'open' .or. & - ns_boundary_type == 'tripole' .or. & - ns_boundary_type == 'tripoleT' ) then - do iblock = 1,numBlocks - call ice_distributionGetBlockID(distrb_info, iblock, blockID) - this_block = get_block(blockID, blockID) - ib = this_block%ilo - ie = this_block%ihi - jb = this_block%jlo - je = this_block%jhi - if (this_block%j_glob(je) == ny_global) then - cidata_std(:,je+1:ny_block,:,:,iblock) = dhalofillval - cjdata_std(:,je+1:ny_block,:,:,iblock) = dhalofillval - endif - enddo - endif - !--------------------------------------------------------------- testcnt = 0 do nn = 1, maxtests do nl = 1, maxlocs do nt = 1, maxtypes + do nf = 1, maxfills !--- setup test --- first_call = .true. testcnt = testcnt + 1 + if (nf == 1) then + halofill = .true. + fillexpected = dhalofillval + elseif (nf == 2) then + halofill = .false. + fillexpected = fillval + else + write(6,*) subname,' nf = ',nf + if (my_task == master_task) then + write(6,*) ' ' + write(6,*) 'HALOCHK FAILED' + write(6,*) ' ' + endif + call abort_ice(subname//' invalid value of nf',file=__FILE__,line=__LINE__) + endif if (testcnt > tottest) then if (my_task == master_task) then write(6,*) ' ' @@ -388,35 +321,54 @@ program halochk darrayi10 = darrayi1 darrayj10 = darrayj1 - !--- halo update --- if (nn == 1) then k1m = 1 k2m = 1 halofld = '2DR8' - call ice_haloUpdate(darrayi1, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) - call ice_haloUpdate(darrayj1, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + if (halofill) then + call ice_haloUpdate(darrayi1, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + call ice_haloUpdate(darrayj1, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + else + call ice_haloUpdate(darrayi1, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(darrayj1, halo_info, field_loc(nl), field_type(nt)) + endif elseif (nn == 2) then k1m = nz1 k2m = 1 halofld = '3DR8' - call ice_haloUpdate(darrayi2, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) - call ice_haloUpdate(darrayj2, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + if (halofill) then + call ice_haloUpdate(darrayi2, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + call ice_haloUpdate(darrayj2, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + else + call ice_haloUpdate(darrayi2, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(darrayj2, halo_info, field_loc(nl), field_type(nt)) + endif elseif (nn == 3) then k1m = nz1 k2m = nz2 halofld = '4DR8' - call ice_haloUpdate(darrayi3, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) - call ice_haloUpdate(darrayj3, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + if (halofill) then + call ice_haloUpdate(darrayi3, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + call ice_haloUpdate(darrayj3, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + else + call ice_haloUpdate(darrayi3, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(darrayj3, halo_info, field_loc(nl), field_type(nt)) + endif elseif (nn == 4) then k1m = 1 k2m = 1 halofld = '2DR4' rarrayi1 = real(darrayi1,kind=real_kind) rarrayj1 = real(darrayj1,kind=real_kind) - call ice_haloUpdate(rarrayi1, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) - call ice_haloUpdate(rarrayj1, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + if (halofill) then + call ice_haloUpdate(rarrayi1, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + call ice_haloUpdate(rarrayj1, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + else + call ice_haloUpdate(rarrayi1, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(rarrayj1, halo_info, field_loc(nl), field_type(nt)) + endif darrayi1 = real(rarrayi1,kind=dbl_kind) darrayj1 = real(rarrayj1,kind=dbl_kind) elseif (nn == 5) then @@ -425,8 +377,13 @@ program halochk halofld = '3DR4' rarrayi2 = real(darrayi2,kind=real_kind) rarrayj2 = real(darrayj2,kind=real_kind) - call ice_haloUpdate(rarrayi2, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) - call ice_haloUpdate(rarrayj2, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + if (halofill) then + call ice_haloUpdate(rarrayi2, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + call ice_haloUpdate(rarrayj2, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + else + call ice_haloUpdate(rarrayi2, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(rarrayj2, halo_info, field_loc(nl), field_type(nt)) + endif darrayi2 = real(rarrayi2,kind=dbl_kind) darrayj2 = real(rarrayj2,kind=dbl_kind) elseif (nn == 6) then @@ -435,8 +392,13 @@ program halochk halofld = '4DR4' rarrayi3 = real(darrayi3,kind=real_kind) rarrayj3 = real(darrayj3,kind=real_kind) - call ice_haloUpdate(rarrayi3, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) - call ice_haloUpdate(rarrayj3, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + if (halofill) then + call ice_haloUpdate(rarrayi3, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + call ice_haloUpdate(rarrayj3, halo_info, field_loc(nl), field_type(nt), fillvalue=rhalofillval) + else + call ice_haloUpdate(rarrayi3, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(rarrayj3, halo_info, field_loc(nl), field_type(nt)) + endif darrayi3 = real(rarrayi3,kind=dbl_kind) darrayj3 = real(rarrayj3,kind=dbl_kind) elseif (nn == 7) then @@ -445,8 +407,13 @@ program halochk halofld = '2DI4' iarrayi1 = nint(darrayi1) iarrayj1 = nint(darrayj1) - call ice_haloUpdate(iarrayi1, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) - call ice_haloUpdate(iarrayj1, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + if (halofill) then + call ice_haloUpdate(iarrayi1, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + call ice_haloUpdate(iarrayj1, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + else + call ice_haloUpdate(iarrayi1, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(iarrayj1, halo_info, field_loc(nl), field_type(nt)) + endif darrayi1 = real(iarrayi1,kind=dbl_kind) darrayj1 = real(iarrayj1,kind=dbl_kind) elseif (nn == 8) then @@ -455,8 +422,13 @@ program halochk halofld = '3DI4' iarrayi2 = nint(darrayi2) iarrayj2 = nint(darrayj2) - call ice_haloUpdate(iarrayi2, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) - call ice_haloUpdate(iarrayj2, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + if (halofill) then + call ice_haloUpdate(iarrayi2, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + call ice_haloUpdate(iarrayj2, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + else + call ice_haloUpdate(iarrayi2, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(iarrayj2, halo_info, field_loc(nl), field_type(nt)) + endif darrayi2 = real(iarrayi2,kind=dbl_kind) darrayj2 = real(iarrayj2,kind=dbl_kind) elseif (nn == 9) then @@ -465,20 +437,36 @@ program halochk halofld = '4DI4' iarrayi3 = nint(darrayi3) iarrayj3 = nint(darrayj3) - call ice_haloUpdate(iarrayi3, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) - call ice_haloUpdate(iarrayj3, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + if (halofill) then + call ice_haloUpdate(iarrayi3, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + call ice_haloUpdate(iarrayj3, halo_info, field_loc(nl), field_type(nt), fillvalue=ihalofillval) + else + call ice_haloUpdate(iarrayi3, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(iarrayj3, halo_info, field_loc(nl), field_type(nt)) + endif darrayi3 = real(iarrayi3,kind=dbl_kind) darrayj3 = real(iarrayj3,kind=dbl_kind) elseif (nn == 10) then k1m = 1 k2m = 1 halofld = '2DL1' - larrayi1 = .true. - where (darrayi1 == fillval) larrayi1 = .false. - larrayj1 = .false. - where (darrayj1 == fillval) larrayj1 = .true. - call ice_haloUpdate(larrayi1, halo_info, field_loc(nl), field_type(nt), fillvalue=0) - call ice_haloUpdate(larrayj1, halo_info, field_loc(nl), field_type(nt), fillvalue=1) + where (darrayi1 == fillval) + larrayi1 = .false. + elsewhere + larrayi1 = (mod(nint(darrayi1),2) == 1) + endwhere + where (darrayj1 == fillval) + larrayj1 = .true. + elsewhere + larrayj1 = (mod(nint(darrayj1),2) == 1) + endwhere + if (halofill) then + call ice_haloUpdate(larrayi1, halo_info, field_loc(nl), field_type(nt), fillvalue=0) + call ice_haloUpdate(larrayj1, halo_info, field_loc(nl), field_type(nt), fillvalue=1) + else + call ice_haloUpdate(larrayi1, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate(larrayj1, halo_info, field_loc(nl), field_type(nt)) + endif darrayi1 = c0 where (larrayi1) darrayi1 = c1 darrayj1 = c0 @@ -489,11 +477,16 @@ program halochk halofld = 'STRESS' darrayi1str = -darrayi1 ! flip sign for testing darrayj1str = -darrayj1 - call ice_haloUpdate_stress(darrayi1, darrayi1str, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) - call ice_haloUpdate_stress(darrayj1, darrayj1str, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + if (halofill) then + call ice_haloUpdate_stress(darrayi1, darrayi1str, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + call ice_haloUpdate_stress(darrayj1, darrayj1str, halo_info, field_loc(nl), field_type(nt), fillvalue=dhalofillval) + else + call ice_haloUpdate_stress(darrayi1, darrayi1str, halo_info, field_loc(nl), field_type(nt)) + call ice_haloUpdate_stress(darrayj1, darrayj1str, halo_info, field_loc(nl), field_type(nt)) + endif endif - write(teststring(testcnt),'(5a10)') trim(halofld),trim(locs_name(nl)),trim(types_name(nt)), & + write(teststring(testcnt),'(6a8)') trim(halofld),trim(locs_name(nl)),trim(types_name(nt)),trim(fill_name(nf)), & trim(ew_boundary_type),trim(ns_boundary_type) do iblock = 1,numBlocks @@ -504,15 +497,12 @@ program halochk jb = this_block%jlo je = this_block%jhi ! just check non-padded gridcells -! do j = 1,ny_block -! do i = 1,nx_block do j = jb-nghost, je+nghost do i = ib-nghost, ie+nghost do k1 = 1,k1m do k2 = 1,k2m tripole_average = .false. tripole_pole = .false. - spvalL1 = .false. if (index(halofld,'2D') > 0) then aichk = darrayi1(i,j,iblock) ajchk = darrayj1(i,j,iblock) @@ -534,14 +524,46 @@ program halochk call abort_ice(subname//' halofld not matched '//trim(halofld),file=__FILE__,line=__LINE__) endif + cichk = cidata_bas(i,j,k1,k2,iblock) + cjchk = cjdata_bas(i,j,k1,k2,iblock) + + ! halo special cases if (field_loc (nl) == field_loc_noupdate .or. & field_type(nt) == field_type_noupdate) then - cichk = cidata_nup(i,j,k1,k2,iblock) - cjchk = cjdata_nup(i,j,k1,k2,iblock) + if (i < ib .or. j < jb .or. i > ie .or. j > je) then + ! no halo update anywhere, doesn't even see fillvalue passed in + cichk = fillval + cjchk = fillval + endif + else - cichk = cidata_std(i,j,k1,k2,iblock) - cjchk = cjdata_std(i,j,k1,k2,iblock) + ! if ew_boundary_type is not cyclic we expect just fill values on outer boundary + if (ew_boundary_type /= 'cyclic' .and. & + ((this_block%i_glob(ib) == 1 .and. i < ib) .or. & ! west outer face + (this_block%i_glob(ie) == nx_global .and. i > ie))) then ! east outer face + cichk = fillexpected + cjchk = fillexpected + endif + + ! if ns_boundary_type is not cyclic we expect just fill values on outer boundary except + ! - tripole north edge will be haloed and is updated below, default to fill value for now + ! - tripole south edge will be set to the fillvalue or to haloupdate internal default (c0) + ! tripole basically assumes south edge is land or always ice free in CICE + if (ns_boundary_type /= 'cyclic' .and. & + ((this_block%j_glob(jb) == 1 .and. j < jb) .or. & ! south outer face + (this_block%j_glob(je) == ny_global .and. j > je))) then ! north outer face + ! ns_boundary_type is not cyclic and on outer boundary + if ((ns_boundary_type == 'tripole' .or. & + ns_boundary_type == 'tripoleT') .and. & + .not. halofill) then + cichk = c0 + cjchk = c0 + else + cichk = fillexpected + cjchk = fillexpected + endif + endif if (index(halofld,'STRESS') > 0) then ! only updates on tripole zipper for tripole grids @@ -560,11 +582,11 @@ program halochk (ns_boundary_type == 'tripoleT' .and. & (j >= je)))) then - ! flip sign for vector/angle - if (field_type(nt) == field_type_vector .or. field_type(nt) == field_type_angle ) then + ! flip sign for vector/angle except for logical halo updates + rsign = c1 + if ((field_type(nt) == field_type_vector .or. field_type(nt) == field_type_angle) .and. & + .not. (index(halofld,'L1') > 0)) then rsign = -c1 - else - rsign = c1 endif ! for tripole @@ -650,44 +672,40 @@ program halochk if (index(halofld,'STRESS') > 0) then ! only updates on tripole zipper for tripole grids, not tripoleT + ! note: L1 and STRESS never overlap so don't worry about L1 here if (tripole_pole) then ! flip sign due to sign of darrayi1str ! ends of tripole seam not averaged in CICE - cichk = -rsign * cidata_std(i,j,k1,k2,iblock) - cjchk = -rsign * cjdata_std(i,j,k1,k2,iblock) + cichk = -rsign * cidata_bas(i,j,k1,k2,iblock) + cjchk = -rsign * cjdata_bas(i,j,k1,k2,iblock) else cichk = -rsign * rival cjchk = -rsign * rjval endif - elseif (index(halofld,'L1') > 0 .and. j == je) then - ! force cichk and cjchk to match on tripole average index, calc not well defined - spvalL1 = .true. - cichk = aichk - cjchk = ajchk + elseif (tripole_pole) then ! ends of tripole seam not averaged in CICE - cichk = rsign * cidata_std(i,j,k1,k2,iblock) - cjchk = rsign * cjdata_std(i,j,k1,k2,iblock) + cichk = rsign * cidata_bas(i,j,k1,k2,iblock) + cjchk = rsign * cjdata_bas(i,j,k1,k2,iblock) + elseif (tripole_average) then - ! tripole average - cichk = p5 * (cidata_std(i,j,k1,k2,iblock) + rsign * rival) - cjchk = p5 * (cjdata_std(i,j,k1,k2,iblock) + rsign * rjval) + if (index(halofld,'L1') > 0) then + ! logical math doesn't work this way, force to correct answer + cichk = aichk ! p5 * (mod(nint(cidata_bas(i,j,k1,k2,iblock)),2) + rsign * mod(nint(rival),2)) + cjchk = ajchk ! p5 * (mod(nint(cidata_bas(i,j,k1,k2,iblock)),2) + rsign * mod(nint(rjval),2)) + else + cichk = p5 * (cidata_bas(i,j,k1,k2,iblock) + rsign * rival) + cjchk = p5 * (cjdata_bas(i,j,k1,k2,iblock) + rsign * rjval) + endif + else ! standard tripole fold cichk = rsign * rival cjchk = rsign * rjval endif -! if (testcnt == 6 .and. j == 61 .and. i < 3) then -! if (testcnt == 186 .and. j == 61 .and. i<4) then -! if (testcnt == 13 .and. j > 61 .and. (i < 3 .or. i > 89)) then -! if (testcnt == 5 .and. j >= 61 .and. (i < 3 .or. i > 90)) then -! write(100+my_task,'(a,5i6,2l3,f6.2,i6)') 'tcx1 ',i,j,iblock,itrip,jtrip, & -! tripole_average,tripole_pole,rsign,this_block%i_glob(i) -! write(100+my_task,'(a,4f12.2)') 'tcx2 ',cidata_std(i,j,k1,k2,iblock),rival,cichk,aichk -! write(100+my_task,'(a,4f12.2)') 'tcx3 ',cjdata_std(i,j,k1,k2,iblock),rjval,cjchk,ajchk -! endif endif ! tripole or tripoleT + endif if (index(halofld,'I4') > 0) then @@ -695,16 +713,16 @@ program halochk cjchk = real(nint(cjchk),kind=dbl_kind) endif - if (index(halofld,'L1') > 0 .and. .not.spvalL1) then + if (index(halofld,'L1') > 0) then if (cichk == dhalofillval .or. cichk == fillval) then cichk = c0 else - cichk = c1 + cichk = mod(nint(cichk),2) endif if (cjchk == dhalofillval .or. cjchk == fillval) then cjchk = c1 else - cjchk = c0 + cjchk = mod(nint(cjchk),2) endif endif @@ -719,6 +737,7 @@ program halochk enddo ! j enddo ! iblock + enddo ! maxfills enddo ! maxtypes enddo ! maxlocs enddo ! maxtests @@ -746,10 +765,10 @@ program halochk do n = 1,tottest if (errorflag(n) == passflag) then tpcnt = tpcnt + 1 - write(6,*) 'PASS ',trim(teststring(n)),ptcnt(n),failcnt(n) + write(6,'(2a,2i8)') 'PASS ',trim(teststring(n)),ptcnt(n),failcnt(n) else tfcnt = tfcnt + 1 - write(6,*) 'FAIL ',trim(teststring(n)),ptcnt(n),failcnt(n) + write(6,'(2a,2i8)') 'FAIL ',trim(teststring(n)),ptcnt(n),failcnt(n) endif enddo write(6,*) ' ' @@ -793,8 +812,10 @@ subroutine chkresults(a1,r1,errorflag,testcnt,failcnt,i,j,k1,k2,iblock,first_cal character(len=*) , parameter :: subname='(chkresults)' if (a1 /= r1 .or. print_always) then - errorflag = failflag - failcnt = failcnt + 1 + if (a1 /= r1) then + errorflag = failflag + failcnt = failcnt + 1 + endif if (first_call) then write(100+my_task,*) ' ' write(100+my_task,'(a,i4,2a)') '------- TEST = ',testcnt,' ',trim(teststring) diff --git a/cicecore/drivers/unittest/opticep/CICE.F90 b/cicecore/drivers/unittest/opticep/CICE.F90 index c92f0ea24..a96ca4ce7 100644 --- a/cicecore/drivers/unittest/opticep/CICE.F90 +++ b/cicecore/drivers/unittest/opticep/CICE.F90 @@ -1,5 +1,5 @@ !======================================================================= -! Copyright (c) 1998, 2017, Triad National Security, LLC +! Copyright 1998-2026, Triad National Security, LLC ! All rights reserved. ! ! This program was produced under U.S. Government contract 89233218CNA000001 diff --git a/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 b/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 index 66a5256e0..74589a064 100644 --- a/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 @@ -84,6 +84,7 @@ subroutine cice_init faero_default, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -197,6 +198,7 @@ subroutine cice_init call init_shortwave ! initialize radiative transfer if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts ! tcraig, use advance_timestep here ! istep = istep + 1 ! update time step counters diff --git a/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 b/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 index 53476776b..4ee0e2bc7 100644 --- a/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 +++ b/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 @@ -435,14 +435,13 @@ subroutine coupling_prep (iblk) fswthru_ai, fhocn, scale_factor, snowfrac, & fswthru, fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & - fsens, flat, fswabs, flwout, evap, Tref, Qref, & + fsens, flat, fswabs, fsw, fswup, flwout, evap, Tref, Qref, & scale_fluxes, frzmlt_init, frzmlt use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & flux_bio, flux_bio_ai use ice_grid, only: tmask - use ice_state, only: aicen, aice + use ice_state, only: aicen, aice, aice_init #ifdef CICE_IN_NEMO - use ice_state, only: aice_init use ice_flux, only: flatn_f, fsurfn_f #endif use ice_step_mod, only: ocean_mixed_layer @@ -589,6 +588,8 @@ subroutine coupling_prep (iblk) fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + fswup (i,j,iblk) = aice_init(i,j,iblk) & + * fsw (i,j,iblk) - fswabs(i,j,iblk) if (nbtrcr > 0) then do k = 1, nbtrcr diff --git a/cicecore/drivers/unittest/opticep/README b/cicecore/drivers/unittest/opticep/README index b5f1bdf9c..958e592da 100644 --- a/cicecore/drivers/unittest/opticep/README +++ b/cicecore/drivers/unittest/opticep/README @@ -1,7 +1,7 @@ This unittest tests Icepack optional arguments. The idea is to have source code that is identical to the standard CICE source code except the significant optional arguments passed -into Icepack are removed from the CICE calls. Then to run a standard CICE case with optional +into Icepack are removed from the CICE calls. Then to run a standard CICE case with optional features (fsd, bgc, isotopes, etc) off in namelist. That results should be bit-for-bit identical with an equivalent run from the standard source code. @@ -18,7 +18,7 @@ today, that includes CICE_InitMod.F90 CICE_RunMod.F90 -Add +Add write(nu_diag, *) "OPTICEP TEST COMPLETED SUCCESSFULLY " to CICE_FinalMod.F90 @@ -26,5 +26,5 @@ Do not worry about the parameter/tracer query/init/write methods Interfaces to modify include ice_init_column.F90 (icepack_step_radiation, icepack_init_zbgc) - ice_step_mod.F90 (icepack_step_therm1, icepack_step_therm2, icepack_prep_radiation, + ice_step_mod.F90 (icepack_step_therm1, icepack_step_therm2, icepack_prep_radiation, icepack_step_radiation, icepack_step_ridge) diff --git a/cicecore/drivers/unittest/opticep/ice_step_mod.F90 b/cicecore/drivers/unittest/opticep/ice_step_mod.F90 index 6d8fff7f9..6e775a3c0 100644 --- a/cicecore/drivers/unittest/opticep/ice_step_mod.F90 +++ b/cicecore/drivers/unittest/opticep/ice_step_mod.F90 @@ -566,7 +566,7 @@ subroutine step_therm1 (dt, iblk) !opt dpnd_initialn=dpnd_initialn(i,j,:,iblk), & !opt dpnd_dlidn = dpnd_dlidn (i,j,:,iblk), & yday=yday) -!opt prescribed_ice=prescribed_ice) +!opt prescribed_ice=prescribed_ice) !----------------------------------------------------------------- ! handle per-category i2x fields, no merging @@ -674,12 +674,16 @@ subroutine step_therm2 (dt, iblk) tr_fsd, & ! floe size distribution tracers z_tracers ! vertical biogeochemistry + character (len=char_len) :: & + wave_height_type ! type of significant wave height forcing + type (block) :: & this_block ! block information for current block character(len=*), parameter :: subname = '(step_therm2)' - call icepack_query_parameters(z_tracers_out=z_tracers) + call icepack_query_parameters(z_tracers_out=z_tracers, & + wave_height_type_out=wave_height_type) call icepack_query_tracer_sizes(ntrcr_out=ntrcr, nbtrcr_out=nbtrcr) call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) call icepack_warnings_flush(nu_diag) @@ -704,57 +708,59 @@ subroutine step_therm2 (dt, iblk) if (tmask(i,j,iblk) .or. opmask(i,j,iblk)) then - ! significant wave height for FSD - if (tr_fsd) & - wave_sig_ht(i,j,iblk) = c4*SQRT(SUM(wave_spectrum(i,j,:,iblk)*dwavefreq(:))) - - call icepack_step_therm2(dt=dt, & - hin_max = hin_max (:), & - aicen = aicen (i,j,:,iblk), & - vicen = vicen (i,j,:,iblk), & - vsnon = vsnon (i,j,:,iblk), & - aicen_init = aicen_init(i,j,:,iblk), & - vicen_init = vicen_init(i,j,:,iblk), & - trcrn = trcrn (i,j,:,:,iblk), & - aice0 = aice0 (i,j, iblk), & - aice = aice (i,j, iblk), & - trcr_depend= trcr_depend(:), & - trcr_base = trcr_base(:,:), & - n_trcr_strata = n_trcr_strata(:), & - nt_strata = nt_strata(:,:), & - Tf = Tf (i,j, iblk), & - sss = sss (i,j, iblk), & - salinz = salinz (i,j,:,iblk), & - rsiden = rsiden (i,j,:,iblk), & - meltl = meltl (i,j, iblk), & -!opt wlat = wlat (i,j, iblk), & - frzmlt = frzmlt (i,j, iblk), & - frazil = frazil (i,j, iblk), & - frain = frain (i,j, iblk), & - fpond = fpond (i,j, iblk), & - fresh = fresh (i,j, iblk), & - fsalt = fsalt (i,j, iblk), & - fhocn = fhocn (i,j, iblk), & - faero_ocn = faero_ocn (i,j,:,iblk), & - first_ice = first_ice (i,j,:,iblk), & - flux_bio = flux_bio (i,j,1:nbtrcr,iblk), & - ocean_bio = ocean_bio (i,j,1:nbtrcr,iblk), & - frazil_diag= frazil_diag(i,j,iblk) & -!opt frz_onset = frz_onset (i,j, iblk), & -!opt yday = yday, & -!opt fiso_ocn = fiso_ocn (i,j,:,iblk), & -!opt HDO_ocn = HDO_ocn (i,j, iblk), & -!opt H2_16O_ocn = H2_16O_ocn(i,j, iblk), & -!opt H2_18O_ocn = H2_18O_ocn(i,j, iblk), & -!opt wave_sig_ht= wave_sig_ht(i,j, iblk), & -!opt wave_spectrum=wave_spectrum(i,j,:,iblk), & -!opt wavefreq = wavefreq (:), & -!opt dwavefreq = dwavefreq (:), & -!opt d_afsd_latg=d_afsd_latg(i,j,:,iblk), & -!opt d_afsd_newi=d_afsd_newi(i,j,:,iblk), & -!opt d_afsd_latm=d_afsd_latm(i,j,:,iblk), & -!opt d_afsd_weld=d_afsd_weld(i,j,:,iblk), & -!opt dpnd_melt = dpnd_melt( i,j, iblk)) + ! significant wave height + if (tr_fsd .and. trim(wave_height_type) == 'internal') then + wave_sig_ht(i,j,iblk) = c4*SQRT(SUM(wave_spectrum(i,j,:,iblk)*dwavefreq(:))) + ! else wave_sig_ht = 0 unless provided by coupler or other external data + endif + + call icepack_step_therm2(dt=dt, & + hin_max = hin_max (:), & + aicen = aicen (i,j,:,iblk), & + vicen = vicen (i,j,:,iblk), & + vsnon = vsnon (i,j,:,iblk), & + aicen_init = aicen_init (i,j,:,iblk), & + vicen_init = vicen_init (i,j,:,iblk), & + trcrn = trcrn (i,j,:,:,iblk), & + aice0 = aice0 (i,j, iblk), & + aice = aice (i,j, iblk), & + trcr_depend = trcr_depend(:), & + trcr_base = trcr_base (:,:), & + n_trcr_strata = n_trcr_strata(:), & + nt_strata = nt_strata (:,:), & + Tf = Tf (i,j, iblk), & + sss = sss (i,j, iblk), & + salinz = salinz (i,j,:,iblk), & + rsiden = rsiden (i,j,:,iblk), & + meltl = meltl (i,j, iblk), & +!opt wlat = wlat (i,j, iblk), & + frzmlt = frzmlt (i,j, iblk), & + frazil = frazil (i,j, iblk), & + frain = frain (i,j, iblk), & + fpond = fpond (i,j, iblk), & + fresh = fresh (i,j, iblk), & + fsalt = fsalt (i,j, iblk), & + fhocn = fhocn (i,j, iblk), & + faero_ocn = faero_ocn (i,j,:,iblk), & + first_ice = first_ice (i,j,:,iblk), & + flux_bio = flux_bio (i,j,1:nbtrcr,iblk), & + ocean_bio = ocean_bio (i,j,1:nbtrcr,iblk), & + frazil_diag = frazil_diag(i,j, iblk) & +!opt frz_onset = frz_onset (i,j, iblk), & +!opt yday = yday, & +!opt fiso_ocn = fiso_ocn (i,j,:,iblk), & +!opt HDO_ocn = HDO_ocn (i,j, iblk), & +!opt H2_16O_ocn = H2_16O_ocn (i,j, iblk), & +!opt H2_18O_ocn = H2_18O_ocn (i,j, iblk), & +!opt wave_sig_ht = wave_sig_ht(i,j, iblk), & +!opt wave_spectrum = & +!opt wave_spectrum(i,j,:,iblk), & +!opt wavefreq = wavefreq (:), & +!opt d_afsd_latg = d_afsd_latg(i,j,:,iblk), & +!opt d_afsd_newi = d_afsd_newi(i,j,:,iblk), & +!opt d_afsd_latm = d_afsd_latm(i,j,:,iblk), & +!opt d_afsd_weld = d_afsd_weld(i,j,:,iblk), & +!opt dpnd_melt = dpnd_melt (i,j, iblk)) ) endif ! tmask @@ -891,7 +897,7 @@ end subroutine update_state subroutine step_dyn_wave (dt) - use ice_arrays_column, only: wave_spectrum, & + use ice_arrays_column, only: wave_spectrum, wave_sig_ht, & d_afsd_wave, wavefreq, dwavefreq use ice_domain_size, only: ncat, nfsd, nfreq use ice_state, only: trcrn, aicen, aice, vice @@ -911,14 +917,11 @@ subroutine step_dyn_wave (dt) iblk, & ! block index i, j ! horizontal indices - character (len=char_len) :: wave_spec_type - character(len=*), parameter :: subname = '(step_dyn_wave)' call ice_timer_start(timer_column) call ice_timer_start(timer_fsd) - call icepack_query_parameters(wave_spec_type_out=wave_spec_type) call icepack_warnings_flush(nu_diag) if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & file=__FILE__, line=__LINE__) @@ -935,8 +938,7 @@ subroutine step_dyn_wave (dt) do j = jlo, jhi do i = ilo, ihi d_afsd_wave(i,j,:,iblk) = c0 - call icepack_step_wavefracture(wave_spec_type = wave_spec_type, & - dt = dt, nfreq = nfreq, & + call icepack_step_wavefracture(dt = dt, nfreq = nfreq, & aice = aice (i,j, iblk), & vice = vice (i,j, iblk), & aicen = aicen (i,j,:, iblk), & @@ -944,7 +946,8 @@ subroutine step_dyn_wave (dt) wavefreq = wavefreq (:), & dwavefreq = dwavefreq (:), & trcrn = trcrn (i,j,:,:,iblk), & - d_afsd_wave = d_afsd_wave (i,j,:, iblk)) + d_afsd_wave = d_afsd_wave (i,j,:, iblk), & + wave_height = wave_sig_ht (i,j, iblk)) end do ! i end do ! j end do ! iblk @@ -1156,8 +1159,8 @@ subroutine step_dyn_ridge (dt, ndtd, iblk) fsalt = fsalt (i,j, iblk), & first_ice = first_ice(i,j,:,iblk), & flux_bio = flux_bio (i,j,1:nbtrcr,iblk), & - Tf = Tf (i,j, iblk) & -!opt dpnd_ridge=dpnd_ridge(i,j,iblk)) + Tf = Tf (i,j, iblk) & +!opt dpnd_ridge=dpnd_ridge(i,j, iblk)) ) endif ! tmask @@ -1446,7 +1449,7 @@ subroutine step_radiation (dt, iblk) albpndn =albpndn (i,j,: ,iblk), apeffn =apeffn (i,j,: ,iblk), & snowfracn=snowfracn(i,j,: ,iblk), & dhsn =dhsn (i,j,: ,iblk), ffracn =ffracn(i,j,:,iblk), & -!opt rsnow =rsnow (:,:), +!opt rsnow =rsnow (:,:), & l_print_point=l_print_point) endif diff --git a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 index 66a5256e0..74589a064 100644 --- a/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/sumchk/CICE_InitMod.F90 @@ -84,6 +84,7 @@ subroutine cice_init faero_default, alloc_forcing_bgc, fiso_default use ice_grid, only: init_grid1, init_grid2, alloc_grid, dealloc_grid use ice_history, only: init_hist, accum_hist + use ice_history_write, only: ice_read_hist use ice_restart_shared, only: restart, runtype use ice_init, only: input_data, init_state use ice_init_column, only: init_thermo_vertical, init_shortwave, init_zbgc, input_zbgc, count_tracers @@ -197,6 +198,7 @@ subroutine cice_init call init_shortwave ! initialize radiative transfer if (write_ic) call accum_hist(dt) ! write initial conditions + call ice_read_hist ! read history restarts ! tcraig, use advance_timestep here ! istep = istep + 1 ! update time step counters diff --git a/cicecore/shared/ice_arrays_column.F90 b/cicecore/shared/ice_arrays_column.F90 index 38f3ee0f7..d72ed63de 100644 --- a/cicecore/shared/ice_arrays_column.F90 +++ b/cicecore/shared/ice_arrays_column.F90 @@ -9,6 +9,7 @@ module ice_arrays_column use ice_kinds_mod + use ice_constants, only : c0 use ice_fileunits, only: nu_diag use ice_blocks, only: nx_block, ny_block use ice_domain_size, only: max_blocks, ncat, nilyr, nslyr, & @@ -25,8 +26,7 @@ module ice_arrays_column ! icepack_atmo.F90 ! Cdn variables on the T-grid - real (kind=dbl_kind), public, & - dimension (:,:,:), allocatable :: & + real (kind=dbl_kind), public, dimension (:,:,:), allocatable :: & Cdn_atm , & ! atm drag coefficient Cdn_ocn , & ! ocn drag coefficient ! form drag @@ -64,16 +64,17 @@ module ice_arrays_column ! icepack_itd.F90 real (kind=dbl_kind), public, allocatable :: & - hin_max(:) ! category limits (m) + hin_max(:) ! category limits (m) - character (len=35), public, allocatable :: c_hi_range(:) + character (len=35), public, allocatable :: & + c_hi_range(:)! string for history output ! icepack_snow.F90 real (kind=dbl_kind), public, dimension (:,:,:), allocatable :: & meltsliq ! snow melt mass (kg/m^2/step-->kg/m^2/day) real (kind=dbl_kind), public, dimension (:,:,:,:), allocatable :: & - meltsliqn ! snow melt mass in category n (kg/m^2) + meltsliqn ! snow melt mass in category n (kg/m^2) ! icepack_meltpond_lvl.F90 real (kind=dbl_kind), public, dimension (:,:,:,:), allocatable :: & @@ -83,10 +84,10 @@ module ice_arrays_column ! icepack_shortwave.F90 ! category albedos real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - alvdrn , & ! visible direct albedo (fraction) - alidrn , & ! near-ir direct albedo (fraction) - alvdfn , & ! visible diffuse albedo (fraction) - alidfn ! near-ir diffuse albedo (fraction) + alvdrn, & ! visible direct albedo (fraction) + alidrn, & ! near-ir direct albedo (fraction) + alvdfn, & ! visible diffuse albedo (fraction) + alidfn ! near-ir diffuse albedo (fraction) ! albedo components for history real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & @@ -100,14 +101,14 @@ module ice_arrays_column ! shortwave components real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable, public :: & - Iswabsn ! SW radiation absorbed in ice layers (W m-2) + Iswabsn ! SW radiation absorbed in ice layers (W m-2) real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable, public :: & - Sswabsn ! SW radiation absorbed in snow layers (W m-2) + Sswabsn ! SW radiation absorbed in snow layers (W m-2) real (kind=dbl_kind), dimension (:,:,:,:), allocatable, public :: & - fswsfcn , & ! SW absorbed at ice/snow surface (W m-2) - fswthrun , & ! SW through ice to ocean (W/m^2) + fswsfcn , & ! SW absorbed at ice/snow surface (W m-2) + fswthrun , & ! SW through ice to ocean (W/m^2) fswthrun_vdr , & ! vis dir SW through ice to ocean (W/m^2) fswthrun_vdf , & ! vis dif SW through ice to ocean (W/m^2) fswthrun_idr , & ! nir dir SW through ice to ocean (W/m^2) @@ -119,7 +120,7 @@ module ice_arrays_column fswintn ! SW absorbed in ice interior, below surface (W m-2) real (kind=dbl_kind), dimension (:,:,:,:,:), allocatable, public :: & - fswpenln ! visible SW entering ice layers (W m-2) + fswpenln ! visible SW entering ice layers (W m-2) ! biogeochemistry components @@ -243,7 +244,7 @@ module ice_arrays_column floe_binwidth ! fsd size bin width in m (radius) real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & - wave_sig_ht ! significant height of waves (m) + wave_sig_ht ! significant height of waves (m) real (kind=dbl_kind), dimension (:), allocatable, public :: & wavefreq, & ! wave frequencies @@ -348,6 +349,71 @@ subroutine alloc_arrays_column stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of Memory1') + Cdn_atm = c0 + Cdn_ocn = c0 + hfreebd = c0 + hdraft = c0 + hridge = c0 + distrdg = c0 + hkeel = c0 + dkeel = c0 + lfloe = c0 + dfloe = c0 + Cdn_atm_skin = c0 + Cdn_atm_floe = c0 + Cdn_atm_pond = c0 + Cdn_atm_rdg = c0 + Cdn_ocn_skin = c0 + Cdn_ocn_floe = c0 + Cdn_ocn_keel = c0 + Cdn_atm_ratio = c0 + grow_net = c0 + PP_net = c0 + hbri = c0 + chl_net = c0 + NO_net = c0 + upNO = c0 + upNH = c0 + meltsliq = c0 + meltsliqn = c0 + dhsn = c0 + ffracn = c0 + alvdrn = c0 + alidrn = c0 + alvdfn = c0 + alidfn = c0 + albicen = c0 + albsnon = c0 + albpndn = c0 + apeffn = c0 + snowfracn = c0 + fswsfcn = c0 + fswthrun = c0 + fswthrun_vdr = c0 + fswthrun_vdf = c0 + fswthrun_idr = c0 + fswthrun_idf = c0 + fswthrun_uvrdr= c0 + fswthrun_uvrdf= c0 + fswthrun_pardr= c0 + fswthrun_pardf= c0 + fswintn = c0 + first_ice_real= c0 + first_ice = .false. + dhbr_top = c0 + dhbr_bot = c0 + darcy_V = c0 + sice_rho = c0 + Iswabsn = c0 + Sswabsn = c0 + fswpenln = c0 + Zoo = c0 + zfswin = c0 + iDi = c0 + iki = c0 + bphi = c0 + bTiz = c0 + allocate( & ocean_bio (nx_block,ny_block,max_nbtrcr,max_blocks), & ! contains all the ocean bgc tracer concentrations fbio_snoice (nx_block,ny_block,max_nbtrcr,max_blocks), & ! fluxes from snow to ice @@ -359,6 +425,14 @@ subroutine alloc_arrays_column stat=ierr) if (ierr/=0) call abort_ice(subname//': Out of Memory2') + ocean_bio = c0 + fbio_snoice = c0 + fbio_atmice = c0 + ocean_bio_all= c0 + ice_bio_net = c0 + snow_bio_net = c0 + algal_peak = 0 + allocate( & hin_max(0:ncat) , & ! category limits (m) c_hi_range(ncat) , & ! @@ -370,6 +444,14 @@ subroutine alloc_arrays_column stat=ierr) if (ierr/=0) call abort_ice(subname//' Out of Memory3') + hin_max = c0 + c_hi_range = '' + bgrid = c0 + igrid = c0 + cgrid = c0 + icgrid = c0 + swgrid = c0 + ! floe size distribution allocate( & floe_rad_l (nfsd) , & ! fsd size lower bound in m (radius) @@ -388,6 +470,20 @@ subroutine alloc_arrays_column stat=ierr) if (ierr/=0) call abort_ice(subname//' Out of Memory5') + floe_rad_l = c0 + floe_rad_c = c0 + floe_binwidth = c0 + c_fsd_range = '' + wavefreq = c0 + dwavefreq = c0 + wave_sig_ht = c0 + wave_spectrum = c0 + d_afsd_newi = c0 + d_afsd_latg = c0 + d_afsd_latm = c0 + d_afsd_wave = c0 + d_afsd_weld = c0 + end subroutine alloc_arrays_column !======================================================================= diff --git a/cicecore/shared/ice_calendar.F90 b/cicecore/shared/ice_calendar.F90 index 829e78218..31ac6501c 100644 --- a/cicecore/shared/ice_calendar.F90 +++ b/cicecore/shared/ice_calendar.F90 @@ -123,6 +123,7 @@ module ice_calendar logical (kind=log_kind), public :: & use_leap_years , & ! use leap year functionality if true write_ic , & ! write initial condition now + write_histrest , & ! write history restarts if needed dump_last , & ! write restart file on last time step force_restart_now, & ! force a restart now write_history(max_nstrm) ! write history now @@ -443,6 +444,11 @@ subroutine calendar() if (mod(elapsed_days,histfreq_n(ns))==0) & write_history(ns) = .true. endif + case ("n", "N") + if (new_day .and. histfreq_n(ns)/=0) then + if (mday == histfreq_n(ns)) & + write_history(ns) = .true. + endif case ("h", "H") if (new_hour .and. histfreq_n(ns)/=0) then if (mod(elapsed_hours,histfreq_n(ns))==0) & diff --git a/cicecore/shared/ice_init_column.F90 b/cicecore/shared/ice_init_column.F90 index 5f04a49d6..8abc21c3d 100644 --- a/cicecore/shared/ice_init_column.F90 +++ b/cicecore/shared/ice_init_column.F90 @@ -677,7 +677,7 @@ subroutine init_fsd(floesize) ! initialize floe size distribution the same in every column and category call icepack_init_fsd(ice_ic = ice_ic, & - afsd = afsd) ! floe size distribution + afsd = afsd) ! floe size distribution do iblk = 1, max_blocks do j = 1, ny_block diff --git a/cicecore/shared/ice_restart_shared.F90 b/cicecore/shared/ice_restart_shared.F90 index 32d78e82f..409d0cb16 100644 --- a/cicecore/shared/ice_restart_shared.F90 +++ b/cicecore/shared/ice_restart_shared.F90 @@ -30,6 +30,7 @@ module ice_restart_shared character (len=char_len), public :: & restart_format , & ! format of restart files 'nc' + restart_mod , & ! restart modification option, "none", "adjust_aice" restart_rearranger ! restart file rearranger, box or subset for pio integer (kind=int_kind), public :: & diff --git a/cicecore/version.txt b/cicecore/version.txt index 0a244794e..cef257940 100644 --- a/cicecore/version.txt +++ b/cicecore/version.txt @@ -1 +1 @@ -CICE 6.6.1 +CICE 6.6.3 diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 220313bfa..abb64e25c 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -366,12 +366,21 @@ cat >> ${jobfile} << EOFB #PBS -l walltime=${batchtime} EOFB +else if (${ICE_MACHINE} =~ boreas* ) then +cat >> ${jobfile} << EOFB +#PBS -N ${ICE_CASENAME} +#PBS -j oe +#PBS -q ${queue} +#PBS -l select=${nnodes}:ncpus=${corespernode}:mpiprocs=${taskpernodelimit}:ompthreads=${nthrds} +#PBS -l walltime=${batchtime} +EOFB + else if (${ICE_MACHINE} =~ gaeac5*) then cat >> ${jobfile} << EOFB #SBATCH -J ${ICE_CASENAME} #SBATCH --partition=batch #SBATCH --qos=${queue} -#SBATCH --account=nggps_emc +#SBATCH --account=${acct} #SBATCH --clusters=c5 #SBATCH --time=${batchtime} #SBATCH --nodes=${nnodes} @@ -388,7 +397,7 @@ cat >> ${jobfile} << EOFB #SBATCH -J ${ICE_CASENAME} #SBATCH --partition=batch #SBATCH --qos=${queue} -#SBATCH --account=sfs_emc +#SBATCH --account=${acct} #SBATCH --clusters=c6 #SBATCH --time=${batchtime} #SBATCH --nodes=${nnodes} diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index bc68a7ce1..73af50d1b 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -272,6 +272,19 @@ aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_F EOFR endif +#======= +else if (${ICE_MACHCOMP} =~ boreas*) then +if (${ICE_COMMDIR} =~ serial*) then +cat >> ${jobfile} << EOFR +aprun -n 1 -N 1 -d 1 ./cice >&! \$ICE_RUNLOG_FILE +EOFR +else +cat >> ${jobfile} << EOFR +aprun -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE +EOFR +endif + + #======= else if (${ICE_MACHCOMP} =~ gaea*) then cat >> ${jobfile} << EOFR diff --git a/configuration/scripts/cmake/CMakeLists.txt b/configuration/scripts/cmake/CMakeLists.txt index aaafbcb4e..d046cbee8 100644 --- a/configuration/scripts/cmake/CMakeLists.txt +++ b/configuration/scripts/cmake/CMakeLists.txt @@ -59,11 +59,13 @@ if(CMAKE_Fortran_COMPILER_ID MATCHES "GNU") set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fallow-argument-mismatch") endif() set(CMAKE_Fortran_FLAGS_RELEASE "-O") + set(CMAKE_Fortran_FLAGS_RELWITHDEBINFO "-O -g") set(CMAKE_Fortran_FLAGS_DEBUG "-g -Wall -Og -ffpe-trap=zero,overflow -fcheck=bounds") elseif(CMAKE_Fortran_COMPILER_ID MATCHES "Intel") set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -qno-opt-dynamic-align -convert big_endian -assume byterecl -ftz -traceback -assume realloc_lhs -fp-model precise") set(CMAKE_Fortran_FLAGS_RELEASE "-O2 -debug minimal") - set(CMAKE_Fortran_FLAGS_DEBUG "-O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created") + set(CMAKE_Fortran_FLAGS_RELWITHDEBINFO "-O2 -g") + set(CMAKE_Fortran_FLAGS_DEBUG "-O0 -g -check all -fpe0 -check noarg_temp_created") else() message(WARNING "Fortran compiler with ID ${CMAKE_Fortran_COMPILER_ID} will be used with CMake default options") endif() diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 776f9f966..98ac2dbbb 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -21,6 +21,7 @@ restart_stride = -99 restart_deflate = 0 restart_chunksize = 0, 0 + restart_mod = 'none' lcdf64 = .false. numin = 21 numax = 89 @@ -67,6 +68,7 @@ history_deflate = 0 history_chunksize = 0, 0 hist_time_axis = 'end' + write_histrest = .true. write_ic = .true. incond_dir = './history/' incond_file = 'iceh_ic' @@ -95,7 +97,6 @@ scale_dxdy = .false. dxscale = 1.d0 dyscale = 1.d0 - close_boundaries = .false. ncat = 5 nfsd = 1 nilyr = 7 @@ -201,8 +202,8 @@ reltol_pgmres = 1e-6 algo_nonlin = 'picard' use_mean_vrel = .true. - dyn_area_min = 0.001d0 - dyn_mass_min = 0.01d0 + dyn_area_min = 1.e-11 + dyn_mass_min = 1.e-10 / &shortwave_nml @@ -281,6 +282,7 @@ saltflux_option = 'constant' ice_ref_salinity = 4.0 oceanmixed_ice = .true. + wave_height_type = 'none' wave_spec_type = 'none' wave_spec_file = 'unknown_wave_spec_file' nfreq = 25 @@ -512,12 +514,15 @@ f_VGRDa = .true. f_bounds = .false. f_aice = 'm' + f_aice_init = 'x' f_hi = 'm' f_hs = 'm' f_Tsfc = 'm' f_sice = 'm' f_uvel = 'm' f_vvel = 'm' + f_icedir = 'x' + f_icespd = 'x' f_uatm = 'm' f_vatm = 'm' f_fswdn = 'm' @@ -627,7 +632,78 @@ f_fmelttn_ai = 'x' f_flatn_ai = 'x' f_fsensn_ai = 'x' - f_CMIP = 'x' + f_siage = 'x' + f_siconc = 'x' + f_sifb = 'x' + f_siflcondbot = 'x' + f_siflcondtop = 'x' + f_sifllattop = 'x' + f_sifllwdtop = 'x' + f_sifllwutop = 'x' + f_siflsensbot = 'x' + f_siflsenstop = 'x' + f_siflswdtop = 'x' + f_siflswutop = 'x' + f_sihc = 'x' + f_siitdconc = 'x' + f_siitdsnconc = 'x' + f_siitdsnthick = 'x' + f_siitdthick = 'x' + f_sisali = 'x' + f_sisaltmass = 'x' + f_sisnhc = 'x' + f_sisnthick = 'x' + f_sitempbot = 'x' + f_sitempsnic = 'x' + f_sitemptop = 'x' + f_sithick = 'x' + f_sitimefrac = 'x' + f_siu = 'x' + f_siv = 'x' + f_sicompstren = 'x' + f_sidconcdyn = 'x' + f_sidconcth = 'x' + f_sidivvel = 'x' + f_sidmassdyn = 'x' + f_sidmassevapsubl = 'x' + f_sidmassgrowthbot = 'x' + f_sidmassgrowthsi = 'x' + f_sidmassgrowthwat = 'x' + f_sidmassmeltbot = 'x' + f_sidmassmeltlat = 'x' + f_sidmassmelttop = 'x' + f_sidmassth = 'x' + f_sidmasstranx = 'x' + f_sidmasstrany = 'x' + f_sidragbot = 'x' + f_sidragtop = 'x' + f_siflfwbot = 'x' + f_siflfwdrain = 'x' + f_siflsaltbot = 'x' + f_siforcecoriolx = 'x' + f_siforcecorioly = 'x' + f_siforceintstrx = 'x' + f_siforceintstry = 'x' + f_siforcetiltx = 'x' + f_siforcetilty = 'x' + f_simass = 'x' + f_sipr = 'x' + f_sishearvel = 'x' + f_sisnconc = 'x' + f_sisndmassdyn = 'x' + f_sisndmassmelt = 'x' + f_sisndmasssi = 'x' + f_sisndmasssnf = 'x' + f_sisndmasssubl = 'x' + f_sisnmass = 'x' + f_sispeed = 'x' + f_sistressave = 'x' + f_sistressmax = 'x' + f_sistrxdtop = 'x' + f_sistrydtop = 'x' + f_sistrxubot = 'x' + f_sistryubot = 'x' + f_sivol = 'x' / &icefields_mechred_nml @@ -650,6 +726,8 @@ f_vredistn = 'x' f_araftn = 'x' f_vraftn = 'x' + f_sirdgconc = 'x' + f_sirdgthick = 'x' / &icefields_pond_nml @@ -676,6 +754,10 @@ f_hpond_ai = 'm' f_ipond_ai = 'm' f_apeff_ai = 'm' + f_simpconc = 'x' + f_simpeffconc = 'x' + f_simprefrozen = 'x' + f_simpthick = 'x' / &icefields_snow_nml @@ -691,6 +773,7 @@ f_rsnw = 'm' f_meltsliq = 'm' f_fsloss = 'm' + f_sisndmasswind = 'x' / &icefields_bgc_nml diff --git a/configuration/scripts/machines/Macros.boreas_intel b/configuration/scripts/machines/Macros.boreas_intel new file mode 100644 index 000000000..0fe33a1bf --- /dev/null +++ b/configuration/scripts/machines/Macros.boreas_intel @@ -0,0 +1,70 @@ +#============================================================================== +# Makefile macros for DMI Freya based on ECCC banting +#============================================================================== +# For use with intel compiler +#============================================================================== + +#INCLDIR := -I. -I/usr/include +#SLIBS := + +#--- Compiler/preprocessor --- +FC := ftn +CC := cc +CXX := CC +CPP := /usr/bin/cpp +CPPFLAGS := -P -traditional # ALLOW fortran double backslash "\\" +SCC := gcc +SFC := ftn + +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise +# Additional flags +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -convert big_endian -assume byterecl +#-xHost + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -fp-model source -ftz -traceback -no-wrap-margin +# -heap-arrays 1024 +else +# FFLAGS += -O3 -xCORE-AVX512 -qopt-zmm-usage=high -finline-functions -finline -parallel + FFLAGS += -O2 -qopt-zmm-usage=high -finline-functions -finline -parallel +endif +LD := $(FC) +LDFLAGS := $(FFLAGS) -v +#ifeq ($(ICE_BLDDEBUG), true) +#FFLAGS := -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +#FFLAGS := -g -O0 -traceback -fp-model precise -fp-stack-check -fpe0 +#else +#FFLAGS := -r8 -i4 -O2 -align all -w -ftz -assume byterecl +# FFLAGS := -O2 -fp-model precise -assume byterecl -ftz -traceback -xHost +#endif +# Preprocessor flags +#CPPDEFS := -DLINUX $(ICE_CPPDEFS) + +# Linker flags + +# Additional flags + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + +#--- NetCDF --- +#ifeq ($(IO_TYPE), netcdf) +# +#endif +# +#ifeq ($(IO_TYPE), netcdf_bin) +# CPPDEFS := $(CPPDEFS) -Dncdf +#endif + +### if using parallel I/O, load all 3 libraries. PIO must be first! +#ifeq ($(ICE_IOTYPE), pio) +# PIO_PATH:=/usr/projects/climate/SHARED_CLIMATE/software/conejo/pio/1.7.2/intel-13.0.1/openmpi-1.6.3/netcdf-3.6.3-parallel-netcdf-1.3.1/include +# INCLDIR += -I$(PIO_PATH) +# SLIBS := $(SLIBS) -L$(PIO_PATH) -lpio +#endif diff --git a/configuration/scripts/machines/Macros.carpenter_gnu b/configuration/scripts/machines/Macros.carpenter_gnu index 61efa80c2..a1fe056fb 100644 --- a/configuration/scripts/machines/Macros.carpenter_gnu +++ b/configuration/scripts/machines/Macros.carpenter_gnu @@ -12,7 +12,8 @@ FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -fallow-a FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow +# FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=zero,overflow CFLAGS += -O0 endif diff --git a/configuration/scripts/machines/Macros.carpenter_gnuimpi b/configuration/scripts/machines/Macros.carpenter_gnuimpi index ef0c5e96a..d57dfda2b 100644 --- a/configuration/scripts/machines/Macros.carpenter_gnuimpi +++ b/configuration/scripts/machines/Macros.carpenter_gnuimpi @@ -12,7 +12,8 @@ FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -fallow-a FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow +# FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=zero,overflow CFLAGS += -O0 endif diff --git a/configuration/scripts/machines/Macros.gaea_cray b/configuration/scripts/machines/Macros.gaea_cray deleted file mode 100644 index 851134514..000000000 --- a/configuration/scripts/machines/Macros.gaea_cray +++ /dev/null @@ -1,57 +0,0 @@ -#============================================================================== -# Makefile macros for NOAA hera, intel compiler -#============================================================================== - -CPP := fpp -CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 - -FIXEDFLAGS := -132 -FREEFLAGS := -FFLAGS := -hbyteswapio -FFLAGS_NOOPT:= -O0 -LDLAGS := -hbyteswapio - -ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -hfp0 -g -Rbcdps -Ktrap=fp -else - FFLAGS += -O2 -hfp0 -endif - -SCC := cc -SFC := ftn -MPICC := cc -MPIFC := ftn - -ifeq ($(ICE_COMMDIR), mpi) - FC := $(MPIFC) - CC := $(MPICC) -else - FC := $(SFC) - CC := $(SCC) -endif -LD:= $(FC) - -NETCDF_PATH := $(NETCDF) - -#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs - -#PNETCDF_PATH := $(PNETCDF) -#PNETCDF_PATH := /glade/u/apps/ch/opt/pio/2.2/mpt/2.15f/intel/17.0.1/lib - -INC_NETCDF := $(NETCDF_PATH)/include -LIB_NETCDF := $(NETCDF_PATH)/lib - -#LIB_PNETCDF := $(PNETCDF_PATH)/lib -#LIB_MPI := $(IMPILIBDIR) - -INCLDIR := $(INCLDIR) -I$(INC_NETCDF) -#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff -L$(LIB_PNETCDF) -lpnetcdf -lgptl -SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff - -ifeq ($(ICE_THREADED), true) - LDFLAGS += -fopenmp - CFLAGS += -fopenmp - FFLAGS += -fopenmp -endif - diff --git a/configuration/scripts/machines/Macros.gaeac5_intel b/configuration/scripts/machines/Macros.gaeac5_intel index 794070214..5a4c384fc 100644 --- a/configuration/scripts/machines/Macros.gaeac5_intel +++ b/configuration/scripts/machines/Macros.gaeac5_intel @@ -4,15 +4,17 @@ CPP := fpp CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -fp-model precise -xHost +CFLAGS := -c -O2 -fp-model precise -march=core-avx2 FIXEDFLAGS := -132 FREEFLAGS := -FR -FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -march=core-avx2 FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg + # 7/2025: cannot use -check uninit + # 7/2025: must use fpe1 rather than fpe0 + FFLAGS += -O0 -g -check bounds -check pointers -fpe1 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/Macros.gaea_intel b/configuration/scripts/machines/Macros.gaeac6_intel similarity index 76% rename from configuration/scripts/machines/Macros.gaea_intel rename to configuration/scripts/machines/Macros.gaeac6_intel index f4c4d2cbe..9ba174738 100644 --- a/configuration/scripts/machines/Macros.gaea_intel +++ b/configuration/scripts/machines/Macros.gaeac6_intel @@ -1,18 +1,20 @@ #============================================================================== -# Makefile macros for NOAA hera, intel compiler +# Makefile macros for NOAA gaeac6, intel compiler #============================================================================== CPP := fpp CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} -CFLAGS := -c -O2 -fp-model precise -xHost +CFLAGS := -c -O2 -fp-model precise -march=core-avx2 FIXEDFLAGS := -132 FREEFLAGS := -FR -FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -xHost +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback -align array64byte -march=core-avx2 FFLAGS_NOOPT:= -O0 ifeq ($(ICE_BLDDEBUG), true) - FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created -link_mpi=dbg +# no -check uninit 25 Jul 2025 +# fpe1 rather than fpe0 due to bug in hdf library 25 Jul 2025 + FFLAGS += -O0 -g -check bounds -check pointers -fpe1 -check noarg_temp_created -link_mpi=dbg else FFLAGS += -O2 endif diff --git a/configuration/scripts/machines/env.boreas_intel b/configuration/scripts/machines/env.boreas_intel new file mode 100644 index 000000000..5525e08eb --- /dev/null +++ b/configuration/scripts/machines/env.boreas_intel @@ -0,0 +1,52 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then +alias module 'eval `/opt/cray/pe/modules/3.2.11.7/bin/modulecmd tcsh \!*`' +module purge +module load craype-x86-milan +module load PrgEnv-intel +module load cray-hdf5-parallel +module load cray-netcdf-hdf5parallel +module swap intel/2022.2.1 intel-classic/2022.2.1 +module load gcc/12.2.0 +module load jasper-3.0.3-cce-14.0.3-dqhpvpm +module load cray-pals/1.2.12 +#asetenv NETCDF_PATH ${NETCDF_DIR} +setenv LD_LIBRARY_PATH /opt/cray/pe/netcdf-hdf5parallel/4.9.0.5/intel/2022.2/lib:${LD_LIBRARY_PATH} +###source /opt/modules/default/init/csh # Initialize modules for csh +##source /opt/cray/pe/modules/3.2.11.7/init/csh +### Clear environment +##module rm PrgEnv-intel +##module rm PrgEnv-cray +##module rm PrgEnv-gnu +##module add PrgEnv-intel +###module load PrgEnv-intel # Intel compiler +###module load cray-mpich # MPI (Cray MPICH) +#module add cray-hdf5 # HDF5 +#module add cray-netcdf # NetCDF +#module load cray-pals +#setenv HDF5_USE_FILE_LOCKING FALSE # necessary since data is on an NFS filesystem + +endif + +setenv ICE_MACHINE_MACHNAME Boreas +setenv ICE_MACHINE_MACHINFO "Cray XC50, Intel Xeon Gold 6148 (Skylake) NOT SURE-TILL" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "Intel 18.0.0.128, cray-mpich/7.7.0, cray-netcdf/4.4.1.1.6" +setenv ICE_MACHINE_MAKE make +setenv ICE_MACHINE_WKDIR /data/${USER}/cice_original/run/ +setenv ICE_MACHINE_INPUTDATA /data/${USER}/cice_original/ +setenv ICE_MACHINE_BASELINE /data/${USER}/cice_original/dbaselines/ +setenv ICE_MACHINE_SUBMIT "qsub" +setenv ICE_MACHINE_TPNODE 128 # tasks per node +#setenv ICE_MACHINE_MAXRUNLENGTH 9 +setenv ICE_MACHINE_ACCT P0000000 +setenv ICE_MACHINE_QUEUE "hpc" +setenv ICE_MACHINE_BLDTHRDS 18 +setenv ICE_MACHINE_QSTAT "qstat " +setenv OMP_STACKSIZE 64M diff --git a/configuration/scripts/machines/env.carpenter_cray b/configuration/scripts/machines/env.carpenter_cray index d2c832d8f..0d70e7f54 100644 --- a/configuration/scripts/machines/env.carpenter_cray +++ b/configuration/scripts/machines/env.carpenter_cray @@ -13,21 +13,21 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-cray/8.4.0 +module load PrgEnv-cray/8.6.0 module unload cce -module load cce/16.0.0 +module load cce/19.0.0 module unload cray-mpich -module load cray-mpich/8.1.26 +module load cray-mpich/8.1.32 module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.9.0.3 -module load cray-hdf5/1.12.2.3 +module load cray-netcdf/4.9.0.17 +module load cray-hdf5/1.14.3.5 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -41,7 +41,7 @@ endif setenv ICE_MACHINE_MACHNAME carpenter setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" setenv ICE_MACHINE_ENVNAME cray -setenv ICE_MACHINE_ENVINFO "Cray Fortran/Clang 16.0.0, cray-mpich/8.1.26, netcdf/4.9.0.3" +setenv ICE_MACHINE_ENVINFO "Cray Fortran/Clang 19.0.0, cray-mpich/8.1.32, netcdf/4.9.0.17" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.carpenter_gnu b/configuration/scripts/machines/env.carpenter_gnu index 96a04072f..9c8e4df4e 100644 --- a/configuration/scripts/machines/env.carpenter_gnu +++ b/configuration/scripts/machines/env.carpenter_gnu @@ -13,15 +13,15 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-gnu/8.4.0 +module load PrgEnv-gnu/8.6.0 module unload gcc -module load gcc/12.2.0 +module load gcc/14.2.0 module unload cray-mpich module unload mpi module unload openmpi -module load cray-mpich/8.1.26 +module load cray-mpich/8.1.32 #module load openmpi/4.1.6 #module load mpi/2021.11 @@ -30,8 +30,8 @@ module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.9.0.3 -module load cray-hdf5/1.12.2.3 +module load cray-netcdf/4.9.0.17 +module load cray-hdf5/1.14.3.5 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -45,7 +45,7 @@ endif setenv ICE_MACHINE_MACHNAME carpenter setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" setenv ICE_MACHINE_ENVNAME gnu -setenv ICE_MACHINE_ENVINFO "gnu gcc 12.2.0 20220819, mpich/8.1.26, netcdf/4.9.0.3" +setenv ICE_MACHINE_ENVINFO "gnu gcc 14.3.0, mpich/8.1.32, netcdf/4.9.0.17" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.carpenter_gnuimpi b/configuration/scripts/machines/env.carpenter_gnuimpi index f21bf97a5..900494b3b 100644 --- a/configuration/scripts/machines/env.carpenter_gnuimpi +++ b/configuration/scripts/machines/env.carpenter_gnuimpi @@ -13,25 +13,25 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-gnu/8.4.0 +module load PrgEnv-gnu/8.6.0 module unload gcc -module load gcc/11.2.0 +module load gcc/14.2.0 module unload cray-mpich module unload mpi module unload openmpi #module load cray-mpich/8.1.26 #module load openmpi/4.1.6 -module load mpi/2021.11 +module load mpi/2021.16 module unload cray-hdf5 module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.9.0.3 -module load cray-hdf5/1.12.2.3 +module load cray-netcdf/4.9.0.17 +module load cray-hdf5/1.14.3.5 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -45,7 +45,7 @@ endif setenv ICE_MACHINE_MACHNAME carpenter setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" setenv ICE_MACHINE_ENVNAME gnuimpi -setenv ICE_MACHINE_ENVINFO "gnu gcc 11.2.0 20210728, intel mpi 2021.11, netcdf/4.9.0.3" +setenv ICE_MACHINE_ENVINFO "gnu gcc 14.2.0, intel mpi 2021.16, netcdf/4.9.0.17" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.carpenter_intel b/configuration/scripts/machines/env.carpenter_intel index 141a5a82a..577d9642f 100644 --- a/configuration/scripts/machines/env.carpenter_intel +++ b/configuration/scripts/machines/env.carpenter_intel @@ -12,15 +12,15 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-intel/8.5.0 +module load PrgEnv-intel/8.6.0 module unload intel -module load intel/2023.0.0 +module load intel/2024.2 module unload cray-mpich module unload mpi module unload openmpi -module load cray-mpich/8.1.30 +module load cray-mpich/8.1.32 #module load mpi/2021.11 #module load openmpi/4.1.6 @@ -29,8 +29,8 @@ module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.9.0.3 -module load cray-hdf5/1.14.3.1 +module load cray-netcdf/4.9.0.17 +module load cray-hdf5/1.14.3.5 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -44,7 +44,7 @@ endif setenv ICE_MACHINE_MACHNAME carpenter setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 2021.8.0 20221119, cray-mpich/8.1.30, netcdf/4.9.0.3" +setenv ICE_MACHINE_ENVINFO "ifort 2021.13.0 20240602, cray-mpich/8.1.32, netcdf/4.9.0.17" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.carpenter_intelimpi b/configuration/scripts/machines/env.carpenter_intelimpi index ef43363cd..80f696772 100644 --- a/configuration/scripts/machines/env.carpenter_intelimpi +++ b/configuration/scripts/machines/env.carpenter_intelimpi @@ -12,16 +12,16 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-intel/8.5.0 +module load PrgEnv-intel/8.6.0 module unload intel -module load intel/2023.0.0 +module load intel/2023.2.0 module unload cray-mpich module unload mpi module unload openmpi #module load cray-mpich/8.1.26 -module load mpi/2021.13 +module load mpi/2021.16 #module load openmpi/4.1.6 module unload cray-hdf5 @@ -29,8 +29,8 @@ module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.9.0.3 -module load cray-hdf5/1.14.3.1 +module load cray-netcdf/4.9.0.17 +module load cray-hdf5/1.14.3.5 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -44,7 +44,7 @@ endif setenv ICE_MACHINE_MACHNAME carpenter setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" setenv ICE_MACHINE_ENVNAME intelimpi -setenv ICE_MACHINE_ENVINFO "ifort 2021.8.0 20221119, intel mpi 2021.13, netcdf/4.9.0.3" +setenv ICE_MACHINE_ENVINFO "ifort 2021.13.0 20240602, intel mpi 2021.16, netcdf/4.9.0.17" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.carpenter_inteloneapi b/configuration/scripts/machines/env.carpenter_inteloneapi index 2480918c1..62ba8b3c8 100644 --- a/configuration/scripts/machines/env.carpenter_inteloneapi +++ b/configuration/scripts/machines/env.carpenter_inteloneapi @@ -12,15 +12,15 @@ module unload PrgEnv-cray module unload PrgEnv-gnu module unload PrgEnv-intel module unload PrgEnv-pgi -module load PrgEnv-intel/8.5.0 +module load PrgEnv-intel/8.6.0 module unload intel -module load intel-oneapi/2024.2 +module load intel-oneapi/2025.2 module unload cray-mpich module unload mpi module unload openmpi -module load cray-mpich/8.1.30 +module load cray-mpich/8.1.32 #module load mpi/2021.11 #module load openmpi/4.1.6 @@ -29,8 +29,8 @@ module unload cray-hdf5-parallel module unload cray-netcdf-hdf5parallel module unload cray-parallel-netcdf module unload netcdf -module load cray-netcdf/4.9.0.13 -module load cray-hdf5/1.14.3.1 +module load cray-netcdf/4.9.0.17 +module load cray-hdf5/1.14.3.5 setenv NETCDF_PATH ${NETCDF_DIR} limit coredumpsize unlimited @@ -44,7 +44,7 @@ endif setenv ICE_MACHINE_MACHNAME carpenter setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" setenv ICE_MACHINE_ENVNAME inteloneapi -setenv ICE_MACHINE_ENVINFO "Intel oneAPI DPC++/C++/icx/ifx 2024.2.0 20240602, cray-mpich/8.1.30, netcdf/4.9.0.13" +setenv ICE_MACHINE_ENVINFO "Intel oneAPI DPC++/C++/icx/ifx 2025.2.1 20250806, cray-mpich/8.1.32, netcdf/4.9.0.17" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium diff --git a/configuration/scripts/machines/env.derecho_cray b/configuration/scripts/machines/env.derecho_cray index 4e886ab71..2eef6aaaa 100644 --- a/configuration/scripts/machines/env.derecho_cray +++ b/configuration/scripts/machines/env.derecho_cray @@ -20,6 +20,7 @@ module load netcdf/4.9.2 #module load netcdf-mpi/4.9.2 module load cray-libsci/23.09.1.1 +module load nco if ($?ICE_IOTYPE) then if ($ICE_IOTYPE =~ pio*) then diff --git a/configuration/scripts/machines/env.derecho_gnu b/configuration/scripts/machines/env.derecho_gnu index 330509636..4ddf4c3a9 100644 --- a/configuration/scripts/machines/env.derecho_gnu +++ b/configuration/scripts/machines/env.derecho_gnu @@ -18,6 +18,7 @@ module load ncarcompilers module load cray-mpich/8.1.29 module load netcdf/4.9.2 module load cray-libsci/24.03.0 +module load nco if ($?ICE_IOTYPE) then if ($ICE_IOTYPE =~ pio*) then diff --git a/configuration/scripts/machines/env.derecho_intel b/configuration/scripts/machines/env.derecho_intel index 05c719838..ada9344bf 100644 --- a/configuration/scripts/machines/env.derecho_intel +++ b/configuration/scripts/machines/env.derecho_intel @@ -20,6 +20,7 @@ module load netcdf/4.9.2 #module load netcdf-mpi/4.9.2 module load cray-libsci/23.09.1.1 +module load nco if ($?ICE_IOTYPE) then if ($ICE_IOTYPE =~ pio*) then diff --git a/configuration/scripts/machines/env.derecho_intelclassic b/configuration/scripts/machines/env.derecho_intelclassic index 8d3639a5e..e1ba716be 100644 --- a/configuration/scripts/machines/env.derecho_intelclassic +++ b/configuration/scripts/machines/env.derecho_intelclassic @@ -20,6 +20,7 @@ module load netcdf/4.9.2 #module load netcdf-mpi/4.9.2 #module load cray-libsci/23.02.1.1 +module load nco if ($?ICE_IOTYPE) then if ($ICE_IOTYPE =~ pio*) then diff --git a/configuration/scripts/machines/env.derecho_inteloneapi b/configuration/scripts/machines/env.derecho_inteloneapi index 79715ba2a..b90958e7d 100644 --- a/configuration/scripts/machines/env.derecho_inteloneapi +++ b/configuration/scripts/machines/env.derecho_inteloneapi @@ -21,6 +21,7 @@ module load netcdf/4.9.2 #module load netcdf-mpi/4.9.2 module load cray-libsci/23.09.1.1 +module load nco if ($?ICE_IOTYPE) then if ($ICE_IOTYPE =~ pio*) then diff --git a/configuration/scripts/machines/env.derecho_nvhpc b/configuration/scripts/machines/env.derecho_nvhpc index 99aa6430a..c482972dc 100644 --- a/configuration/scripts/machines/env.derecho_nvhpc +++ b/configuration/scripts/machines/env.derecho_nvhpc @@ -20,6 +20,7 @@ module load netcdf/4.9.2 #module load netcdf-mpi/4.9.2 module load cray-libsci/23.09.1.1 +module load nco if ($?ICE_IOTYPE) then if ($ICE_IOTYPE =~ pio*) then diff --git a/configuration/scripts/machines/env.gaea_cray b/configuration/scripts/machines/env.gaea_cray deleted file mode 100644 index db62615ee..000000000 --- a/configuration/scripts/machines/env.gaea_cray +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/csh -f - -set inp = "undefined" -if ($#argv == 1) then - set inp = $1 -endif - -if ("$inp" != "-nomodules") then - -#source /lustre/f2/pdata/esrl/gsd/contrib/lua-5.1.4.9/init/init_lmod.csh -source $MODULESHOME/init/csh -module purge -module load PrgEnv-cray -module load cce/18.0.0 -module load cray-libsci/24.07.0 -module load cray-hdf5/1.14.3.1 -module load cray-netcdf/4.9.0.13 -setenv NETCDF $NETCDF_DIR -module list - -# May be needed for OpenMP memory -#setenv OMP_STACKSIZE 64M - -endif - -# May be needed for OpenMP memory -#setenv OMP_STACKSIZE 64M - -endif - -setenv ICE_MACHINE_MACHNAME gaea -setenv ICE_MACHINE_MACHINFO "Cray XC40 Intel Haswell/Broadwell 2.3GHz, Gemini Interconnect" -setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 18.0.5 20180823, cray-mpich, cray-netcdf" -setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $HOME/scratch/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /ncrc/home1/Robert.Grumbine/rgdev/CICE_INPUTDATA -setenv ICE_MACHINE_BASELINE $HOME/scratch/CICE_BASELINE -setenv ICE_MACHINE_SUBMIT "sbatch" -setenv ICE_MACHINE_TPNODE 40 -setenv ICE_MACHINE_ACCT P0000000 -setenv ICE_MACHINE_QUEUE "normal" -setenv ICE_MACHINE_BLDTHRDS 1 -setenv ICE_MACHINE_QSTAT "squeue --jobs=" diff --git a/configuration/scripts/machines/env.gaeac5_intel b/configuration/scripts/machines/env.gaeac5_intel index 69bddb428..2fbbffef8 100644 --- a/configuration/scripts/machines/env.gaeac5_intel +++ b/configuration/scripts/machines/env.gaeac5_intel @@ -7,37 +7,33 @@ endif if ("$inp" != "-nomodules") then -#source /lustre/f2/pdata/esrl/gsd/contrib/lua-5.1.4.9/init/init_lmod.csh source $MODULESHOME/init/csh -#module list -module load PrgEnv-intel -module load intel -#module load intel/2023.2.0 -#module load cce/18.0.0 +module load PrgEnv-intel/8.6.0 +module load intel/2025.0 +module load cray-mpich/8.1.32 module load cray-hdf5/1.14.3.1 module load cray-netcdf/4.9.0.13 setenv NETCDF $NETCDF_DIR -echo zzz final module list -module list -#module avail intel +#echo zzz final module list +#module list # May be needed for OpenMP memory setenv OMP_STACKSIZE 64M endif -env | grep NETCDF +#env | grep NETCDF setenv ICE_MACHINE_MACHNAME gaea -setenv ICE_MACHINE_MACHINFO "Cray XC40 Intel Haswell/Broadwell 2.3GHz, Gemini Interconnect" +setenv ICE_MACHINE_MACHINFO "HPE-EX Cray X3000, AMD EPYC 7H12 2.6 GHz, HPE Slingshot interconnect" setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 18.0.5 20180823, cray-mpich, cray-netcdf" +setenv ICE_MACHINE_ENVINFO "ifort 2025.0, cray-mpich 8.1.32, cray-netcdf 4.9.0.13" setenv ICE_MACHINE_MAKE gmake setenv ICE_MACHINE_WKDIR $HOME/scratch/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /ncrc/home1/Robert.Grumbine/rgdev/CICE_INPUTDATA +setenv ICE_MACHINE_INPUTDATA /ncrc/home1/Anthony.Craig/scratch setenv ICE_MACHINE_BASELINE $HOME/scratch/CICE_BASELINE setenv ICE_MACHINE_SUBMIT "sbatch" -setenv ICE_MACHINE_TPNODE 40 +setenv ICE_MACHINE_TPNODE 128 setenv ICE_MACHINE_ACCT P0000000 setenv ICE_MACHINE_QUEUE "normal" setenv ICE_MACHINE_BLDTHRDS 1 diff --git a/configuration/scripts/machines/env.gaea_intel b/configuration/scripts/machines/env.gaeac6_intel similarity index 53% rename from configuration/scripts/machines/env.gaea_intel rename to configuration/scripts/machines/env.gaeac6_intel index 69bddb428..0963ecc69 100644 --- a/configuration/scripts/machines/env.gaea_intel +++ b/configuration/scripts/machines/env.gaeac6_intel @@ -10,15 +10,14 @@ if ("$inp" != "-nomodules") then #source /lustre/f2/pdata/esrl/gsd/contrib/lua-5.1.4.9/init/init_lmod.csh source $MODULESHOME/init/csh #module list -module load PrgEnv-intel -module load intel -#module load intel/2023.2.0 -#module load cce/18.0.0 +module load PrgEnv-intel/8.6.0 +module load intel/2025.0 +module load cray-mpich/8.1.32 module load cray-hdf5/1.14.3.1 module load cray-netcdf/4.9.0.13 setenv NETCDF $NETCDF_DIR -echo zzz final module list -module list +#echo zzz final module list +#module list #module avail intel # May be needed for OpenMP memory @@ -26,19 +25,19 @@ setenv OMP_STACKSIZE 64M endif -env | grep NETCDF +#env | grep NETCDF setenv ICE_MACHINE_MACHNAME gaea -setenv ICE_MACHINE_MACHINFO "Cray XC40 Intel Haswell/Broadwell 2.3GHz, Gemini Interconnect" +setenv ICE_MACHINE_MACHINFO "HPE-EX Cray3000, AMD EPYC 9654 2.4GHz, HPE Slingshot interconnect" setenv ICE_MACHINE_ENVNAME intel -setenv ICE_MACHINE_ENVINFO "ifort 18.0.5 20180823, cray-mpich, cray-netcdf" +setenv ICE_MACHINE_ENVINFO "intel 2025.0, cray-mpich 8.1.32, cray-netcdf 4.9.0.13" setenv ICE_MACHINE_MAKE gmake -setenv ICE_MACHINE_WKDIR $HOME/scratch/CICE_RUNS -setenv ICE_MACHINE_INPUTDATA /ncrc/home1/Robert.Grumbine/rgdev/CICE_INPUTDATA -setenv ICE_MACHINE_BASELINE $HOME/scratch/CICE_BASELINE +setenv ICE_MACHINE_WKDIR $HOME/scratch6/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /ncrc/home1/Anthony.Craig/scratch6 +setenv ICE_MACHINE_BASELINE $HOME/scratch6/CICE_BASELINE setenv ICE_MACHINE_SUBMIT "sbatch" -setenv ICE_MACHINE_TPNODE 40 -setenv ICE_MACHINE_ACCT P0000000 +setenv ICE_MACHINE_TPNODE 192 +setenv ICE_MACHINE_ACCT A00000 setenv ICE_MACHINE_QUEUE "normal" setenv ICE_MACHINE_BLDTHRDS 1 setenv ICE_MACHINE_QSTAT "squeue --jobs=" diff --git a/configuration/scripts/options/set_nml.box2001 b/configuration/scripts/options/set_nml.box2001 index ad42a4236..1c2fc3683 100644 --- a/configuration/scripts/options/set_nml.box2001 +++ b/configuration/scripts/options/set_nml.box2001 @@ -11,9 +11,8 @@ grid_type = 'rectangular' kmt_type = 'default' dxrect = 16.e5 dyrect = 16.e5 -close_boundaries = .true. -ew_boundary_type = 'open' -ns_boundary_type = 'open' +ew_boundary_type = 'closed' +ns_boundary_type = 'closed' tr_iage = .false. tr_FY = .false. tr_lvl = .false. diff --git a/configuration/scripts/options/set_nml.boxchan b/configuration/scripts/options/set_nml.boxchan index a3f0fd191..67fdaff9c 100644 --- a/configuration/scripts/options/set_nml.boxchan +++ b/configuration/scripts/options/set_nml.boxchan @@ -9,7 +9,6 @@ grid_type = 'rectangular' kmt_type = 'channel' dxrect = 16.e5 dyrect = 16.e5 -close_boundaries = .false. ew_boundary_type = 'cyclic' ns_boundary_type = 'open' tr_iage = .false. diff --git a/configuration/scripts/options/set_nml.boxchan1e b/configuration/scripts/options/set_nml.boxchan1e index cf8b0d314..0a4e92bef 100644 --- a/configuration/scripts/options/set_nml.boxchan1e +++ b/configuration/scripts/options/set_nml.boxchan1e @@ -9,7 +9,6 @@ grid_type = 'rectangular' kmt_type = 'channel_oneeast' dxrect = 16.e5 dyrect = 16.e5 -close_boundaries = .false. ew_boundary_type = 'cyclic' ns_boundary_type = 'open' tr_iage = .false. diff --git a/configuration/scripts/options/set_nml.boxchan1n b/configuration/scripts/options/set_nml.boxchan1n index f90d4da0c..a342f811c 100644 --- a/configuration/scripts/options/set_nml.boxchan1n +++ b/configuration/scripts/options/set_nml.boxchan1n @@ -9,7 +9,6 @@ grid_type = 'rectangular' kmt_type = 'channel_onenorth' dxrect = 16.e5 dyrect = 16.e5 -close_boundaries = .false. ew_boundary_type = 'open' ns_boundary_type = 'cyclic' tr_iage = .false. diff --git a/configuration/scripts/options/set_nml.boxclosed b/configuration/scripts/options/set_nml.boxclosed index d55faa302..ba9d9b4d7 100644 --- a/configuration/scripts/options/set_nml.boxclosed +++ b/configuration/scripts/options/set_nml.boxclosed @@ -9,9 +9,8 @@ grid_type = 'rectangular' kmt_type = 'default' dxrect = 16.e5 dyrect = 16.e5 -close_boundaries = .true. -ew_boundary_type = 'open' -ns_boundary_type = 'open' +ew_boundary_type = 'closed' +ns_boundary_type = 'closed' tr_iage = .false. tr_FY = .false. tr_lvl = .false. diff --git a/configuration/scripts/options/set_nml.boxopen b/configuration/scripts/options/set_nml.boxopen index 84badd373..081865d7a 100644 --- a/configuration/scripts/options/set_nml.boxopen +++ b/configuration/scripts/options/set_nml.boxopen @@ -6,7 +6,6 @@ histfreq = 'd','x','x','x','x' grid_type = 'rectangular' dxrect = 16.e5 dyrect = 16.e5 -close_boundaries = .false. ew_boundary_type = 'cyclic' ns_boundary_type = 'open' ktherm = -1 diff --git a/configuration/scripts/options/set_nml.boxslotcyl b/configuration/scripts/options/set_nml.boxslotcyl index 10f0518c8..de35e21e9 100644 --- a/configuration/scripts/options/set_nml.boxslotcyl +++ b/configuration/scripts/options/set_nml.boxslotcyl @@ -11,9 +11,8 @@ kmt_type = 'default' dxrect = 10.e5 dyrect = 10.e5 kcatbound = 2 -ew_boundary_type = 'open' -ns_boundary_type = 'open' -close_boundaries = .true. +ew_boundary_type = 'closed' +ns_boundary_type = 'closed' tr_lvl = .false. tr_pond_lvl = .false. ktherm = -1 diff --git a/configuration/scripts/options/set_nml.boxwallblock b/configuration/scripts/options/set_nml.boxwallblock index 2e9a34728..352f4c4ad 100644 --- a/configuration/scripts/options/set_nml.boxwallblock +++ b/configuration/scripts/options/set_nml.boxwallblock @@ -9,7 +9,6 @@ grid_type = 'rectangular' kmt_type = 'wall' dxrect = 16.e5 dyrect = 16.e5 -close_boundaries = .false. ew_boundary_type = 'cyclic' ns_boundary_type = 'cyclic' tr_iage = .false. diff --git a/configuration/scripts/options/set_nml.cesm3 b/configuration/scripts/options/set_nml.cesm3 new file mode 100644 index 000000000..35d03f5b6 --- /dev/null +++ b/configuration/scripts/options/set_nml.cesm3 @@ -0,0 +1,12 @@ + +congel_freeze = 'one-step' +hist_time_axis = 'middle' +nfsd = 12 +nfreq = 25 +snwredist = "snwITDrdg" +tr_fsd = .true. +tr_ponds_lvl = .false. +tr_ponds_sealvl = .true. +tr_snow = .true. +tscale_pnd_drain = 0.5d0 +wave_frac_spec = 'alt' diff --git a/configuration/scripts/options/set_nml.cmip b/configuration/scripts/options/set_nml.cmip new file mode 100644 index 000000000..f38b8a305 --- /dev/null +++ b/configuration/scripts/options/set_nml.cmip @@ -0,0 +1,144 @@ + histfreq = 'm','d','n','x','x' + histfreq_n = 1,1,15,1,1 + hist_avg = .true.,.true.,.false.,.false,.false. + f_aice = 'md' + f_aice_init = 'md' + f_aicen = 'md' + f_siage = 'md' + f_siconc = 'md' + f_sifb = 'md' + f_siflcondbot = 'md' + f_siflcondtop = 'md' + f_sifllattop = 'md' + f_sifllwdtop = 'md' + f_sifllwutop = 'md' + f_siflsensbot = 'md' + f_siflsenstop = 'md' + f_siflswdtop = 'md' + f_siflswutop = 'md' + f_sihc = 'md' + f_siitdconc = 'md' + f_siitdsnconc = 'md' + f_siitdsnthick = 'md' + f_siitdthick = 'md' + f_simpconc = 'md' + f_simpeffconc = 'md' + f_simprefrozen = 'md' + f_simpthick = 'md' + f_sirdgconc = 'md' + f_sirdgthick = 'md' + f_sisali = 'md' + f_sisaltmass = 'md' + f_sisnhc = 'md' + f_sisnthick = 'md' + f_sitempbot = 'md' + f_sitempsnic = 'md' + f_sitemptop = 'md' + f_sithick = 'md' + f_sitimefrac = 'md' + f_siu = 'md' + f_siv = 'md' + f_sicompstren = 'm' + f_sidconcdyn = 'm' + f_sidconcth = 'm' + f_sidivvel = 'n' + f_sidmassdyn = 'm' + f_sidmassevapsubl = 'm' + f_sidmassgrowthbot = 'm' + f_sidmassgrowthsi = 'm' + f_sidmassgrowthwat = 'm' + f_sidmassmeltbot = 'm' + f_sidmassmeltlat = 'm' + f_sidmassmelttop = 'm' + f_sidmassth = 'm' + f_sidmasstranx = 'm' + f_sidmasstrany = 'm' + f_sidragbot = 'm' + f_sidragtop = 'm' + f_siflfwbot = 'm' + f_siflfwdrain = 'm' + f_siflsaltbot = 'm' + f_siforcecoriolx = 'm' + f_siforcecorioly = 'm' + f_siforceintstrx = 'm' + f_siforceintstry = 'm' + f_siforcetiltx = 'm' + f_siforcetilty = 'm' + f_simass = 'm' + f_sipr = 'm' + f_sishearvel = 'n' + f_sisnconc = 'm' + f_sisndmassdyn = 'm' + f_sisndmassmelt = 'm' + f_sisndmasssi = 'm' + f_sisndmasssnf = 'm' + f_sisndmasssubl = 'm' + f_sisndmasswind = 'm' + f_sisnmass = 'm' + f_sispeed = 'm' + f_sistressave = 'n' + f_sistressmax = 'n' + f_sistrxdtop = 'm' + f_sistrydtop = 'm' + f_sistrxubot = 'm' + f_sistryubot = 'm' + f_sivol = 'm' + f_icepresent = 'x' + f_hi = 'x' + f_hs = 'x' + f_Tsfc = 'x' + f_iage = 'x' + f_fswdn = 'x' + f_fswup = 'x' + f_fswthru = 'x' + f_flwdn = 'x' + f_flwup = 'x' + f_fsens = 'x' + f_flat = 'x' + f_fhocn = 'x' + f_rain = 'x' + f_snow = 'x' + f_evap = 'x' + f_fsalt = 'x' + f_fresh = 'x' + f_meltl = 'x' + f_melts = 'x' + f_meltt = 'x' + f_meltb = 'x' + f_strairx = 'x' + f_strairy = 'x' + f_strocnx = 'x' + f_strocny = 'x' + f_strtltx = 'x' + f_strtlty = 'x' + f_strintx = 'x' + f_strinty = 'x' + f_strcorx = 'x' + f_strcory = 'x' + f_strength = 'x' + f_daidtt = 'x' + f_daidtd = 'x' + f_dvidtt = 'x' + f_dvidtd = 'x' + f_congel = 'x' + f_frazil = 'x' + f_snoice = 'x' + f_vicen = 'x' + f_vsnon = 'x' + f_divu = 'x' + f_shear = 'x' + f_snowfrac = 'x' + f_snowfracn = 'x' + f_uvel = 'x' + f_vvel = 'x' + f_icespd = 'x' + f_apeff = 'x' + f_apond = 'md' + f_apond_ai = 'md' + f_hpond = 'x' + f_ipond = 'x' + f_fsloss = 'x' + f_alvl = 'md' + f_vlvl = 'md' + f_ardg = 'md' + f_vrdg = 'md' diff --git a/configuration/scripts/options/set_nml.fsd1 b/configuration/scripts/options/set_nml.fsd1 index 042ed5f25..4c59704da 100644 --- a/configuration/scripts/options/set_nml.fsd1 +++ b/configuration/scripts/options/set_nml.fsd1 @@ -1,5 +1,6 @@ tr_fsd = .true. nfsd = 1 wave_spec_type = 'none' +wave_height_type = 'none' nfreq = 25 diff --git a/configuration/scripts/options/set_nml.fsd12 b/configuration/scripts/options/set_nml.fsd12 index 620b2e96b..796125e0d 100644 --- a/configuration/scripts/options/set_nml.fsd12 +++ b/configuration/scripts/options/set_nml.fsd12 @@ -1,5 +1,6 @@ tr_fsd = .true. nfsd = 12 wave_spec_type = 'profile' +wave_height_type = 'internal' nfreq = 25 diff --git a/configuration/scripts/options/set_nml.fsd12ww3 b/configuration/scripts/options/set_nml.fsd12ww3 index a8d5d06ac..18a904160 100644 --- a/configuration/scripts/options/set_nml.fsd12ww3 +++ b/configuration/scripts/options/set_nml.fsd12ww3 @@ -1,5 +1,6 @@ tr_fsd = .true. nfsd = 12 wave_spec_type = 'constant' +wave_height_type = 'internal' nfreq = 25 wave_spec_file = 'ICE_MACHINE_INPUTDATA/CICE_data/forcing/gx3/ww3.20100101_efreq_remapgx3.nc' diff --git a/configuration/scripts/options/set_nml.gridc b/configuration/scripts/options/set_nml.gridc index a04fab4fd..63c5af2bb 100644 --- a/configuration/scripts/options/set_nml.gridc +++ b/configuration/scripts/options/set_nml.gridc @@ -1,2 +1,4 @@ grid_ice = 'C' +dyn_area_min = 0.001d0 +dyn_mass_min = 0.01d0 diff --git a/configuration/scripts/options/set_nml.gridcd b/configuration/scripts/options/set_nml.gridcd index 7889e64f4..4cf521a4b 100644 --- a/configuration/scripts/options/set_nml.gridcd +++ b/configuration/scripts/options/set_nml.gridcd @@ -1,4 +1,6 @@ grid_ice = 'C_override_D' +dyn_area_min = 0.001d0 +dyn_mass_min = 0.01d0 # visc_method=avg_zeta causes some gridcd tests to abort, use avg_strength for now visc_method = 'avg_strength' diff --git a/configuration/scripts/options/set_nml.histall b/configuration/scripts/options/set_nml.histall index 2b9810bdd..e05ff729b 100644 --- a/configuration/scripts/options/set_nml.histall +++ b/configuration/scripts/options/set_nml.histall @@ -21,7 +21,6 @@ f_VGRDb = .true. f_VGRDa = .true. f_bounds = .true. - f_CMIP = 'm' f_aice = 'md1h' f_hi = 'h1dm' f_hs = 'd1m' @@ -29,6 +28,8 @@ f_sice = 'md' f_uvel = 'md' f_vvel = 'dm' + f_icespd = 'md' + f_icedir = 'md' f_uatm = 'dm' f_vatm = 'dm' f_fswdn = 'dm' @@ -180,6 +181,18 @@ f_hpond_ai = 'md' f_ipond_ai = 'md' f_apeff_ai = 'md' + f_smassicen = 'md' + f_smassliqn = 'md' + f_rhos_cmpn = 'md' + f_rhos_cntn = 'md' + f_rsnwn = 'md' + f_smassice = 'md' + f_smassliq = 'md' + f_rhos_cmp = 'md' + f_rhos_cnt = 'md' + f_rsnw = 'md' + f_meltsliq = 'md' + f_fsloss = 'md' f_fiso_atm = 'md' f_fiso_ocn = 'md' f_iso = 'md' @@ -237,3 +250,83 @@ f_aice_ww = 'md' f_diam_ww = 'md' f_hice_ww = 'md' + f_aice_init = 'md' + f_siage = 'md' + f_siconc = 'md' + f_sifb = 'md' + f_siflcondbot = 'md' + f_siflcondtop = 'md' + f_sifllattop = 'md' + f_sifllwdtop = 'md' + f_sifllwutop = 'md' + f_siflsensbot = 'md' + f_siflsenstop = 'md' + f_siflswdtop = 'md' + f_siflswutop = 'md' + f_sihc = 'md' + f_siitdconc = 'md' + f_siitdsnconc = 'md' + f_siitdsnthick = 'md' + f_siitdthick = 'md' + f_simpconc = 'md' + f_simpeffconc = 'md' + f_simprefrozen = 'md' + f_simpthick = 'md' + f_sirdgconc = 'md' + f_sirdgthick = 'md' + f_sisali = 'md' + f_sisaltmass = 'md' + f_sisnhc = 'md' + f_sisnthick = 'md' + f_sitempbot = 'md' + f_sitempsnic = 'md' + f_sitemptop = 'md' + f_sithick = 'md' + f_sitimefrac = 'md' + f_siu = 'md' + f_siv = 'md' + f_sicompstren = 'md' + f_sidconcdyn = 'md' + f_sidconcth = 'md' + f_sidivvel = 'md' + f_sidmassdyn = 'md' + f_sidmassevapsubl = 'md' + f_sidmassgrowthbot = 'md' + f_sidmassgrowthsi = 'md' + f_sidmassgrowthwat = 'md' + f_sidmassmeltbot = 'md' + f_sidmassmeltlat = 'md' + f_sidmassmelttop = 'md' + f_sidmassth = 'md' + f_sidmasstranx = 'md' + f_sidmasstrany = 'md' + f_sidragbot = 'md' + f_sidragtop = 'md' + f_siflfwbot = 'md' + f_siflfwdrain = 'md' + f_siflsaltbot = 'md' + f_siforcecoriolx = 'md' + f_siforcecorioly = 'md' + f_siforceintstrx = 'md' + f_siforceintstry = 'md' + f_siforcetiltx = 'md' + f_siforcetilty = 'md' + f_simass = 'md' + f_sipr = 'md' + f_sishearvel = 'md' + f_sisnconc = 'md' + f_sisndmassdyn = 'md' + f_sisndmassmelt = 'md' + f_sisndmasssi = 'md' + f_sisndmasssnf = 'md' + f_sisndmasssubl = 'md' + f_sisndmasswind = 'md' + f_sisnmass = 'md' + f_sispeed = 'md' + f_sistressave = 'md' + f_sistressmax = 'md' + f_sistrxdtop = 'md' + f_sistrydtop = 'md' + f_sistrxubot = 'md' + f_sistryubot = 'md' + f_sivol = 'md' diff --git a/configuration/scripts/options/set_nml.histall10d b/configuration/scripts/options/set_nml.histall10d new file mode 100644 index 000000000..0ba5b6809 --- /dev/null +++ b/configuration/scripts/options/set_nml.histall10d @@ -0,0 +1,334 @@ + grid_outfile = .true. + histfreq = 'd','h','m','x','x' + histfreq_n = 10,24,1 + histfreq_base = 'zero','init','zero' + hist_avg = .true.,.true.,.true. + write_histrest = .true. + write_ic = .true. + f_tmask = .true. + f_blkmask = .true. + f_tarea = .true. + f_uarea = .true. + f_dxt = .true. + f_dyt = .true. + f_dxu = .true. + f_dyu = .true. + f_HTN = .true. + f_HTE = .true. + f_ANGLE = .true. + f_ANGLET = .true. + f_NCAT = .true. + f_VGRDi = .true. + f_VGRDs = .true. + f_VGRDb = .true. + f_VGRDa = .true. + f_bounds = .true. + f_aice = 'mdh' + f_hi = 'hdm' + f_hs = 'dmh' + f_Tsfc = 'mdh' + f_sice = 'hmd' + f_uvel = 'hmd' + f_vvel = 'hdm' + f_icespd = 'hmd' + f_icedir = 'md' + f_uatm = 'dm' + f_vatm = 'dm' + f_fswdn = 'dm' + f_flwdn = 'md' + f_snowfrac = 'md' + f_snow = 'md' + f_snow_ai = 'md' + f_rain = 'md' + f_rain_ai = 'md' + f_sst = 'md' + f_sss = 'md' + f_uocn = 'md' + f_vocn = 'md' + f_frzmlt = 'md' + f_fswfac = 'md' + f_fswint_ai = 'md' + f_fswabs = 'md' + f_fswabs_ai = 'md' + f_albsni = 'md' + f_alvdr = 'md' + f_alidr = 'md' + f_alvdf = 'md' + f_alidf = 'md' + f_alvdr_ai = 'md' + f_alidr_ai = 'md' + f_alvdf_ai = 'md' + f_alidf_ai = 'md' + f_albice = 'md' + f_albsno = 'md' + f_albpnd = 'md' + f_coszen = 'md' + f_flat = 'md' + f_flat_ai = 'md' + f_fsens = 'md' + f_fsens_ai = 'md' + f_fswup = 'md' + f_flwup = 'md' + f_flwup_ai = 'md' + f_evap = 'md' + f_evap_ai = 'md' + f_Tair = 'md' + f_Tref = 'md' + f_Qref = 'md' + f_congel = 'md' + f_frazil = 'md' + f_snoice = 'md' + f_dsnow = 'md' + f_melts = 'md' + f_meltt = 'md' + f_meltb = 'md' + f_meltl = 'md' + f_fresh = 'md' + f_fresh_ai = 'md' + f_fsalt = 'md' + f_fsalt_ai = 'md' + f_fbot = 'md' + f_fhocn = 'md' + f_fhocn_ai = 'md' + f_fswthru = 'md' + f_fswthru_ai = 'md' + f_fsurf_ai = 'md' + f_fcondtop_ai = 'md' + f_fmeltt_ai = 'md' + f_strairx = 'md' + f_strairy = 'md' + f_strtltx = 'md' + f_strtlty = 'md' + f_strcorx = 'md' + f_strcory = 'md' + f_strocnx = 'md' + f_strocny = 'md' + f_strintx = 'md' + f_strinty = 'md' + f_taubx = 'md' + f_tauby = 'md' + f_strength = 'md' + f_divu = 'md' + f_shear = 'md' + f_vort = 'md' + f_sig1 = 'md' + f_sig2 = 'md' + f_sigP = 'md' + f_dvidtt = 'md' + f_dvidtd = 'md' + f_daidtt = 'md' + f_daidtd = 'md' + f_dagedtt = 'md' + f_dagedtd = 'md' + f_mlt_onset = 'x' + f_frz_onset = 'x' + f_hisnap = 'md' + f_aisnap = 'md' + f_trsig = 'md' + f_icepresent = 'md' + f_iage = 'md' + f_FY = 'md' + f_aicen = 'md' + f_vicen = 'md' + f_vsnon = 'md' + f_snowfracn = 'md' + f_keffn_top = 'md' + f_Tinz = 'md' + f_Sinz = 'md' + f_Tsnz = 'md' + f_fsurfn_ai = 'md' + f_fcondtopn_ai = 'md' + f_fmelttn_ai = 'md' + f_flatn_ai = 'md' + f_fsensn_ai = 'md' + f_alvl = 'md' + f_vlvl = 'md' + f_ardg = 'md' + f_vrdg = 'md' + f_dardg1dt = 'md' + f_dardg2dt = 'md' + f_dvirdgdt = 'md' + f_opening = 'md' + f_ardgn = 'md' + f_vrdgn = 'md' + f_dardg1ndt = 'md' + f_dardg2ndt = 'md' + f_dvirdgndt = 'md' + f_krdgn = 'md' + f_aparticn = 'md' + f_aredistn = 'md' + f_vredistn = 'md' + f_araftn = 'md' + f_vraftn = 'md' + f_apondn = 'md' + f_apeffn = 'md' + f_hpondn = 'md' + f_dpnd_flushn = 'md' + f_dpnd_exponn = 'md' + f_dpnd_freebdn = 'md' + f_dpnd_initialn= 'md' + f_dpnd_dlidn = 'md' + f_apond = 'md' + f_hpond = 'md' + f_ipond = 'md' + f_apeff = 'md' + f_dpnd_flush = 'md' + f_dpnd_expon = 'md' + f_dpnd_freebd = 'md' + f_dpnd_initial = 'md' + f_dpnd_dlid = 'md' + f_dpnd_melt = 'md' + f_dpnd_ridge = 'md' + f_apond_ai = 'md' + f_hpond_ai = 'md' + f_ipond_ai = 'md' + f_apeff_ai = 'md' + f_smassicen = 'md' + f_smassliqn = 'md' + f_rhos_cmpn = 'md' + f_rhos_cntn = 'md' + f_rsnwn = 'md' + f_smassice = 'md' + f_smassliq = 'md' + f_rhos_cmp = 'md' + f_rhos_cnt = 'md' + f_rsnw = 'md' + f_meltsliq = 'md' + f_fsloss = 'md' + f_fiso_atm = 'md' + f_fiso_ocn = 'md' + f_iso = 'md' + f_faero_atm = 'md' + f_faero_ocn = 'md' + f_aero = 'md' + f_fbio = 'md' + f_fbio_ai = 'md' + f_zaero = 'md' + f_bgc_S = 'md' + f_bgc_N = 'md' + f_bgc_C = 'md' + f_bgc_DOC = 'md' + f_bgc_DIC = 'md' + f_bgc_chl = 'md' + f_bgc_Nit = 'md' + f_bgc_Am = 'md' + f_bgc_Sil = 'md' + f_bgc_DMSPp = 'md' + f_bgc_DMSPd = 'md' + f_bgc_DMS = 'md' + f_bgc_DON = 'md' + f_bgc_Fe = 'md' + f_bgc_hum = 'md' + f_bgc_PON = 'md' + f_bgc_ml = 'md' + f_upNO = 'md' + f_upNH = 'md' + f_bTin = 'md' + f_bphi = 'md' + f_iDi = 'md' + f_iki = 'md' + f_fbri = 'md' + f_hbri = 'md' + f_zfswin = 'md' + f_bionet = 'md' + f_biosnow = 'md' + f_grownet = 'md' + f_PPnet = 'md' + f_algalpeak = 'md' + f_zbgc_frac = 'md' + f_drag = 'md' + f_Cdn_atm = 'md' + f_Cdn_ocn = 'md' + f_fsdrad = 'md' + f_fsdperim = 'md' + f_afsd = 'md' + f_afsdn = 'md' + f_dafsd_newi = 'md' + f_dafsd_latg = 'md' + f_dafsd_latm = 'md' + f_dafsd_wave = 'md' + f_dafsd_weld = 'md' + f_wave_sig_ht = 'md' + f_aice_ww = 'md' + f_diam_ww = 'md' + f_hice_ww = 'md' + f_aice_init = 'md' + f_siage = 'md' + f_siconc = 'md' + f_sifb = 'md' + f_siflcondbot = 'md' + f_siflcondtop = 'md' + f_sifllattop = 'md' + f_sifllwdtop = 'md' + f_sifllwutop = 'md' + f_siflsensbot = 'md' + f_siflsenstop = 'md' + f_siflswdtop = 'md' + f_siflswutop = 'md' + f_sihc = 'md' + f_siitdconc = 'md' + f_siitdsnconc = 'md' + f_siitdsnthick = 'md' + f_siitdthick = 'md' + f_simpconc = 'md' + f_simpeffconc = 'md' + f_simprefrozen = 'md' + f_simpthick = 'md' + f_sirdgconc = 'md' + f_sirdgthick = 'md' + f_sisali = 'md' + f_sisaltmass = 'md' + f_sisnhc = 'md' + f_sisnthick = 'md' + f_sitempbot = 'md' + f_sitempsnic = 'md' + f_sitemptop = 'md' + f_sithick = 'md' + f_sitimefrac = 'md' + f_siu = 'md' + f_siv = 'md' + f_sicompstren = 'md' + f_sidconcdyn = 'md' + f_sidconcth = 'md' + f_sidivvel = 'md' + f_sidmassdyn = 'md' + f_sidmassevapsubl = 'md' + f_sidmassgrowthbot = 'md' + f_sidmassgrowthsi = 'md' + f_sidmassgrowthwat = 'md' + f_sidmassmeltbot = 'md' + f_sidmassmeltlat = 'md' + f_sidmassmelttop = 'md' + f_sidmassth = 'md' + f_sidmasstranx = 'md' + f_sidmasstrany = 'md' + f_sidragbot = 'md' + f_sidragtop = 'md' + f_siflfwbot = 'md' + f_siflfwdrain = 'md' + f_siflsaltbot = 'md' + f_siforcecoriolx = 'md' + f_siforcecorioly = 'md' + f_siforceintstrx = 'md' + f_siforceintstry = 'md' + f_siforcetiltx = 'md' + f_siforcetilty = 'md' + f_simass = 'md' + f_sipr = 'md' + f_sishearvel = 'md' + f_sisnconc = 'md' + f_sisndmassdyn = 'md' + f_sisndmassmelt = 'md' + f_sisndmasssi = 'md' + f_sisndmasssnf = 'md' + f_sisndmasssubl = 'md' + f_sisndmasswind = 'md' + f_sisnmass = 'md' + f_sispeed = 'md' + f_sistressave = 'md' + f_sistressmax = 'md' + f_sistrxdtop = 'md' + f_sistrydtop = 'md' + f_sistrxubot = 'md' + f_sistryubot = 'md' + f_sivol = 'md' diff --git a/configuration/scripts/options/set_nml.histdbg b/configuration/scripts/options/set_nml.histdbg index a70e734e5..23a7685c7 100644 --- a/configuration/scripts/options/set_nml.histdbg +++ b/configuration/scripts/options/set_nml.histdbg @@ -20,7 +20,6 @@ f_VGRDb = .true. f_VGRDa = .true. f_bounds = .true. - f_CMIP = 'm' f_aice = 'md1h' f_hi = 'h1dm' f_hs = 'd1m' diff --git a/configuration/scripts/options/set_nml.restaicetest b/configuration/scripts/options/set_nml.restaicetest new file mode 100644 index 000000000..3d779d732 --- /dev/null +++ b/configuration/scripts/options/set_nml.restaicetest @@ -0,0 +1,2 @@ +restart_mod = "adjust_aice_test" + diff --git a/configuration/scripts/tests/base_suite.ts b/configuration/scripts/tests/base_suite.ts index 46f2c1900..4af813211 100644 --- a/configuration/scripts/tests/base_suite.ts +++ b/configuration/scripts/tests/base_suite.ts @@ -86,3 +86,4 @@ restart gx3 4x4 diag1,gx3ncarbulk,short smoke gx3 4x1 calcdragio restart gx3 4x2 atmbndyconstant restart gx3 4x2 atmbndymixed +smoke gx3 12x2 diag1,run5day,restaicetest,debug diff --git a/configuration/scripts/tests/baseline.script b/configuration/scripts/tests/baseline.script index 9fd2fe001..168824010 100644 --- a/configuration/scripts/tests/baseline.script +++ b/configuration/scripts/tests/baseline.script @@ -136,17 +136,17 @@ if (${ICE_BFBCOMP} != ${ICE_SPVAL}) then set cnt = 0 if (${job} =~ [0-9]*) then while ($qstatjob) - ${ICE_MACHINE_QSTAT} $job >&/dev/null - set qstatus = $status + set qstatus = `${ICE_MACHINE_QSTAT} $job | grep $job | wc -l` +# ${ICE_MACHINE_QSTAT} $job # echo $job $qstatus - if ($qstatus != 0) then + if ($qstatus == 0) then echo "Job $job completed" set qstatjob = 0 else @ cnt = $cnt + 1 echo "Waiting for $job to complete $cnt" sleep 60 # Sleep for 1 minute, so as not to overwhelm the queue manager - if ($cnt > 30) then + if ($cnt > 5) then echo "No longer waiting for $job to complete" set qstatjob = 0 # Abandon check after cnt sleep 60 checks endif diff --git a/configuration/scripts/tests/comparebfb.csh b/configuration/scripts/tests/comparebfb.csh index 046f40651..2d2618b21 100755 --- a/configuration/scripts/tests/comparebfb.csh +++ b/configuration/scripts/tests/comparebfb.csh @@ -1,14 +1,14 @@ #!/bin/csh -f -# Compare the binary files +# Compare the CICE files via binary cmp diff #----------------------------------------------------------- # usage: comparebfb.csh base_file test_file -# does binary diff of two files +# does diff of two files # usage: comparebfb.csh base_dir test_dir -# looks for base_iced and iced binary files for comparison +# looks for base_iced and iced files for comparison # usage: comparebfb.csh base_dir -# looks for iced binary files in both directories for comparison +# looks for iced files in both directories for comparison # # Return Codes (depends on quality of error checking) # 0 = pass @@ -38,11 +38,11 @@ else if ( $#argv == 2 ) then else echo "Error in ${0}" echo "Usage: ${0} " - echo " does binary diff of two files" + echo " does diff of two files" echo "Usage: ${0} " - echo " looks for base_iced and iced binary files for comparison" + echo " looks for base_iced and iced files for comparison" echo "Usage: ${0} " - echo " looks for iced binary files in both directories for comparison" + echo " looks for iced files in both directories for comparison" exit 9 endif diff --git a/configuration/scripts/tests/io_suite.ts b/configuration/scripts/tests/io_suite.ts index 71bcd00a5..7d7ee2ba3 100644 --- a/configuration/scripts/tests/io_suite.ts +++ b/configuration/scripts/tests/io_suite.ts @@ -7,6 +7,15 @@ smoke gx3 8x2 histall,ionetcdf,iocdf5,run5day #smoke gx3 8x2 histall,iopio2,iocdf5 smoke_gx3_8x2_histall_iocdf5_ionetcdf smoke gx3 8x2 histall,iopio2,iocdf5,run5day +# history restart tests +restart gx3 15x2 gx3ncarbulk,fsd12,isotope,debug,histall10d,iobinary +restart gx3 18x1 debug,fsd12,isotope,bgczm,histall10d,ionetcdf,iocdf5 +restart gx3 20x2 debug,fsd12,isotope,bgczm,histall10d,iopio1,iocdf5 +restart gx3 18x2 debug,fsd12,isotope,bgczm,histall10d,iopio2,iocdf2 +restart gx3 10x2 fsd12,isotope,bgczm,histall10d,ionetcdf,iocdf2 +restart gx3 40x1 fsd12,isotope,bgczm,histall10d,iopio1,iocdf1 +restart gx3 17x2 fsd12,isotope,bgczm,histall10d,iopio2,iocdf5 + # some iobinary configurations fail due to bathymetry netcdf file requirement, remove them # iobinary cannot work with JRA55 because netcdf is turned off restart gx3 8x4 gx3ncarbulk,debug,histall,iobinary,precision8 @@ -68,3 +77,4 @@ restart gx3 8x4 isotope,histall,iopio2,iohdf5,precision8 restart gx3 12x2 fsd12,histall,iopio2,iocdf1,precision8 restart gx3 16x2 debug,histall,iopio2,iocdf2,histinst,precision8 +restart gx3 12x2 cmip,ionetcdf,iocdf2 diff --git a/configuration/scripts/tests/test_restart.script b/configuration/scripts/tests/test_restart.script index 59729b361..e3719f7eb 100644 --- a/configuration/scripts/tests/test_restart.script +++ b/configuration/scripts/tests/test_restart.script @@ -21,20 +21,25 @@ if ( $res != 0 ) then exit 99 endif -# Prepend 'base_' to the final restart file to save for comparison +# Compute date of last file in first run if ( "${ICE_IOTYPE}" == "binary" ) then - set end_date = `ls -t1 ${ICE_RUNDIR}/restart | head -1 | awk -F'.' '{print $NF}'` - foreach file (${ICE_RUNDIR}/restart/*${end_date}) - set surname = `echo $file | awk -F'/' '{print $NF}'` - mv $file ${ICE_RUNDIR}/restart/base_$surname - end + set end_date = `ls -t1 ${ICE_RUNDIR}/restart | head -1 | rev | cut -d "." -f 1 | rev` else - set test_file = `ls -t1 ${ICE_RUNDIR}/restart | head -1` - set test_data = ${ICE_RUNDIR}/restart/${test_file} - set base_data = ${ICE_RUNDIR}/restart/base_${test_file} - mv ${test_data} ${base_data} + set end_date = `ls -t1 ${ICE_RUNDIR}/restart | head -1 | rev | cut -d "." -f 2 | rev` endif +# Prepend 'base_' to final restart and history restart files to save for comparison +foreach file (${ICE_RUNDIR}/restart/*${end_date}*) + set surname = `echo $file | awk -F'/' '{print $NF}'` + mv $file ${ICE_RUNDIR}/restart/base_$surname +end + +# Prepend 'base_' to all history files to save for comparison +foreach file (${ICE_RUNDIR}/history/*) + set surname = `echo $file | awk -F'/' '{print $NF}'` + mv $file ${ICE_RUNDIR}/history/base_$surname +end + #----------------------------------------------------------- # Run the CICE model for the restart simulation @@ -69,13 +74,40 @@ else if (${tcolumn} == "") set tcolumn = -1 echo "PASS ${ICE_TESTNAME} run ${ttimeloop} ${tdynamics} ${tcolumn}" >> ${ICE_CASEDIR}/test_output + set restbfb = "PASS" ${ICE_CASEDIR}/casescripts/comparebfb.csh ${ICE_RUNDIR}/restart set bfbstatus = $status - if (${bfbstatus} == 0) then + if (${bfbstatus} != 0) set restbfb = "FAIL" + + set icehrbfb = "NONE" + if (${ICE_TESTNAME} =~ *histall10d* && ${ICE_IOTYPE} !~ *binary* ) then + echo "${restbfb} ${ICE_TESTNAME} restart " >> ${ICE_CASEDIR}/test_output + foreach file1 (${ICE_RUNDIR}/restart/base_iceh* ${ICE_RUNDIR}/history/base_iceh*) + set file2 = `echo $file1 | sed -e 's|base_iceh|iceh|g'` + if (-e ${file2} && `where ncatted` != "") then + if (${icehrbfb} == "NONE") set icehrbfb = "PASS" + # remove global attributes to do bfb binary compare + set file1m = `echo $file1 | sed -e 's|base_iceh|base_iceh_mod|g'` + set file2m = `echo $file1 | sed -e 's|base_iceh|iceh_mod|g'` + ncatted -h -a ,global,d,, ${file1} ${file1m} + ncatted -h -a ,global,d,, ${file2} ${file2m} + ${ICE_CASEDIR}/casescripts/comparebfb.csh ${file1m} ${file2m} + set bfbstatus = $status + if (${bfbstatus} != 0) set icehrbfb = "FAIL" + rm ${file1m} ${file2m} + endif + end + echo "${icehrbfb} ${ICE_TESTNAME} histrest " >> ${ICE_CASEDIR}/test_output + endif + + if (${restbfb} == PASS && ${icehrbfb} == PASS) then + echo "PASS ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output + else if (${restbfb} == PASS && ${icehrbfb} == NONE) then echo "PASS ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output else echo "FAIL ${ICE_TESTNAME} test " >> ${ICE_CASEDIR}/test_output endif + endif #----------------------------------------------------------- diff --git a/configuration/scripts/tests/test_restart2.script b/configuration/scripts/tests/test_restart2.script index 67760bbf4..b62a520bc 100644 --- a/configuration/scripts/tests/test_restart2.script +++ b/configuration/scripts/tests/test_restart2.script @@ -21,20 +21,25 @@ if ( $res != 0 ) then exit 99 endif -# Prepend 'base_' to the final restart file to save for comparison +# Compute date of last file in first run if ( "${ICE_IOTYPE}" == "binary" ) then - set end_date = `ls -t1 ${ICE_RUNDIR}/restart | head -1 | awk -F'.' '{print $NF}'` - foreach file (${ICE_RUNDIR}/restart/*${end_date}) - set surname = `echo $file | awk -F'/' '{print $NF}'` - mv $file ${ICE_RUNDIR}/restart/base_$surname - end + set end_date = `ls -t1 ${ICE_RUNDIR}/restart | head -1 | rev | cut -d "." -f 1 | rev` else - set test_file = `ls -t1 ${ICE_RUNDIR}/restart | head -1` - set test_data = ${ICE_RUNDIR}/restart/${test_file} - set base_data = ${ICE_RUNDIR}/restart/base_${test_file} - mv ${test_data} ${base_data} + set end_date = `ls -t1 ${ICE_RUNDIR}/restart | head -1 | rev | cut -d "." -f 2 | rev` endif +# Prepend 'base_' to final restart and history restart files to save for comparison +foreach file (${ICE_RUNDIR}/restart/*${end_date}*) + set surname = `echo $file | awk -F'/' '{print $NF}'` + mv $file ${ICE_RUNDIR}/restart/base_$surname +end + +# Prepend 'base_' to all history files to save for comparison +foreach file (${ICE_RUNDIR}/history/*) + set surname = `echo $file | awk -F'/' '{print $NF}'` + mv $file ${ICE_RUNDIR}/history/base_$surname +end + #----------------------------------------------------------- # Run the CICE model for the restart simulation diff --git a/doc/source/cice_index.rst b/doc/source/cice_index.rst index 949ab80b6..eef05641a 100644 --- a/doc/source/cice_index.rst +++ b/doc/source/cice_index.rst @@ -120,7 +120,7 @@ section :ref:`tabnamelist`. "cosw", "cosine of the turning angle in water", "1." "coszen", "cosine of the zenith angle", "" "Cp", "proportionality constant for potential energy", "kg/m\ :math:`^2`/s\ :math:`^2`" - "cpl_frazil", ":math:`\bullet` type of frazil ice coupling", "" + "cpl_frazil", "type of frazil ice coupling", "" "cp_air", "specific heat of air", "1005.0 J/kg/K" "cp_ice", "specific heat of fresh ice", "2106. J/kg/K" "cp_ocn", "specific heat of sea water", "4218. J/kg/K" @@ -208,8 +208,8 @@ section :ref:`tabnamelist`. "dvidtd", "ice volume tendency due to dynamics/transport", "m/s" "dvidtt", "ice volume tendency due to thermodynamics", "m/s" "dvirdg(n)dt", "ice volume ridging rate (category n)", "m/s" - "dyn_area_min", "minimum area concentration for computing velocity", "0.001" - "dyn_mass_min", "minimum mass for computing velocity", "0.01 kg/m\ :math:`^2`" + "dyn_area_min", "minimum area concentration for computing velocity", "1.e-11" + "dyn_mass_min", "minimum mass for computing velocity", "1.e-10 kg/m\ :math:`^2`" "**E**", "", "" "e11, e12, e22", "strain rate tensor components", "" "earea", "area of E-cell", "m\ :math:`^2`" @@ -390,6 +390,8 @@ section :ref:`tabnamelist`. "istep0", "number of steps taken in previous run", "0" "istep1", "total number of steps at current time step", "" "Iswabs", "shortwave radiation absorbed in ice layers", "W/m\ :math:`^2`" + "itd_area_min", "zap residual ice below a minimum area", "dyn_area_min" + "itd_mass_min", "zap residual ice below a minimum mass", "dyn_mass_min" "**J**", "", "" "**K**", "", "" "kalg", "absorption coefficient for algae", "" @@ -595,6 +597,7 @@ section :ref:`tabnamelist`. "restart_file", "restart file prefix", "" "restart_format", "restart file format", "" "restart_iotasks", "restart output total number of tasks used", "" + "restart_mod", "restart modification mode", "" "restart_rearranger", "restart output io rearranger method", "" "restart_root", "restart output io root task id", "" "restart_stride", "restart output io task stride", "" @@ -781,6 +784,8 @@ section :ref:`tabnamelist`. "wind", "wind speed", "m/s" "windmin", "minimum wind speed to compact snow", "10 m/s" "write_history", "if true, write history now", "" + "write_histrest", "if true, write out history restart files if needed", "" + "write_histrest_now", "write out history restart files now", "" "write_ic", "if true, write initial conditions", "" "write_restart", "if 1, write restart now", "" "**X**", "", "" diff --git a/doc/source/conf.py b/doc/source/conf.py index a71bef070..fa93c8a1a 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -57,7 +57,7 @@ # General information about the project. project = u'CICE' -copyright = u'1998, 2017, Triad National Security, LLC (code) and National Center for Atmospheric Research (documentation)' +copyright = u'1998-2026, Triad National Security, LLC (code) and National Center for Atmospheric Research (documentation)' author = u'CICE-Consortium' # The version info for the project you're documenting, acts as replacement for @@ -65,9 +65,9 @@ # built documents. # # The short X.Y version. -version = u'6.6.1' +version = u'6.6.3' # The full version, including alpha/beta/rc tags. -version = u'6.6.1' +version = u'6.6.3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/developer_guide/dg_assim.rst b/doc/source/developer_guide/dg_assim.rst new file mode 100644 index 000000000..cdcfb9eaa --- /dev/null +++ b/doc/source/developer_guide/dg_assim.rst @@ -0,0 +1,37 @@ +:tocdepth: 3 + +.. _dataassimilation: + +Data Assimilation +====================== + +Data assimilation (DA) is the scientific process of combining external +data with numerical model forecasts. There are several ways this can +be done including by adjusting the model initial conditions (internally +or externally) or adjusting the model solution as it evolves in time. +Various data assimilation options are being introduced in CICE and are +described below. + +.. _restartmod: + +Data Assimilation on restart +------------------------------------ + +The namelist variable, ``restart_mod``, specifies the restart DA mode. +By default, this namelist value is set to ``none`` which disables the feature. +The current active options are ``adjust_aice`` and ``adjust_aice_test``. + +With ``adjust_aice`` and ``adjust_aice_test``, the category averaged aice +value is modified at restart to specified values using the method implemented in +**cicecore/cicedyn/infrastructure/ice_restart_driver.F90** subroutine +**direct_adjust_aice**. This method adjusts aice, vice, vsno, qice, and +sice in all categories to be consistent with the category average aice +specified. It also adjusts several thermodynamic variables such as +temperature and salinity (see :cite:`Posey15`). +``adjust_aice`` reads in a sea ice concentration +field from an external file. The field is currently hardwired to 'sic' and the +file is currently hardwired to 'sic.nc'. The field must be on the model grid. +``adjust_aice_test`` modifies the +aice field read on restart internally. The current implementation rounds +the aice values read at restart to the nearest 1/100th. This mode exists +primarily to test the feature. diff --git a/doc/source/developer_guide/dg_other.rst b/doc/source/developer_guide/dg_other.rst index a8f6e8b15..2819fa0eb 100644 --- a/doc/source/developer_guide/dg_other.rst +++ b/doc/source/developer_guide/dg_other.rst @@ -81,6 +81,247 @@ This is very likely to be bfb, but is not as fast or accurate as the reprosum implementation. See :cite:`He01` +.. _averages: + +Averages +----------------- + +Coupling and history output quantities may be averaged in different forms, depending on +whether the quantity represents a value averaged over the entire grid cell, the sea ice fraction, +or a subset of the sea ice fraction such as a thickness category or the ponded area. These +distinctions must also be considered for time averaging. + +The SIMIP Project :cite:`Notz16` +categorizes output variables as 'intensive' and 'extensive' based on their characteristics +relative to ice area. Extensive variables are proportional to area fraction, and their time +averages include zeroes when and where there is no ice. Intensive variables are not +proportional to area fraction, and their time averages should not include zeroes when and +where there is no ice. This is accomplished by summing area-weighted intensive values across categories +then dividing by the sum of the category areas. Tracers such as ice thickness, surface temperature, +and biogeochemical tracers are examples of intensive variables. + +The following formulas ignore subtleties such as some fluxes being computed on the initial ice area, which then +changes due to frazil ice formation, lateral melting and transport. The ice area used for both averaging and coupling should be carefully +considered in light of the model timestepping. Edge cases such as the complete disappearance or new appearance of ice +cause averaging errors. To address these cases, we could consider interpolating all quantities to the middle of the +timestep, but that is not currently done. + +Ice area +~~~~~~~~~~~~~~~~~ + +If :math:`\mathbf{X}=(x,y)`, :math:`A` is the cell area (:math:`m^2`) and :math:`g` represents +the ice thickness distribution discretized as :math:`a_n` for :math:`n=1,\, ncat`, then the +ice area (:math:`m^2`) is the sum of the thickness category areas :math:`a_n A`: + +.. math:: + A_{i}(t) = \int_{ice} g(\mathbf{X},t) \, d\mathbf{X} \sim \sum_{n=1}^{ncat} a_n(t) \, A + +and the (unitless) ice area fraction is + +.. math:: + a_{ice}(t) = {\int_{ice} g(\mathbf{X},t) \, d\mathbf{X} \over \int_{cell} d\mathbf{X} \, dt} \sim \sum_{n=1}^{ncat} a_n(t). + + +The time-averaged ice area over an interval of length :math:`N\Delta t` is + +.. math:: + \bar{A}_{i} = {\int_t A_{i}(t) \, dt \over \int_t \, dt} + \sim {\sum_{\Delta t} \sum_{n=1}^{ncat} a_n \, A \, \Delta t \over N \, \Delta t} + = {A \over N} \sum_{\Delta t} \sum_{n=1}^{ncat} a_n + +and the time-averaged ice area fraction is extensive (by definition): + +.. math:: + \bar{a}_{ice} = {\int_t \int_{ice} g(\mathbf{X},t) \, d\mathbf{X} \, dt \over \int_t \int_{cell} d\mathbf{X} \, dt} + \sim {\sum_{\Delta t} \sum_{n=1}^{ncat} a_n \, A \Delta t \over A \, N \, \Delta t} + = {1 \over N} \sum_{\Delta t} \sum_{n=1}^{ncat} a_n. + +Ice volume +~~~~~~~~~~~~~~~~~ + +Likewise for time averages of ice volume :math:`V_i` (:math:`m^3`), + +.. math:: + \bar{V}_{i} = {\int_t \int_{cell} \int_{0}^{h} g(\mathbf{X},t) \, dz \, d\mathbf{X} \, dt \over \int_{t} dt} + \sim {\sum_{\Delta t} \sum_{n=1}^{ncat} h_n \, a_n \, A \, \Delta t \over N \, \Delta t} + = {A \over N} \sum_{\Delta t} \sum_{n=1}^{ncat} h_n \, a_n + +for ice thickness :math:`h` assumed to be 0 in open water. Then the time-average ice volume per square meter of grid cell (:math:`m`) is + +.. math:: + \bar{v}_{ice} = {\int_t \int_{cell} \int_{0}^{h} g(\mathbf{X},t) \, dz \, d\mathbf{X} \, dt \over \int_{t} \int_{cell} d\mathbf{X} \, dt} + \sim {\sum_{\Delta t} \sum_{n=1}^{ncat} h_n \, a_n \, A \, \Delta t \over A \, N \, \Delta t} + = {1 \over N} \sum_{\Delta t} \sum_{n=1}^{ncat} h_n \, a_n = {1 \over N} \sum_{\Delta t} \sum_{n=1}^{ncat} v_n. + +where :math:`v_n = h_n a_n`. :math:`v_{ice}` is the quantity labeled `hi` in history, which can be thought of as the mean ice thickness averaged over the entire +grid cell. The time-averaged ice volume per square meter of ice (mean 'actual' ice thickness, :math:`m`) is + +.. math:: + \bar{h}_{i} = {\int_t \int_{ice} \int_{0}^{h} g(\mathbf{X},t) \, dz \, d\mathbf{X} \, dt \over \int_{t} \int_{ice} d\mathbf{X} \, dt} + \sim {\sum_{\Delta t} \sum_{n=1}^{ncat} h_n \, a_n \, A \, \Delta t \over \sum_{\Delta t} \sum_{n=1}^{ncat} a_n \, A \, \Delta t} + = {\sum_{\Delta t} \sum_{n=1}^{ncat} v_n \over \sum_{\Delta t} \sum_{n=1}^{ncat} a_n}. + +Snow volume is treated similarly. Ice and snow volumes are extensive, while thicknesses are +intensive. + +The form used here for time-averaging the average 'actual' thickness produces the average over all ice present +during the averaging interval. For intensive variables in particular, this form is slightly different from +the time-average of the category-averaged quantity per time step. The latter, two-step averaging process +requires additional divisions and re-multiplications by ice area, introducing errors where ice areas +are very small or cells change from ice-free to having ice or vice versa. The same is true for other tracers +and intensive variables. While both approaches are valid, averages as written here are preferred when +conservation is important. + +Volume content +~~~~~~~~~~~~~~~~~ + +Total content of tracers such as salt and enthalpy are necessary for conservative coupling. The time-average content +of a volume tracer :math:`b` (with units per :math:`m^3`) is + +.. math:: + \bar{B}_{i} = {\int_t \int_{cell} \int_{0}^{h} b(\mathbf{X},z,t) g(\mathbf{X},t) \, dz \, d\mathbf{X} \, dt \over \int_{t} dt} + \sim {\sum_{\Delta t} \sum_{n=1}^{ncat} b_n \, h_n \, a_n \, A \, \Delta t \over N \, \Delta t} + = {A \over N} \sum_{\Delta t} \sum_{n=1}^{ncat} b_n \, v_n + +and the time-averaged content per square meter of grid cell is + +.. math:: + \bar{b}_{ice} \sim {1 \over N} \sum_{\Delta t} \sum_{n=1}^{ncat} b_n \, v_n. + +The mean tracer value in sea ice is + +.. math:: + \bar{b}_{i} = {\int_t \int_{cell} \int_{0}^{h} b(\mathbf{X},z,t) g(\mathbf{X},t) \, dz \, d\mathbf{X} \, dt \over \int_{t} \int_{cell} \int_{0}^{h} dz \, d\mathbf{X} \, dt} + \sim {\sum_{\Delta t} \sum_{n=1}^{ncat} b_n \, h_n \, a_n \, A \, \Delta t \over \sum_{\Delta t} \sum_{n=1}^{ncat} h_n \, a_n \, A \, \Delta t} + = {\sum_{\Delta t} \sum_{n=1}^{ncat} b_n \, v_n \over \sum_{\Delta t} \sum_{n=1}^{ncat} v_n} + +Thus, volume content variables are extensive, while the tracers themselves are intensive. + +Surface quantities +~~~~~~~~~~~~~~~~~ + +Surface quantities such as temperature are intensive and treated similarly to volume tracers, with integrals taken over +the desired surface area rather than the volume. For example, + +.. math:: + T_{ice}(t) = {\int_{ice} T(\mathbf{X},t) g(\mathbf{X},t) \, d\mathbf{X} \over \int_{ice} g(\mathbf{X},t) \, d\mathbf{X}} + +and the time average is simply + +.. math:: + \bar{T}_{ice} = {\sum_{\Delta t} \sum_{n=1}^{ncat} T_n \, a_n \over \sum_{\Delta t} \sum_{n=1}^{ncat} \, a_n}. + +Note that since :math:`\sum_{n=0}^{ncat} \, a_n \,=\, 1`, a category-merged quantity can be considered the average over the cell area, assuming +the quantity is zero over open water: + +.. math:: + T_{cell} = {\sum_{n=0}^{ncat} T_n \, a_n \over \sum_{n=0}^{ncat} \, a_n} = \sum_{n=1}^{ncat} \, T_n \, a_n, + +and the average value over the ice is then + +.. math:: + T_{ice} = {\sum_{n=1}^{ncat} T_n \, a_n \over \sum_{n=1}^{ncat} \, a_n} = {T_{cell} \over a_{ice}}. + +This simplification is applicable for tracers carried on the ice area (or volume, similarly), which are zero over open water by definition. +When time-averaging CICE's history fields, the category-merged value in the numerator is saved (usually in Icepack), then accumulated in time and +later divided by the accumulated ice area fraction (or volume) in CICE. + + + +Tracer hierarchies +~~~~~~~~~~~~~~~~~ + +For tracers that are carried on other tracers, such as melt ponds, averages over different areas of a given cell differ in the denominator. +For melt ponds not carried on the level-ice area, for example, the average pond depths over the grid cell area, the ice area, and the ponded +area are, respectively, + +.. math:: + h_{p\,cell} = \frac{ \int_{cell} h_p \, a_p \, g \, d\mathbf{X} } + { \int_{cell} d\mathbf{X} } + \sim \sum_{n=1}^{ncat} h_{pn} \, a_{pn} \, a_n + +.. math:: + h_{p\,ice} = \frac{ \int_{ice} h_p \, a_p \, g \, d\mathbf{X} } + { \int_{ice} g \, d\mathbf{X} } + = \frac{ \int_{cell} h_p \, a_p \, g \, d\mathbf{X} } + { \int_{ice} g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} h_{pn} \, a_{pn} \, a_n }{ \sum_{n=1}^{ncat} a_n } + +.. math:: + h_{p\,pond} = \frac{ \int_{pond} h_p \, a_p \, g \, d\mathbf{X} } + { \int_{pond} a_p \, g \, d\mathbf{X} } + = \frac{ \int_{cell} h_p \, a_p \, g \, d\mathbf{X} } + { \int_{ice} a_p \, g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} h_{pn} \, a_{pn} \, a_n }{ \sum_{n=1}^{ncat} a_{pn} \, a_n }. + +For level-ice ponds, there is an extra factor of :math:`a_{lvl}`. The level-ice pond depth averaged over the grid cell area, total ice area, level ice area and pond area are + +.. math:: + h_{p\,cell} = \frac{ \int_{cell} h_p \, a_p \, a_{lvl} \, g \, d\mathbf{X} } + { \int_{cell} d\mathbf{X} } + \sim \sum_{n=1}^{ncat} h_{pn} \, a_{pn} \, a_{lvln} \, a_n + +.. math:: + h_{p\,ice} = \frac{ \int_{ice} h_p \, a_p \, a_{lvl} \, g \, d\mathbf{X} } + { \int_{ice} g \, d\mathbf{X} } + = \frac{ \int_{cell} h_p \, a_p \, a_{lvl} \, g \, d\mathbf{X} } + { \int_{ice} g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} h_{pn} \, a_{pn} \, a_{lvln} \, a_n }{ \sum_{n=1}^{ncat} a_n } + +.. math:: + h_{p\,lvl} = \frac{ \int_{lvl} h_p \, a_p \, a_{lvl} \, g \, d\mathbf{X} } + { \int_{lvl} a_{lvl} \, g \, d\mathbf{X} } + = \frac{ \int_{cell} h_p \, a_p \, a_{lvl} \, g \, d\mathbf{X} } + { \int_{ice} a_{lvl} \, a_{pn} \, g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} h_{pn} \, a_{pn} \, a_{lvln} \, a_n }{ \sum_{n=1}^{ncat} a_{lvln} \, a_n } + +.. math:: + h_{p\,pond} = \frac{ \int_{pond} h_p \, a_p \, a_{lvl} \, g \, d\mathbf{X} } + { \int_{pond} a_p \, a_{lvl} \, g \, d\mathbf{X} } + = \frac{ \int_{cell} h_p \, a_p \, a_{lvl} \, g \, d\mathbf{X} } + { \int_{ice} a_p \, a_{lvl} \, g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} h_{pn} \, a_{pn} \, a_{lvln} \, a_n }{ \sum_{n=1}^{ncat} a_{pn} \, a_{lvln} \, a_n }. + +Time averages follow analogously as above. + +Ridged-ice area and volume are handled slightly differently, since they are diagnostic based on +level-ice area and volume. Level-ice area is a tracer on ice area, and level-ice volume is a +tracer on ice volume. The tracer values are fractions of the total ice, and ridged (deformed) ice is +diagnosed as the remainder of the ice fraction or volume: +:math:`T_{ardg} = 1 - T_{alvl}` and :math:`T_{vrdg} = 1 - T_{vlvl}` for the area and volume tracers. +Thus the mean level and ridged ice area fractions of the ice area are + +.. math:: + a_{lvl\,ice} = \frac{ \int_{ice} T_{alvl} \, g \, d\mathbf{X} } + { \int_{ice} g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} a_{lvln} \, a_n }{ \sum_{n=1}^{ncat} a_n } + +.. math:: + a_{rdg\,ice} = \frac{ \int_{ice} (1 - T_{alvl}) \, g \, d\mathbf{X} } + { \int_{ice} g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} (1 - a_{lvln}) \, a_n }{ \sum_{n=1}^{ncat} a_n }. + +The mean thickness of level ice, averaging over just the level-ice areas from all categories, is + +.. math:: + h_{lvl} = \frac{ \int_{ice} \int_{0}^{h} T_{vlvl} \, g \, dz \, d\mathbf{X} } + { \int_{ice} T_{alvl} \, g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} T_{vlvln} \, a_n \, h_n } + { \sum_{n=1}^{ncat} T_{alvln} \, a_n } + \sim \frac{ \sum_{n=1}^{ncat} T_{vlvln} \, v_n } + { \sum_{n=1}^{ncat} T_{alvln} \, a_n } + +and the mean thickness of deformed ice (averaging over just the ridged-ice +areas from all categories) is + +.. math:: + h_{rdg} = \frac{ \int_{ice} \int_{0}^{h} (1 - T_{vlvl}) \, g \, dz \, d\mathbf{X} } + { \int_{ice} (1 - T_{alvl}) \, g \, d\mathbf{X} } + \sim \frac{ \sum_{n=1}^{ncat} (1 - T_{vlvln}) \, a_n \, h_n } + { \sum_{n=1}^{ncat} (1 - T_{alvln}) \, a_n } + \sim \frac{ \sum_{n=1}^{ncat} (1 - T_{vlvln}) \, v_n } + { \sum_{n=1}^{ncat} (1 - T_{alvln}) \, a_n }. + .. _addtimer: Adding Timers diff --git a/doc/source/developer_guide/index.rst b/doc/source/developer_guide/index.rst index 680746beb..e8ed32408 100644 --- a/doc/source/developer_guide/index.rst +++ b/doc/source/developer_guide/index.rst @@ -16,6 +16,7 @@ Developer Guide dg_infra.rst dg_driver.rst dg_forcing.rst + dg_assim.rst dg_icepack.rst dg_scripts.rst dg_tools.rst diff --git a/doc/source/intro/copyright.rst b/doc/source/intro/copyright.rst index c72da7fbd..7d4830aa7 100644 --- a/doc/source/intro/copyright.rst +++ b/doc/source/intro/copyright.rst @@ -5,7 +5,7 @@ Copyright ============================= -© Copyright 1998, 2017, Triad National Security, LLC +Copyright 1998-2025, Triad National Security, LLC All rights reserved. This program was produced under U.S. Government contract 89233218CNA000001 for Los Alamos National Laboratory (LANL), which is operated by Triad National Security, LLC for the U.S. Department of Energy/National Nuclear Security Administration. All rights in the program are reserved by Triad National Security, LLC, and the U.S. Department of Energy/National Nuclear Security Administration. The Government is granted for itself and others acting on its behalf a nonexclusive, paid-up, irrevocable worldwide license in this material to reproduce, prepare. derivative works, distribute copies to the public, perform publicly and display publicly, and to permit others to do so. diff --git a/doc/source/master_list.bib b/doc/source/master_list.bib index 6e3bb9b40..a91511ccd 100644 --- a/doc/source/master_list.bib +++ b/doc/source/master_list.bib @@ -1041,7 +1041,7 @@ @incollection{Arakawa77 @article{Horvat15, author = "C. Horvat and E. Tziperman", - journal = {The Cryosphere}, + journal = TC, number = {6}, pages = {2119-2134}, title = "{A prognostic model of the sea-ice floe size and thickness distribution}", @@ -1113,6 +1113,16 @@ @Article{Tsujino18 pages = {79-139}, url = {http://dx.doi.org/10.1016/j.ocemod.2018.07.002} } + +@Article{Posey15, + author = "P.G. Posey and E.J. Metzger and A.J. Wallcraft and D.A. Hebert and R.A. Allard and O.M. Smedstad and M.W. Phelps and F. Fetterer and J.S. Stewart and W.N. Meier and S.R. Helfrich", + title = "{Improving Arctic sea ice edge forecasts by assimilating high horizontal resolution sea ice concentration data into the US Navy's ice forecast system}", + journal = TC, + year = {2015}, + volume = {9}, + pages = {1735-1745}, + url = {https://doi.org/10.5194/tc-9-1735-2015} +} % ********************************************** % For new entries, see example entry in BIB_TEMPLATE.txt % ********************************************** diff --git a/doc/source/science_guide/sg_dynamics.rst b/doc/source/science_guide/sg_dynamics.rst index ce9c3c4ee..23d1fbc33 100644 --- a/doc/source/science_guide/sg_dynamics.rst +++ b/doc/source/science_guide/sg_dynamics.rst @@ -96,9 +96,12 @@ Note that the VP solver has not yet been tested on the ``tx1`` grid. The EVP, rEVP, EAP and VP approaches are all available with the B grid. However, at the moment, only the EVP and rEVP schemes are possible with the C grid. -The dynamics are solved for all gridcells with area concentration greater than ``dyn_area_min`` and mass -greater than ``dyn_mass_min``. These parameters are respectively 0.001 and 0.01 by default but can be set in -namelist. Lower values can improve the solution but also lead to instabilities. +The dynamics are solved for all grid cells with area concentration greater than ``dyn_area_min`` +and mass greater than ``dyn_mass_min``. These parameters can be set in namelist. Lower +values can improve the solution with increased computational expense due to additional +calculations in grid cells with small amounts of ice, but can also lead to instabilities. +For this reason, default values in the code and base namelist file are set for the B-grid, +with different values provided for C-grid tests. Here we summarize the equations and direct the reader to the above references for details. diff --git a/doc/source/science_guide/sg_fundvars.rst b/doc/source/science_guide/sg_fundvars.rst index 2d6f50328..89b095c70 100644 --- a/doc/source/science_guide/sg_fundvars.rst +++ b/doc/source/science_guide/sg_fundvars.rst @@ -13,7 +13,6 @@ modeling is to describe the evolution of the ice thickness distribution (ITD) in time and space. In addition to an ice thickness distribution, CICE includes an optional capability for a floe size distribution. - Ice floe horizontal size may change through vertical and lateral growth and melting of existing floes, freezing of new ice, wave breaking, and welding of floes in freezing conditions. The floe size distribution (FSD) is a probability function that characterizes this variability. The scheme is based on the theoretical framework described in :cite:`Horvat15` for a joint floe size and thickness distribution (FSTD), and was implemented by :cite:`Roach18` and :cite:`Roach19`. The joint floe size distribution is carried as an area-weighted tracer, defined as the fraction of ice belonging to a given thickness category with lateral floe size belong to a given floe size class. This development includes interactions between sea ice and ocean surface waves. Input data on ocean surface wave spectra at a single time is provided for testing, but as with the other CICE datasets, it should not be used for production runs or publications. It is not recommended to use the FSD without ocean surface waves. Additional information about the ITD and joint FSTD for CICE can be found in the @@ -113,3 +112,15 @@ the beginning of the timestep. Rather than recompute the albedo and shortwave components at the beginning of the next timestep using new values of the downwelling shortwave forcing, the shortwave components computed at the end of the last timestep are scaled for the new forcing. + +In Icepack, residual amounts of ice may be conservatively removed based +on minimum area and mass parameters ``itd_area_min`` and ``itd_mass_min``. +Initializing these parameters to CICE's ``dyn_area_min`` and ``dyn_mass_min`` +namelist values ensures consistency between Icepack's column physics and +CICE's dynamic calculations by avoiding residual ice not handled in either +place. However, ``dyn_area_min`` and ``dyn_mass_min`` should be relatively +small to avoid removing too much ice. The default behavior sets the column +physics (itd) parameters to the dynamics values (from namelist). +``itd_area_min`` and ``itd_mass_min`` can be added to the namelist file +**ice_in** and set to different values, if desired. Setting them to zero +turns off residual zapping completely. diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index 2f705e64c..ea959868d 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -189,12 +189,13 @@ setup_nml "", "``zero``", "restart output frequency relative to year-month-day of 0000-01-01", "" "``dumpfreq_n``", "integer array", "write restart frequency with ``dumpfreq``", "1,1,1,1,1" "``dump_last``", "logical", "write restart on last time step of simulation", "``.false.``" - "``histfreq``", "``d``", "write history every ``histfreq_n`` days", "'1','h','d','m','y'" + "``histfreq``", "``d``", "write history every ``histfreq_n`` days", "'1','h','d','m','y','n'" "", "``h``", "write history every ``histfreq_n`` hours", "" "", "``m``", "write history every ``histfreq_n`` months", "" "", "``x``", "unused frequency stream (not written)", "" "", "``y``", "write history every ``histfreq_n`` years", "" "", "``1``", "write history every ``histfreq_n`` time step", "" + "", "``n``", "write history on day ``histfreq_n``", "" "``histfreq_base``", "``init``", "history output frequency relative to year_init, month_init, day_init", "'zero','zero','zero','zero','zero'" "", "``zero``", "history output frequency relative to year-month-day of 0000-01-01", "" "``histfreq_n``", "integer array", "frequency history output is written with ``histfreq``", "1,1,1,1,1" @@ -266,6 +267,9 @@ setup_nml "", "``pnetcdf2``", "write restart files with pnetcdf cdf2 (netcdf3-64bit-offset) format", "" "", "``pnetcdf5``", "write restart files with pnetcdf cdf5 (netcdf3-64bit-data) format", "" "``restart_iotasks``", "integer", "pe io tasks for restart output with restart_root and restart_stride (PIO only), -99=internal default", "-99" + "``restart_mod``", "``adjust_aice``", "adjust aice on restart read from file", "none" + "", "``adjust_aice_test``", "adjust aice on restart read rounding", "" + "", "``none``", "no modification of restart at read", "" "``restart_rearranger``", "``box``", "box io rearranger option for restart output (PIO only)", "default" "", "``default``", "internal default io rearranger option for restart output", "" "", "``subset``", "subset io rearranger option for restart output", "" @@ -279,6 +283,7 @@ setup_nml "``use_leap_years``", "logical", "include leap days", "``.false.``" "``use_restart_time``", "logical", "set initial date using restart file on initial runtype only", "``.false.``" "``version_name``", "string", "model version", "'unknown_version_name'" + "``write_histrest``", "logical", "write history restarts if needed", "``.true.``" "``write_ic``", "logical", "write initial condition", "``.false.``" "``year_init``", "integer", "the initial year if not using restart", "0" "", "", "", "" @@ -294,7 +299,7 @@ grid_nml "``bathymetry_file``", "string", "name of bathymetry file to be read", "'unknown_bathymetry_file'" "``bathymetry_format``", "``default``", "NetCDF depth field", "'default'" "", "``pop``", "POP thickness file in cm in ascii format", "" - "``close_boundaries``", "logical", "force two gridcell wide land mask on boundaries for rectangular grids", "``.false.``" + "``close_boundaries``", "logical", "deprecated Nov, 2025, use ew_boundary_type and ns_boundary_type", "``.false.``" "``dxrect``", "real", "x-direction grid spacing for rectangular grid in cm", "0.0" "``dxscale``", "real", "user defined rectgrid x-grid scale factor", "1.0" "``dyrect``", "real", "y-direction grid spacing for rectangular grid in cm", "0.0" @@ -308,6 +313,7 @@ grid_nml "``grid_format``", "``bin``", "read direct access grid and kmt files", "``bin``" "", "``geosnc``", "read grid and kmt file in GEOS netcdf format", "" "", "``pop_nc``", "read grid and kmt files in POP netcdf format", "" + "", "``pop_nc_ext``", "read extended grid and kmt files in POP netcdf format", "" "", "``meshnc``", "coupled model grid option, no CICE code support", "" "", "``mom_nc``", "read grid in MOM (supergrid) format and kmt files", "" "``grid_ice``", "``B``", "use B grid structure with T at center and U at NE corner", "``B``" @@ -373,7 +379,8 @@ domain_nml "", "``blockfull``", "block method with NO land block elimination and full weight given to land blocks", "" "", "``latitude``", "latitude/ocean sets ``work_per_block``", "" "``distribution_wght_file``", "string", "distribution weight file when distribution_type is ``wghtfile``", "'unknown'" - "``ew_boundary_type``", "``cyclic``", "periodic boundary conditions in x-direction", "``cyclic``" + "``ew_boundary_type``", "``closed``", "force two gridcell wide land mask on x-direction boundaries for rectangular grids", "``cyclic``" + "", "``cyclic``", "periodic boundary conditions in x-direction", "" "", "``open``", "Dirichlet boundary conditions in x", "" "``maskhalo_dyn``", "logical", "mask unused halo cells for dynamics", "``.false.``" "``maskhalo_remap``", "logical", "mask unused halo cells for transport", "``.false.``" @@ -382,7 +389,8 @@ domain_nml "", "``-1``", "find number of blocks per MPI task automatically", "" "``nprocs``", "integer", "number of MPI tasks to use", "-1" "", "``-1``", "find number of MPI tasks automatically", "" - "``ns_boundary_type``", "``cyclic``", "periodic boundary conditions in y-direction", "``open``" + "``ns_boundary_type``", "``closed``", "force two gridcell wide land mask on y-direction boundaries for rectangular grids", "``cyclic``" + "", "``cyclic``", "periodic boundary conditions in y-direction", "" "", "``open``", "Dirichlet boundary conditions in y", "" "", "``tripole``", "U-fold tripole boundary conditions in y", "" "", "``tripoleT``", "T-fold tripole boundary conditions in y", "" @@ -461,6 +469,8 @@ thermo_nml "``ktherm``", "``-1``", "thermodynamic model disabled", "1" "", "``1``", "Bitz and Lipscomb thermodynamic model", "" "", "``2``", "mushy-layer thermodynamic model", "" + "``itd_area_min``", "real", "area below which ice is zapped", "1.e-11" + "``itd_mass_min``", "real", "mass below which ice is zapped", "1.e-10" "``phi_c_slow_mode``", ":math:`0<\phi_c < 1`", "critical liquid fraction", "0.05" "``phi_i_mushy``", ":math:`0<\phi_i < 1`", "solid fraction at lower boundary", "0.85" "``Rac_rapid_mode``", "real", "critical Rayleigh number", "10.0" @@ -507,8 +517,8 @@ dynamics_nml "``deltaminVP``", "real", "minimum delta for viscosities", "2e-9" "``dim_fgmres``", "integer", "maximum number of Arnoldi iterations for FGMRES solver", "50" "``dim_pgmres``", "integer", "maximum number of Arnoldi iterations for PGMRES preconditioner", "5" - "``dyn_area_min``", "real", "min ice area concentration to activate dynamics", "0.001" - "``dyn_mass_min``", "real", "min ice mass to activate dynamics (kg/m\ :math:`^2`)", "0.01" + "``dyn_area_min``", "real", "min ice area concentration to activate dynamics", "1.e-11" + "``dyn_mass_min``", "real", "min ice mass to activate dynamics (kg/m\ :math:`^2`)", "1.e-10" "``e_plasticpot``", "real", "aspect ratio of elliptical plastic potential", "2.0" "``e_yieldcurve``", "real", "aspect ratio of elliptical yield curve", "2.0" "``elasticDamp``", "real", "elastic damping parameter", "0.36" @@ -940,6 +950,7 @@ source code for a full list of supported output fields. "", "``x``", "do not write var to history", "" "", "``y``", "write field var every ``histfreq_n`` years", "" "", "``1``", "write field var every time step", "" + "", "``n``", "write field var on day ``histfreq_n`` instantaneously", "" "", "``md``", "*e.g.,* write both monthly and daily files", "" "``f__ai``", "``d``", "write field cell average var every ``histfreq_n`` days", "" "", "``h``", "write field cell average var every ``histfreq_n`` hours", "" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index 69c288ee8..15c0decd1 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -421,7 +421,7 @@ Tinz and Tsnz, and the ice salinity profile, Sinz. These variables also include category as a fourth dimension. ******************* -Boundary conditions +Boundary Conditions ******************* Much of the infrastructure used in CICE, including the boundary @@ -430,33 +430,39 @@ communications among processors when MPI is in use and among blocks whenever there is more than one block per processor. Boundary conditions are defined by the ``ns_boundary_type`` and ``ew_boundary_type`` -namelist inputs. Valid values are ``open`` and ``cyclic``. In addition, +namelist inputs. Valid values are ``open``, ``closed``, and ``cyclic``. In addition, ``tripole`` and ``tripoleT`` are options for the ``ns_boundary_type``. -Closed boundary conditions are not supported currently. -The domain can be physically closed with the ``close_boundaries`` -namelist which forces a land mask on the boundary with a two gridcell depth. -Where the boundary is land, the boundary_type settings play no role. -For example, in the displaced-pole grids, at least one row of grid cells along the north -and south boundaries is land. Along the east/west domain boundaries not -masked by land, periodic conditions wrap the domain around the globe. In +``closed`` imposes a land mask on the boundary with a two gridcell depth +and is only supported for rectangular grids. In general, +where the boundary is land or where there is no ice on the boundary, +the boundary_type settings and boundary conditions play no role. + +In the displaced-pole global grids, the mask (kmt) file has at least one row of +grid cells along the north and south boundaries that is land. Along the east/west +domain boundaries, periodic conditions wrap the domain around the globe. In this example, -the appropriate namelist settings are ``nsboundary_type`` = ``open``, -``ew_boundary_type`` = ``cyclic``, and ``close_boundaries`` = ``.false.``. - -CICE can be run on regional grids with open boundary conditions; except -for variables describing grid lengths, non-land halo cells along the -grid edge must be filled by restoring them to specified values. The -namelist variable ``restore_ice`` turns this functionality on and off; the +the appropriate namelist settings are ``ns_boundary_type`` = ``open``, +``ew_boundary_type`` = ``cyclic``. + +CICE can be run on regional grids with ``open``, ``closed``, or ``cyclic`` +boundary conditions. +Except for variables describing grid lengths, non-land halo cells along the +grid edge must be filled with some boundary conditions +if ice is present at that location. The outside halo is handled automatically +with ``closed`` or ``cyclic`` conditions. With open boundary conditions, one can imagine +several different ways to set the outside boundary including reading values from +an external file or deriving values on that halo based on the interior +solution while specifying zero gradient, constant gradient, specified state, +zero flux, or other boundary conditions. Mathematically specified boundary +conditions are currently not supported in the CICE model. + +The namelist variable ``restore_ice`` turns on a restoring capability on the +boundary by setting the boundary halo to values read from a file. The restoring timescale ``trestore`` may be used (it is also used for restoring ocean sea surface temperature in stand-alone ice runs). This implementation is only intended to provide the “hooks" for a more -sophisticated treatment; the rectangular grid option can be used to test -this configuration. The ‘displaced_pole’ grid option should not be used -unless the regional grid contains land all along the north and south -boundaries. The current form of the boundary condition routines does not -allow Neumann boundary conditions, which must be set explicitly. This -has been done in an unreleased branch of the code; contact Elizabeth for -more information. +sophisticated treatment. The rectangular grid option can be used to test +this configuration. For exact restarts using restoring, set ``restart_ext`` = true in namelist to use the extended-grid subroutines. @@ -1254,7 +1260,7 @@ to the Macros machine file explicity when needed. .. _history: ************* -History files +History Files ************* CICE provides history data output in binary unformatted or netCDF formats via @@ -1290,8 +1296,8 @@ collected in their own history modules (**ice_history_bgc.F90**, **ice_history_drag.F90**, **ice_history_mechred.F90**, **ice_history_pond.F90**). -The history modules allow output at different frequencies. Five output -options (``1``, ``h``, ``d``, ``m``, ``y``) are available simultaneously for ``histfreq`` +The history modules allow output at different frequencies. Six output +options (``1``, ``h``, ``d``, ``m``, ``y``, ``n``) are available simultaneously for ``histfreq`` during a run, and each stream must have a unique value for ``histfreq``. In other words, ``d`` cannot be used by two different streams. Each stream has an associated frequency set by ``histfreq_n``. The frequency is @@ -1390,17 +1396,26 @@ subroutine **define_hist_field**. ``cona`` and ``conb`` are multiplicative and terms respectively that are hardwired into the source code to convert model units to history units. -Beginning with CICE v6, history variables requested by the Sea Ice Model Intercomparison -Project (SIMIP) :cite:`Notz16` have been added as possible history output variables (e.g. -``f_sithick``, ``f_sidmassgrowthbottom``, etc.). The lists of -`monthly `_ and -`daily `_ -requested SIMIP variables provide the names of possible history fields in CICE. -However, each of the additional variables can be output at any temporal frequency -specified in the **icefields_nml** section of **ice_in** as detailed above. -Additionally, a new history output variable, ``f_CMIP``, has been added. When ``f_CMIP`` -is added to the **icefields_nml** section of **ice_in** then all SIMIP variables -will be turned on for output at the frequency specified by ``f_CMIP``. +Beginning with CICE v6, history variables requested by the Sea Ice Model Intercomparison +Project (SIMIP) :cite:`Notz16` are available as history output variables +(e.g. ``f_sithick``, ``f_sidmassgrowthbottom``, etc.). The lists of +`monthly `_ and +`daily `_ +requested SIMIP variables provide their history field names in CICE. +These variables have been updated for the +`CMIP7 data request `_. + +The ``f_CMIP`` flag has been removed. This is now a ``set_nml.cmip`` namelist option +which can be invoked with the ``-s cmip`` option during cice.setup. This optional +namelist setting will turn on the CMIP data request and turn off CICE duplicates of +SIMIP variables. However, these can be changed by the user in their case ``ice_in`` file. +Note that all SIMIP variables have been updated to correspond to the new +`CMIP7 data request `_. + +Note that some SIMIP variables require division by ice or sub-ice areas, which can be extremely +small and cause the output variables to appear unphysically large. Please interpret these +quantities (such as ``sithick``) very carefully. A future release will have an option to mask +these regions. It may also be helpful for debugging to increase the precision of the history file output from 4 bytes to 8 bytes. This is changed through the ``history_precision`` @@ -1409,8 +1424,35 @@ namelist flag. Note that the dpnd pond history fields have not yet been implemented for the topo ponds scheme and are turned off in the code. +************************ +History Restart Files +************************ + +CICE has a history restart capability. History restart files are needed and written when +a restart file is written while history fields are accumulating. The implementation dumps +accumulated history data, one file per history stream, to the restart directory using +a naming convention that uses the history filename, appends '_r' plus the ``histfreq`` character +string and then appends the model time. This occurs only for streams with +``hist_avg = .true.`` and where the accumulator count is greater than zero when the data is +written. Only accumulating data associated with the history stream is written. This feature +can be turned off by setting ``write_histrest = .false.`` in namelist. + +On restart, CICE looks for appropriate history restart files and reads them if they exist. +If the files do not exist or fields cannot be read, the model continues with the history +accumulator set to zero. Output is written to the log file that indicates which history restart +files and fields were read and which were not. In a production run, where the history streams +are set and unchanging, this should result in bit-for-bit history restart capability. If +a user changes the history stream output, CICE will read only files and fields that exist +and any new fields will initialize with zero accumulation and a potentially erroneous accumluation +counter. + +There is a settings option, **histall10d**, that specifies 10-day and monthly time average +history streams for all history variables. For these tests, the test script compares +the history and history restart output generated to verify bit-for-bit history capability +for a restart run. + **************** -Diagnostic files +Diagnostic Files **************** Like ``histfreq``, the parameter ``diagfreq`` can be used to regulate how often @@ -1519,7 +1561,7 @@ The timers use *MPI_WTIME* for parallel runs and the F90 intrinsic .. _restartfiles: ************* -Restart files +Restart Files ************* CICE reads and writes restart data in binary unformatted or netCDF formats via diff --git a/doc/source/user_guide/ug_testing.rst b/doc/source/user_guide/ug_testing.rst index e0cbd2c2a..36ca2f13b 100644 --- a/doc/source/user_guide/ug_testing.rst +++ b/doc/source/user_guide/ug_testing.rst @@ -285,7 +285,7 @@ To run the test:: .. _testsuites: -Test suites +Test Suites ------------ Test suites support running multiple tests specified via diff --git a/icepack b/icepack index b91f1dd73..2f31ee37f 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit b91f1dd73d6c8475e6d94611f592b8036623bf78 +Subproject commit 2f31ee37f3a70a70b5e33cae43476f09dbb33da7