diff --git a/cicecore/drivers/access/cmeps/CICE_FinalMod.F90 b/cicecore/drivers/access/cmeps/CICE_FinalMod.F90 new file mode 100644 index 000000000..0be3636f0 --- /dev/null +++ b/cicecore/drivers/access/cmeps/CICE_FinalMod.F90 @@ -0,0 +1,61 @@ +!======================================================================= +! +! This module contains routines for the final exit of the CICE model, +! including final output and clean exit from any message passing +! environments and frameworks. +! +! authors: Philip W. Jones, LANL +! 2006: Converted to free source form (F90) by Elizabeth Hunke +! 2008: E. Hunke moved ESMF code to its own driver + + module CICE_FinalMod + + use ice_kinds_mod + use ice_communicate, only: my_task, master_task + use ice_exit, only: end_run, abort_ice + use ice_fileunits, only: nu_diag, release_all_fileunits + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + + implicit none + private + public :: CICE_Finalize + +!======================================================================= + + contains + +!======================================================================= +! +! This routine shuts down CICE by exiting all relevent environments. + + subroutine CICE_Finalize + + use ice_restart_shared, only: runid + use ice_timers, only: ice_timer_stop, ice_timer_print_all, timer_total + + character(len=*), parameter :: subname = '(CICE_Finalize)' + + !------------------------------------------------------------------- + ! stop timers and print timer info + !------------------------------------------------------------------- + + call ice_timer_stop(timer_total) ! stop timing entire run + call ice_timer_print_all(stats=.false.) ! print timing information + +!echmod if (nu_diag /= 6) close (nu_diag) ! diagnostic output + call release_all_fileunits + + !------------------------------------------------------------------- + ! quit MPI + !------------------------------------------------------------------- + +! standalone +! call end_run ! quit MPI + + end subroutine CICE_Finalize + +!======================================================================= + + end module CICE_FinalMod + +!======================================================================= diff --git a/cicecore/drivers/access/cmeps/CICE_InitMod.F90 b/cicecore/drivers/access/cmeps/CICE_InitMod.F90 new file mode 100644 index 000000000..4a4984687 --- /dev/null +++ b/cicecore/drivers/access/cmeps/CICE_InitMod.F90 @@ -0,0 +1,491 @@ +module CICE_InitMod + + ! Initialize CICE model. + + use ice_kinds_mod + use ice_exit , only: abort_ice + use ice_fileunits, only: init_fileunits, nu_diag + use icepack_intfc, only: icepack_aggregate + use icepack_intfc, only: icepack_init_itd, icepack_init_itd_hist + use icepack_intfc, only: icepack_init_fsd_bounds, icepack_init_wave + use icepack_intfc, only: icepack_init_snow, icepack_init_radiation + use icepack_intfc, only: icepack_configure + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_query_parameters, icepack_query_tracer_flags + use icepack_intfc, only: icepack_query_tracer_indices, icepack_query_tracer_sizes + + implicit none + private + public :: cice_init1 + public :: cice_init2 + + private :: init_restart + +!======================================================================= +contains +!======================================================================= + + subroutine cice_init1() + + ! Initialize the basic state, grid and all necessary parameters for + ! running the CICE model. + + use ice_init , only: input_data + use ice_init_column , only: input_zbgc, count_tracers + use ice_grid , only: init_grid1, alloc_grid + use ice_domain , only: init_domain_blocks + use ice_arrays_column , only: alloc_arrays_column + use ice_state , only: alloc_state + use ice_flux_bgc , only: alloc_flux_bgc + use ice_flux , only: alloc_flux + use ice_timers , only: timer_total, init_ice_timers, ice_timer_start + + character(len=*), parameter :: subname = '(cice_init1)' + !---------------------------------------------------- + + call init_fileunits ! unit numbers + call icepack_configure() ! initialize icepack + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + call input_data ! namelist variables + call access_verify_inputs + + call input_zbgc ! vertical biogeochemistry namelist + call count_tracers ! count tracers + + call init_domain_blocks ! set up block decomposition + call init_grid1 ! domain distribution + call alloc_grid ! allocate grid arrays + call alloc_arrays_column ! allocate column arrays + call alloc_state ! allocate state arrays + call alloc_flux_bgc ! allocate flux_bgc arrays + call alloc_flux ! allocate flux arrays + call init_ice_timers ! initialize all timers + call ice_timer_start(timer_total) ! start timing entire run + + end subroutine cice_init1 + + !======================================================================= + subroutine cice_init2() + + ! Initialize the basic state, and all necessary parameters for + ! running the CICE model. + + use ice_arrays_column , only: hin_max, c_hi_range + use ice_arrays_column , only: floe_rad_l, floe_rad_c, floe_binwidth, c_fsd_range + use ice_calendar , only: dt, dt_dyn, istep, istep1, write_ic, init_calendar, calendar + use ice_communicate , only: my_task, master_task + use ice_diagnostics , only: init_diags + use ice_domain_size , only: ncat, nfsd, nfreq + use ice_dyn_eap , only: init_eap + use ice_dyn_evp , only: init_evp + use ice_dyn_vp , only: init_vp + use ice_dyn_shared , only: kdyn + use ice_flux , only: init_coupler_flux, init_history_therm + use ice_flux , only: init_history_dyn, init_flux_atm, init_flux_ocn + use ice_forcing , only: init_snowtable + use ice_forcing_bgc , only: get_forcing_bgc, get_atm_bgc + use ice_forcing_bgc , only: faero_default, alloc_forcing_bgc, fiso_default + use ice_grid , only: dealloc_grid + use ice_history , only: init_hist, accum_hist + use ice_restart_shared , only: restart, runtype + use ice_init , only: input_data, init_state + use ice_init_column , only: init_thermo_vertical, init_shortwave, init_zbgc + use ice_restoring , only: ice_HaloRestore_init + use ice_timers , only: timer_total, init_ice_timers, ice_timer_start + use ice_transport_driver , only: init_transport + use ice_arrays_column , only: wavefreq, dwavefreq + + logical(kind=log_kind) :: tr_aero, tr_zaero, skl_bgc, z_tracers + logical(kind=log_kind) :: tr_iso, tr_fsd, wave_spec, tr_snow + character(len=char_len) :: snw_aging_table + real(kind=dbl_kind), dimension(25) :: wave_spectrum_profile ! hardwire for now + character(len=*), parameter :: subname = '(cice_init2)' + !---------------------------------------------------- + + call init_zbgc ! vertical biogeochemistry initialization + call init_calendar ! initialize some calendar stuff + call init_hist (dt) ! initialize output history file + + if (kdyn == 1) then + call init_evp ! define evp dynamics parameters, variables + elseif (kdyn == 2) then + call init_eap ! define eap dynamics parameters, variables + else if (kdyn == 3) then + call init_vp ! define vp dynamics parameters, variables + endif + + call init_coupler_flux ! initialize fluxes exchanged with coupler + call init_thermo_vertical ! initialize vertical thermodynamics + + call icepack_init_itd(hin_max=hin_max) ! ice thickness distribution + if (my_task == master_task) then + call icepack_init_itd_hist(hin_max=hin_max, c_hi_range=c_hi_range) ! output + endif + + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_fsd) call icepack_init_fsd_bounds ( & + floe_rad_l_out = floe_rad_l, & ! fsd size lower bound in m (radius) + floe_rad_c_out = floe_rad_c, & ! fsd size bin centre in m (radius) + floe_binwidth_out = floe_binwidth, & ! fsd size bin width in m (radius) + c_fsd_range_out = c_fsd_range, & ! string for history output + write_diags=(my_task == master_task)) ! write diag on master only + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call calendar() ! determine the initial date + + call init_state ! initialize the ice state + call init_transport ! initialize horizontal transport + call ice_HaloRestore_init ! restored boundary conditions + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers, & + wave_spec_out=wave_spec, snw_aging_table_out=snw_aging_table) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (skl_bgc .or. z_tracers) call alloc_forcing_bgc ! allocate biogeochemistry arrays + + call init_restart ! initialize restart variables + call init_diags ! initialize diagnostic output points + call init_history_therm ! initialize thermo history variables + call init_history_dyn ! initialize dynamic history variables + call icepack_init_radiation ! initialize icepack shortwave tables + + call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_zaero_out=tr_zaero) + call icepack_query_tracer_flags(tr_iso_out=tr_iso, tr_snow_out=tr_snow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + ! snow aging lookup table initialization + if (tr_snow) then ! advanced snow physics + call icepack_init_snow() + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + if (snw_aging_table(1:4) /= 'test') then + call init_snowtable() + endif + endif + + if (wave_spec) then + call icepack_init_wave(nfreq=nfreq, & + wave_spectrum_profile=wave_spectrum_profile, wavefreq=wavefreq, dwavefreq=dwavefreq) + end if + + ! Initialize shortwave components using swdn from previous timestep + ! if restarting. These components will be scaled to current forcing + ! in prep_radiation. + + if (trim(runtype) == 'continue' .or. restart) then + call init_shortwave ! initialize radiative transfer + end if + + !-------------------------------------------------------------------- + ! coupler communication or forcing data initialization + !-------------------------------------------------------------------- + + if (z_tracers) call get_atm_bgc ! biogeochemistry + + if (runtype == 'initial' .and. .not. restart) then + call init_shortwave ! initialize radiative transfer using current swdn + end if + + call init_flux_atm ! initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + call dealloc_grid ! deallocate temporary grid arrays + + end subroutine cice_init2 + + !======================================================================= + + subroutine init_restart() + + use ice_arrays_column, only: dhsn + use ice_blocks, only: nx_block, ny_block + use ice_calendar, only: calendar + use ice_constants, only: c0 + use ice_domain, only: nblocks + use ice_domain_size, only: ncat, n_iso, n_aero, nfsd, nslyr + use ice_dyn_eap, only: read_restart_eap + use ice_dyn_shared, only: kdyn + use ice_flux, only: Tf + use ice_grid, only: tmask + use ice_init, only: ice_ic + use ice_init_column, only: init_age, init_FY, init_lvl, init_snowtracers, & + init_meltponds_lvl, init_meltponds_topo, & + init_isotope, init_aerosol, init_hbrine, init_bgc, init_fsd + use ice_restart_column, only: restart_age, read_restart_age, & + restart_FY, read_restart_FY, restart_lvl, read_restart_lvl, & + restart_pond_lvl, read_restart_pond_lvl, & + restart_pond_topo, read_restart_pond_topo, & + restart_snow, read_restart_snow, & + restart_fsd, read_restart_fsd, & + restart_iso, read_restart_iso, & + restart_aero, read_restart_aero, & + restart_hbrine, read_restart_hbrine, & + restart_bgc + use ice_restart_driver, only: restartfile + use ice_restart_shared, only: runtype, restart + use ice_state ! almost everything + + integer(kind=int_kind) :: & + i, j , & ! horizontal indices + iblk ! block index + logical(kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_pond_lvl, & + tr_pond_topo, tr_fsd, tr_iso, tr_aero, tr_brine, tr_snow, & + skl_bgc, z_tracers + integer(kind=int_kind) :: & + ntrcr + integer(kind=int_kind) :: & + nt_alvl, nt_vlvl, nt_apnd, nt_hpnd, nt_ipnd, & + nt_smice, nt_smliq, nt_rhos, nt_rsnw, & + nt_iage, nt_FY, nt_aero, nt_fsd, nt_isosno, nt_isoice + + character(len=*), parameter :: subname = '(init_restart)' + !---------------------------------------------------- + + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + call icepack_query_parameters(skl_bgc_out=skl_bgc, z_tracers_out=z_tracers) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_aero_out=tr_aero, tr_brine_out=tr_brine, & + tr_snow_out=tr_snow, tr_fsd_out=tr_fsd, tr_iso_out=tr_iso) + call icepack_query_tracer_indices(nt_alvl_out=nt_alvl, nt_vlvl_out=nt_vlvl, & + nt_apnd_out=nt_apnd, nt_hpnd_out=nt_hpnd, nt_ipnd_out=nt_ipnd, & + nt_iage_out=nt_iage, nt_FY_out=nt_FY, nt_aero_out=nt_aero, nt_fsd_out=nt_fsd, & + nt_smice_out=nt_smice, nt_smliq_out=nt_smliq, & + nt_rhos_out=nt_rhos, nt_rsnw_out=nt_rsnw, & + nt_isosno_out=nt_isosno, nt_isoice_out=nt_isoice) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + if (trim(runtype) == 'continue') then + ! start from core restart file + call restartfile() ! given by pointer in ice_in + call calendar() ! update time parameters + if (kdyn == 2) call read_restart_eap ! EAP + else if (restart) then ! ice_ic = core restart file + call restartfile (ice_ic) ! or 'default' or 'none' +!!! uncomment to create netcdf + ! call restartfile_v4 (ice_ic) ! CICE v4.1 binary restart file +!!! uncomment if EAP restart data exists + ! if (kdyn == 2) call read_restart_eap + endif + + ! tracers + ! ice age tracer + if (tr_iage) then + if (trim(runtype) == 'continue') & + restart_age = .true. + if (restart_age) then + call read_restart_age + else + do iblk = 1, nblocks + call init_age(trcrn(:,:,nt_iage,:,iblk)) + enddo ! iblk + endif + endif + ! first-year area tracer + if (tr_FY) then + if (trim(runtype) == 'continue') restart_FY = .true. + if (restart_FY) then + call read_restart_FY + else + do iblk = 1, nblocks + call init_FY(trcrn(:,:,nt_FY,:,iblk)) + enddo ! iblk + endif + endif + ! level ice tracer + if (tr_lvl) then + if (trim(runtype) == 'continue') restart_lvl = .true. + if (restart_lvl) then + call read_restart_lvl + else + do iblk = 1, nblocks + call init_lvl(iblk,trcrn(:,:,nt_alvl,:,iblk), & + trcrn(:,:,nt_vlvl,:,iblk)) + enddo ! iblk + endif + endif + ! level-ice melt ponds + if (tr_pond_lvl) then + if (trim(runtype) == 'continue') & + restart_pond_lvl = .true. + if (restart_pond_lvl) then + call read_restart_pond_lvl + else + do iblk = 1, nblocks + call init_meltponds_lvl(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk), & + dhsn(:,:,:,iblk)) + enddo ! iblk + endif + endif + ! topographic melt ponds + if (tr_pond_topo) then + if (trim(runtype) == 'continue') & + restart_pond_topo = .true. + if (restart_pond_topo) then + call read_restart_pond_topo + else + do iblk = 1, nblocks + call init_meltponds_topo(trcrn(:,:,nt_apnd,:,iblk), & + trcrn(:,:,nt_hpnd,:,iblk), & + trcrn(:,:,nt_ipnd,:,iblk)) + enddo ! iblk + endif ! .not. restart_pond + endif + ! snow redistribution/metamorphism + if (tr_snow) then + if (trim(runtype) == 'continue') restart_snow = .true. + if (restart_snow) then + call read_restart_snow + else + do iblk = 1, nblocks + call init_snowtracers(trcrn(:,:,nt_smice:nt_smice+nslyr-1,:,iblk), & + trcrn(:,:,nt_smliq:nt_smliq+nslyr-1,:,iblk), & + trcrn(:,:,nt_rhos :nt_rhos +nslyr-1,:,iblk), & + trcrn(:,:,nt_rsnw :nt_rsnw +nslyr-1,:,iblk)) + enddo ! iblk + endif + endif + + ! floe size distribution + if (tr_fsd) then + if (trim(runtype) == 'continue') restart_fsd = .true. + if (restart_fsd) then + call read_restart_fsd + else + call init_fsd(trcrn(:,:,nt_fsd:nt_fsd+nfsd-1,:,:)) + endif + endif + ! isotopes + if (tr_iso) then + if (trim(runtype) == 'continue') restart_iso = .true. + if (restart_iso) then + call read_restart_iso + else + do iblk = 1, nblocks + call init_isotope(trcrn(:,:,nt_isosno:nt_isosno+n_iso-1,:,iblk), & + trcrn(:,:,nt_isoice:nt_isoice+n_iso-1,:,iblk)) + enddo ! iblk + endif + endif + + if (tr_aero) then ! ice aerosol + if (trim(runtype) == 'continue') restart_aero = .true. + if (restart_aero) then + call read_restart_aero + else + do iblk = 1, nblocks + call init_aerosol(trcrn(:,:,nt_aero:nt_aero+4*n_aero-1,:,iblk)) + enddo ! iblk + endif ! .not. restart_aero + endif + + if (trim(runtype) == 'continue') then + if (tr_brine) & + restart_hbrine = .true. + if (skl_bgc .or. z_tracers) & + restart_bgc = .true. + endif + + if (tr_brine .or. skl_bgc) then ! brine height tracer + call init_hbrine + if (tr_brine .and. restart_hbrine) call read_restart_hbrine + endif + + if (skl_bgc .or. z_tracers) then ! biogeochemistry + if (tr_fsd) then + write (nu_diag,*) 'FSD implementation incomplete for use with BGC' + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + endif + call init_bgc + endif + + !----------------------------------------------------------------- + ! aggregate tracers + !----------------------------------------------------------------- + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j,iblk)) then + call icepack_aggregate( & + aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,:,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,:,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + trcr_depend = trcr_depend, & + trcr_base = trcr_base, & + n_trcr_strata = n_trcr_strata, & + nt_strata = nt_strata, & + Tf = Tf(i,j,iblk)) + else + ! tcraig, reset all tracer values on land to zero + trcrn(i,j,:,:,iblk) = c0 + endif + enddo + enddo + enddo + !$OMP END PARALLEL DO + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + end subroutine init_restart + + !======================================================================= + + subroutine access_verify_inputs() + ! Check required namelist settings for ACCESS CM3 + logical(kind=log_kind) :: & + tr_pond_lvl + character(len=*), parameter :: subname = '(access_verify_inputs)' + + + call icepack_query_tracer_flags(tr_pond_lvl_out=tr_pond_lvl) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(trim(subname), & + file=__FILE__,line= __LINE__) + + if (tr_pond_lvl) then + write(nu_diag,*) subname//' ERROR: Wrong meltpond scheme selected' + write(nu_diag,*) subname//' ERROR: tr_pond_lvl = ', tr_pond_lvl + call abort_ice (error_message=subname//' Level pond scheme not supported in ACCESS CM3 coupling', & + file=__FILE__, line=__LINE__) + end if + + end subroutine access_verify_inputs + !======================================================================= + +end module CICE_InitMod + +!======================================================================= diff --git a/cicecore/drivers/access/cmeps/CICE_RunMod.F90 b/cicecore/drivers/access/cmeps/CICE_RunMod.F90 new file mode 100644 index 000000000..80905080e --- /dev/null +++ b/cicecore/drivers/access/cmeps/CICE_RunMod.F90 @@ -0,0 +1,740 @@ +!======================================================================= +! +! Main driver for time stepping of CICE. +! +! authors Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL +! +! 2006 ECH: moved exit timeLoop to prevent execution of unnecessary timestep +! 2006 ECH: Streamlined for efficiency +! 2006 ECH: Converted to free source form (F90) +! 2007 BPB: Modified Delta-Eddington shortwave interface +! 2008 ECH: moved ESMF code to its own driver + + module CICE_RunMod + + use ice_kinds_mod + use cice_wrapper_mod, only : t_startf, t_stopf, t_barrierf + use cice_wrapper_mod, only : ufs_logfhour + use ice_fileunits, only: nu_diag + use ice_arrays_column, only: oceanmixed_ice + use ice_constants, only: c0, c1 + use ice_constants, only: field_loc_center, field_type_scalar + use ice_exit, only: abort_ice + use icepack_intfc, only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc, only: icepack_max_iso, icepack_max_aero + use icepack_intfc, only: icepack_query_parameters + use icepack_intfc, only: icepack_query_tracer_flags, icepack_query_tracer_sizes + + implicit none + private + public :: CICE_Run, ice_step + +!======================================================================= + + contains + +!======================================================================= +! +! This is the main driver routine for advancing CICE forward in time. +! +! author Elizabeth C. Hunke, LANL +! Philip W. Jones, LANL +! William H. Lipscomb, LANL + + subroutine CICE_Run + + use ice_calendar, only: istep, istep1, dt, calendar, advance_timestep + use ice_forcing, only: get_forcing_atmo, get_forcing_ocn, & + get_wave_spec + use ice_forcing_bgc, only: get_forcing_bgc, get_atm_bgc, & + fiso_default, faero_default + use ice_flux, only: init_flux_atm, init_flux_ocn + use ice_timers, only: ice_timer_start, ice_timer_stop, & + timer_couple, timer_step + logical (kind=log_kind) :: & + tr_iso, tr_aero, tr_zaero, skl_bgc, z_tracers, wave_spec, tr_fsd + character(len=*), parameter :: subname = '(CICE_Run)' + + !-------------------------------------------------------------------- + ! initialize error code and step timer + !-------------------------------------------------------------------- + + call ice_timer_start(timer_step) ! start timing entire run + + call icepack_query_parameters(skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, & + wave_spec_out=wave_spec) + call icepack_query_tracer_flags(tr_iso_out=tr_iso, & + tr_aero_out=tr_aero, & + tr_zaero_out=tr_zaero, & + tr_fsd_out=tr_fsd) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !-------------------------------------------------------------------- + ! timestep loop + !-------------------------------------------------------------------- + + call ice_timer_start(timer_couple) ! atm/ocn coupling + + call advance_timestep() ! advance timestep and update calendar data + + if (z_tracers) call get_atm_bgc ! biogeochemistry + + call init_flux_atm ! Initialize atmosphere fluxes sent to coupler + call init_flux_ocn ! initialize ocean fluxes sent to coupler + + call ice_timer_stop(timer_couple) ! atm/ocn coupling + + call ice_step + + !-------------------------------------------------------------------- + ! end of timestep loop + !-------------------------------------------------------------------- + + call ice_timer_stop(timer_step) ! end timestepping loop timer + + end subroutine CICE_Run + +!======================================================================= +! +! Calls drivers for physics components, some initialization, and output +! +! author Elizabeth C. Hunke, LANL +! William H. Lipscomb, LANL + + subroutine ice_step + + use ice_constants, only: c3600 + use ice_boundary, only: ice_HaloUpdate + use ice_calendar, only: dt, dt_dyn, ndtd, diagfreq, write_restart, istep + use ice_calendar, only: idate, myear, mmonth, mday, msec, timesecs + use ice_calendar, only: calendar_sec2hms, write_history, nstreams, histfreq + use ice_diagnostics, only: init_mass_diags, runtime_diags, debug_model, debug_ice + use ice_diagnostics_bgc, only: hbrine_diags, bgc_diags + use ice_domain, only: halo_info, nblocks + use ice_dyn_eap, only: write_restart_eap + use ice_dyn_shared, only: kdyn, kridge + use ice_flux, only: scale_factor, init_history_therm, & + daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd + use ice_history, only: accum_hist + use ice_history_bgc, only: init_history_bgc + use ice_restart, only: final_restart + use ice_restart_column, only: write_restart_age, write_restart_FY, & + write_restart_lvl, write_restart_pond_lvl, & + write_restart_pond_topo, write_restart_aero, write_restart_fsd, & + write_restart_iso, write_restart_bgc, write_restart_hbrine, & + write_restart_snow + use ice_restart_driver, only: dumpfile + use ice_restoring, only: restore_ice, ice_HaloRestore + use ice_step_mod, only: prep_radiation, step_therm1, step_therm2, & + update_state, step_dyn_horiz, step_dyn_ridge, step_radiation, & + biogeochemistry, step_prep, step_dyn_wave, step_snow + use ice_timers, only: ice_timer_start, ice_timer_stop, & + timer_diags, timer_column, timer_thermo, timer_bound, & + timer_hist, timer_readwrite + use ice_communicate, only: MPI_COMM_ICE, my_task, master_task + use ice_prescribed_mod + + integer (kind=int_kind) :: & + iblk , & ! block index + k , & ! dynamics supercycling index + ktherm ! thermodynamics is off when ktherm = -1 + + real (kind=dbl_kind) :: & + offset ! d(age)/dt time offset + + logical (kind=log_kind) :: & + tr_iage, tr_FY, tr_lvl, tr_fsd, tr_snow, & + tr_pond_lvl, tr_pond_topo, tr_brine, tr_iso, tr_aero, & + calc_Tsfc, skl_bgc, z_tracers, wave_spec + + character(len=*), parameter :: subname = '(ice_step)' + + character (len=char_len) :: plabeld + integer (kind=int_kind) :: hh,mm,ss,ns + character (len=char_len) :: logmsg + + if (debug_model) then + plabeld = 'beginning time step' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo + endif + + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc, skl_bgc_out=skl_bgc, & + z_tracers_out=z_tracers, ktherm_out=ktherm, wave_spec_out=wave_spec) + call icepack_query_tracer_flags(tr_iage_out=tr_iage, tr_FY_out=tr_FY, & + tr_lvl_out=tr_lvl, tr_pond_lvl_out=tr_pond_lvl, & + tr_pond_topo_out=tr_pond_topo, tr_brine_out=tr_brine, tr_aero_out=tr_aero, & + tr_iso_out=tr_iso, tr_fsd_out=tr_fsd, tr_snow_out=tr_snow) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! restoring on grid boundaries + !----------------------------------------------------------------- + + if (restore_ice) call ice_HaloRestore + + !----------------------------------------------------------------- + ! initialize diagnostics and save initial state values + !----------------------------------------------------------------- + + call ice_timer_start(timer_diags) ! diagnostics/history + call init_mass_diags ! diagnostics per timestep + call init_history_therm + call init_history_bgc + call ice_timer_stop(timer_diags) ! diagnostics/history + + if (prescribed_ice) then ! read prescribed ice + call t_barrierf('cice_run_presc_BARRIER',MPI_COMM_ICE) + call t_startf ('cice_run_presc') + call ice_prescribed_run(idate, msec) + call t_stopf ('cice_run_presc') + endif + + call step_prep + + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) then + + !----------------------------------------------------------------- + ! scale radiation fields + !----------------------------------------------------------------- + + if (calc_Tsfc) call prep_radiation (iblk) + if (debug_model) then + plabeld = 'post prep_radiation' + call debug_ice (iblk, plabeld) + endif + + !----------------------------------------------------------------- + ! thermodynamics and biogeochemistry + !----------------------------------------------------------------- + + call step_therm1 (dt, iblk) ! vertical thermodynamics + if (debug_model) then + plabeld = 'post step_therm1' + call debug_ice (iblk, plabeld) + endif + + call biogeochemistry (dt, iblk) ! biogeochemistry + if (debug_model) then + plabeld = 'post biogeochemistry' + call debug_ice (iblk, plabeld) + endif + + if (.not.prescribed_ice) & + call step_therm2 (dt, iblk) ! ice thickness distribution thermo + if (debug_model) then + plabeld = 'post step_therm2' + call debug_ice (iblk, plabeld) + endif + + endif ! ktherm > 0 + + enddo ! iblk + !$OMP END PARALLEL DO + + ! clean up, update tendency diagnostics + offset = dt + call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & + dagedt=dagedtt, offset=offset) + + call ice_timer_stop(timer_thermo) ! thermodynamics + call ice_timer_stop(timer_column) ! column physics + + !----------------------------------------------------------------- + ! dynamics, transport, ridging + !----------------------------------------------------------------- + + if (.not.prescribed_ice) then + + ! wave fracture of the floe size distribution + ! note this is called outside of the dynamics subcycling loop + if (tr_fsd .and. wave_spec) call step_dyn_wave(dt) + + do k = 1, ndtd + + ! momentum, stress, transport + call step_dyn_horiz (dt_dyn) + if (debug_model) then + plabeld = 'post step_dyn_horiz' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo ! iblk + endif + + ! ridging + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + if (kridge > 0) call step_dyn_ridge (dt_dyn, ndtd, iblk) + enddo + !$OMP END PARALLEL DO + if (debug_model) then + plabeld = 'post step_dyn_ridge' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo ! iblk + endif + + ! clean up, update tendency diagnostics + offset = c0 + call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & + dagedt=dagedtd, offset=offset) + + enddo + if (debug_model) then + plabeld = 'post dynamics' + do iblk = 1, nblocks + call debug_ice (iblk, plabeld) + enddo + endif + + endif ! not prescribed ice + + !----------------------------------------------------------------- + ! albedo, shortwave radiation + !----------------------------------------------------------------- + + call ice_timer_start(timer_column) ! column physics + call ice_timer_start(timer_thermo) ! thermodynamics + + !----------------------------------------------------------------- + ! snow redistribution and metamorphosis + !----------------------------------------------------------------- + + if (tr_snow) then ! advanced snow physics + do iblk = 1, nblocks + call step_snow (dt, iblk) + enddo + call update_state (dt=dt) ! clean up + endif + +!MHRI: CHECK THIS OMP + !$OMP PARALLEL DO PRIVATE(iblk) + do iblk = 1, nblocks + + if (ktherm >= 0) call step_radiation (dt, iblk) + if (debug_model) then + plabeld = 'post step_radiation' + call debug_ice (iblk, plabeld) + endif + + !----------------------------------------------------------------- + ! get ready for coupling and the next time step + !----------------------------------------------------------------- + + call coupling_prep (iblk) + if (debug_model) then + plabeld = 'post coupling_prep' + call debug_ice (iblk, plabeld) + endif + enddo ! iblk + !$OMP END PARALLEL DO + + call ice_timer_start(timer_bound) + call ice_HaloUpdate (scale_factor, halo_info, & + field_loc_center, field_type_scalar) + call ice_timer_stop(timer_bound) + + call ice_timer_stop(timer_thermo) ! thermodynamics + call ice_timer_stop(timer_column) ! column physics + + !----------------------------------------------------------------- + ! write data + !----------------------------------------------------------------- + + call ice_timer_start(timer_diags) ! diagnostics + if (mod(istep,diagfreq) == 0) then + call runtime_diags(dt) ! log file + if (skl_bgc .or. z_tracers) call bgc_diags + if (tr_brine) call hbrine_diags + endif + call ice_timer_stop(timer_diags) ! diagnostics + + call ice_timer_start(timer_hist) ! history + call accum_hist (dt) ! history file + call ice_timer_stop(timer_hist) ! history + + call ice_timer_start(timer_readwrite) ! reading/writing + if (write_restart == 1) then + call dumpfile ! core variables for restarting + if (tr_iage) call write_restart_age + if (tr_FY) call write_restart_FY + if (tr_lvl) call write_restart_lvl + if (tr_pond_lvl) call write_restart_pond_lvl + if (tr_pond_topo) call write_restart_pond_topo + if (tr_snow) call write_restart_snow + if (tr_fsd) call write_restart_fsd + if (tr_iso) call write_restart_iso + if (tr_aero) call write_restart_aero + if (skl_bgc .or. z_tracers) & + call write_restart_bgc + if (tr_brine) call write_restart_hbrine + if (kdyn == 2) call write_restart_eap + call final_restart + endif + + call ice_timer_stop(timer_readwrite) ! reading/writing + if (my_task == master_task) then + do ns = 1,nstreams + if (write_history(ns) .and. histfreq(ns) .eq. 'h') then + call calendar_sec2hms(msec,hh,mm,ss) + write(logmsg,'(6(i4,2x))')myear,mmonth,mday,hh,mm,ss + call ufs_logfhour(trim(logmsg),timesecs/c3600) + end if + end do + end if + end subroutine ice_step + +!======================================================================= +! +! Prepare for coupling +! +! authors: Elizabeth C. Hunke, LANL + + subroutine coupling_prep (iblk) + + use ice_arrays_column, only: alvdfn, alidfn, alvdrn, alidrn, & + albicen, albsnon, albpndn, apeffn, snowfracn + use ice_blocks, only: nx_block, ny_block, get_block, block + use ice_domain, only: blocks_ice + use ice_calendar, only: dt, nstreams + use ice_domain_size, only: ncat + use ice_flux, only: alvdf, alidf, alvdr, alidr, albice, albsno, & + albpnd, albcnt, apeff_ai, fpond, fresh, l_mpond_fresh, & + alvdf_ai, alidf_ai, alvdr_ai, alidr_ai, fhocn_ai, & + fresh_ai, fsalt_ai, fsalt, & + fswthru_ai, fhocn, fswthru, scale_factor, snowfrac, & + fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf, & + swvdr, swidr, swvdf, swidf, Tf, Tair, Qa, strairxT, strairyT, & + fsens, flat, fswabs, flwout, evap, Tref, Qref, & + scale_fluxes, frzmlt_init, frzmlt, Uref, wind + use ice_flux_bgc, only: faero_ocn, fiso_ocn, Qref_iso, fiso_evap, & + flux_bio, flux_bio_ai, fnit, fsil, famm, fdmsp, fdms, fhum, & + fdust, falgalN, fdoc, fdic, fdon, ffep, ffed, bgcflux_ice_to_ocn + use ice_grid, only: tmask + use ice_state, only: aicen, aice + use ice_state, only: aice_init + use ice_flux, only: flatn_f, fsurfn_f + use ice_step_mod, only: ocean_mixed_layer + use ice_timers, only: timer_couple, ice_timer_start, ice_timer_stop + + integer (kind=int_kind), intent(in) :: & + iblk ! block index + + ! local variables + + integer (kind=int_kind) :: & + ilo,ihi,jlo,jhi, & ! beginning and end of physical domain + n , & ! thickness category index + i,j , & ! horizontal indices + k , & ! tracer index + nbtrcr ! + + type (block) :: & + this_block ! block information for current block + + logical (kind=log_kind) :: & + skl_bgc , & ! + calc_Tsfc ! + + real (kind=dbl_kind) :: & + cszn , & ! counter for history averaging + puny , & ! + rhofresh , & ! + netsw ! flag for shortwave radiation presence + + character(len=*), parameter :: subname = '(coupling_prep)' + + !----------------------------------------------------------------- + + call icepack_query_parameters(puny_out=puny, rhofresh_out=rhofresh) + call icepack_query_parameters(skl_bgc_out=skl_bgc) + call icepack_query_tracer_sizes(nbtrcr_out=nbtrcr) + call icepack_query_parameters(calc_Tsfc_out=calc_Tsfc) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Save current value of frzmlt for diagnostics. + ! Update mixed layer with heat and radiation from ice. + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + frzmlt_init (i,j,iblk) = frzmlt(i,j,iblk) + enddo + enddo + + call ice_timer_start(timer_couple,iblk) ! atm/ocn coupling + + if (oceanmixed_ice) & + call ocean_mixed_layer (dt,iblk) ! ocean surface fluxes and sst + + !----------------------------------------------------------------- + ! Aggregate albedos + !----------------------------------------------------------------- + + do j = 1, ny_block + do i = 1, nx_block + alvdf(i,j,iblk) = c0 + alidf(i,j,iblk) = c0 + alvdr(i,j,iblk) = c0 + alidr(i,j,iblk) = c0 + + albice(i,j,iblk) = c0 + albsno(i,j,iblk) = c0 + albpnd(i,j,iblk) = c0 + apeff_ai(i,j,iblk) = c0 + snowfrac(i,j,iblk) = c0 + + ! for history averaging + cszn = c0 + netsw = swvdr(i,j,iblk)+swidr(i,j,iblk)+swvdf(i,j,iblk)+swidf(i,j,iblk) + if (netsw > puny) cszn = c1 + do n = 1, nstreams + albcnt(i,j,iblk,n) = albcnt(i,j,iblk,n) + cszn + enddo + enddo + enddo + + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do n = 1, ncat + do j = jlo, jhi + do i = ilo, ihi + if (aicen(i,j,n,iblk) > puny) then + + alvdf(i,j,iblk) = alvdf(i,j,iblk) & + + alvdfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidf(i,j,iblk) = alidf(i,j,iblk) & + + alidfn(i,j,n,iblk)*aicen(i,j,n,iblk) + alvdr(i,j,iblk) = alvdr(i,j,iblk) & + + alvdrn(i,j,n,iblk)*aicen(i,j,n,iblk) + alidr(i,j,iblk) = alidr(i,j,iblk) & + + alidrn(i,j,n,iblk)*aicen(i,j,n,iblk) + + netsw = swvdr(i,j,iblk) + swidr(i,j,iblk) & + + swvdf(i,j,iblk) + swidf(i,j,iblk) + if (netsw > puny) then ! sun above horizon + albice(i,j,iblk) = albice(i,j,iblk) & + + albicen(i,j,n,iblk)*aicen(i,j,n,iblk) + albsno(i,j,iblk) = albsno(i,j,iblk) & + + albsnon(i,j,n,iblk)*aicen(i,j,n,iblk) + albpnd(i,j,iblk) = albpnd(i,j,iblk) & + + albpndn(i,j,n,iblk)*aicen(i,j,n,iblk) + endif + + apeff_ai(i,j,iblk) = apeff_ai(i,j,iblk) & ! for history + + apeffn(i,j,n,iblk)*aicen(i,j,n,iblk) + snowfrac(i,j,iblk) = snowfrac(i,j,iblk) & ! for history + + snowfracn(i,j,n,iblk)*aicen(i,j,n,iblk) + + endif ! aicen > puny + enddo + enddo + enddo + + do j = 1, ny_block + do i = 1, nx_block + + !----------------------------------------------------------------- + ! reduce fresh by fpond for coupling + !----------------------------------------------------------------- + + if (l_mpond_fresh) then + fpond(i,j,iblk) = fpond(i,j,iblk) * rhofresh/dt + fresh(i,j,iblk) = fresh(i,j,iblk) - fpond(i,j,iblk) + endif + + !---------------------------------------------------------------- + ! Store grid box mean albedos and fluxes before scaling by aice + !---------------------------------------------------------------- + + alvdf_ai (i,j,iblk) = alvdf (i,j,iblk) + alidf_ai (i,j,iblk) = alidf (i,j,iblk) + alvdr_ai (i,j,iblk) = alvdr (i,j,iblk) + alidr_ai (i,j,iblk) = alidr (i,j,iblk) + fresh_ai (i,j,iblk) = fresh (i,j,iblk) + fsalt_ai (i,j,iblk) = fsalt (i,j,iblk) + fhocn_ai (i,j,iblk) = fhocn (i,j,iblk) + fswthru_ai(i,j,iblk) = fswthru(i,j,iblk) + + if (nbtrcr > 0) then + do k = 1, nbtrcr + flux_bio_ai (i,j,k,iblk) = flux_bio (i,j,k,iblk) + enddo + endif + + !----------------------------------------------------------------- + ! Save net shortwave for scaling factor in scale_factor + !----------------------------------------------------------------- + scale_factor(i,j,iblk) = & + swvdr(i,j,iblk)*(c1 - alvdr_ai(i,j,iblk)) & + + swvdf(i,j,iblk)*(c1 - alvdf_ai(i,j,iblk)) & + + swidr(i,j,iblk)*(c1 - alidr_ai(i,j,iblk)) & + + swidf(i,j,iblk)*(c1 - alidf_ai(i,j,iblk)) + + enddo + enddo + + !----------------------------------------------------------------- + ! Divide fluxes by ice area + ! - the CESM coupler assumes fluxes are per unit ice area + ! - also needed for global budget in diagnostics + !----------------------------------------------------------------- + + call scale_fluxes (nx_block, ny_block, & + tmask (:,:,iblk), nbtrcr, & + icepack_max_aero, & + aice (:,:,iblk), Tf (:,:,iblk), & + Tair (:,:,iblk), Qa (:,:,iblk), & + strairxT (:,:,iblk), strairyT(:,:,iblk), & + fsens (:,:,iblk), flat (:,:,iblk), & + fswabs (:,:,iblk), flwout (:,:,iblk), & + evap (:,:,iblk), & + Tref (:,:,iblk), Qref (:,:,iblk), & + fresh (:,:,iblk), fsalt (:,:,iblk), & + fhocn (:,:,iblk), & + fswthru (:,:,iblk), & + fswthru_vdr(:,:,iblk), & + fswthru_vdf(:,:,iblk), & + fswthru_idr(:,:,iblk), & + fswthru_idf(:,:,iblk), & + faero_ocn(:,:,:,iblk), & + alvdr (:,:,iblk), alidr (:,:,iblk), & + alvdf (:,:,iblk), alidf (:,:,iblk), & + flux_bio (:,:,1:nbtrcr,iblk), & + Qref_iso =Qref_iso (:,:,:,iblk), & + fiso_evap=fiso_evap(:,:,:,iblk), & + fiso_ocn =fiso_ocn (:,:,:,iblk), & + Uref=Uref(:,:,iblk), wind=wind(:,:,iblk) ) + + !----------------------------------------------------------------- + ! Define ice-ocean bgc fluxes + !----------------------------------------------------------------- + + if (nbtrcr > 0 .or. skl_bgc) then + call bgcflux_ice_to_ocn (nx_block, ny_block, & + flux_bio(:,:,1:nbtrcr,iblk), & + fnit(:,:,iblk), fsil(:,:,iblk), & + famm(:,:,iblk), fdmsp(:,:,iblk), & + fdms(:,:,iblk), fhum(:,:,iblk), & + fdust(:,:,iblk), falgalN(:,:,:,iblk), & + fdoc(:,:,:,iblk), fdic(:,:,:,iblk), & + fdon(:,:,:,iblk), ffep(:,:,:,iblk), & + ffed(:,:,:,iblk)) + endif + +!echmod - comment this out for efficiency, if .not. calc_Tsfc + if (.not. calc_Tsfc) then + + !--------------------------------------------------------------- + ! If surface fluxes were provided, conserve these fluxes at ice + ! free points by passing to ocean. + !--------------------------------------------------------------- + + call sfcflux_to_ocn & + (nx_block, ny_block, & + tmask (:,:,iblk), aice_init(:,:,iblk), & + fsurfn_f (:,:,:,iblk), flatn_f(:,:,:,iblk), & + fresh (:,:,iblk), fhocn (:,:,iblk)) + endif +!echmod + call ice_timer_stop(timer_couple,iblk) ! atm/ocn coupling + + end subroutine coupling_prep + +!======================================================================= +! +! If surface heat fluxes are provided to CICE instead of CICE calculating +! them internally (i.e. .not. calc_Tsfc), then these heat fluxes can +! be provided at points which do not have ice. (This is could be due to +! the heat fluxes being calculated on a lower resolution grid or the +! heat fluxes not recalculated at every CICE timestep.) At ice free points, +! conserve energy and water by passing these fluxes to the ocean. +! +! author: A. McLaren, Met Office + + subroutine sfcflux_to_ocn(nx_block, ny_block, & + tmask, aice, & + fsurfn_f, flatn_f, & + fresh, fhocn) + + use ice_domain_size, only: ncat + + integer (kind=int_kind), intent(in) :: & + nx_block, ny_block ! block dimensions + + logical (kind=log_kind), dimension (nx_block,ny_block), intent(in) :: & + tmask ! land/boundary mask, thickness (T-cell) + + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(in):: & + aice ! initial ice concentration + + real (kind=dbl_kind), dimension(nx_block,ny_block,ncat), intent(in) :: & + fsurfn_f, & ! net surface heat flux (provided as forcing) + flatn_f ! latent heat flux (provided as forcing) + + real (kind=dbl_kind), dimension(nx_block,ny_block), intent(inout):: & + fresh , & ! fresh water flux to ocean (kg/m2/s) + fhocn ! actual ocn/ice heat flx (W/m**2) + +#ifdef CICE_IN_NEMO + + ! local variables + integer (kind=int_kind) :: & + i, j, n ! horizontal indices + + real (kind=dbl_kind) :: & + puny, & ! + Lsub, & ! + rLsub ! 1/Lsub + + character(len=*), parameter :: subname = '(sfcflux_to_ocn)' + + call icepack_query_parameters(puny_out=puny, Lsub_out=Lsub) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + rLsub = c1 / Lsub + + do n = 1, ncat + do j = 1, ny_block + do i = 1, nx_block + if (tmask(i,j) .and. aice(i,j) <= puny) then + fhocn(i,j) = fhocn(i,j) & + + fsurfn_f(i,j,n) + flatn_f(i,j,n) + fresh(i,j) = fresh(i,j) & + + flatn_f(i,j,n) * rLsub + endif + enddo ! i + enddo ! j + enddo ! n + +#endif + + end subroutine sfcflux_to_ocn + +!======================================================================= + + end module CICE_RunMod + +!======================================================================= diff --git a/cicecore/drivers/access/cmeps/CICE_copyright.txt b/cicecore/drivers/access/cmeps/CICE_copyright.txt new file mode 100644 index 000000000..3f81ec782 --- /dev/null +++ b/cicecore/drivers/access/cmeps/CICE_copyright.txt @@ -0,0 +1,17 @@ +! Copyright (c) 2024, Triad National Security, LLC +! All rights reserved. +! +! Copyright 2024. Triad National Security, LLC. This software was +! produced under U.S. Government contract DE-AC52-06NA25396 for Los +! Alamos National Laboratory (LANL), which is operated by Triad +! National Security, LLC for the U.S. Department of Energy. The U.S. +! Government has rights to use, reproduce, and distribute this software. +! NEITHER THE GOVERNMENT NOR TRIAD NATIONAL SECURITY, LLC MAKES ANY +! WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF +! THIS SOFTWARE. If software is modified to produce derivative works, +! such modified software should be clearly marked, so as not to confuse +! it with the version available from LANL. +! +! The full license and distribution policy are available from +! https://github.com/CICE-Consortium +! diff --git a/cicecore/drivers/access/cmeps/cice_wrapper_mod.F90 b/cicecore/drivers/access/cmeps/cice_wrapper_mod.F90 new file mode 100644 index 000000000..db0140b3c --- /dev/null +++ b/cicecore/drivers/access/cmeps/cice_wrapper_mod.F90 @@ -0,0 +1,103 @@ +module cice_wrapper_mod + +#ifdef CESMCOUPLED + use perf_mod , only : t_startf, t_stopf, t_barrierf + use shr_log_mod , only : shr_log_getlogunit, shr_log_setlogunit + + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long + + implicit none + + real(dbl_kind) :: wtime = 0.0 +contains + ! Define stub routines that do nothing - they are just here to avoid + ! having cppdefs in the main program + subroutine ufs_settimer(timevalue) + real(dbl_kind), intent(inout) :: timevalue + end subroutine ufs_settimer + subroutine ufs_logtimer(nunit,elapsedsecs,string,runtimelog,time0) + integer, intent(in) :: nunit + integer(int_kind), intent(in) :: elapsedsecs + character(len=*), intent(in) :: string + logical, intent(in) :: runtimelog + real(dbl_kind), intent(in) :: time0 + end subroutine ufs_logtimer + subroutine ufs_file_setLogUnit(filename,nunit,runtimelog) + character(len=*), intent(in) :: filename + logical, intent(in) :: runtimelog + integer, intent(out) :: nunit + end subroutine ufs_file_setLogUnit + subroutine ufs_logfhour(msg,hour) + character(len=*), intent(in) :: msg + real(dbl_kind), intent(in) :: hour + end subroutine ufs_logfhour +#else + + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long + + implicit none + + real(dbl_kind) :: wtime = 0.0 +contains + subroutine ufs_settimer(timevalue) + real(dbl_kind), intent(inout) :: timevalue + real(dbl_kind) :: MPI_Wtime + timevalue = MPI_Wtime() + end subroutine ufs_settimer + + subroutine ufs_logtimer(nunit,elapsedsecs,string,runtimelog,time0) + integer, intent(in) :: nunit + integer(int_kind), intent(in) :: elapsedsecs + character(len=*), intent(in) :: string + logical, intent(in) :: runtimelog + real(dbl_kind), intent(in) :: time0 + real(dbl_kind) :: MPI_Wtime, timevalue + if (.not. runtimelog) return + if (time0 > 0.) then + timevalue = MPI_Wtime()-time0 + write(nunit,*)elapsedsecs,' CICE '//trim(string),timevalue + end if + end subroutine ufs_logtimer + + subroutine ufs_file_setLogUnit(filename,nunit,runtimelog) + character(len=*), intent(in) :: filename + logical, intent(in) :: runtimelog + integer, intent(out) :: nunit + if (.not. runtimelog) return + open (newunit=nunit, file=trim(filename)) + end subroutine ufs_file_setLogUnit + + subroutine ufs_logfhour(msg,hour) + character(len=*), intent(in) :: msg + real(dbl_kind), intent(in) :: hour + character(len=char_len) :: filename + integer(int_kind) :: nunit + write(filename,'(a,i4.4)')'log.ice.f',int(hour) + open(newunit=nunit,file=trim(filename)) + write(nunit,'(a)')'completed: cice' + write(nunit,'(a,f10.3)')'forecast hour:',hour + write(nunit,'(a)')'valid time: '//trim(msg) + close(nunit) + end subroutine ufs_logfhour + + ! Define stub routines that do nothing - they are just here to avoid + ! having cppdefs in the main program + subroutine shr_log_setLogUnit(nunit) + integer, intent(in) :: nunit + end subroutine shr_log_setLogUnit + subroutine shr_log_getLogUnit(nunit) + integer, intent(in) :: nunit + end subroutine shr_log_getLogUnit + subroutine t_startf(string) + character(len=*) :: string + end subroutine t_startf + subroutine t_stopf(string) + character(len=*) :: string + end subroutine t_stopf + subroutine t_barrierf(string, comm) + character(len=*) :: string + integer:: comm + end subroutine t_barrierf +#endif + +end module cice_wrapper_mod diff --git a/cicecore/drivers/access/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/access/cmeps/ice_comp_nuopc.F90 new file mode 100644 index 000000000..eae3b4c46 --- /dev/null +++ b/cicecore/drivers/access/cmeps/ice_comp_nuopc.F90 @@ -0,0 +1,1606 @@ +module ice_comp_nuopc + + !---------------------------------------------------------------------------- + ! This is the NUOPC cap for CICE + !---------------------------------------------------------------------------- + + use ESMF + use NUOPC , only : NUOPC_CompDerive, NUOPC_CompSetEntryPoint, NUOPC_CompSpecialize + use NUOPC , only : NUOPC_CompFilterPhaseMap, NUOPC_IsUpdated, NUOPC_IsAtTime + use NUOPC , only : NUOPC_CompAttributeGet, NUOPC_Advertise + use NUOPC , only : NUOPC_SetAttribute, NUOPC_CompAttributeGet, NUOPC_CompAttributeSet + use NUOPC_Model , only : model_routine_SS => SetServices + use NUOPC_Model , only : model_label_Advance => label_Advance + use NUOPC_Model , only : model_label_DataInitialize => label_DataInitialize + use NUOPC_Model , only : model_label_SetRunClock => label_SetRunClock + use NUOPC_Model , only : model_label_Finalize => label_Finalize + use NUOPC_Model , only : NUOPC_ModelGet, SetVM + use ice_constants , only : ice_init_constants, c0 + use ice_shr_methods , only : chkerr, state_setscalar, state_getscalar, state_diagnose, alarmInit + use ice_shr_methods , only : get_component_instance, state_flddebug, state_reset + + use ice_import_export , only : ice_import, ice_export, ice_advertise_fields, ice_realize_fields, ice_increment_fluxes + use ice_domain_size , only : nx_global, ny_global + use ice_grid , only : grid_format, init_grid2 + use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice + use ice_calendar , only : force_restart_now, write_ic + use ice_calendar , only : idate, idate0, mday, mmonth, myear, year_init, month_init, day_init + use ice_calendar , only : msec, dt, calendar, calendar_type, nextsw_cday, istep + use ice_calendar , only : ice_calendar_noleap, ice_calendar_gregorian, use_leap_years + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long + use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name + use ice_fileunits , only : inst_suffix, release_all_fileunits, flush_fileunit + use ice_restart_shared , only : runid, runtype, restart, use_restart_time, restart_dir, restart_file, & + restart_format, restart_chunksize, pointer_date + use ice_history , only : accum_hist + use ice_history_shared , only : history_format, history_chunksize + use ice_exit , only : abort_ice + use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc , only : icepack_init_orbit, icepack_init_parameters, icepack_query_orbit + use icepack_intfc , only : icepack_query_tracer_flags, icepack_query_parameters + use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf + use cice_wrapper_mod , only : shr_log_getlogunit, shr_log_setlogunit + use cice_wrapper_mod , only : ufs_settimer, ufs_logtimer, ufs_file_setlogunit, wtime +#ifdef CESMCOUPLED + use shr_const_mod + use shr_orb_mod , only : shr_orb_decl, shr_orb_params, SHR_ORB_UNDEF_REAL, SHR_ORB_UNDEF_INT + use ice_scam , only : scmlat, scmlon, scol_mask, scol_frac, scol_ni, scol_nj, scol_area + use nuopc_shr_methods , only : set_component_logging +#else + use ice_shr_methods , only : set_component_logging +#endif + use ice_timers + use CICE_InitMod , only : cice_init1, cice_init2 + use CICE_RunMod , only : cice_run + use ice_mesh_mod , only : ice_mesh_set_distgrid, ice_mesh_setmask_from_maskfile, ice_mesh_check + use ice_mesh_mod , only : ice_mesh_init_tlon_tlat_area_hm, ice_mesh_create_scolumn + use ice_prescribed_mod , only : ice_prescribed_init + use ice_scam , only : scol_valid, single_column + + implicit none + private + + public :: SetServices + public :: SetVM + + private :: InitializeP0 + private :: InitializeAdvertise + private :: InitializeRealize + private :: ModelAdvance + private :: ModelSetRunClock + private :: ModelFinalize + private :: ice_orbital_init ! only valid for cesm + + character(len=char_len_long) :: flds_scalar_name = '' + integer :: flds_scalar_num = 0 + integer :: flds_scalar_index_nx = 0 + integer :: flds_scalar_index_ny = 0 + integer :: flds_scalar_index_nextsw_cday = 0 + + character(len=char_len_long) :: orb_mode ! attribute - orbital mode + integer :: orb_iyear ! attribute - orbital year + integer :: orb_iyear_align ! attribute - associated with model year + real(dbl_kind) :: orb_obliq ! attribute - obliquity in degrees + real(dbl_kind) :: orb_mvelp ! attribute - moving vernal equinox longitude + real(dbl_kind) :: orb_eccen ! attribute and update- orbital eccentricity + + character(len=*) , parameter :: orb_fixed_year = 'fixed_year' + character(len=*) , parameter :: orb_variable_year = 'variable_year' + character(len=*) , parameter :: orb_fixed_parameters = 'fixed_parameters' + + type(ESMF_Mesh) :: ice_mesh + + integer :: nthrds ! Number of threads to use in this component + integer :: nu_timer = 6 ! Simple timer log, unused except by UFS + integer :: dbug = 0 + logical :: profile_memory = .false. + logical :: mastertask + logical :: runtimelog = .false. + logical :: restart_eor = .false. !End of run restart flag + integer :: start_ymd ! Start date (YYYYMMDD) + integer :: start_tod ! start time of day (s) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: stop_ymd ! stop date (YYYYMMDD) + integer :: stop_tod ! stop time of day (sec) + integer :: ref_ymd ! Reference date (YYYYMMDD) + integer :: ref_tod ! reference time of day (s) + integer , parameter :: debug_import = 0 ! internal debug level + integer , parameter :: debug_export = 0 ! internal debug level + character(*), parameter :: modName = "(ice_comp_nuopc)" + character(*), parameter :: u_FILE_u = & + __FILE__ + +!======================================================================= +contains +!=============================================================================== + + subroutine SetServices(gcomp, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + integer, intent(out) :: rc + + ! Local variables + character(len=*),parameter :: subname=trim(modName)//':(SetServices) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + ! the NUOPC gcomp component will register the generic methods + call NUOPC_CompDerive(gcomp, model_routine_SS, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! switching to IPD versions + call ESMF_GridCompSetEntryPoint(gcomp, ESMF_METHOD_INITIALIZE, & + userRoutine=InitializeP0, phase=0, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! set entry point for methods that require specific implementation + call NUOPC_CompSetEntryPoint(gcomp, ESMF_METHOD_INITIALIZE, & + phaseLabelList=(/"IPDv01p1"/), userRoutine=InitializeAdvertise, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call NUOPC_CompSetEntryPoint(gcomp, ESMF_METHOD_INITIALIZE, & + phaseLabelList=(/"IPDv01p3"/), userRoutine=InitializeRealize, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! attach specializing method(s) + call NUOPC_CompSpecialize(gcomp, specLabel=model_label_Advance, & + specRoutine=ModelAdvance, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_MethodRemove(gcomp, label=model_label_SetRunClock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call NUOPC_CompSpecialize(gcomp, specLabel=model_label_SetRunClock, & + specRoutine=ModelSetRunClock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call NUOPC_CompSpecialize(gcomp, specLabel=model_label_Finalize, & + specRoutine=ModelFinalize, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + end subroutine SetServices + + !=============================================================================== + + subroutine InitializeP0(gcomp, importState, exportState, clock, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState, exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc + + logical :: isPresent, isSet + character(len=64) :: value + character(len=char_len_long) :: logmsg + !-------------------------------- + + rc = ESMF_SUCCESS + + ! Switch to IPDv01 by filtering all other phaseMap entries + call NUOPC_CompFilterPhaseMap(gcomp, ESMF_METHOD_INITIALIZE, & + acceptStringList=(/"IPDv01p"/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + profile_memory = .false. + call NUOPC_CompAttributeGet(gcomp, name="ProfileMemory", value=value, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) profile_memory=(trim(value)=="true") + write(logmsg,*) profile_memory + call ESMF_LogWrite('CICE_cap:ProfileMemory = '//trim(logmsg), ESMF_LOGMSG_INFO) + + end subroutine InitializeP0 + + !=============================================================================== + + subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState, exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc + + ! Local variables + character(len=char_len_long) :: cvalue + character(len=char_len_long) :: ice_meshfile + character(len=char_len_long) :: ice_maskfile + character(len=char_len_long) :: errmsg + logical :: isPresent, isSet + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + type(ESMF_DistGrid) :: ice_distGrid + real(kind=dbl_kind) :: atmiter_conv + real(kind=dbl_kind) :: atmiter_conv_driver + integer (kind=int_kind) :: natmiter + integer (kind=int_kind) :: natmiter_driver + integer :: localPet + integer :: npes + type(ESMF_VM) :: vm + integer :: lmpicom ! local communicator + type(ESMF_Time) :: currTime ! Current time + type(ESMF_Time) :: startTime ! Start time + type(ESMF_Time) :: stopTime ! Stop time + type(ESMF_Time) :: refTime ! Ref time + type(ESMF_TimeInterval) :: timeStep ! Model timestep + type(ESMF_CalKind_Flag) :: esmf_caltype ! esmf calendar type + integer :: yy,mm,dd ! Temporaries for time query + integer :: dtime ! time step + integer :: shrlogunit ! original log unit + character(len=char_len) :: starttype ! infodata start type + integer :: lsize ! local size of coupling array + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ig, jg ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + + character(len=char_len_long) :: diag_filename = 'unset' + character(len=char_len_long) :: logmsg + character(len=char_len_long) :: single_column_lnd_domainfile + real(dbl_kind) :: scol_lon + real(dbl_kind) :: scol_lat + real(dbl_kind) :: scol_spval + character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist + character(len=char_len) :: tfrz_option_driver ! tfrz_option from cice namelist + character(len=*), parameter :: subname=trim(modName)//':(InitializeAdvertise) ' + !-------------------------------- + + call ufs_settimer(wtime) + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldName", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + flds_scalar_name = trim(cvalue) + call ESMF_LogWrite(trim(subname)//' flds_scalar_name = '//trim(flds_scalar_name), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call abort_ice(subname//'Need to set attribute ScalarFieldName') + endif + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldCount", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue, *) flds_scalar_num + write(logmsg,*) flds_scalar_num + call ESMF_LogWrite(trim(subname)//' flds_scalar_num = '//trim(logmsg), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call abort_ice(subname//'Need to set attribute ScalarFieldCount') + endif + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxGridNX", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_scalar_index_nx + write(logmsg,*) flds_scalar_index_nx + call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_nx = '//trim(logmsg), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call abort_ice(subname//'Need to set attribute ScalarFieldIdxGridNX') + endif + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxGridNY", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_scalar_index_ny + write(logmsg,*) flds_scalar_index_ny + call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_ny = '//trim(logmsg), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call abort_ice(subname//'Need to set attribute ScalarFieldIdxGridNY') + endif + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxNextSwCday", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_scalar_index_nextsw_cday + write(logmsg,*) flds_scalar_index_nextsw_cday + call ESMF_LogWrite(trim(subname)//' : flds_scalar_index_nextsw_cday = '//trim(logmsg), ESMF_LOGMSG_INFO) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + endif + + call NUOPC_CompAttributeGet(gcomp, name='dbug_flag', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) dbug + end if + write(logmsg,'(i6)') dbug + call ESMF_LogWrite('CICE_cap: dbug = '//trim(logmsg), ESMF_LOGMSG_INFO) + + call NUOPC_CompAttributeGet(gcomp, name="RunTimeLog", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) runtimelog=(trim(cvalue)=="true") + write(logmsg,*) runtimelog + call ESMF_LogWrite('CICE_cap:RunTimeLog = '//trim(logmsg), ESMF_LOGMSG_INFO) + + call NUOPC_CompAttributeGet(gcomp, name="write_restart_at_endofrun", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + if (trim(cvalue) .eq. '.true.') restart_eor = .true. + endif + +#ifdef CESMCOUPLED + pointer_date = .true. +#endif + + ! set CICE internal pointer_date variable based on nuopc settings + ! this appends a datestamp to the "rpointer" file + call NUOPC_CompAttributeGet(gcomp, name="restart_pointer_append_date", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) pointer_date = (trim(cvalue) .eq. ".true.") + !---------------------------------------------------------------------------- + ! generate local mpi comm + !---------------------------------------------------------------------------- + + call ESMF_GridCompGet(gcomp, vm=vm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_VMGet(vm, mpiCommunicator=lmpicom, localPet=localPet, PetCount=npes, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +#ifdef CESMCOUPLED + call ESMF_VMGet(vm, pet=localPet, peCount=nthrds, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (nthrds==1) then + call NUOPC_CompAttributeGet(gcomp, "nthreads", value=cvalue, rc=rc) + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=u_FILE_u)) return + read(cvalue,*) nthrds + endif +!$ call omp_set_num_threads(nthrds) +#endif + + !---------------------------------------------------------------------------- + ! Initialize cice communicators + !---------------------------------------------------------------------------- + + call init_communicate(lmpicom) ! initial setup for message passing + mastertask = .false. + if (my_task == master_task) mastertask = .true. + + !---------------------------------------------------------------------------- + ! determine instance information + !---------------------------------------------------------------------------- + + call get_component_instance(gcomp, inst_suffix, inst_index, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +! inst_name = "ICE"//trim(inst_suffix) + inst_name = "ICE" + + !---------------------------------------------------------------------------- + ! start cice timers + !---------------------------------------------------------------------------- + + call t_startf ('cice_init_total') + + !---------------------------------------------------------------------------- + ! Initialize constants + !---------------------------------------------------------------------------- + +#ifdef CESMCOUPLED + call ice_init_constants(omega_in=SHR_CONST_OMEGA, radius_in=SHR_CONST_REARTH, & + spval_dbl_in=SHR_CONST_SPVAL) + + ! TODO: get tfrz_option from driver + + call icepack_init_parameters( & + secday_in = SHR_CONST_CDAY, & + rhoi_in = SHR_CONST_RHOICE, & + rhow_in = SHR_CONST_RHOSW, & + cp_air_in = SHR_CONST_CPDAIR, & + cp_ice_in = SHR_CONST_CPICE, & + cp_ocn_in = SHR_CONST_CPSW, & + gravit_in = SHR_CONST_G, & + rhofresh_in = SHR_CONST_RHOFW, & + zvir_in = SHR_CONST_ZVIR, & + vonkar_in = SHR_CONST_KARMAN, & + cp_wv_in = SHR_CONST_CPWV, & + stefan_boltzmann_in = SHR_CONST_STEBOL, & + Tffresh_in = SHR_CONST_TKFRZ, & + Lsub_in = SHR_CONST_LATSUB, & + Lvap_in = SHR_CONST_LATVAP, & + !Lfresh_in = SHR_CONST_LATICE, & ! computed in init_parameters as Lsub-Lvap + Timelt_in = SHR_CONST_TKFRZ-SHR_CONST_TKFRZ, & + Tsmelt_in = SHR_CONST_TKFRZ-SHR_CONST_TKFRZ, & + ice_ref_salinity_in = SHR_CONST_ICE_REF_SAL, & + depressT_in = 0.054_dbl_kind, & + Tocnfrz_in = -34.0_dbl_kind*0.054_dbl_kind, & + pi_in = SHR_CONST_PI, & + snowpatch_in = 0.005_dbl_kind) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) +#endif + + !---------------------------------------------------------------------------- + ! Determine attributes - also needed in realize phase to get grid information + !---------------------------------------------------------------------------- + + ! Get orbital values + ! Note that these values are obtained in a call to init_orbit in ice_shortwave.F90 + ! if CESMCOUPLED is not defined + + call ice_orbital_init(gcomp, clock, nu_diag, mastertask, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Determine runtype and possibly nextsw_cday + call NUOPC_CompAttributeGet(gcomp, name='start_type', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) starttype + if (trim(starttype) == trim('startup')) then + runtype = "initial" + else if (trim(starttype) == trim('continue') ) then + runtype = "continue" + restart = .true. + use_restart_time = .true. + else if (trim(starttype) == trim('branch')) then + runtype = "continue" + restart = .true. + use_restart_time = .true. + else + call abort_ice( subname//' ERROR: unknown starttype' ) + end if + + ! We assume here that on startup - nextsw_cday is just the current time + ! TOOD (mvertens, 2019-03-21): need to get the perpetual run working + if (trim(runtype) /= 'initial') then + ! Set nextsw_cday to -1 (this will skip an orbital calculation on initialization + nextsw_cday = -1.0_dbl_kind + else + call ESMF_ClockGet( clock, currTime=currTime, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet( currTime, dayOfYear_r8=nextsw_cday, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + else + runtype = 'initial' ! determined from the namelist in ice_init if CESMCOUPLED is not defined + end if + + ! Determine runid + call NUOPC_CompAttributeGet(gcomp, name='case_name', value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (isPresent .and. isSet) then + read(cvalue,*) runid + else + ! read in from the namelist in ice_init.F90 if this is not an attribute passed from the driver + runid = 'unknown' + end if + + ! Get clock information before call to cice_init + call ESMF_ClockGet( clock, & + currTime=currTime, startTime=startTime, stopTime=stopTime, refTime=RefTime, & + timeStep=timeStep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_TimeGet( currTime, yy=yy, mm=mm, dd=dd, s=curr_tod, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ice_cal_ymd2date(yy,mm,dd,curr_ymd) + + call ESMF_TimeGet( startTime, yy=yy, mm=mm, dd=dd, s=start_tod, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ice_cal_ymd2date(yy,mm,dd,start_ymd) + + call ESMF_TimeGet( stopTime, yy=yy, mm=mm, dd=dd, s=stop_tod, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ice_cal_ymd2date(yy,mm,dd,stop_ymd) + + call ESMF_TimeGet( refTime, yy=yy, mm=mm, dd=dd, s=ref_tod, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ice_cal_ymd2date(yy,mm,dd,ref_ymd) + + call ESMF_TimeIntervalGet( timeStep, s=dtime, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + dt = real(dtime) + + call ESMF_TimeGet( currTime, calkindflag=esmf_caltype, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (esmf_caltype == ESMF_CALKIND_NOLEAP) then + calendar_type = ice_calendar_noleap + else if (esmf_caltype == ESMF_CALKIND_GREGORIAN) then + calendar_type = ice_calendar_gregorian + else + call abort_ice( subname//'ERROR:: bad calendar for ESMF' ) + end if + + !---------------------------------------------------------------------------- + ! Set cice logging + !---------------------------------------------------------------------------- + ! Note - this must be done AFTER the communicators are set + ! Note that sets the nu_diag module variable in ice_fileunits + ! Set the nu_diag_set flag so it's not reset later + + call shr_log_setLogUnit (shrlogunit) + call ufs_file_setLogUnit('./log.ice.timer',nu_timer,runtimelog) + + call NUOPC_CompAttributeGet(gcomp, name="diro", value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + diag_filename = trim(cvalue) + end if + call NUOPC_CompAttributeGet(gcomp, name="logfile", value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + diag_filename = trim(diag_filename) // '/' // trim(cvalue) + end if + + if (trim(diag_filename) /= 'unset') then + call set_component_logging(gcomp, mastertask, nu_diag, shrlogunit, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + nu_diag_set = .true. + end if + + !---------------------------------------------------------------------------- + ! First cice initialization phase - before initializing grid info + !---------------------------------------------------------------------------- + +#ifdef CESMCOUPLED + ! Determine if single column + + call NUOPC_CompAttributeGet(gcomp, name='scol_lon', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlon + call NUOPC_CompAttributeGet(gcomp, name='scol_lat', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scmlat + call NUOPC_CompAttributeGet(gcomp, name='scol_spval', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_spval + + if (scmlon > scol_spval .and. scmlat > scol_spval) then + call NUOPC_CompAttributeGet(gcomp, name='single_column_lnd_domainfile', & + value=single_column_lnd_domainfile, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (trim(single_column_lnd_domainfile) /= 'UNSET') then + single_column = .true. + else + call abort_ice('single_column_domainfile cannot be null for single column mode') + end if + call NUOPC_CompAttributeGet(gcomp, name='scol_ocnmask', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_mask + call NUOPC_CompAttributeGet(gcomp, name='scol_ocnfrac', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_frac + call NUOPC_CompAttributeGet(gcomp, name='scol_ni', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_ni + call NUOPC_CompAttributeGet(gcomp, name='scol_nj', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_nj + call NUOPC_CompAttributeGet(gcomp, name='scol_area', value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) scol_area + + call ice_mesh_create_scolumn(scmlon, scmlat, ice_mesh, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + scol_valid = (scol_mask == 1) + if (.not. scol_valid) then + ! Read the cice namelist as part of the call to cice_init1 + ! Note that if single_column is true and scol_valid is not - will never get here + call t_startf ('cice_init1') + call cice_init1 + call t_stopf ('cice_init1') + ! Advertise fields + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call t_stopf ('cice_init_total') + + ! ******************* + ! *** RETURN HERE *** + ! ******************* + RETURN + end if + end if + ! Read the cice namelist as part of the call to cice_init1 + ! Note that if single_column is true and scol_valid is not - will never get here + call t_startf ('cice_init1') + call cice_init1 + call t_stopf ('cice_init1') + + !----------------------------------------------------------------- + ! Advertise fields + !----------------------------------------------------------------- + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + + ! Form of ocean freezing temperature + ! 'minus1p8' = -1.8 C + ! 'linear_salt' = -depressT * sss + ! 'mushy' conforms with ktherm=2 + call NUOPC_CompAttributeGet(gcomp, name="tfreeze_option", value=tfrz_option_driver, & + isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (.not. isPresent) then + tfrz_option_driver = 'linear_salt' + end if + call icepack_query_parameters( tfrz_option_out=tfrz_option) + if (tfrz_option_driver /= tfrz_option) then + write(errmsg,'(a)') trim(subname)//'WARNING: tfrz_option from driver '//trim(tfrz_option_driver)//& + ' is overwriting tfrz_option from cice namelist '//trim(tfrz_option) + if (mastertask) write(nu_diag,*) trim(errmsg) + call icepack_warnings_flush(nu_diag) + call icepack_init_parameters(tfrz_option_in=tfrz_option_driver) + endif + + ! Flux convergence tolerance - always use the driver attribute value + call NUOPC_CompAttributeGet(gcomp, name="flux_convergence", value=cvalue, & + isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) atmiter_conv_driver + call icepack_query_parameters( atmiter_conv_out=atmiter_conv) + if (atmiter_conv_driver /= atmiter_conv) then + write(errmsg,'(a,d13.5,a,d13.5)') trim(subname)//'WARNING: atmiter_ from driver ',& + atmiter_conv_driver,' is overwritting atmiter_conv from cice namelist ',atmiter_conv + if(mastertask) write(nu_diag,*) trim(errmsg) + call icepack_warnings_flush(nu_diag) + call icepack_init_parameters(atmiter_conv_in=atmiter_conv_driver) + end if + end if + + ! Number of iterations for boundary layer calculations + call NUOPC_CompAttributeGet(gcomp, name="flux_max_iteration", value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent) then + read(cvalue,*) natmiter_driver + else + natmiter_driver = 5 + end if + call icepack_query_parameters( natmiter_out=natmiter) + if (natmiter_driver /= natmiter) then + write(errmsg,'(a,i8,a,i8)') trim(subname)//'error: natmiter_driver ',natmiter_driver, & + ' must be the same as natmiter from cice namelist ',natmiter + call abort_ice(trim(errmsg)) + endif + + ! Netcdf output created by PIO + call NUOPC_CompAttributeGet(gcomp, name="pio_typename", value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + if (trim(history_format)/='cdf1' .and. mastertask) then + write(nu_diag,*) trim(subname)//history_format//'WARNING: history_format from cice_namelist ignored' + write(nu_diag,*) trim(subname)//'WARNING: using '//trim(cvalue)//' from ICE_modelio' + endif + if (trim(restart_format)/='cdf1' .and. mastertask) then + write(nu_diag,*) trim(subname)//restart_format//'WARNING: restart_format from cice_namelist ignored' + write(nu_diag,*) trim(subname)//'WARNING: using '//trim(cvalue)//' from ICE_modelio' + endif + + ! The only reason to set these is to detect in ice_history_write if the chunk/deflate settings are ok. + select case (trim(cvalue)) + case ('netcdf4p') + history_format='hdf5' + restart_format='hdf5' + case ('netcdf4c') + if (mastertask) write(nu_diag,*) trim(subname)//'WARNING: pio_typename = netcdf4c is superseded, use netcdf4p' + history_format='hdf5' + restart_format='hdf5' + case default !pio_typename=netcdf or pnetcdf + ! do nothing + end select + else + if(mastertask) write(nu_diag,*) trim(subname)//'WARNING: pio_typename from driver needs to be set for netcdf output to work' + end if + +#else + + ! Read the cice namelist as part of the call to cice_init1 + call t_startf ('cice_init1') + call cice_init1 + call t_stopf ('cice_init1') + + !----------------------------------------------------------------- + ! Advertise fields + !----------------------------------------------------------------- + call ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +#endif + + !---------------------------------------------------------------------------- + ! Initialize grid info + !---------------------------------------------------------------------------- + + if (single_column .and. scol_valid) then + call ice_mesh_init_tlon_tlat_area_hm() + else + ! Determine mesh input file + call NUOPC_CompAttributeGet(gcomp, name='mesh_ice', value=ice_meshfile, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Determine mask input file + call NUOPC_CompAttributeGet(gcomp, name='mesh_mask', value=cvalue, isPresent=isPresent, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + ice_maskfile = trim(cvalue) + else + ice_maskfile = ice_meshfile + end if + if (my_task == master_task) then + write(nu_diag,*)'mesh file for cice domain is ',trim(ice_meshfile) + write(nu_diag,*)'mask file for cice domain is ',trim(ice_maskfile) + end if + + ! Determine the model distgrid using the decomposition obtained in + ! call to init_grid1 called from cice_init1 + call ice_mesh_set_distgrid(localpet, npes, ice_distgrid, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Read in the ice mesh on the cice distribution + ice_mesh = ESMF_MeshCreate(filename=trim(ice_meshfile), fileformat=ESMF_FILEFORMAT_ESMFMESH, & + elementDistGrid=ice_distgrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Initialize the cice mesh and the cice mask + if (trim(grid_format) == 'meshnc') then + ! In this case cap code determines the mask file + call ice_mesh_setmask_from_maskfile(ice_maskfile, ice_mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ice_mesh_init_tlon_tlat_area_hm() + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + ! In this case init_grid2 will initialize tlon, tlat, area and hm + call init_grid2() + call ice_mesh_check(gcomp,ice_mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + end if + + call t_stopf ('cice_init_total') + if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeAdvertise time: ',runtimelog,wtime) + end subroutine InitializeAdvertise + + !=============================================================================== + + subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) + + ! Arguments + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + type(ESMF_Clock) :: clock + integer, intent(out) :: rc + + ! Local variables + integer :: n + integer :: fieldcount + type(ESMF_Field) :: lfield + character(len=char_len_long) :: cvalue + real(dbl_kind) :: scol_lon + real(dbl_kind) :: scol_lat + real(dbl_kind) :: scol_spval + real(dbl_kind), pointer :: fldptr1d(:) + real(dbl_kind), pointer :: fldptr2d(:,:) + integer :: rank + character(len=char_len) :: tfrz_option ! tfrz_option from cice namelist + integer(int_kind) :: ktherm + + character(len=char_len_long) :: single_column_lnd_domainfile + character(len=char_len_long) , pointer :: lfieldnamelist(:) => null() + character(len=*), parameter :: subname=trim(modName)//':(InitializeRealize) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + call ufs_settimer(wtime) + !---------------------------------------------------------------------------- + ! Second cice initialization phase -after initializing grid info + !---------------------------------------------------------------------------- + ! Note that cice_init2 also sets time manager info as well as mpi communicator info, + ! including master_task and my_task + ! Note that cice_init2 calls ice_init() which in turn calls icepack_init_parameters + ! which sets the tfrz_option + call t_startf ('cice_init2') + call cice_init2() + call t_stopf ('cice_init2') + !--------------------------------------------------------------------------- + ! use EClock to reset calendar information + !--------------------------------------------------------------------------- + + ! - on initial run + ! - iyear, month and mday obtained from sync clock + ! - time determined from myear, month and mday + ! - istep0 and istep1 are set to 0 + ! - on restart run + ! - istep0, time and time_forc are read from restart file + ! - istep1 is set to istep0 + ! - idate is determined from time via the call to calendar (see below) + + if (runtype == 'initial') then + if (ref_ymd /= start_ymd .or. ref_tod /= start_tod) then + if (my_task == master_task) then + write(nu_diag,*) trim(subname),': ref_ymd ',ref_ymd, ' must equal start_ymd ',start_ymd + write(nu_diag,*) trim(subname),': ref_tod',ref_tod, ' must equal start_tod ',start_tod + end if + end if + + if (my_task == master_task) then + write(nu_diag,*) trim(subname),' idate from sync clock = ', start_ymd + write(nu_diag,*) trim(subname),' tod from sync clock = ', start_tod + write(nu_diag,*) trim(subname),' resetting idate to match sync clock' + end if + idate = curr_ymd + + if (idate < 0) then + if (my_task == master_task) then + write(nu_diag,*) trim(subname),' ERROR curr_ymd,year_init =',curr_ymd,year_init + write(nu_diag,*) trim(subname),' ERROR idate lt zero',idate + end if + call abort_ice(subname//' :: ERROR idate lt zero') + endif + myear = (idate/10000) ! integer year of basedate + mmonth= (idate-myear*10000)/100 ! integer month of basedate + mday = idate-myear*10000-mmonth*100 ! day of month of basedate + msec = start_tod ! start from basedate + + if (my_task == master_task) then + write(nu_diag,*) trim(subname),' curr_ymd = ',curr_ymd + write(nu_diag,*) trim(subname),' cice year_init = ',year_init + write(nu_diag,*) trim(subname),' cice start date = ',idate + write(nu_diag,*) trim(subname),' cice start ymds = ',myear,mmonth,mday,start_tod + write(nu_diag,*) trim(subname),' cice calendar_type = ',trim(calendar_type) + endif + + end if + + ! - start time from ESMF clock. Used to set history time units + idate0 = start_ymd + year_init = (idate0/10000) + month_init= (idate0-year_init*10000)/100 ! integer month of basedate + day_init = idate0-year_init*10000-month_init*100 + + ! - Set use_leap_years based on calendar (as some CICE calls use this instead of the calendar type) + if (calendar_type == ice_calendar_gregorian) then + use_leap_years = .true. + else + use_leap_years = .false. ! no_leap calendars + endif + + call calendar() ! update calendar info + + !---------------------------------------------------------------------------- + ! reset shr logging to my log file + !---------------------------------------------------------------------------- + + call icepack_query_parameters(ktherm_out=ktherm) + call icepack_query_parameters(tfrz_option_out=tfrz_option) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Now write output to nu_diag - this must happen AFTER call to cice_init + if (mastertask) then + write(nu_diag,'(a,d21.14)') trim(subname)//' cice init nextsw_cday = ',nextsw_cday + write(nu_diag,'(a)') trim(subname)//' tfrz_option = '//trim(tfrz_option) + if (ktherm == 2 .and. trim(tfrz_option) /= 'mushy') then + write(nu_diag,*) trim(subname),' Warning: Using ktherm = 2 and tfrz_option = ', trim(tfrz_option) + endif + write(nu_diag,'(a )') trim(subname)//' inst_name = '//trim(inst_name) + write(nu_diag,'(a,i8 )') trim(subname)//' inst_index = ',inst_index + write(nu_diag,'(a )') trim(subname)//' inst_suffix = ',trim(inst_suffix) + endif + + + if (write_ic) then + call accum_hist(dt) ! write initial conditions + end if + + !----------------------------------------------------------------- + ! Prescribed ice initialization + !----------------------------------------------------------------- + + call ice_prescribed_init(gcomp, clock, ice_mesh, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +#ifdef CESMCOUPLED + ! if single column is not valid - set all export state fields to zero and return + if (single_column .and. .not. scol_valid) then + write(nu_diag,'(a)')' (ice_comp_nuopc) single column mode point does not contain any ocn/ice '& + //' - setting all export data to 0' + call ice_realize_fields(gcomp, mesh=ice_mesh, & + flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_StateGet(exportState, itemCount=fieldCount, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate(lfieldnamelist(fieldCount)) + call ESMF_StateGet(exportState, itemNameList=lfieldnamelist, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + do n = 1, fieldCount + if (trim(lfieldnamelist(n)) /= flds_scalar_name) then + call ESMF_StateGet(exportState, itemName=trim(lfieldnamelist(n)), field=lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfield, rank=rank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (rank == 2) then + call ESMF_FieldGet(lfield, farrayPtr=fldptr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + fldptr2d(:,:) = 0._dbl_kind + else + call ESMF_FieldGet(lfield, farrayPtr=fldptr1d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + fldptr1d(:) = 0._dbl_kind + end if + end if + enddo + deallocate(lfieldnamelist) + call State_SetScalar(dble(0), flds_scalar_index_nx, exportState, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call State_SetScalar(dble(0), flds_scalar_index_ny, exportState, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! ******************* + ! *** RETURN HERE *** + ! ******************* + RETURN + else if(single_column) then + write(nu_diag,'(a,3(f10.5,2x))')' (ice_comp_nuopc) single column mode lon/lat/frac is ',& + scmlon,scmlat,scol_frac + end if +#endif + + !----------------------------------------------------------------- + ! Realize the actively coupled fields + !----------------------------------------------------------------- + + call ice_realize_fields(gcomp, mesh=ice_mesh, & + flds_scalar_name=flds_scalar_name, flds_scalar_num=flds_scalar_num, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !----------------------------------------------------------------- + ! Create cice export state + !----------------------------------------------------------------- + call state_reset(exportstate, c0, rc) + call state_reset(importState, c0, rc) + call ice_export (importState, exportstate, 3600, rc) ! setting cpl dt to 3600 shouldn't matter + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call State_SetScalar(dble(nx_global), flds_scalar_index_nx, exportState, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call State_SetScalar(dble(ny_global), flds_scalar_index_ny, exportState, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !-------------------------------- + ! diagnostics + !-------------------------------- + + ! TODO (mvertens, 2018-12-21): fill in iceberg_prognostic as .false. + if (debug_export > 0 .and. my_task==master_task) then + call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & + idate, msec, nu_diag, rc=rc) + end if + + if (dbug > 0) then + call state_diagnose(exportState,subname//':ES',rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + endif + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + call flush_fileunit(nu_diag) + + if (mastertask) call ufs_logtimer(nu_timer,msec,'InitializeRealize time: ',runtimelog,wtime) + end subroutine InitializeRealize + + !=============================================================================== + + subroutine ModelAdvance(gcomp, rc) + + !--------------------------------------------------------------------------- + ! Run CICE + !--------------------------------------------------------------------------- + + ! Arguments + type(ESMF_GridComp) :: gcomp + integer, intent(out) :: rc + + ! Local variables + type(ESMF_Clock) :: clock + type(ESMF_Alarm) :: alarm + type(ESMF_Time) :: startTime + type(ESMF_Time) :: currTime + type(ESMF_Time) :: nextTime + type(ESMF_TimeInterval) :: timeStep + type(ESMF_State) :: importState, exportState + character(ESMF_MAXSTR) :: cvalue + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + integer :: shrlogunit ! original log unit + integer :: k,n ! index + logical :: stop_now ! .true. ==> stop at the end of this run phase + integer :: ymd ! Current date (YYYYMMDD) + integer :: tod ! Current time of day (sec) + integer :: curr_ymd ! Current date (YYYYMMDD) + integer :: curr_tod ! Current time of day (s) + integer :: yy,mm,dd ! year, month, day, time of day + integer :: ymd_sync ! Sync date (YYYYMMDD) + integer :: yr_sync ! Sync current year + integer :: mon_sync ! Sync current month + integer :: day_sync ! Sync current day + integer :: tod_sync ! Sync current time of day (sec) + integer :: nsteps ! Number of model timeteps per coupling timestep + integer :: cpl_dt ! Coupling timestep in seconds + character(char_len_long) :: restart_date + character(char_len_long) :: restart_filename + logical :: isPresent, isSet + character(len=*),parameter :: subname=trim(modName)//':(ModelAdvance) ' + character(char_len_long) :: msgString + !-------------------------------- + + rc = ESMF_SUCCESS + if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time since last step: ',runtimelog,wtime) + call ufs_settimer(wtime) + + call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + if (single_column .and. .not. scol_valid) then + ! ******************* + ! *** RETURN HERE *** + ! ******************* + RETURN + end if + + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + ! query the Component for its clock, importState and exportState + call ESMF_GridCompGet(gcomp, clock=clock, importState=importState, & + exportState=exportState, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockPrint(clock, options="currTime", & + preString="------>Advancing ICE from: ", unit=msgString, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_LogWrite(subname//trim(msgString), ESMF_LOGMSG_INFO) + + call ESMF_ClockGet(clock, startTime=startTime, currTime=currTime, & + timeStep=timeStep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_TimePrint(currTime + timeStep, & + preString="--------------------------------> to: ", unit=msgString, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + + !-------------------------------- + ! Turn on timers + !-------------------------------- + + call ice_timer_start(timer_total) ! time entire run + call t_barrierf('cice_run_total_BARRIER',mpi_comm_ice) + call t_startf ('cice_run_total') + + !-------------------------------- + ! Reset shr logging to my log file + !-------------------------------- + + call shr_log_getLogUnit (shrlogunit) + call shr_log_setLogUnit (nu_diag) + + !-------------------------------- + ! Query the Component for its clock, importState and exportState + !-------------------------------- + + call NUOPC_ModelGet(gcomp, modelClock=clock, importState=importState, exportState=exportState, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !-------------------------------- + ! Determine time of next atmospheric shortwave calculation + !-------------------------------- + + call NUOPC_CompAttributeGet(gcomp, name="ScalarFieldIdxNextSwCday", value=cvalue, isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + call State_GetScalar(importState, flds_scalar_index_nextsw_cday, nextsw_cday, & + flds_scalar_name, flds_scalar_num, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call ESMF_ClockGetNextTime(clock, nextTime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet(nextTime, dayOfYear_r8=nextsw_cday, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + if (my_task == master_task) then + write(nu_diag,'(a,2x,i8,2x,d24.14)') trim(subname)//' cice istep, nextsw_cday = ',istep, nextsw_cday + end if + + !-------------------------------- + ! Obtain orbital values + !-------------------------------- + call ice_orbital_init(gcomp, clock, nu_diag, my_task==master_task, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !-------------------------------- + ! check that cice internal time is in sync with master clock before timestep update + !-------------------------------- + + ! cice clock + tod = msec + ymd = idate + + ! model clock + call ESMF_ClockGet( clock, currTime=currTime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet( currTime, yy=yr_sync, mm=mon_sync, dd=day_sync, s=tod_sync, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ice_cal_ymd2date(yr_sync, mon_sync, day_sync, ymd_sync) + + ! error check + if ( (ymd /= ymd_sync) .or. (tod /= tod_sync) ) then + if (my_task == master_task) then + write(nu_diag,*)' cice ymd=',ymd ,' cice tod= ',tod + write(nu_diag,*)' sync ymd=',ymd_sync,' sync tod= ',tod_sync + end if + call ESMF_LogWrite(subname//" CICE clock not in sync with ESMF model clock",ESMF_LOGMSG_ERROR) + rc = ESMF_FAILURE + return + end if + + !-------------------------------- + ! Determine if time to write restart + !-------------------------------- + + ! Note this logic triggers off of the component clock rather than the internal cice time + ! The component clock does not get advanced until the end of the loop - not at the beginning + + call ESMF_ClockGetAlarm(clock, alarmname='alarm_restart', alarm=alarm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + force_restart_now = .false. + + if (ESMF_AlarmIsRinging(alarm, rc=rc)) then + if (ChkErr(rc,__LINE__,u_FILE_u)) return + force_restart_now = .true. + call ESMF_AlarmRingerOff( alarm, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockGetNextTime(clock, nextTime=nextTime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet(nexttime, yy=yy, mm=mm, dd=dd, s=tod, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + write(restart_date,"(i4.4,a,i2.2,a,i2.2,a,i5.5)") yy, '-', mm, '-',dd,'-',tod + write(restart_filename,'(4a)') trim(restart_dir), trim(restart_file), '.', trim(restart_date) + endif + + ! Handle end of run restart + if (restart_eor) then + call ESMF_ClockGetAlarm(clock, alarmname='alarm_stop', alarm=alarm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (ESMF_AlarmIsRinging(alarm, rc=rc)) then + if (ChkErr(rc,__LINE__,u_FILE_u)) return + force_restart_now = .true. + call ESMF_AlarmRingerOff( alarm, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + endif + endif + + !-------------------------------- + ! Unpack import state + !-------------------------------- + + call t_barrierf('cice_run_import_BARRIER',mpi_comm_ice) + call t_startf ('cice_run_import') + call ice_import(importState, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call t_stopf ('cice_run_import') + + ! write Debug output + if (debug_import > 0 .and. my_task==master_task) then + call State_fldDebug(importState, flds_scalar_name, 'cice_import:', & + idate, msec, nu_diag, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + if (dbug > 0) then + call state_diagnose(importState,subname//':IS',rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + !-------------------------------- + ! Advance cice and timestep update + !-------------------------------- + + if(profile_memory) call ESMF_VMLogMemInfo("Entering CICE_Run : ") + call ESMF_TimeIntervalGet(timeStep, s=cpl_dt) + call state_reset(exportState, c0, rc) + nsteps = INT(cpl_dt / dt) + do k=1, nsteps + call CICE_Run() + call ice_increment_fluxes(exportState, nsteps, rc) + end do + + if(profile_memory) call ESMF_VMLogMemInfo("Leaving CICE_Run : ") + + !-------------------------------- + ! Create export state + !-------------------------------- + + call t_barrierf('cice_run_export_BARRIER',mpi_comm_ice) + call t_startf ('cice_run_export') + call ice_export(importState, exportState, cpl_dt, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call t_stopf ('cice_run_export') + + ! write Debug output + if (debug_export > 0 .and. my_task==master_task) then + call State_fldDebug(exportState, flds_scalar_name, 'cice_export:', & + idate, msec, nu_diag, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + if (dbug > 0) then + call state_diagnose(exportState,subname//':ES',rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! reset shr logging to my original values + call shr_log_setLogUnit (shrlogunit) + + !-------------------------------- + ! stop timers and print timer info + !-------------------------------- + ! Need to have this logic here instead of in finalize phase + ! since the finalize phase will still be called even in aqua-planet mode + + !-------------------------------- + ! Determine if time to stop + !-------------------------------- + + call ESMF_ClockGetAlarm(clock, alarmname='alarm_stop', alarm=alarm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (ESMF_AlarmIsRinging(alarm, rc=rc)) then + if (ChkErr(rc,__LINE__,u_FILE_u)) return + stop_now = .true. + call ESMF_AlarmRingerOff( alarm, rc=rc ) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + stop_now = .false. + endif + + call t_stopf ('cice_run_total') + + ! Need to stop this at the end of every run phase in a coupled run. + call ice_timer_stop(timer_total) + if (stop_now) then + call ice_timer_print_all(stats=.true.) ! print timing information + call release_all_fileunits + endif + + 105 format( A, 2i8, A, f10.2, A, f10.2, A) + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + if (mastertask) call ufs_logtimer(nu_timer,msec,'ModelAdvance time: ',runtimelog,wtime) + call ufs_settimer(wtime) + + end subroutine ModelAdvance + + !=============================================================================== + + subroutine ModelSetRunClock(gcomp, rc) + + ! intput/output variables + type(ESMF_GridComp) :: gcomp + integer, intent(out) :: rc + + ! local variables + type(ESMF_Clock) :: mclock, dclock + type(ESMF_Time) :: mcurrtime, dcurrtime + type(ESMF_Time) :: mstoptime + type(ESMF_TimeInterval) :: mtimestep, dtimestep + character(len=256) :: cvalue + character(len=256) :: restart_option ! Restart option units + integer :: restart_n ! Number until restart interval + integer :: restart_ymd ! Restart date (YYYYMMDD) + type(ESMF_ALARM) :: restart_alarm + character(len=256) :: stop_option ! Stop option units + integer :: stop_n ! Number until stop interval + integer :: stop_ymd ! Stop date (YYYYMMDD) + type(ESMF_ALARM) :: stop_alarm + character(len=128) :: name + integer :: alarmcount + character(len=*),parameter :: subname=trim(modName)//':(ModelSetRunClock) ' + !-------------------------------- + + rc = ESMF_SUCCESS + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + ! query the Component for its clocks + call NUOPC_ModelGet(gcomp, driverClock=dclock, modelClock=mclock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockGet(dclock, currTime=dcurrtime, timeStep=dtimestep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockGet(mclock, currTime=mcurrtime, timeStep=mtimestep, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !-------------------------------- + ! force model clock currtime and timestep to match driver and set stoptime + !-------------------------------- + + mstoptime = mcurrtime + dtimestep + call ESMF_ClockSet(mclock, currTime=dcurrtime, timeStep=dtimestep, stopTime=mstoptime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !-------------------------------- + ! set restart and stop alarms + !-------------------------------- + + call ESMF_ClockGetAlarmList(mclock, alarmlistflag=ESMF_ALARMLIST_ALL, alarmCount=alarmCount, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (alarmCount == 0) then + + call ESMF_GridCompGet(gcomp, name=name, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_LogWrite(subname//'setting alarms for ' // trim(name), ESMF_LOGMSG_INFO) + + !---------------- + ! Stop alarm + !---------------- + call NUOPC_CompAttributeGet(gcomp, name="stop_option", value=stop_option, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call NUOPC_CompAttributeGet(gcomp, name="stop_n", value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) stop_n + + call NUOPC_CompAttributeGet(gcomp, name="stop_ymd", value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) stop_ymd + + call alarmInit(mclock, stop_alarm, stop_option, & + opt_n = stop_n, & + opt_ymd = stop_ymd, & + RefTime = mcurrTime, & + alarmname = 'alarm_stop', rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_AlarmSet(stop_alarm, clock=mclock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + !---------------- + ! Restart alarm + !---------------- + call NUOPC_CompAttributeGet(gcomp, name="restart_option", value=restart_option, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call NUOPC_CompAttributeGet(gcomp, name="restart_n", value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) restart_n + + call NUOPC_CompAttributeGet(gcomp, name="restart_ymd", value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) restart_ymd + + call alarmInit(mclock, restart_alarm, restart_option, & + opt_n = restart_n, & + opt_ymd = restart_ymd, & + RefTime = mcurrTime, & + alarmname = 'alarm_restart', rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_AlarmSet(restart_alarm, clock=mclock, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + end if + + !-------------------------------- + ! Advance model clock to trigger alarms then reset model clock back to currtime + !-------------------------------- + + call ESMF_ClockAdvance(mclock,rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_ClockSet(mclock, currTime=dcurrtime, timeStep=dtimestep, stopTime=mstoptime, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + end subroutine ModelSetRunClock + + !=============================================================================== + + subroutine ModelFinalize(gcomp, rc) + + !-------------------------------- + ! Finalize routine + !-------------------------------- + + type(ESMF_GridComp) :: gcomp + integer, intent(out) :: rc + + ! local variables + character(*), parameter :: F91 = "('(ice_comp_nuopc) ',73('-'))" + character(len=*),parameter :: subname=trim(modName)//':(ModelFinalize) ' + !-------------------------------- + + rc = ESMF_SUCCESS + call ufs_settimer(wtime) + if (dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + if (my_task == master_task) then + write(nu_diag,F91) + write(nu_diag,'(a)') 'CICE: end of main integration loop' + write(nu_diag,F91) + end if + if (dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + if(mastertask) call ufs_logtimer(nu_timer,msec,'ModelFinalize time: ',runtimelog,wtime) + + end subroutine ModelFinalize + + !=============================================================================== + + subroutine ice_orbital_init(gcomp, clock, logunit, mastertask, rc) + + !---------------------------------------------------------- + ! Initialize orbital related values for cesm coupled + !---------------------------------------------------------- + + ! input/output variables + type(ESMF_GridComp) :: gcomp + type(ESMF_Clock) , intent(in) :: clock + integer , intent(in) :: logunit + logical , intent(in) :: mastertask + integer , intent(out) :: rc ! output error + + ! local variables + real(dbl_kind) :: eccen, obliqr, lambm0, mvelpp + character(len=char_len_long) :: msgstr ! temporary + character(len=char_len_long) :: cvalue ! temporary + type(ESMF_Time) :: CurrTime ! current time + integer :: year ! model year at current time + integer :: orb_year ! orbital year for current orbital computation + integer, save :: prev_orb_year=0 ! orbital year for previous orbital computation + logical :: lprint + logical, save :: first_time = .true. + character(len=*) , parameter :: subname = "(cice_orbital_init)" + !------------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + +#ifndef CESMCOUPLED + return +#else + if (first_time) then + + ! Determine orbital attributes from input + call NUOPC_CompAttributeGet(gcomp, name='orb_mode', value=cvalue, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_mode + call NUOPC_CompAttributeGet(gcomp, name="orb_iyear", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_iyear + call NUOPC_CompAttributeGet(gcomp, name="orb_iyear_align", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_iyear_align + call NUOPC_CompAttributeGet(gcomp, name="orb_obliq", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_obliq + call NUOPC_CompAttributeGet(gcomp, name="orb_eccen", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_eccen + call NUOPC_CompAttributeGet(gcomp, name="orb_mvelp", value=cvalue, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + read(cvalue,*) orb_mvelp + + ! Error checks + if (trim(orb_mode) == trim(orb_fixed_year)) then + orb_obliq = SHR_ORB_UNDEF_REAL + orb_eccen = SHR_ORB_UNDEF_REAL + orb_mvelp = SHR_ORB_UNDEF_REAL + if (orb_iyear == SHR_ORB_UNDEF_INT) then + if (mastertask) then + write(logunit,*) trim(subname),' ERROR: invalid settings orb_mode =',trim(orb_mode) + write(logunit,*) trim(subname),' ERROR: fixed_year settings = ',orb_iyear + write (msgstr, *) ' ERROR: invalid settings for orb_mode '//trim(orb_mode) + end if + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + return ! bail out + endif + elseif (trim(orb_mode) == trim(orb_variable_year)) then + orb_obliq = SHR_ORB_UNDEF_REAL + orb_eccen = SHR_ORB_UNDEF_REAL + orb_mvelp = SHR_ORB_UNDEF_REAL + if (orb_iyear == SHR_ORB_UNDEF_INT .or. orb_iyear_align == SHR_ORB_UNDEF_INT) then + if (mastertask) then + write(logunit,*) trim(subname),' ERROR: invalid settings orb_mode =',trim(orb_mode) + write(logunit,*) trim(subname),' ERROR: variable_year settings = ',orb_iyear, orb_iyear_align + write (msgstr, *) subname//' ERROR: invalid settings for orb_mode '//trim(orb_mode) + end if + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + return ! bail out + endif + elseif (trim(orb_mode) == trim(orb_fixed_parameters)) then + !-- force orb_iyear to undef to make sure shr_orb_params works properly + orb_iyear = SHR_ORB_UNDEF_INT + orb_iyear_align = SHR_ORB_UNDEF_INT + if (orb_eccen == SHR_ORB_UNDEF_REAL .or. & + orb_obliq == SHR_ORB_UNDEF_REAL .or. & + orb_mvelp == SHR_ORB_UNDEF_REAL) then + if (mastertask) then + write(logunit,*) trim(subname),' ERROR: invalid settings orb_mode =',trim(orb_mode) + write(logunit,*) trim(subname),' ERROR: orb_eccen = ',orb_eccen + write(logunit,*) trim(subname),' ERROR: orb_obliq = ',orb_obliq + write(logunit,*) trim(subname),' ERROR: orb_mvelp = ',orb_mvelp + write (msgstr, *) subname//' ERROR: invalid settings for orb_mode '//trim(orb_mode) + end if + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + return ! bail out + endif + else + write (msgstr, *) subname//' ERROR: invalid orb_mode '//trim(orb_mode) + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + rc = ESMF_FAILURE + return ! bail out + endif + end if + lprint = .false. + if (trim(orb_mode) == trim(orb_variable_year)) then + call ESMF_ClockGet(clock, CurrTime=CurrTime, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeGet(CurrTime, yy=year, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + orb_year = orb_iyear + (year - orb_iyear_align) + else + orb_year = orb_iyear + end if + + if (orb_year .ne. prev_orb_year) then + lprint = mastertask + ! this prevents the orbital print happening before the log file is opened. + if (.not. first_time) prev_orb_year = orb_year + endif + eccen = orb_eccen + + call shr_orb_params(orb_year, eccen, orb_obliq, orb_mvelp, obliqr, lambm0, mvelpp, lprint) + + if ( eccen == SHR_ORB_UNDEF_REAL .or. obliqr == SHR_ORB_UNDEF_REAL .or. & + mvelpp == SHR_ORB_UNDEF_REAL .or. lambm0 == SHR_ORB_UNDEF_REAL) then + write (msgstr, *) subname//' ERROR: orb params incorrect' + call ESMF_LogSetError(ESMF_RC_NOT_VALID, msg=msgstr, line=__LINE__, file=__FILE__, rcToReturn=rc) + return ! bail out + endif + + call icepack_init_orbit(eccen_in=eccen, mvelpp_in=mvelpp, lambm0_in=lambm0, obliqr_in=obliqr) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + first_time = .false. +#endif + end subroutine ice_orbital_init + + !=============================================================================== + subroutine ice_cal_ymd2date(year, month, day, date) + + ! input/output parameters: + integer,intent(in ) :: year,month,day ! calendar year,month,day + integer,intent(out) :: date ! coded (yyyymmdd) calendar date + + !--- local --- + character(*),parameter :: subName = "(ice_cal_ymd2date)" + !------------------------------------------------------------------------------- + ! NOTE: + ! this calendar has a year zero (but no day or month zero) + !------------------------------------------------------------------------------- + + date = abs(year)*10000 + month*100 + day ! coded calendar date + if (year < 0) date = -date + + end subroutine ice_cal_ymd2date + +end module ice_comp_nuopc diff --git a/cicecore/drivers/access/cmeps/ice_import_export.F90 b/cicecore/drivers/access/cmeps/ice_import_export.F90 new file mode 100644 index 000000000..445fbb2ee --- /dev/null +++ b/cicecore/drivers/access/cmeps/ice_import_export.F90 @@ -0,0 +1,2394 @@ +module ice_import_export + + use ESMF + use NUOPC + use NUOPC_Model + use ice_kinds_mod , only : int_kind, dbl_kind, char_len, char_len_long, log_kind + use ice_constants , only : c0, c1, spval_dbl, radius + use ice_constants , only : field_loc_center, field_type_scalar, field_type_vector + use ice_blocks , only : block, get_block, nx_block, ny_block + use ice_domain , only : nblocks, blocks_ice, halo_info, distrb_info + use ice_domain_size , only : nx_global, ny_global, block_size_x, block_size_y, max_blocks, ncat + use ice_domain_size , only : nfreq, nfsd + use ice_exit , only : abort_ice + use ice_flux , only : strairxT, strairyT, strocnxT_iavg, strocnyT_iavg + use ice_flux , only : alvdr, alidr, alvdf, alidf, Tref, Qref, Uref + use ice_flux , only : flat, fsens, flwout, evap, fswabs, fhocn_ai, fswthru + use ice_flux , only : fswthru_vdr, fswthru_vdf, fswthru_idr, fswthru_idf + use ice_flux , only : send_i2x_per_cat, fswthrun_ai + use ice_flux , only : flatn_f, fcondtopn_f, fsurfn_f + use ice_flux_bgc , only : faero_atm, faero_ocn + use ice_flux_bgc , only : fiso_atm, fiso_ocn, fiso_evap + use ice_flux_bgc , only : Qa_iso, Qref_iso, HDO_ocn, H2_18O_ocn, H2_16O_ocn + use ice_flux , only : fresh_ai, fsalt_ai, zlvl, uatm, vatm, potT, Tair, Qa + use ice_flux , only : rhoa, swvdr, swvdf, swidr, swidf, flw, frain + use ice_flux , only : fsnow, uocn, vocn, sst, ss_tltx, ss_tlty, frzmlt + use ice_flux , only : send_i2x_per_cat + use ice_flux , only : sss, Tf, wind, fsw + use ice_arrays_column , only : floe_rad_c, wave_spectrum + use ice_state , only : vice, vsno, aice, aicen_init, trcr, trcrn + use ice_grid , only : tlon, tlat, tarea, tmask, anglet, hm + use ice_grid , only : grid_format + use ice_mesh_mod , only : ocn_gridcell_frac + use ice_boundary , only : ice_HaloUpdate + use ice_fileunits , only : nu_diag, flush_fileunit + use ice_communicate , only : my_task, master_task, MPI_COMM_ICE + use ice_prescribed_mod , only : prescribed_ice + use ice_shr_methods , only : chkerr, state_reset + use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc , only : icepack_query_parameters, icepack_query_tracer_flags + use icepack_intfc , only : icepack_liquidus_temperature + use icepack_intfc , only : icepack_sea_freezing_temperature + use icepack_intfc , only : icepack_query_tracer_indices + use icepack_parameters , only : puny, c2 + use cice_wrapper_mod , only : t_startf, t_stopf, t_barrierf + use ice_read_write , only : ice_open_nc, ice_read_nc, ice_close_nc +#ifdef CESMCOUPLED + use shr_frz_mod , only : shr_frz_freezetemp + use shr_mpi_mod , only : shr_mpi_min, shr_mpi_max +#endif + + implicit none + public + + public :: ice_advertise_fields + public :: ice_realize_fields + public :: ice_import + public :: ice_export + public :: ice_increment_fluxes + private :: fldlist_add + private :: fldlist_realize + private :: state_FldChk + + interface state_getfldptr + module procedure state_getfldptr_1d + module procedure state_getfldptr_2d + end interface state_getfldptr + private :: state_getfldptr + + interface state_getimport + module procedure state_getimport_4d + module procedure state_getimport_3d + end interface state_getimport + private :: state_getimport + + interface state_setexport + module procedure state_setexport_4d + module procedure state_setexport_3d + end interface state_setexport + private :: state_setexport + + ! Private module data + + type fld_list_type + character(len=128) :: stdname + integer :: ungridded_lbound = 0 + integer :: ungridded_ubound = 0 + end type fld_list_type + + ! area correction factors for fluxes send and received from mediator + real(dbl_kind), allocatable :: mod2med_areacor(:) ! ratios of model areas to input mesh areas + real(dbl_kind), allocatable :: med2mod_areacor(:) ! ratios of input mesh areas to model areas + + real(kind=dbl_kind), dimension(:,:,:), allocatable :: lice_nth, lice_sth, msk_nth, msk_sth, amsk_nth, amsk_sth + + integer, parameter :: fldsMax = 100 + integer :: fldsToIce_num = 0 + integer :: fldsFrIce_num = 0 + type (fld_list_type) :: fldsToIce(fldsMax) + type (fld_list_type) :: fldsFrIce(fldsMax) + + integer , parameter :: io_dbug = 10 ! i/o debug messages + character(*), parameter :: u_FILE_u = & + __FILE__ + +!============================================================================== +contains +!============================================================================== + + subroutine ice_advertise_fields(gcomp, importState, exportState, flds_scalar_name, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(out) :: rc + + ! local variables + integer :: n + character(char_len) :: stdname + character(char_len) :: cvalue + logical :: flds_wiso ! use case + logical :: flds_wave ! use case + logical :: isPresent, isSet + character(len=*), parameter :: subname='(ice_import_export:ice_advertise_fields)' + !------------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + ! call get_lice_discharge_masks_or_iceberg('lice_discharge_masks_iceberg.nc') + + ! Determine if ice sends multiple ice category info back to mediator + send_i2x_per_cat = .false. + call NUOPC_CompAttributeGet(gcomp, name='flds_i2o_per_cat', value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) send_i2x_per_cat + end if + if (my_task == master_task) then + write(nu_diag,*)'send_i2x_per_cat = ',send_i2x_per_cat + end if + if (.not.send_i2x_per_cat) then + if (allocated(fswthrun_ai)) then + deallocate(fswthrun_ai) + end if + end if + + ! Determine if the following attributes are sent by the driver and if so read them in + flds_wiso = .false. + call NUOPC_CompAttributeGet(gcomp, name='flds_wiso', value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_wiso + end if + if (my_task == master_task) then + write(nu_diag,*)'flds_wiso = ',flds_wiso + end if + + flds_wave = .false. + call NUOPC_CompAttributeGet(gcomp, name='wav_coupling_to_cice', value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) flds_wave + end if + if (my_task == master_task) then + write(nu_diag,*)'flds_wave = ',flds_wave + end if + + !----------------- + ! advertise import fields + !----------------- + + call fldlist_add(fldsToIce_num, fldsToIce, trim(flds_scalar_name)) + + ! from ocean + call fldlist_add(fldsToIce_num, fldsToIce, 'So_dhdx' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'So_dhdy' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'So_t' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'So_s' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'So_u' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'So_v' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Fioo_q' ) + if (flds_wiso) then + call fldlist_add(fldsToIce_num, fldsToIce, 'So_roce_wiso', ungridded_lbound=1, ungridded_ubound=3) + end if + + ! from atmosphere + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_z' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_u' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_v' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_shum' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_tbot' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_pbot' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_swvdr' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_swvdf' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_swndr' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_swndf' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_lwdn' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_rain' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_snow' ) + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_ptem' ) !cesm + call fldlist_add(fldsToIce_num, fldsToIce, 'Sa_dens' ) !cesm + + ! the following are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific + ! from atm - black carbon deposition fluxes (3) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_bcph', ungridded_lbound=1, ungridded_ubound=3) + ! from atm - wet dust deposition fluxes (4 sizes) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstwet', ungridded_lbound=1, ungridded_ubound=4) + ! from atm - dry dust deposition fluxes (4 sizes) + call fldlist_add(fldsToIce_num, fldsToIce, 'Faxa_dstdry', ungridded_lbound=1, ungridded_ubound=4) + + ! the following are advertised but might not be connected if they are not advertised in the + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific + ! from wave + if (flds_wave) then + call fldlist_add(fldsToIce_num, fldsToIce, 'Sw_elevation_spectrum', ungridded_lbound=1, & + ungridded_ubound=25) + end if + + call ice_advertise_fields_access_import(gcomp, importState, exportState, flds_scalar_name, rc) + + do n = 1,fldsToIce_num + call NUOPC_Advertise(importState, standardName=fldsToIce(n)%stdname, & + TransferOfferGeomObject='will provide', rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end do + + !----------------- + ! advertise export fields + !----------------- + + call fldlist_add(fldsFrIce_num, fldsFrIce, trim(flds_scalar_name)) + + ! ice states + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_imask' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_ifrac' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_t' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_vice' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_vsno' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_tref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_snowh' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_u10' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_avsdr' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_avsdf' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_anidr' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_anidf' ) + + ! the following are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific + if (send_i2x_per_cat) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_ifrac_n', & + ungridded_lbound=1, ungridded_ubound=ncat) + end if + if (flds_wave) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_thick' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_floediam' ) + end if + + ! ice/atm fluxes computed by ice + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_taux' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_tauy' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_lat' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_sen' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_lwup' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_evap' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_swnet' ) + + ! ice/ocn fluxes computed by ice + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_melth' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_vdr' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_vdf' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_idr' ) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_idf' ) + + if (send_i2x_per_cat) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_swpen_ifrac_n', & + ungridded_lbound=1, ungridded_ubound=ncat) + end if + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_meltw' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_salt' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_taux' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_tauy' ) + + ! the following are advertised but might not be connected if they are not present + ! in the cmeps esmFldsExchange_xxx_mod.F90 that is model specific + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcpho' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_bcphi' ) + call fldlist_add(fldsFrIce_num , fldsFrIce, 'Fioi_flxdst' ) + + if (flds_wiso) then + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Fioi_meltw_wiso', & + ungridded_lbound=1, ungridded_ubound=3) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Faii_evap_wiso', & + ungridded_lbound=1, ungridded_ubound=3) + call fldlist_add(fldsFrIce_num, fldsFrIce, 'Si_qref_wiso', & + ungridded_lbound=1, ungridded_ubound=3) + end if + + call ice_advertise_fields_access_export(gcomp, importState, exportState, flds_scalar_name, rc) + + do n = 1,fldsFrIce_num + call NUOPC_Advertise(exportState, standardName=fldsFrIce(n)%stdname, & + TransferOfferGeomObject='will provide', rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + enddo + + if (io_dbug > 5) call ESMF_LogWrite(subname//' done', ESMF_LOGMSG_INFO) + + end subroutine ice_advertise_fields + + !============================================================================== + subroutine ice_realize_fields(gcomp, mesh, flds_scalar_name, flds_scalar_num, rc) + use ice_scam, only : single_column + + ! input/output variables + type(ESMF_GridComp) :: gcomp + type(ESMF_Mesh) , intent(in) :: mesh + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(in) :: flds_scalar_num + integer , intent(out) :: rc + + ! local variables + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + type(ESMF_Field) :: lfield + integer :: numOwnedElements + integer :: i, j, iblk, n + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type(block) :: this_block ! block information for current block + real(dbl_kind), allocatable :: mesh_areas(:) + real(dbl_kind), allocatable :: model_areas(:) + real(dbl_kind), pointer :: dataptr(:) + real(dbl_kind) :: max_mod2med_areacor + real(dbl_kind) :: max_med2mod_areacor + real(dbl_kind) :: min_mod2med_areacor + real(dbl_kind) :: min_med2mod_areacor + real(dbl_kind) :: max_mod2med_areacor_glob + real(dbl_kind) :: max_med2mod_areacor_glob + real(dbl_kind) :: min_mod2med_areacor_glob + real(dbl_kind) :: min_med2mod_areacor_glob + character(len=*), parameter :: subname='(ice_import_export:realize_fields)' + !--------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + call NUOPC_ModelGet(gcomp, importState=importState, exportState=exportState, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call fldlist_realize( & + state=ExportState, & + fldList=fldsFrIce, & + numflds=fldsFrIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Export',& + mesh=mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call fldlist_realize( & + state=importState, & + fldList=fldsToIce, & + numflds=fldsToIce_num, & + flds_scalar_name=flds_scalar_name, & + flds_scalar_num=flds_scalar_num, & + tag=subname//':CICE_Import',& + mesh=mesh, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return +#ifdef CESMCOUPLED + + ! allocate area correction factors + call ESMF_MeshGet(mesh, numOwnedElements=numOwnedElements, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate (mod2med_areacor(numOwnedElements)) + allocate (med2mod_areacor(numOwnedElements)) + + if (single_column) then + + mod2med_areacor(:) = 1._dbl_kind + med2mod_areacor(:) = 1._dbl_kind + + else + + ! Get mesh areas from second field - using second field since the + ! first field is the scalar field + + call ESMF_StateGet(exportState, itemName=trim(fldsFrIce(2)%stdname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldRegridGetArea(lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfield, farrayPtr=dataptr, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate(mesh_areas(numOwnedElements)) + mesh_areas(:) = dataptr(:) + + ! Determine flux correction factors (module variables) + allocate(model_areas(numOwnedElements)) + mod2med_areacor(:) = 1._dbl_kind + med2mod_areacor(:) = 1._dbl_kind + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + model_areas(n) = tarea(i,j,iblk)/(radius*radius) + mod2med_areacor(n) = model_areas(n) / mesh_areas(n) + med2mod_areacor(n) = mesh_areas(n) / model_areas(n) + enddo + enddo + enddo + deallocate(model_areas) + deallocate(mesh_areas) + end if + + min_mod2med_areacor = minval(mod2med_areacor) + max_mod2med_areacor = maxval(mod2med_areacor) + min_med2mod_areacor = minval(med2mod_areacor) + max_med2mod_areacor = maxval(med2mod_areacor) + call shr_mpi_max(max_mod2med_areacor, max_mod2med_areacor_glob, mpi_comm_ice) + call shr_mpi_min(min_mod2med_areacor, min_mod2med_areacor_glob, mpi_comm_ice) + call shr_mpi_max(max_med2mod_areacor, max_med2mod_areacor_glob, mpi_comm_ice) + call shr_mpi_min(min_med2mod_areacor, min_med2mod_areacor_glob, mpi_comm_ice) + + if (my_task == master_task) then + write(nu_diag,'(2A,2g23.15,A )') trim(subname),' : min_mod2med_areacor, max_mod2med_areacor ',& + min_mod2med_areacor_glob, max_mod2med_areacor_glob, 'CICE6' + write(nu_diag,'(2A,2g23.15,A )') trim(subname),' : min_med2mod_areacor, max_med2mod_areacor ',& + min_med2mod_areacor_glob, max_med2mod_areacor_glob, 'CICE6' + end if +#endif + + end subroutine ice_realize_fields + + !============================================================================== + subroutine ice_import( importState, rc ) + + use icepack_tracers, only: nt_Tsfc + use icepack_parameters, only: Lsub + use ice_state, only: aicen + + ! input/output variables + type(ESMF_State) , intent(in) :: importState + integer , intent(out) :: rc + + ! local variables + integer,parameter :: nflds=16 + integer,parameter :: nfldv=6 + integer :: i, j, iblk, n, k + integer :: ilo, ihi, jlo, jhi !beginning and end of physical domain + type(block) :: this_block ! block information for current block + real (kind=dbl_kind),allocatable :: aflds(:,:,:,:) + real (kind=dbl_kind) :: workx, worky + real (kind=dbl_kind) :: MIN_RAIN_TEMP, MAX_SNOW_TEMP + real (kind=dbl_kind) :: Tffresh + real (kind=dbl_kind) :: inst_pres_height_lowest + real (kind=dbl_kind), pointer :: dataptr2d(:,:) + real (kind=dbl_kind), pointer :: dataptr1d(:) + real (kind=dbl_kind), pointer :: dataptr2d_dstwet(:,:) + real (kind=dbl_kind), pointer :: dataptr2d_dstdry(:,:) + character(len=char_len) :: tfrz_option + integer(int_kind) :: ktherm + character(len=*), parameter :: subname = 'ice_import' + character(len=1024) :: msgString + !----------------------------------------------------- + + call icepack_query_parameters(Tffresh_out=Tffresh) + call icepack_query_parameters(tfrz_option_out=tfrz_option) + call icepack_query_parameters(ktherm_out=ktherm) + + call log_state_info(importState, fldsToIce, fldsToIce_num, importState) + + if (io_dbug > 5) then + write(msgString,'(A,i8)')trim(subname)//' tfrz_option = ' & + // trim(tfrz_option)//', ktherm = ',ktherm + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + end if + + ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & + ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & + ! Tffresh_out=Tffresh) + ! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & + ! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & + ! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=u_FILE_u, line=__LINE__) + + ! Note that the precipitation fluxes received from the mediator + ! are in units of kg/s/m^2 which is what CICE requires. + ! Note also that the read in below includes only values needed + ! by the thermodynamic component of CICE. Variables uocn, vocn, + ! ss_tltx, and ss_tlty are excluded. Also, because the SOM and + ! DOM don't compute SSS. SSS is not read in and is left at + ! the initilized value (see ice_flux.F init_coupler_flux) of + ! 34 ppt + + ! Use aflds to gather the halo updates of multiple fields + ! Need to separate the scalar from the vector halo updates + + allocate(aflds(nx_block,ny_block,nflds,nblocks)) + aflds = c0 + + ! import ocean states + + call state_getimport(importState, 'So_t', output=aflds, index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'So_s', output=aflds, index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! import atm states + + call state_getimport(importState, 'Sa_z', output=aflds, index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (State_FldChk(importState, 'Sa_ptem') .and. State_fldchk(importState, 'Sa_dens')) then + call state_getimport(importState, 'Sa_ptem', output=aflds, index=4, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Sa_dens', output=aflds, index=5, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else if (State_FldChk(importState, 'Sa_pbot')) then + call state_getimport(importState, 'Sa_pbot', output=aflds, index=6, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call abort_ice(trim(subname)//& + ": ERROR either Sa_ptem and Sa_dens OR Sa_pbot must be in import state") + end if + + call state_getimport(importState, 'Sa_tbot', output=aflds, index=7, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Sa_shum', output=aflds, index=8, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! import ocn/ice fluxes + + call state_getimport(importState, 'Fioo_q', output=aflds, index=9, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! import atm fluxes + + call state_getimport(importState, 'Faxa_swvdr', output=aflds, index=10, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Faxa_swndr', output=aflds, index=11, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Faxa_swvdf', output=aflds, index=12, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Faxa_swndf', output=aflds, index=13, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Faxa_lwdn', output=aflds, index=14, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Faxa_rain', output=aflds, index=15, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Faxa_snow', output=aflds, index=16, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! perform a halo update + + if (.not.prescribed_ice) then + call t_startf ('cice_imp_halo') + call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_scalar) + call t_stopf ('cice_imp_halo') + endif + + ! now fill in the ice internal data types + do i=1,ncat + call state_getimport(importState, 'sublim', output=flatn_f, index=i, ungridded_index=i, rc=rc) + call state_getimport(importState, 'botmelt', output=fcondtopn_f, index=i, ungridded_index=i, rc=rc) + call state_getimport(importState, 'topmelt', output=fsurfn_f, index=i, ungridded_index=i, rc=rc) + call state_getimport(importState, 'tstar_sice', output=trcrn(:,:,nt_Tsfc,:,:), index=i, ungridded_index=i, rc=rc) + end do + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + do k=1,ncat + flatn_f(i,j,k,iblk) = -flatn_f(i,j,k,iblk) * Lsub ! convert to W m-2 + fsurfn_f(i,j,k,iblk) = fsurfn_f(i,j,k,iblk) + fcondtopn_f(i,j,k,iblk) + + if (trcrn(i,j,nt_Tsfc,k,iblk) > 200.0) then + trcrn(i,j,nt_Tsfc,k,iblk) = trcrn(i,j,nt_Tsfc,k,iblk) - Tffresh + end if + trcrn(i,j,nt_Tsfc,k,iblk) = max(trcrn(i,j,nt_Tsfc,k,iblk), -60.0) + trcrn(i,j,nt_Tsfc,k,iblk) = min(trcrn(i,j,nt_Tsfc,k,iblk), 0.0) + + fsurfn_f(i,j,k,iblk) = fsurfn_f(i,j,k,iblk) * aicen(i,j,k,iblk) + flatn_f(i,j,k,iblk) = flatn_f(i,j,k,iblk) * aicen(i,j,k,iblk) + fcondtopn_f(i,j,k,iblk) = fcondtopn_f(i,j,k,iblk) * aicen(i,j,k,iblk) + + end do + end do + end do + end do + !$OMP END PARALLEL DO + + ! flatn_f = - Foxx_evap(i,j,cat,k) * Lsub !! latent heat - + ! fcondtopn_f = botmelt + ! fsurfn_f (:,:,cat,:) = topmelt(:,:,cat,:) + botmelt(:,:,cat,:) + ! if (um_tsfice(i,j,cat,k) > 0.0) then + ! trcrn(i,j,nt_Tsfc,cat,k) = 0.0 + ! else if (um_tsfice(i,j,cat,k) < -60.0) then + ! trcrn(i,j,nt_Tsfc,cat,k) = -60.0 + ! else + ! trcrn(i,j,nt_Tsfc,cat,k) = um_tsfice(i,j,cat,k) + ! endif + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + sst (i,j,iblk) = aflds(i,j, 1,iblk) + sss (i,j,iblk) = aflds(i,j, 2,iblk) + zlvl (i,j,iblk) = aflds(i,j, 3,iblk) + ! see below for 4,5,6 + Tair (i,j,iblk) = aflds(i,j, 7,iblk) + Qa (i,j,iblk) = aflds(i,j, 8,iblk) + frzmlt (i,j,iblk) = aflds(i,j, 9,iblk) + swvdr(i,j,iblk) = 0.0 ! aflds(i,j,10,iblk) + swidr(i,j,iblk) = 0.0 !aflds(i,j,11,iblk) + swvdf(i,j,iblk) = 0.0 !aflds(i,j,12,iblk) + swidf(i,j,iblk) = 0.0 !aflds(i,j,13,iblk) + flw (i,j,iblk) = 0.0 ! aflds(i,j,14,iblk) + frain(i,j,iblk) = aflds(i,j,15,iblk) + fsnow(i,j,iblk) = aflds(i,j,16,iblk) + ! strax !! windstress - already handled, come back to this + ! stray !! windstress + + end do + end do + end do + !$OMP END PARALLEL DO + + ! import wave elevation spectrum from wave (frequencies 1-25, assume that nfreq is 25) + if (State_FldChk(importState, 'Sw_elevation_spectrum')) then + if (nfreq /= 25) then + call abort_ice(trim(subname)//": ERROR nfreq not equal to 25 ") + end if + call state_getfldptr(importState, 'Sw_elevation_spectrum', fldptr=dataPtr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + do k = 1,nfreq + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + wave_spectrum(i,j,k,iblk) = dataPtr2d(k,n) + end do + end do + end do + end do + end if + + if ( State_fldChk(importState, 'Sa_ptem') .and. State_fldchk(importState,'Sa_dens')) then + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + potT (i,j,iblk) = aflds(i,j, 4,iblk) + rhoa (i,j,iblk) = aflds(i,j, 5,iblk) + end do + end do + end do + !$OMP END PARALLEL DO + else if (State_fldChk(importState, 'Sa_pbot')) then + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + inst_pres_height_lowest = aflds(i,j,6,iblk) + if (inst_pres_height_lowest > 0.0_ESMF_KIND_R8) then + potT (i,j,iblk) = Tair(i,j,iblk) * (100000._ESMF_KIND_R8/inst_pres_height_lowest)**0.286_ESMF_KIND_R8 + else + potT (i,j,iblk) = 0.0_ESMF_KIND_R8 + end if + if (Tair(i,j,iblk) /= 0._ESMF_KIND_R8) then + rhoa(i,j,iblk) = inst_pres_height_lowest / & + (287.058_ESMF_KIND_R8*(1._ESMF_KIND_R8+0.608_ESMF_KIND_R8*Qa(i,j,iblk))*Tair(i,j,iblk)) + else + rhoa(i,j,iblk) = 1.2_ESMF_KIND_R8 + endif + end do !i + end do !j + end do !iblk + !$OMP END PARALLEL DO + end if + + deallocate(aflds) + allocate(aflds(nx_block,ny_block,nfldv,nblocks)) + aflds = c0 + + ! Get velocity fields from ocean and atm and slope fields from ocean + + call state_getimport(importState, 'So_u', output=aflds, index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'So_v', output=aflds, index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Sa_u', output=aflds, index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Sa_v', output=aflds, index=4, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'So_dhdx', output=aflds, index=5, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'So_dhdy', output=aflds, index=6, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + + if (.not.prescribed_ice) then + call t_startf ('cice_imp_halo') + call ice_HaloUpdate(aflds, halo_info, field_loc_center, field_type_vector) + call t_stopf ('cice_imp_halo') + endif + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + uocn (i,j,iblk) = aflds(i,j, 1,iblk) + vocn (i,j,iblk) = aflds(i,j, 2,iblk) + uatm (i,j,iblk) = aflds(i,j, 3,iblk) + vatm (i,j,iblk) = aflds(i,j, 4,iblk) + ss_tltx(i,j,iblk) = aflds(i,j, 5,iblk) + ss_tlty(i,j,iblk) = aflds(i,j, 6,iblk) + enddo !i + enddo !j + enddo !iblk + !$OMP END PARALLEL DO + + deallocate(aflds) + + !------------------------------------------------------- + ! Get aerosols from mediator + !------------------------------------------------------- + + if (State_FldChk(importState, 'Faxa_bcph')) then + ! the following indices are based on what the atmosphere is sending + ! bcphidry ungridded_index=1 + ! bcphodry ungridded_index=2 + ! bcphiwet ungridded_index=3 + + call state_getfldptr(importState, 'Faxa_bcph', fldptr=dataPtr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + faero_atm(i,j,1,iblk) = dataPtr2d(2,n) * med2mod_areacor(n) ! bcphodry + faero_atm(i,j,2,iblk) = (dataptr2d(1,n) + dataPtr2d(3,n)) * med2mod_areacor(n) ! bcphidry + bcphiwet + end do + end do + end do + end if + + ! Sum over all dry and wet dust fluxes from ath atmosphere + if (State_FldChk(importState, 'Faxa_dstwet') .and. State_FldChk(importState, 'Faxa_dstdry')) then + call state_getfldptr(importState, 'Faxa_dstwet', fldptr=dataPtr2d_dstwet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getfldptr(importState, 'Faxa_dstdry', fldptr=dataPtr2d_dstdry, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + faero_atm(i,j,3,iblk) = dataPtr2d_dstwet(1,n) + dataptr2d_dstdry(1,n) + & + dataPtr2d_dstwet(2,n) + dataptr2d_dstdry(2,n) + & + dataPtr2d_dstwet(3,n) + dataptr2d_dstdry(3,n) + & + dataPtr2d_dstwet(4,n) + dataptr2d_dstdry(4,n) + faero_atm(i,j,3,iblk) = faero_atm(i,j,3,iblk) * med2mod_areacor(n) + end do + end do + end do + end if + + !------------------------------------------------------- + ! Water isotopes from the mediator + !------------------------------------------------------- + + ! 16O => ungridded_index=1 + ! 18O => ungridded_index=2 + ! HDO => ungridded_index=3 + + if (State_FldChk(importState, 'shum_wiso')) then + call state_getimport(importState, 'Sa_shum_wiso', output=Qa_iso, index=1, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Sa_shum_wiso', output=Qa_iso, index=2, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Sa_shum_wiso', output=Qa_iso, index=3, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=1, ungridded_index=3, & + ! areacor=med2mod_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=2, ungridded_index=1, & + ! areacor=med2mod_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! call state_getimport(importState, 'mean_prec_rate_wiso', output=fiso_rain, index=3, ungridded_index=2, & + ! areacor=med2mod_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'Faxa_snow_wiso', output=fiso_atm, index=1, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_snow_wiso', output=fiso_atm, index=2, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'Faxa_snow_wiso', output=fiso_atm, index=3, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call state_getimport(importState, 'So_roce_wiso', output=HDO_ocn , ungridded_index=3, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'So_roce_wiso', output=H2_16O_ocn, ungridded_index=1, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_getimport(importState, 'So_roce_wiso', output=H2_18O_ocn, ungridded_index=2, & + areacor=med2mod_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + !----------------------------------------------------------------- + ! rotate zonal/meridional vectors to local coordinates + ! compute data derived quantities + !----------------------------------------------------------------- + + ! Vector fields come in on T grid, but are oriented geographically + ! need to rotate to pop-grid FIRST using ANGLET + ! then interpolate to the U-cell centers (otherwise we + ! interpolate across the pole) + ! use ANGLET which is on the T grid ! + + call t_startf ('cice_imp_ocn') + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) + do iblk = 1, nblocks + + do j = 1,ny_block + do i = 1,nx_block + ! ocean + workx = uocn (i,j,iblk) ! currents, m/s + worky = vocn (i,j,iblk) + + uocn(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! rotate to align with model i,j + + worky*sin(ANGLET(i,j,iblk)) + vocn(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & + - workx*sin(ANGLET(i,j,iblk)) + + workx = ss_tltx (i,j,iblk) ! sea sfc tilt, m/m + worky = ss_tlty (i,j,iblk) + + ss_tltx(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! rotate to align with model i,j + + worky*sin(ANGLET(i,j,iblk)) + ss_tlty(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & + - workx*sin(ANGLET(i,j,iblk)) + + sst(i,j,iblk) = sst(i,j,iblk) - Tffresh ! sea sfc temp (C) + + sss(i,j,iblk) = max(sss(i,j,iblk),c0) + + enddo + enddo + end do + + !$OMP PARALLEL DO PRIVATE(iblk,i,j) + do iblk = 1, nblocks + do j = 1,ny_block + do i = 1,nx_block + Tf(i,j,iblk) = icepack_sea_freezing_temperature(sss(i,j,iblk)) + end do + end do + end do + !$OMP END PARALLEL DO + + call t_stopf ('cice_imp_ocn') + + ! Interpolate ocean dynamics variables from T-cell centers to + ! U-cell centers. + + if (.not.prescribed_ice) then + call t_startf ('cice_imp_t2u') + call ice_HaloUpdate(uocn, halo_info, field_loc_center, field_type_vector) + call ice_HaloUpdate(vocn, halo_info, field_loc_center, field_type_vector) + call ice_HaloUpdate(ss_tltx, halo_info, field_loc_center, field_type_vector) + call ice_HaloUpdate(ss_tlty, halo_info, field_loc_center, field_type_vector) + call t_stopf ('cice_imp_t2u') + end if + + ! Atmosphere variables are needed in T cell centers in + ! subroutine stability and are interpolated to the U grid + ! later as necessary. + + call t_startf ('cice_imp_atm') + !$OMP PARALLEL DO PRIVATE(iblk,i,j,workx,worky) + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + + ! atmosphere + workx = uatm(i,j,iblk) ! wind velocity, m/s + worky = vatm(i,j,iblk) + uatm (i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) & ! convert to POP grid + + worky*sin(ANGLET(i,j,iblk)) ! note uatm, vatm, wind + vatm (i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) & ! are on the T-grid here + - workx*sin(ANGLET(i,j,iblk)) + + wind (i,j,iblk) = sqrt(uatm(i,j,iblk)**2 + vatm(i,j,iblk)**2) + fsw (i,j,iblk) = swvdr(i,j,iblk) + swvdf(i,j,iblk) & + + swidr(i,j,iblk) + swidf(i,j,iblk) + enddo + enddo + enddo + !$OMP END PARALLEL DO + call t_stopf ('cice_imp_atm') + + end subroutine ice_import + + !=============================================================================== + subroutine ice_export(importState, exportState, cpl_dt, rc ) + + use ice_scam, only : single_column + + ! input/output variables + type(ESMF_State), intent(inout) :: importState, exportState + integer , intent(in) :: cpl_dt + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: i, j, iblk, n, k ! indices + integer :: n2 ! thickness category index + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + real (kind=dbl_kind) :: workx, worky ! tmps for converting grid + integer (kind=int_kind) :: icells ! number of ocean/ice cells + logical :: flag + integer (kind=int_kind) :: indxi (nx_block*ny_block) ! compressed indices in i + integer (kind=int_kind) :: indxj (nx_block*ny_block) ! compressed indices in i + real (kind=dbl_kind) :: Tsrf (nx_block,ny_block,max_blocks) ! surface temperature + real (kind=dbl_kind) :: tauxa (nx_block,ny_block,max_blocks) ! atmo/ice stress + real (kind=dbl_kind) :: tauya (nx_block,ny_block,max_blocks) ! atm/ice stress + real (kind=dbl_kind) :: tauxo (nx_block,ny_block,max_blocks) ! ice/ocean stress + real (kind=dbl_kind) :: tauyo (nx_block,ny_block,max_blocks) ! ice/ocean stress + real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) ! fractional ice area + real (kind=dbl_kind) :: floediam(nx_block,ny_block,max_blocks) + real (kind=dbl_kind) :: floethick(nx_block,ny_block,max_blocks) ! ice thickness + logical (kind=log_kind) :: tr_fsd + integer (kind=int_kind) :: nt_fsd + real (kind=dbl_kind) :: Tffresh + real (kind=dbl_kind), allocatable :: tempfld(:,:,:) + real (kind=dbl_kind), pointer :: dataptr_ifrac_n(:,:) + real (kind=dbl_kind), pointer :: dataptr_swpen_n(:,:) + logical (kind=log_kind), save :: first_call = .true. + character(len=*),parameter :: subname = 'ice_export' + !----------------------------------------------------- + + rc = ESMF_SUCCESS + if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + call icepack_query_parameters(Tffresh_out=Tffresh) + ! call icepack_query_parameters(tfrz_option_out=tfrz_option, & + ! modal_aero_out=modal_aero, z_tracers_out=z_tracers, skl_bgc_out=skl_bgc, & + ! Tffresh_out=Tffresh) + ! call icepack_query_tracer_flags(tr_aero_out=tr_aero, tr_iage_out=tr_iage, & + ! tr_FY_out=tr_FY, tr_pond_out=tr_pond, tr_lvl_out=tr_lvl, & + ! tr_zaero_out=tr_zaero, tr_bgc_Nit_out=tr_bgc_Nit) + + call icepack_query_tracer_indices(nt_fsd_out=nt_fsd) + call icepack_query_tracer_flags(tr_fsd_out=tr_fsd) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=u_FILE_u, line=__LINE__) + + !calculate ice thickness from aice and vice. Also + !create Tsrf from the first tracer (trcr) in ice_state.F + + ailohi(:,:,:) = c0 + Tsrf(:,:,:) = c0 + tauxa(:,:,:) = c0 + tauya(:,:,:) = c0 + tauxo(:,:,:) = c0 + tauyo(:,:,:) = c0 + floediam(:,:,:) = c0 + floethick(:,:,:) = c0 + + !$OMP PARALLEL DO PRIVATE(iblk,i,j,k,workx,worky, this_block, ilo, ihi, jlo, jhi) + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo,jhi + do i = ilo,ihi + ! ice fraction + ailohi(i,j,iblk) = min(aice(i,j,iblk), c1) + + ! surface temperature + Tsrf(i,j,iblk) = Tffresh + trcr(i,j,1,iblk) !Kelvin (original ???) + + if (tr_fsd) then + ! floe thickness (m) + if (aice(i,j,iblk) > puny) then + floethick(i,j,iblk) = vice(i,j,iblk) / aice(i,j,iblk) + else + floethick(i,j,iblk) = c0 + end if + + ! floe diameter (m) + workx = c0 + worky = c0 + do n = 1, ncat + do k = 1, nfsd + workx = workx + floe_rad_c(k) * aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) + worky = worky + aicen_init(i,j,n,iblk) * trcrn(i,j,nt_fsd+k-1,n,iblk) + end do + end do + if (worky > c0) workx = c2*workx / worky + floediam(i,j,iblk) = MAX(c2*floe_rad_c(1),workx) + endif + + ! wind stress (on POP T-grid: convert to lat-lon) + workx = strairxT(i,j,iblk) ! N/m^2 + worky = strairyT(i,j,iblk) ! N/m^2 + tauxa(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) - worky*sin(ANGLET(i,j,iblk)) + tauya(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) + workx*sin(ANGLET(i,j,iblk)) + + ! ice/ocean stress (on POP T-grid: convert to lat-lon) + workx = -strocnxT_iavg(i,j,iblk) ! N/m^2 + worky = -strocnyT_iavg(i,j,iblk) ! N/m^2 + tauxo(i,j,iblk) = workx*cos(ANGLET(i,j,iblk)) - worky*sin(ANGLET(i,j,iblk)) + tauyo(i,j,iblk) = worky*cos(ANGLET(i,j,iblk)) + workx*sin(ANGLET(i,j,iblk)) + enddo + enddo + enddo + !$OMP END PARALLEL DO + + flag=.false. + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + if (tmask(i,j,iblk) .and. ailohi(i,j,iblk) < c0 ) then + flag = .true. + endif + end do + end do + end do + if (flag) then + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo,jhi + do i = ilo,ihi + if (tmask(i,j,iblk) .and. ailohi(i,j,iblk) < c0 ) then + write(nu_diag,*) & + ' (ice) send: ERROR ailohi < 0.0 ',i,j,ailohi(i,j,iblk) + call flush_fileunit(nu_diag) + endif + end do + end do + end do + endif + + !--------------------------------- + ! Create the export state + !--------------------------------- + + ! Create a temporary field + allocate(tempfld(nx_block,ny_block,nblocks)) + + ! Fractions and mask + call state_setexport(exportState, 'Si_ifrac', input=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (trim(grid_format) == 'meshnc') then + call state_setexport(exportState, 'Si_imask', input=ocn_gridcell_frac, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + tempfld(i,j,iblk) = real(nint(hm(i,j,iblk)),kind=dbl_kind) + end do + end do + end do + call state_setexport(exportState, 'Si_imask', input=tempfld, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! ---- + ! States from ice + ! ---- + + ! surface temperature of ice covered portion (degK) + call state_setexport(exportState, 'Si_t', input=Tsrf , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! albedo vis dir + call state_setexport(exportState, 'Si_avsdr', input=alvdr, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! albedo nir dir + call state_setexport(exportState, 'Si_anidr', input=alidr, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! albedo vis dif + call state_setexport(exportState, 'Si_avsdf', input=alvdf, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! albedo nir dif + call state_setexport(exportState, 'Si_anidf', input=alidf, lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! 10m atm reference wind speed (m/s) + call state_setexport(exportState, 'Si_u10' , input=Uref , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! 2m atm reference temperature (K) + call state_setexport(exportState, 'Si_tref' , input=Tref , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! 2m atm reference spec humidity (kg/kg) + call state_setexport(exportState, 'Si_qref' , input=Qref , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Snow volume + call state_setexport(exportState, 'Si_vsno' , input=vsno , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Ice volume + call state_setexport(exportState, 'Si_vice' , input=vice , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Snow height + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + if ( tmask(i,j,iblk) .and. ailohi(i,j,iblk) > c0 ) then + tempfld(i,j,iblk) = vsno(i,j,iblk)/ailohi(i,j,iblk) + end if + end do + end do + end do + call state_setexport(exportState, 'Si_snowh' , input=tempfld , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! ------ + ! optional floe diameter and ice thickness to wave + ! ------ + + ! Sea ice thickness (m) + if (State_FldChk(exportState, 'Si_thick')) then + call state_setexport(exportState, 'Si_thick' , input=floethick , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! Sea ice floe diameter (m) + if (State_FldChk(exportState, 'Si_floediam')) then + call state_setexport(exportState, 'Si_floediam' , input=floediam , lmask=tmask, ifrac=ailohi, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! ------ + ! ice/atm fluxes computed by ice + ! ------ + + ! Zonal air/ice stress + call state_setexport(exportState, 'Faii_taux' , input=tauxa, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Meridional air/ice stress + call state_setexport(exportState, 'Faii_tauy' , input=tauya, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Latent heat flux (atm into ice) + call state_setexport(exportState, 'Faii_lat' , input=flat, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Sensible heat flux (atm into ice) + call state_setexport(exportState, 'Faii_sen' , input=fsens, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! longwave outgoing (upward), average over ice fraction only + call state_setexport(exportState, 'Faii_lwup' , input=flwout, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Evaporative water flux (kg/m^2/s) + call state_setexport(exportState, 'Faii_evap' , input=evap, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Shortwave flux absorbed in ice and ocean (W/m^2) + call state_setexport(exportState, 'Faii_swnet' , input=fswabs, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! ------ + ! ice/ocn fluxes computed by ice + ! ------ + + ! flux of shortwave through ice to ocean + call state_setexport(exportState, 'Fioi_swpen' , input=fswthru, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! flux of vis dir shortwave through ice to ocean + call state_setexport(exportState, 'Fioi_swpen_vdr' , input=fswthru_vdr, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! flux of vis dif shortwave through ice to ocean + call state_setexport(exportState, 'Fioi_swpen_vdf' , input=fswthru_vdf, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! flux of ir dir shortwave through ice to ocean + call state_setexport(exportState, 'Fioi_swpen_idr' , input=fswthru_idr, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! flux of ir dif shortwave through ice to ocean + call state_setexport(exportState, 'Fioi_swpen_idf' , input=fswthru_idf, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! ! flux of heat exchange with ocean + ! call state_setexport(exportState, 'Fioi_melth' , input=fhocn, lmask=tmask, ifrac=ailohi, & + ! areacor=mod2med_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! ! flux fresh water to ocean (h2o flux from melting) + ! call state_setexport(exportState, 'Fioi_meltw' , input=fresh, lmask=tmask, ifrac=ailohi, & + ! areacor=mod2med_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! ! flux of salt to ocean (salt flux from melting) + ! call state_setexport(exportState, 'Fioi_salt' , input=fsalt, lmask=tmask, ifrac=ailohi, & + ! areacor=mod2med_areacor, rc=rc) + ! if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! stress n i/o zonal + call state_setexport(exportState, 'Fioi_taux' , input=tauxo, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! stress n i/o meridional + call state_setexport(exportState, 'Fioi_tauy' , input=tauyo, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! ------ + ! optional aerosol fluxes to ocean + ! ------ + + ! hydrophobic bc + if (State_FldChk(exportState, 'Fioi_bcpho')) then + call state_setexport(exportState, 'Fioi_bcpho' , input=faero_ocn, index=1, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! hydrophilic bc + if (State_FldChk(exportState, 'Fioi_bcphi')) then + call state_setexport(exportState, 'Fioi_bcphi' , input=faero_ocn, index=2, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! dust + if (State_FldChk(exportState, 'Fioi_flxdst')) then + call state_setexport(exportState, 'Fioi_flxdst' , input=faero_ocn, index=3, lmask=tmask, ifrac=ailohi, & + areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! ------ + ! optional water isotope fluxes to ocean + ! ------ + + if (State_FldChk(exportState, 'Fioi_meltw_wiso')) then + ! 16O => ungridded_index=1 + ! 18O => ungridded_index=2 + ! HDO => ungridded_index=3 + + call state_setexport(exportState, 'Fioi_meltw_wiso' , input=fiso_ocn, index=1, & + lmask=tmask, ifrac=ailohi, ungridded_index=3, areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'Fioi_meltw_wiso' , input=fiso_ocn, index=2, & + lmask=tmask, ifrac=ailohi, ungridded_index=1, areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'Fioi_meltw_wiso' , input=fiso_ocn, index=3, & + lmask=tmask, ifrac=ailohi, ungridded_index=2, areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! ------ + ! optional water isotope fluxes to atmospehre + ! ------ + + if (State_FldChk(exportState, 'Faii_evap_wiso')) then + ! Isotope evap to atm + call state_setexport(exportState, 'Faii_evap_wiso' , input=fiso_evap, index=1, & + lmask=tmask, ifrac=ailohi, ungridded_index=3, areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'Faii_evap_wiso' , input=fiso_evap, index=2, & + lmask=tmask, ifrac=ailohi, ungridded_index=1, areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'Faii_evap_wiso' , input=fiso_evap, index=3, & + lmask=tmask, ifrac=ailohi, ungridded_index=2, areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! qref to atm + call state_setexport(exportState, 'Si_qref_wiso' , input=Qref_iso, index=1, & + lmask=tmask, ifrac=ailohi, ungridded_index=3, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'Si_qref_wiso' , input=Qref_iso, index=2, & + lmask=tmask, ifrac=ailohi, ungridded_index=1, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call state_setexport(exportState, 'Si_qref_wiso' , input=Qref_iso, index=3, & + lmask=tmask, ifrac=ailohi, ungridded_index=2, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + endif + + ! ------ + ! optional short wave penetration to ocean ice category + ! ------ + + ! ice fraction by category + if ( State_FldChk(exportState, 'Si_ifrac_n') .and. & + State_FldChk(exportState, 'Fioi_swpen_ifrac_n')) then + do n = 1,ncat + call state_setexport(exportState, 'Si_ifrac_n', input=aicen_init, index=n, & + ungridded_index=n, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! penetrative shortwave by category + ! Note: no need zero out pass-through fields over land for benefit of x2oacc fields in cpl hist files since + ! the export state has been zeroed out at the beginning + call state_setexport(exportState, 'Fioi_swpen_ifrac_n', input=fswthrun_ai, index=n, & + lmask=tmask, ifrac=ailohi, ungridded_index=n, areacor=mod2med_areacor, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end do + end if + + call ice_export_access(importState, exportState, ailohi, cpl_dt, rc) + first_call = .false. + call log_state_info(exportState, fldsFrIce, fldsFrIce_num, exportState) + + end subroutine ice_export + + + subroutine ice_increment_fluxes( state, nsteps, rc ) + + type(ESMF_State), intent(inout) :: state + integer, intent(in) :: nsteps + integer, intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! incides + real(kind=dbl_kind), pointer :: fhocn_ptr(:), fresh_ptr(:), fsalt_ptr(:) + character(len=*) , parameter :: subname='(ice_import_export:ice_increment_fluxes)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call state_getfldptr(state, 'Fioi_melth', fhocn_ptr, rc) + call state_getfldptr(state, 'Fioi_meltw', fresh_ptr, rc) + call state_getfldptr(state, 'Fioi_salt', fsalt_ptr, rc) + + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + fhocn_ptr(n) = fhocn_ptr(n) + fhocn_ai(i,j,iblk) * mod2med_areacor(n) / nsteps + fresh_ptr(n) = fresh_ptr(n) + fresh_ai(i,j,iblk) * mod2med_areacor(n) / nsteps + fsalt_ptr(n) = fsalt_ptr(n) + fsalt_ai(i,j,iblk) * mod2med_areacor(n) / nsteps + end do + end do + end do + + end subroutine ice_increment_fluxes + + subroutine log_state_info(state, field_list, field_num, exportState) + type(ESMF_State) :: state, exportState + type(fld_list_type) :: field_list(:) + integer :: field_num + + ! local variables + type(ESMF_Field) :: field + character(len=320) :: msgString, tmpString + character(len=20) :: fld_name + integer :: i, rc, j, k, n + real(ESMF_KIND_R8), pointer :: fld_ptr1(:), fld_ptr2(:, :), sea_ice_mask(:) + real :: lo, hi + real(ESMF_KIND_R8), pointer :: esmf_arr(:) + + ! call ESMF_StateGet(exportState, itemName='ice_mask', field=field) + ! call ESMF_FieldGet(field, farrayptr=sea_ice_mask) + + do i = 1,field_num + + if (field_list(i)%stdname == 'cpl_scalars') cycle + + if (State_FldChk(state, trim(field_list(i)%stdname))) then + + write (tmpString, *) i + call ESMF_LogWrite('i: ' // trim(tmpString) // ' - ' // trim(field_list(i)%stdname), ESMF_LOGMSG_DEBUG, rc=rc) + + call ESMF_StateGet(state, itemName=trim(field_list(i)%stdname), field=field) + + if (field_list(i)%ungridded_lbound > 0 .and. field_list(i)%ungridded_ubound > 0) then + call ESMF_FieldGet(field, farrayptr=fld_ptr2) + lo = minval(fld_ptr2) + hi = maxval(fld_ptr2) + write (tmpString, *) nan_check(pack(fld_ptr2, .true.)) + + else + call ESMF_FieldGet(field, farrayptr=fld_ptr1) + lo = minval(fld_ptr1) + hi = maxval(fld_ptr1) + write (tmpString, *) nan_check(fld_ptr1) + + end if + + call ESMF_LogWrite(trim(field_list(i)%stdname) // ' any nans: ' // trim(tmpString), ESMF_LOGMSG_DEBUG, rc=rc) + write (tmpString, *) lo + call ESMF_LogWrite(trim(field_list(i)%stdname) // ' min: ' // trim(tmpString), ESMF_LOGMSG_DEBUG, rc=rc) + write (tmpString, *) hi + call ESMF_LogWrite(trim(field_list(i)%stdname) // ' max: ' // trim(tmpString), ESMF_LOGMSG_DEBUG, rc=rc) + + end if + + end do + + end subroutine log_state_info + + logical function nan_check(arr) + use, intrinsic :: ieee_arithmetic + real(ESMF_KIND_R8), intent(in) :: arr(:) + + integer :: i + + nan_check = .false. + + do i=1,size(arr) + nan_check = nan_check .or. ieee_is_nan(arr(i)) !.not.(arr(i) == arr(i)) + end do + return + + end function nan_check + + !=============================================================================== + subroutine fldlist_add(num, fldlist, stdname, ungridded_lbound, ungridded_ubound) + + ! input/output variables + integer , intent(inout) :: num + type(fld_list_type) , intent(inout) :: fldlist(:) + character(len=*) , intent(in) :: stdname + integer, optional , intent(in) :: ungridded_lbound + integer, optional , intent(in) :: ungridded_ubound + + ! local variables + character(len=*), parameter :: subname='(fldlist_add)' + !------------------------------------------------------------------------------- + + ! Set up a list of field information + + num = num + 1 + if (num > fldsMax) then + call abort_ice(trim(subname)//": ERROR num > fldsMax "//trim(stdname)) + endif + fldlist(num)%stdname = trim(stdname) + + if (present(ungridded_lbound) .and. present(ungridded_ubound)) then + fldlist(num)%ungridded_lbound = ungridded_lbound + fldlist(num)%ungridded_ubound = ungridded_ubound + end if + + end subroutine fldlist_add + + !=============================================================================== + subroutine fldlist_realize(state, fldList, numflds, flds_scalar_name, flds_scalar_num, mesh, grid, tag, rc) + + use NUOPC, only : NUOPC_IsConnected, NUOPC_Realize + use ESMF , only : ESMF_MeshLoc_Element, ESMF_FieldCreate, ESMF_TYPEKIND_R8 + use ESMF , only : ESMF_MAXSTR, ESMF_Field, ESMF_State, ESMF_Mesh, ESMF_StateRemove + use ESMF , only : ESMF_LogFoundError, ESMF_LOGMSG_INFO, ESMF_SUCCESS + use ESMF , only : ESMF_LogWrite, ESMF_LOGMSG_ERROR, ESMF_LOGERR_PASSTHRU + use ESMF , only : ESMF_VM + + ! input/output variables + type(ESMF_State) , intent(inout) :: state + type(fld_list_type) , intent(in) :: fldList(:) + integer , intent(in) :: numflds + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(in) :: flds_scalar_num + character(len=*) , intent(in) :: tag + type(ESMF_Mesh), optional , intent(in) :: mesh + type(ESMF_Grid), optional , intent(in) :: grid + integer , intent(inout) :: rc + + ! local variables + integer :: n + type(ESMF_Field) :: field + character(len=80) :: stdname + character(ESMF_MAXSTR) :: msg + character(len=*),parameter :: subname='(ice_import_export:fld_list_realize)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + do n = 1, numflds + stdname = fldList(n)%stdname + if (NUOPC_IsConnected(state, fieldName=stdname)) then + if (stdname == trim(flds_scalar_name)) then + call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected on root pe", & + ESMF_LOGMSG_INFO) + ! Create the scalar field + call SetScalarField(field, flds_scalar_name, flds_scalar_num, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + if (present(mesh)) then + ! Create the field + if (fldlist(n)%ungridded_lbound > 0 .and. fldlist(n)%ungridded_ubound > 0) then + field = ESMF_FieldCreate(mesh, ESMF_TYPEKIND_R8, name=stdname, meshloc=ESMF_MESHLOC_ELEMENT, & + ungriddedLbound=(/fldlist(n)%ungridded_lbound/), & + ungriddedUbound=(/fldlist(n)%ungridded_ubound/), & + gridToFieldMap=(/2/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + write(msg, '(a,i4,2x,i4)') trim(subname)//trim(tag)//" Field = "//trim(stdname)//& + " is connected using mesh with lbound, ubound = ",& + fldlist(n)%ungridded_lbound,fldlist(n)%ungridded_ubound + call ESMF_LogWrite(msg, ESMF_LOGMSG_INFO) + else + field = ESMF_FieldCreate(mesh, ESMF_TYPEKIND_R8, name=stdname, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + write(msg, '(a,i4,a,i4)') trim(subname)//trim(tag)//" Field = "//trim(stdname)//& + " is connected using mesh without ungridded dimension" + call ESMF_LogWrite(msg, ESMF_LOGMSG_INFO) + end if + else if (present(grid)) then + call ESMF_LogWrite(trim(subname)//trim(tag)//" Field = "//trim(stdname)//" is connected using grid", & + ESMF_LOGMSG_INFO) + if (fldlist(n)%ungridded_lbound > 0 .and. fldlist(n)%ungridded_ubound > 0) then + field = ESMF_FieldCreate(grid, ESMF_TYPEKIND_R8, name=stdname, indexflag=ESMF_INDEX_DELOCAL, & + ungriddedLBound=(/1,1/), ungriddedUBound=(/max_blocks,fldlist(n)%ungridded_ubound/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + field = ESMF_FieldCreate(grid, ESMF_TYPEKIND_R8, name=stdname, indexflag=ESMF_INDEX_DELOCAL, & + ungriddedLBound=(/1/), ungriddedUBound=(/max_blocks/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + else + call ESMF_LogWrite(subname // 'input must be grid or mesh', ESMF_LOGMSG_INFO) + rc = ESMF_FAILURE + return + end if + end if ! if not scalar field + + ! NOW call NUOPC_Realize + call NUOPC_Realize(state, field=field, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + if (stdname /= trim(flds_scalar_name)) then + call ESMF_LogWrite(subname // trim(tag) // " Field = "// trim(stdname) // " is not connected.", & + ESMF_LOGMSG_INFO) + call ESMF_StateRemove(state, (/stdname/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + end if + end do + + contains !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + subroutine SetScalarField(field, flds_scalar_name, flds_scalar_num, rc) + ! ---------------------------------------------- + ! create a field with scalar data on the root pe + ! ---------------------------------------------- + use ESMF, only : ESMF_Field, ESMF_DistGrid, ESMF_Grid + use ESMF, only : ESMF_DistGridCreate, ESMF_GridCreate, ESMF_LogFoundError, ESMF_LOGERR_PASSTHRU + use ESMF, only : ESMF_FieldCreate, ESMF_GridCreate, ESMF_TYPEKIND_R8 + + type(ESMF_Field) , intent(inout) :: field + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(in) :: flds_scalar_num + integer , intent(inout) :: rc + + ! local variables + type(ESMF_Distgrid) :: distgrid + type(ESMF_Grid) :: grid + real(ESMF_KIND_R8), pointer :: fldptr2d(:,:) + character(len=*), parameter :: subname='(ice_import_export:SetScalarField)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + ! create a DistGrid with a single index space element, which gets mapped onto DE 0. + distgrid = ESMF_DistGridCreate(minIndex=(/1/), maxIndex=(/1/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + grid = ESMF_GridCreate(distgrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + field = ESMF_FieldCreate(name=trim(flds_scalar_name), grid=grid, typekind=ESMF_TYPEKIND_R8, & + ungriddedLBound=(/1/), ungriddedUBound=(/flds_scalar_num/), gridToFieldMap=(/2/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! initialize fldptr to zero + call ESMF_FieldGet(field, farrayPtr=fldptr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + fldptr2d(:,:) = 0.0 + + end subroutine SetScalarField + + end subroutine fldlist_realize + + !=============================================================================== + logical function State_FldChk(State, fldname) + ! ---------------------------------------------- + ! Determine if field is in state + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + + ! local variables + type(ESMF_StateItem_Flag) :: itemType + ! ---------------------------------------------- + + call ESMF_StateGet(State, trim(fldname), itemType) + State_FldChk = (itemType /= ESMF_STATEITEM_NOTFOUND) + + end function State_FldChk + + !=============================================================================== + subroutine state_getimport_4d(state, fldname, output, index, ungridded_index, areacor, rc) + + ! ---------------------------------------------- + ! Map import state field to output array + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: state + character(len=*) , intent(in) :: fldname + real (kind=dbl_kind) , intent(inout) :: output(:,:,:,:) + integer , intent(in) :: index + integer, optional , intent(in) :: ungridded_index + real(kind=dbl_kind), optional , intent(in) :: areacor(:) + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! incides + real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh + character(len=*), parameter :: subname='(ice_import_export:state_getimport_4d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + ! check that fieldname exists + if (.not. State_FldChk(state, trim(fldname))) return + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! set values of output array + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(ungridded_index)) then + output(i,j,index,iblk) = dataPtr2d(ungridded_index,n) + else + output(i,j,index,iblk) = dataPtr1d(n) + end if + end do + end do + end do + if (present(areacor)) then + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n + 1 + output(i,j,index,iblk) = output(i,j,index,iblk) * areacor(n) + end do + end do + end do + end if + + end subroutine state_getimport_4d + + !=============================================================================== + subroutine state_getimport_3d(state, fldname, output, ungridded_index, areacor, rc) + + ! ---------------------------------------------- + ! Map import state field to output array + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: state + character(len=*) , intent(in) :: fldname + real (kind=dbl_kind) , intent(inout) :: output(:,:,:) + integer, optional , intent(in) :: ungridded_index + real(kind=dbl_kind), optional , intent(in) :: areacor(:) + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! incides + real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh + character(len=*) , parameter :: subname='(ice_import_export:state_getimport_3d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + ! check that fieldname exists + if (.not. State_FldChk(state, trim(fldname))) return + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end if + + ! determine output array + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(ungridded_index)) then + output(i,j,iblk) = dataPtr2d(ungridded_index,n) + else + output(i,j,iblk) = dataPtr1d(n) + end if + end do + end do + end do + if (present(areacor)) then + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n + 1 + output(i,j,iblk) = output(i,j,iblk) * areacor(n) + end do + end do + end do + end if + + end subroutine state_getimport_3d + + !=============================================================================== + subroutine state_setexport_4d(state, fldname, input, index, lmask, ifrac, ungridded_index, areacor, rc) + + ! ---------------------------------------------- + ! Map 4d input array to export state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(inout) :: state + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , intent(in) :: input(:,:,:,:) + integer , intent(in) :: index + logical , optional, intent(in) :: lmask(:,:,:) + real(kind=dbl_kind) , optional, intent(in) :: ifrac(:,:,:) + integer , optional, intent(in) :: ungridded_index + real(kind=dbl_kind) , optional, intent(in) :: areacor(:) + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! indices + real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh + integer :: ice_num + character(len=*), parameter :: subname='(ice_import_export:state_setexport_4d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + ! check that fieldname exists + if (.not. State_FldChk(state, trim(fldname))) return + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + dataptr2d(ungridded_index,:) = c0 + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + if (present(lmask) .and. present(ifrac)) then + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) + else + dataPtr2d(ungridded_index,n) = c0 + end if + end do + end do + else + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + dataPtr2d(ungridded_index,n) = input(i,j,index,iblk) + end do + end do + end if + end do + ice_num = n + if (present(areacor)) then + do n = 1,ice_num + dataPtr2d(ungridded_index,n) = dataPtr2d(ungridded_index,n) * areacor(n) + end do + end if + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + dataptr1d(:) = c0 + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo; ihi = this_block%ihi + jlo = this_block%jlo; jhi = this_block%jhi + if (present(lmask) .and. present(ifrac)) then + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + dataPtr1d(n) = input(i,j,index,iblk) + end if + end do + end do + else + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + dataPtr1d(n) = input(i,j,index,iblk) + end do + end do + end if + end do + ice_num = n + if (present(areacor)) then + do n = 1,ice_num + dataPtr1d(n) = dataPtr1d(n) * areacor(n) + end do + end if + end if + + end subroutine state_setexport_4d + + !=============================================================================== + subroutine state_setexport_3d(state, fldname, input, lmask, ifrac, ungridded_index, areacor, rc) + + ! ---------------------------------------------- + ! Map 3d input array to export state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(inout) :: state + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , intent(in) :: input(:,:,:) + logical , optional , intent(in) :: lmask(:,:,:) + real(kind=dbl_kind) , optional , intent(in) :: ifrac(:,:,:) + integer , optional , intent(in) :: ungridded_index + real(kind=dbl_kind) , optional , intent(in) :: areacor(:) + integer , intent(out) :: rc + + ! local variables + type(block) :: this_block ! block information for current block + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: i, j, iblk, n, i1, j1 ! incides + real(kind=dbl_kind), pointer :: dataPtr1d(:) ! mesh + real(kind=dbl_kind), pointer :: dataPtr2d(:,:) ! mesh + integer :: num_ice + character(len=*), parameter :: subname='(ice_import_export:state_setexport_3d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + ! check that fieldname exists + if (.not. State_FldChk(state, trim(fldname))) return + + ! get field pointer + if (present(ungridded_index)) then + call state_getfldptr(state, trim(fldname), dataPtr2d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + dataPtr2d(ungridded_index, :) = c0 + else + call state_getfldptr(state, trim(fldname), dataPtr1d, rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + dataPtr1d(:) = c0 + end if + + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + if (present(lmask) .and. present(ifrac)) then + if ( lmask(i,j,iblk) .and. ifrac(i,j,iblk) > c0 ) then + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,iblk) + else + dataPtr1d(n) = input(i,j,iblk) + end if + end if + else + if (present(ungridded_index)) then + dataPtr2d(ungridded_index,n) = input(i,j,iblk) + else + dataPtr1d(n) = input(i,j,iblk) + end if + end if + end do + end do + end do + num_ice = n + if (present(areacor)) then + if (present(ungridded_index)) then + do n = 1,num_ice + dataPtr2d(:,n) = dataPtr2d(:,n) * areacor(n) + end do + else + do n = 1,num_ice + dataPtr1d(n) = dataPtr1d(n) * areacor(n) + end do + end if + end if + + end subroutine state_setexport_3d + + !=============================================================================== + subroutine State_GetFldPtr_1d(State, fldname, fldptr, rc) + ! ---------------------------------------------- + ! Get pointer to a state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:) + integer, optional , intent(out) :: rc + + ! local variables + type(ESMF_Field) :: lfield + character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_1d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + end subroutine State_GetFldPtr_1d + + !=============================================================================== + subroutine State_GetFldPtr_2d(State, fldname, fldptr, rc) + ! ---------------------------------------------- + ! Get pointer to a state field + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State) , intent(in) :: State + character(len=*) , intent(in) :: fldname + real(kind=dbl_kind) , pointer , intent(inout) :: fldptr(:,:) + integer , optional , intent(out) :: rc + + ! local variables + type(ESMF_Field) :: lfield + character(len=*),parameter :: subname='(ice_import_export:State_GetFldPtr_2d)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemName=trim(fldname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_FieldGet(lfield, farrayPtr=fldptr, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + end subroutine State_GetFldPtr_2d + + + subroutine ice_advertise_fields_access_import(gcomp, importState, exportState, flds_scalar_name, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(out) :: rc + + character(len=100) :: tmpString + + call fldlist_add(fldsToIce_num, fldsToIce, 'um_icenth') + call fldlist_add(fldsToIce_num, fldsToIce, 'um_icesth') + call fldlist_add(fldsToIce_num, fldsToIce, 'pen_rad', ungridded_lbound=1, ungridded_ubound=ncat) + call fldlist_add(fldsToIce_num, fldsToIce, 'topmelt', ungridded_lbound=1, ungridded_ubound=ncat) + call fldlist_add(fldsToIce_num, fldsToIce, 'botmelt', ungridded_lbound=1, ungridded_ubound=ncat) + call fldlist_add(fldsToIce_num, fldsToIce, 'tstar_sice', ungridded_lbound=1, ungridded_ubound=ncat) + call fldlist_add(fldsToIce_num, fldsToIce, 'sublim', ungridded_lbound=1, ungridded_ubound=ncat) + + write (tmpString, *) ncat + call ESMF_LogWrite("CICE number of ice categories: " // trim(tmpString)) + end subroutine ice_advertise_fields_access_import + + + subroutine ice_advertise_fields_access_export(gcomp, importState, exportState, flds_scalar_name, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + type(ESMF_State) :: importState + type(ESMF_State) :: exportState + character(len=*) , intent(in) :: flds_scalar_name + integer , intent(out) :: rc + + character(len=100) :: tmpString + + call fldlist_add(fldsFrIce_num , fldsFrIce, 'ia_aicen', ungridded_lbound=1, ungridded_ubound=ncat) ! from ice_state field: aicen + call fldlist_add(fldsFrIce_num , fldsFrIce, 'ia_snown', ungridded_lbound=1, ungridded_ubound=ncat) ! from ice_state field: vsnon + call fldlist_add(fldsFrIce_num , fldsFrIce, 'ia_thikn', ungridded_lbound=1, ungridded_ubound=ncat) ! from ice_state field: vicen + + call fldlist_add(fldsFrIce_num , fldsFrIce, 'ia_itopt', ungridded_lbound=1, ungridded_ubound=ncat) ! from ice flux: Tn_top + call fldlist_add(fldsFrIce_num , fldsFrIce, 'ia_itopk', ungridded_lbound=1, ungridded_ubound=ncat) ! from ice flux: keffn_top + call fldlist_add(fldsFrIce_num , fldsFrIce, 'ia_pndfn', ungridded_lbound=1, ungridded_ubound=ncat) ! from icepack_shorwave: apeffn + call fldlist_add(fldsFrIce_num , fldsFrIce, 'ia_pndtn', ungridded_lbound=1, ungridded_ubound=ncat) ! from ice state field: trcrn + call fldlist_add(fldsFrIce_num , fldsFrIce, 'sstfrz') + + write (tmpString, *) ncat + call ESMF_LogWrite("CICE number of ice categories: " // trim(tmpString)) + end subroutine ice_advertise_fields_access_export + + + subroutine ice_export_access(importState, exportState, ailohi, cpl_dt, rc) + + use ice_scam, only : single_column + use ice_domain_size, only: nslyr, nilyr + use icepack_parameters, only: hs_min, Lfresh, rhos, ksno, cp_ice, depressT, ktherm, rhoi + use icepack_mushy_physics, only: liquidus_temperature_mush + use icepack_therm_shared, only: calculate_Tin_from_qin + use ice_state, only: aicen, vsnon, vicen, trcrn + ! use icepack_therm_itd, only: nt_hpnd, + use icepack_tracers, only: nt_qsno, nt_hpnd, nt_apnd, nt_sice, nt_qice + use ice_arrays_column, only: apeffn + + ! input/output variables + type(ESMF_State), intent(inout) :: importState, exportState + integer , intent(in) :: cpl_dt + integer , intent(out) :: rc + + real (kind=dbl_kind) :: ailohi(nx_block,ny_block,max_blocks) + + ! local variables + type(block) :: this_block ! block information for current block + integer :: i, j, iblk, n, k ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + logical :: flag + real (kind=dbl_kind), allocatable :: tempfld(:,:,:), tempfld1(:,:,:), ki_fld(:,:,:,:), hi1_fld(:,:,:,:) + real (kind=dbl_kind), allocatable :: pndfn_scaled(:,:,:,:), pndtn_scaled(:,:,:,:) + real(kind=dbl_kind), pointer :: fhocn_ptr(:), fresh_ptr(:), um_icenth(:), um_icesth(:) + real (kind=dbl_kind) :: hs1, hi1, Tmlt1, ki, rnslyr, rnilyr, licefw, liceht + character(len=*),parameter :: subname = 'ice_export_access' + character(len=200) :: tmpString + !----------------------------------------------------- + + rc = ESMF_SUCCESS + if (io_dbug > 5) call ESMF_LogWrite(subname//' called', ESMF_LOGMSG_INFO) + + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=u_FILE_u, line=__LINE__) + + ! Create temporary fields + allocate(tempfld(nx_block,ny_block,nblocks)) + allocate(tempfld1(nx_block,ny_block,nblocks)) + allocate(pndfn_scaled(nx_block,ny_block,ncat,nblocks)) + allocate(pndtn_scaled(nx_block,ny_block,ncat,nblocks)) + + do n = 1, ncat + call state_setexport(exportState, 'ia_aicen', input=aicen , lmask=tmask, ifrac=ailohi, rc=rc, index=n, ungridded_index=n) + call state_setexport(exportState, 'ia_snown', input=vsnon , lmask=tmask, ifrac=ailohi, rc=rc, index=n, ungridded_index=n) + call state_setexport(exportState, 'ia_thikn', input=vicen , lmask=tmask, ifrac=ailohi, rc=rc, index=n, ungridded_index=n) + end do + + call state_setexport(exportState, 'sstfrz', input=Tf , lmask=tmask, ifrac=ailohi, rc=rc) + + ! To conserve pond areas, scale by ice fractions before mapping. Unscale in the atmosphere after mapping + pndfn_scaled(:,:,:,:) = apeffn(:,:,:,:) * aicen(:,:,:,:) + ! To conserve pond volumes, scale by the pond gridcell fraction before mapping. Unscale in the atmosphere + ! after mapping + pndtn_scaled(:,:,:,:) = trcrn(:,:,nt_hpnd,:,:) * apeffn(:,:,:,:) * aicen(:,:,:,:) + + do n = 1, ncat + call state_setexport(exportState, 'ia_pndfn', input=pndfn_scaled, lmask=tmask, ifrac=ailohi, rc=rc, index=n, ungridded_index=n) + call state_setexport(exportState, 'ia_pndtn', input=pndtn_scaled, lmask=tmask, ifrac=ailohi, rc=rc, index=n, ungridded_index=n) + end do + + rnslyr = real(nslyr,kind=dbl_kind) + rnilyr = real(nilyr,kind=dbl_kind) + + do n = 1, ncat + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + if (aicen(i,j,n,iblk) > puny) then + hs1 = vsnon(i,j,n,iblk)/(aicen(i,j,n,iblk)*rnslyr) + if (hs1 > hs_min/rnslyr) then + !snow is top layer + tempfld(i,j,iblk) = (Lfresh + trcrn(i,j,nt_qsno,n,iblk) / rhos)/cp_ice + tempfld1(i,j,iblk) = c2 * ksno / hs1 + else + !ice is top layer + hi1 = vicen(i,j,n,iblk)/(aicen(i,j,n,iblk)*rnilyr) + if (ktherm == 2) then + Tmlt1 = liquidus_temperature_mush(trcrn(i,j,nt_sice,n,iblk)) + else + Tmlt1 = - trcrn(i,j,nt_sice,n,iblk) * depressT + endif + + tempfld(i,j,iblk) = calculate_Tin_from_qin(trcrn(i,j,nt_qice,n,iblk),Tmlt1) + ki = calculate_ki_from_Tin(tempfld(i,j,iblk), trcrn(i,j,nt_sice,n,iblk)) + tempfld1(i,j,iblk) = (c2 * ki / hi1) + end if + + tempfld(i,j,iblk) = tempfld(i,j,iblk) * aicen(i,j,n,iblk) + tempfld1(i,j,iblk) = tempfld1(i,j,iblk) * aicen(i,j,n,iblk) + + else + tempfld(i,j,iblk) = 0.0 + tempfld1(i,j,iblk) = 0.0 + endif + end do + end do + end do + call state_setexport(exportState, 'ia_itopt', input=tempfld, lmask=tmask, ifrac=ailohi, rc=rc, ungridded_index=n) + call state_setexport(exportState, 'ia_itopk', input=tempfld1, lmask=tmask, ifrac=ailohi, rc=rc, ungridded_index=n) + end do + + call state_getfldptr(importState, 'um_icenth', um_icenth, rc) + call state_getfldptr(importState, 'um_icesth', um_icesth, rc) + + call state_getfldptr(exportState, 'Fioi_melth', fhocn_ptr, rc) + call state_getfldptr(exportState, 'Fioi_meltw', fresh_ptr, rc) + + ! n = 0 + ! do iblk = 1, nblocks + ! this_block = get_block(blocks_ice(iblk),iblk) + ! ilo = this_block%ilo + ! ihi = this_block%ihi + ! jlo = this_block%jlo + ! jhi = this_block%jhi + ! do j = jlo, jhi + ! do i = ilo, ihi + ! n = n + 1 + + ! if (tmask(i,j,iblk)) then + + ! if (lice_nth(i,j,iblk) == 0.0) then + ! lice_nth(i,j,iblk) = um_icenth(n) + ! end if + + ! if (lice_sth(i,j,iblk) == 0.0) then + ! lice_sth(i,j,iblk) = um_icesth(n) + ! end if + + ! if (amsk_nth(i,j,iblk) > 0.0) then + ! licefw = max(0.0, um_icenth(n) - lice_nth(i,j,iblk)) * msk_nth(i,j,iblk) / amsk_nth(i,j,iblk) + ! else if (amsk_sth(i,j,iblk) > 0.0) then + ! licefw = max(0.0, um_icesth(n) - lice_sth(i,j,iblk)) * msk_sth(i,j,iblk) / amsk_sth(i,j,iblk) + ! else + ! licefw = 0.0 + ! end if + + ! licefw = licefw / cpl_dt + + ! liceht = -licefw * Lfresh + + ! ! fresh_ptr(n) = fresh_ptr(n) + licefw + ! ! fhocn_ptr(n) = fhocn_ptr(n) + liceht + + ! lice_nth(i,j,iblk) = um_icenth(n) + ! lice_sth(i,j,iblk) = um_icesth(n) + + ! end if + + ! end do + ! end do + ! end do + + end subroutine ice_export_access + + + function calculate_ki_from_Tin (Tink, salink) & + result(ki) + + use icepack_parameters, only: kice, conduct, rhoi + ! use icepack_therm_bl99, only: kimin, betak + ! + ! !USES: + ! + ! !INPUT PARAMETERS: + ! + real (kind=dbl_kind), intent(in) :: & + Tink , & ! ice layer temperature + salink ! salinity at one level + ! + ! !OUTPUT PARAMETERS + ! + real (kind=dbl_kind) :: & + ki ! ice conductivity + + + real (kind=dbl_kind), parameter :: & + betak = 0.13_dbl_kind, & ! constant in formula for k (W m-1 ppt-1) + kimin = 0.10_dbl_kind + ! + !EOP + ! + if (conduct == 'MU71') then + ! Maykut and Untersteiner 1971 form (with Wettlaufer 1991 constants) + ki = kice + betak*salink/min(-puny,Tink) + else + ! Pringle et al JGR 2007 'bubbly brine' + ki = (2.11_dbl_kind - 0.011_dbl_kind*Tink & + + 0.09_dbl_kind*salink/min(-puny,Tink)) & + * rhoi / 917._dbl_kind + endif + + ki = max (ki, kimin) + + end function calculate_ki_from_Tin + + subroutine get_lice_discharge_masks_or_iceberg(fname) + + ! Called at beginning of each run trunk to read in land ice discharge mask or iceberg + ! (off Antarctica and Greenland). + + implicit none + + character(len=*), intent(in) :: fname + character*80 :: myvar = 'ficeberg' + integer(kind=int_kind) :: ncid_i2o, im, k + logical :: dbug + !!! + !character(:), allocatable :: fname_trim + !!! + + allocate (lice_nth(nx_block,ny_block,max_blocks)); lice_nth(:,:,:) = 0 + allocate (lice_sth(nx_block,ny_block,max_blocks)); lice_sth(:,:,:) = 0 + allocate (msk_nth(nx_block,ny_block,max_blocks)); msk_nth(:,:,:) = 0 + allocate (msk_sth(nx_block,ny_block,max_blocks)); msk_sth(:,:,:) = 0 + allocate (amsk_nth(nx_block,ny_block,max_blocks)); amsk_nth(:,:,:) = 0 + allocate (amsk_sth(nx_block,ny_block,max_blocks)); amsk_sth(:,:,:) = 0 + + dbug = .true. + + !!! + !fname_trim = trim(fname) + !!! + if (my_task == 0) write(*,'(a,a)'),'BBB1: opening file ',fname + if (my_task == 0) write(*,'(a,a)'),'BBB2: opening file ',trim(fname) + + call ice_open_nc(trim(fname), ncid_i2o) + + call ice_read_nc(ncid_i2o, 1, 'msk_nth', msk_nth, dbug) + call ice_read_nc(ncid_i2o, 1, 'msk_sth', msk_sth, dbug) + call ice_read_nc(ncid_i2o, 1, 'amsk_nth', amsk_nth, dbug) + call ice_read_nc(ncid_i2o, 1, 'amsk_sth', amsk_sth, dbug) + + + return + end subroutine get_lice_discharge_masks_or_iceberg + +end module ice_import_export diff --git a/cicecore/drivers/access/cmeps/ice_mesh_mod.F90 b/cicecore/drivers/access/cmeps/ice_mesh_mod.F90 new file mode 100644 index 000000000..ae0a2d070 --- /dev/null +++ b/cicecore/drivers/access/cmeps/ice_mesh_mod.F90 @@ -0,0 +1,699 @@ +module ice_mesh_mod + + use ESMF + use NUOPC , only : NUOPC_CompAttributeGet + use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long + use ice_domain_size , only : nx_global, ny_global, max_blocks + use ice_domain , only : nblocks, blocks_ice, distrb_info + use ice_blocks , only : block, get_block, nx_block, ny_block, nblocks_x, nblocks_y + use ice_shr_methods , only : chkerr + use ice_fileunits , only : nu_diag + use ice_communicate , only : my_task, master_task + use ice_exit , only : abort_ice + use icepack_intfc , only : icepack_query_parameters + use icepack_intfc , only : icepack_warnings_flush, icepack_warnings_aborted + + implicit none + private + + public :: ice_mesh_set_distgrid + public :: ice_mesh_setmask_from_maskfile + public :: ice_mesh_create_scolumn + public :: ice_mesh_init_tlon_tlat_area_hm + public :: ice_mesh_check + + ! Only relevant for lat-lon grids gridcell value of [1 - (land fraction)] (T-cell) + real (dbl_kind), allocatable, public :: ocn_gridcell_frac(:,:,:) + + character(*), parameter :: u_FILE_u = & + __FILE__ + +!======================================================================= +contains +!======================================================================= + + subroutine ice_mesh_set_distgrid(localpet, npes, distgrid, rc) + + ! Determine the global index space needed for the distgrid + + ! input/output variables + integer , intent(in) :: localpet + integer , intent(in) :: npes + type(ESMF_DistGrid) , intent(inout) :: distgrid + integer , intent(out) :: rc + + ! local variables + integer :: n,c,g,i,j,m ! indices + integer :: iblk, jblk ! indices + integer :: ig, jg ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + integer :: lsize ! local size of coupling array + type(block) :: this_block ! block information for current block + integer :: num_elim_global + integer :: num_elim_local + integer :: num_elim + integer :: num_ice + integer :: num_elim_gcells ! local number of eliminated gridcells + integer :: num_elim_blocks ! local number of eliminated blocks + integer :: num_total_blocks + integer :: my_elim_start, my_elim_end + integer , allocatable :: gindex(:) + integer , allocatable :: gindex_ice(:) + integer , allocatable :: gindex_elim(:) + integer :: globalID + character(len=*), parameter :: subname = ' ice_mesh_set_distgrid: ' + !---------------------------------------------------------------- + + rc = ESMF_SUCCESS + + ! number the local grid to get allocation size for gindex_ice + lsize = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + lsize = lsize + 1 + enddo + enddo + enddo + + ! set global index array + allocate(gindex_ice(lsize)) + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + ig = this_block%i_glob(i) + jg = this_block%j_glob(j) + gindex_ice(n) = (jg-1)*nx_global + ig + enddo + enddo + enddo + + ! Determine total number of eliminated blocks globally + globalID = 0 + num_elim_global = 0 ! number of eliminated blocks + num_total_blocks = 0 + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + num_total_blocks = num_total_blocks + 1 + if (distrb_info%blockLocation(globalID) == 0) then + num_elim_global = num_elim_global + 1 + end if + end do + end do + + if (num_elim_global > 0) then + + ! Distribute the eliminated blocks in a round robin fashion amoung processors + num_elim_local = num_elim_global / npes + my_elim_start = num_elim_local*localPet + min(localPet, mod(num_elim_global, npes)) + 1 + if (localPet < mod(num_elim_global, npes)) then + num_elim_local = num_elim_local + 1 + end if + my_elim_end = my_elim_start + num_elim_local - 1 + + ! Determine the number of eliminated gridcells locally + globalID = 0 + num_elim_blocks = 0 ! local number of eliminated blocks + num_elim_gcells = 0 + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + if (distrb_info%blockLocation(globalID) == 0) then + num_elim_blocks = num_elim_blocks + 1 + if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then + this_block = get_block(globalID, globalID) + num_elim_gcells = num_elim_gcells + & + (this_block%jhi-this_block%jlo+1) * (this_block%ihi-this_block%ilo+1) + end if + end if + end do + end do + + ! Determine the global index space of the eliminated gridcells + allocate(gindex_elim(num_elim_gcells)) + globalID = 0 + num_elim_gcells = 0 ! local number of eliminated gridcells + num_elim_blocks = 0 ! local number of eliminated blocks + do jblk=1,nblocks_y + do iblk=1,nblocks_x + globalID = globalID + 1 + if (distrb_info%blockLocation(globalID) == 0) then + this_block = get_block(globalID, globalID) + num_elim_blocks = num_elim_blocks + 1 + if (num_elim_blocks >= my_elim_start .and. num_elim_blocks <= my_elim_end) then + do j=this_block%jlo,this_block%jhi + do i=this_block%ilo,this_block%ihi + num_elim_gcells = num_elim_gcells + 1 + ig = this_block%i_glob(i) + jg = this_block%j_glob(j) + gindex_elim(num_elim_gcells) = (jg-1)*nx_global + ig + end do + end do + end if + end if + end do + end do + + ! create a global index that includes both active and eliminated gridcells + num_ice = size(gindex_ice) + num_elim = size(gindex_elim) + allocate(gindex(num_elim + num_ice)) + do n = 1,num_ice + gindex(n) = gindex_ice(n) + end do + do n = num_ice+1,num_ice+num_elim + gindex(n) = gindex_elim(n-num_ice) + end do + + deallocate(gindex_elim) + + else + + ! No eliminated land blocks + num_ice = size(gindex_ice) + allocate(gindex(num_ice)) + do n = 1,num_ice + gindex(n) = gindex_ice(n) + end do + + end if + + !--------------------------------------------------------------------------- + ! Create distGrid from global index array + !--------------------------------------------------------------------------- + + DistGrid = ESMF_DistGridCreate(arbSeqIndexList=gindex, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + deallocate(gindex_ice) + deallocate(gindex) + + end subroutine ice_mesh_set_distgrid + + !======================================================================= + subroutine ice_mesh_setmask_from_maskfile(ice_maskfile, ice_mesh, rc) + + use ice_grid , only : tlon, tlat, hm, tarea + use ice_constants , only : c0, c1, c2, p25, radius + + ! input/output variables + character(len=*) , intent(in) :: ice_maskfile + type(ESMF_Mesh) , intent(inout) :: ice_mesh + integer , intent(out) :: rc + + ! local variables + integer :: i, j, n + integer (int_kind) :: ni, nj + integer :: iblk, jblk ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type (block) :: this_block ! block information for current block + real(dbl_kind) , pointer :: ice_frac(:) + type(ESMF_Field) :: areaField + type(ESMF_Mesh) :: mesh_mask + type(ESMF_Field) :: field_mask + type(ESMF_Field) :: field_dst + type(ESMF_RouteHandle) :: rhandle + integer :: srcMaskValue = 0 + integer :: dstMaskValue = -987987 ! spval for RH mask values + integer :: srcTermProcessing_Value = 0 + logical :: checkflag = .false. + integer, pointer :: ice_mask(:) + real(dbl_kind) , pointer :: mask_src(:) ! on mesh created from ice_maskfile + real(dbl_kind) , pointer :: dataptr1d(:) + type(ESMF_DistGrid) :: distgrid_mask + type(ESMF_Array) :: elemMaskArray + integer :: lsize_mask, lsize_dst + integer :: spatialDim + real(dbl_kind) :: fminval = 0.001_dbl_kind ! TODO: make this a share constant + real(dbl_kind) :: fmaxval = 1._dbl_kind + real(dbl_kind) :: lfrac + real(dbl_kind) , pointer :: mesh_areas(:) + integer :: numownedelements + real(dbl_kind) , pointer :: ownedElemCoords(:) + real(dbl_kind) :: pi + real(dbl_kind) :: c180 + real(dbl_kind) :: puny + real(dbl_kind) :: deg_to_rad + character(len=*), parameter :: subname = ' ice_mesh_setmask_from_maskfile' + !--------------------------------------------------- + + rc = ESMF_SUCCESS + + mesh_mask = ESMF_MeshCreate(trim(ice_maskfile), fileformat=ESMF_FILEFORMAT_ESMFMESH, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call ESMF_MeshGet(ice_mesh, spatialDim=spatialDim, numOwnedElements=lsize_dst, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(ice_mask(lsize_dst)) + allocate(ice_frac(lsize_dst)) + + ! create fields on source and destination meshes + field_mask = ESMF_FieldCreate(mesh_mask, ESMF_TYPEKIND_R8, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + field_dst = ESMF_FieldCreate(ice_mesh, ESMF_TYPEKIND_R8, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! create route handle to map source mask (assume ocean) to destination mesh (assume atm/lnd) + call ESMF_FieldRegridStore(field_mask, field_dst, routehandle=rhandle, & + srcMaskValues=(/srcMaskValue/), dstMaskValues=(/dstMaskValue/), & + regridmethod=ESMF_REGRIDMETHOD_CONSERVE, normType=ESMF_NORMTYPE_DSTAREA, & + srcTermProcessing=srcTermProcessing_Value, & + ignoreDegenerate=.true., unmappedaction=ESMF_UNMAPPEDACTION_IGNORE, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + ! fill in values for field_mask with mask on source mesh + call ESMF_MeshGet(mesh_mask, elementdistGrid=distgrid_mask, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_DistGridGet(distgrid_mask, localDe=0, elementCount=lsize_mask, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(mask_src(lsize_mask)) + elemMaskArray = ESMF_ArrayCreate(distgrid_mask, mask_src, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! The following call fills in the values of mask_src + call ESMF_MeshGet(mesh_mask, elemMaskArray=elemMaskArray, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + ! The following call fills in the values of field_mask + call ESMF_FieldGet(field_mask, farrayptr=dataptr1d, rc=rc) + dataptr1d(:) = mask_src(:) + + ! map source mask to destination mesh - to obtain destination mask and frac + call ESMF_FieldRegrid(field_mask, field_dst, routehandle=rhandle, & + termorderflag=ESMF_TERMORDER_SRCSEQ, checkflag=checkflag, zeroregion=ESMF_REGION_TOTAL, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_MeshGet(ice_mesh, spatialDim=spatialDim, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(field_dst, farrayptr=dataptr1d, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + ! now determine ice_mask and ice_frac + do n = 1,size(dataptr1d) + lfrac = c1 - dataptr1d(n) + if (lfrac > fmaxval) lfrac = c1 + if (lfrac < fminval) lfrac = c0 + ice_frac(n) = c1 - lfrac + if (ice_frac(n) == c0) then + ice_mask(n) = 0 + else + ice_mask(n) = 1 + end if + enddo + + ! reset the model mesh mask + call ESMF_MeshSet(ice_mesh, elementMask=ice_mask, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! deallocate memory + call ESMF_RouteHandleDestroy(rhandle, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldDestroy(field_mask, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldDestroy(field_dst, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + deallocate(mask_src) + + ! Allocate module variable ocn_gridcell_frac + allocate(ocn_gridcell_frac(nx_block,ny_block,max_blocks)) + + ! Obtain mesh areas in radians^2 + areaField = ESMF_FieldCreate(ice_mesh, ESMF_TYPEKIND_R8, meshloc=ESMF_MESHLOC_ELEMENT, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldRegridGetArea(areaField, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(areaField, farrayPtr=mesh_areas, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Obtain mesh lons and lats in degrees + call ESMF_MeshGet(ice_mesh, spatialDim=spatialDim, numOwnedElements=numOwnedElements, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate(ownedElemCoords(spatialDim*numownedelements)) + call ESMF_MeshGet(ice_mesh, ownedElemCoords=ownedElemCoords) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_MeshGet(ice_mesh, ownedElemCoords=ownedElemCoords, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Get required constants + call icepack_query_parameters(pi_out=pi, c180_out=c180) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + deg_to_rad = pi/c180 + + ! Set tlon, tlat, tarea, hm + ! Convert mesh areas from radians^2 to m^2 (tarea is in m^2) + ! Convert lons and lats from degrees to radians + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n + 1 + tlon(i,j,iblk) = ownedElemCoords(2*n-1) * deg_to_rad + tlat(i,j,iblk) = ownedElemCoords(2*n) * deg_to_rad + tarea(i,j,iblk) = mesh_areas(n) * (radius*radius) + hm(i,j,iblk) = real(ice_mask(n),kind=dbl_kind) + ocn_gridcell_frac(i,j,iblk) = ice_frac(n) + enddo + enddo + enddo + + ! Dealocate memory + deallocate(ownedElemCoords) + call ESMF_FieldDestroy(areaField) + + end subroutine ice_mesh_setmask_from_maskfile + + !=============================================================================== + subroutine ice_mesh_create_scolumn(scol_lon, scol_lat, ice_mesh, rc) + + use ice_constants , only : c0, c1 + use ice_scam , only : scmlat, scmlon, scol_area, scol_mask, scol_frac, scol_nj + use netcdf + + ! Create the model mesh from the domain file - for either single column mode + ! or for a regional grid + + ! input/output variables + real(dbl_kind) , intent(in) :: scol_lon + real(dbl_kind) , intent(in) :: scol_lat + type(ESMF_Mesh) , intent(inout) :: ice_mesh + integer , intent(out) :: rc + + ! local variables + type(ESMF_Grid) :: lgrid + integer :: maxIndex(2) + real(dbl_kind) :: mincornerCoord(2) + real(dbl_kind) :: maxcornerCoord(2) + integer :: i, j,iblk, jblk ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type (block) :: this_block ! block information for current block + character(len=*), parameter :: subname = ' ice_mesh_create_scolumn' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + ! Use center and come up with arbitrary area delta lon and lat = .1 degree + maxIndex(1) = 1 ! number of lons + maxIndex(2) = 1 ! number of lats + mincornerCoord(1) = scol_lon - .1_dbl_kind ! min lon + mincornerCoord(2) = scol_lat - .1_dbl_kind ! min lat + maxcornerCoord(1) = scol_lon + .1_dbl_kind ! max lon + maxcornerCoord(2) = scol_lat + .1_dbl_kind ! max lat + + ! create the ESMF grid + lgrid = ESMF_GridCreateNoPeriDimUfrm (maxindex=maxindex, & + mincornercoord=mincornercoord, maxcornercoord= maxcornercoord, & + staggerloclist=(/ESMF_STAGGERLOC_CENTER, ESMF_STAGGERLOC_CORNER/), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! create the mesh from the lgrid + ice_mesh = ESMF_MeshCreate(lgrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! Allocate module variable ocn_gridcell_frac + allocate(ocn_gridcell_frac(2,2,1)) + ocn_gridcell_frac(:,:,:) = scol_frac + + end subroutine ice_mesh_create_scolumn + + !=============================================================================== + subroutine ice_mesh_init_tlon_tlat_area_hm() + + use ice_grid , only : tlon, tlat, hm, tarea, ULON, ULAT, HTN, HTE, ANGLE, ANGLET + use ice_grid , only : uarea, uarear, tarear!, tinyarea + use ice_grid , only : dxT, dyT, dxU, dyU + use ice_grid , only : makemask + use ice_boundary , only : ice_HaloUpdate + use ice_domain , only : blocks_ice, nblocks, halo_info, distrb_info + use ice_constants , only : c0, c1, p25 + use ice_constants , only : field_loc_center, field_type_scalar + use ice_scam , only : scmlat, scmlon, scol_area, scol_mask, scol_frac, scol_nj, single_column + + ! local variables + integer :: i,j,n + integer :: iblk, jblk ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type (block) :: this_block ! block information for current block + real(dbl_kind) :: puny + real(dbl_kind) :: pi + character(len=*), parameter :: subname = ' ice_mesh_init_tlon_tlat_area_hm' + ! ---------------------------------------------- + + ! Get required constants + call icepack_query_parameters(pi_out=pi, puny_out=puny) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + ! Check for consistency + if (single_column) then + if ((nx_global /= 1).or. (ny_global /= 1)) then + write(nu_diag,*) 'nx_global = ',nx_global + write(nu_diag,*) 'ny_global = ',ny_global + write(nu_diag,*) 'Because you have selected the column model flag' + write(nu_diag,*) 'then require nx_global=ny_global=1 in file ice_domain_size.F' + call abort_ice(' ice_mesh_init_tlon_tlat_area_hm: nx_global and ny_global need to be 1 for single column') + else + write(nu_diag,'(a,f10.5)')' single column mode lon/lat does contain ocn with ocn fraction ',scol_frac + end if + + TLON = scmlon + TLAT = scmlat + tarea = scol_area + hm = scol_mask + ULAT = TLAT + pi/scol_nj + end if + + call ice_HaloUpdate (TLON , halo_info, field_loc_center, field_type_scalar, fillValue=c1) + call ice_HaloUpdate (TLAT , halo_info, field_loc_center, field_type_scalar, fillValue=c1) + call ice_HaloUpdate (tarea , halo_info, field_loc_center, field_type_scalar, fillValue=c1) + call ice_HaloUpdate (hm , halo_info, field_loc_center, field_type_scalar, fillValue=c1) + + !----------------------------------------------------------------- + ! CALCULATE various geometric 2d arrays + ! The U grid (velocity) is not used when run with sequential CAM + ! because we only use thermodynamic sea ice. However, ULAT is used + ! in the default initialization of CICE so we calculate it here as + ! a "dummy" so that CICE will initialize with ice. If a no ice + ! initialization is OK (or desired) this can be commented out and + ! ULAT will remain 0 as specified above. ULAT is located at the + ! NE corner of the grid cell, TLAT at the center, so here ULAT is + ! hacked by adding half the latitudinal spacing (in radians) to TLAT. + !----------------------------------------------------------------- + + ANGLET(:,:,:) = c0 + + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + + do j = jlo, jhi + do i = ilo, ihi + + if (ny_global == 1) then + uarea(i,j,iblk) = tarea(i,j, iblk) + else + uarea(i,j,iblk) = p25* & + (tarea(i,j, iblk) + tarea(i+1,j, iblk) & + + tarea(i,j+1,iblk) + tarea(i+1,j+1,iblk)) + endif + tarear(i,j,iblk) = c1/tarea(i,j,iblk) + uarear(i,j,iblk) = c1/uarea(i,j,iblk) +! tinyarea(i,j,iblk) = puny*tarea(i,j,iblk) + + if (.not. single_column) then + if (ny_global == 1) then + ULAT(i,j,iblk) = TLAT(i,j,iblk) + else + ULAT(i,j,iblk) = TLAT(i,j,iblk)+(pi/ny_global) + endif + endif + ULON (i,j,iblk) = c0 + ANGLE (i,j,iblk) = c0 + + HTN (i,j,iblk) = 1.e36_dbl_kind + HTE (i,j,iblk) = 1.e36_dbl_kind + dxT (i,j,iblk) = 1.e36_dbl_kind + dyT (i,j,iblk) = 1.e36_dbl_kind + dxU (i,j,iblk) = 1.e36_dbl_kind + dyU (i,j,iblk) = 1.e36_dbl_kind + enddo + enddo + enddo + + call ice_HaloUpdate (ULAT, halo_info, field_loc_center, field_type_scalar, fillValue=c1) + + ! Set the boundary values for the T cell land mask (hm) and + ! make the logical land masks for T and U cells (tmask, umask). + ! Also create hemisphere masks (mask-n northern, mask-s southern) + call makemask() + + end subroutine ice_mesh_init_tlon_tlat_area_hm + + !=============================================================================== + subroutine ice_mesh_check(gcomp, ice_mesh, rc) + + ! Check CICE mesh + + use ice_constants, only : c1,c0,c180,c360 + use ice_grid , only : tlon, tlat, hm + + ! input/output parameters + type(ESMF_GridComp) , intent(inout) :: gcomp + type(ESMF_Mesh) , intent(inout) :: ice_mesh + integer , intent(out) :: rc + + ! local variables + type(ESMF_DistGrid) :: distGrid + type(ESMF_Array) :: elemMaskArray + integer :: n,i,j ! indices + integer :: iblk, jblk ! indices + integer :: ilo, ihi, jlo, jhi ! beginning and end of physical domain + type(block) :: this_block ! block information for current block + integer :: spatialDim + integer :: numOwnedElements + real(dbl_kind), pointer :: ownedElemCoords(:) + real(dbl_kind), pointer :: lat(:), latMesh(:) + real(dbl_kind), pointer :: lon(:), lonMesh(:) + integer , pointer :: model_mask(:) + real(dbl_kind) :: diff_lon + real(dbl_kind) :: diff_lat + real(dbl_kind) :: rad_to_deg + real(dbl_kind) :: eps_imesh + logical :: isPresent, isSet + logical :: mask_error + integer :: mask_internal + integer :: mask_file + character(len=char_len_long) :: cvalue + character(len=char_len_long) :: logmsg + character(len=*), parameter :: subname = ' ice_mesh_check: ' + !--------------------------------------------------- + + ! Determine allowed mesh error + call NUOPC_CompAttributeGet(gcomp, name='eps_imesh', value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) eps_imesh + else + eps_imesh = 1.0e-1_dbl_kind + end if + write(logmsg,*) eps_imesh + call ESMF_LogWrite(trim(subname)//' eps_imesh = '//trim(logmsg), ESMF_LOGMSG_INFO) + + ! error check differences between internally generated lons and those read in + call ESMF_MeshGet(ice_mesh, spatialDim=spatialDim, numOwnedElements=numOwnedElements, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(ownedElemCoords(spatialDim*numownedelements)) + allocate(lonmesh(numOwnedElements)) + allocate(latmesh(numOwnedElements)) + call ESMF_MeshGet(ice_mesh, ownedElemCoords=ownedElemCoords, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + do n = 1,numOwnedElements + lonMesh(n) = ownedElemCoords(2*n-1) + latMesh(n) = ownedElemCoords(2*n) + end do + + ! obtain internally generated cice lats and lons for error checks + call icepack_query_parameters(rad_to_deg_out=rad_to_deg) + allocate(lon(numOwnedElements)) + allocate(lat(numOwnedElements)) + lon(:) = 0. + lat(:) = 0. + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n + 1 + lon(n) = tlon(i,j,iblk)*rad_to_deg + lat(n) = tlat(i,j,iblk)*rad_to_deg + + ! error check differences between internally generated lons and those read in + diff_lon = mod(abs(lonMesh(n) - lon(n)),360.0) + if (diff_lon > c180) then + diff_lon = diff_lon - c360 + endif + if (abs(diff_lon) > eps_imesh ) then + write(6,100)n,lonMesh(n),lon(n), diff_lon + call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) + end if + diff_lat = abs(latMesh(n) - lat(n)) + if (diff_lat > eps_imesh) then + write(6,101)n,latMesh(n),lat(n), diff_lat + call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) + end if + enddo + enddo + enddo + + ! obtain internally generated ice mask for error checks + allocate(model_mask(numOwnedElements)) + call ESMF_MeshGet(ice_mesh, elementdistGrid=distGrid, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + elemMaskArray = ESMF_ArrayCreate(distGrid, model_mask, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_MeshGet(ice_mesh, elemMaskArray=elemMaskArray, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + mask_error = .false. + n=0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n + 1 + mask_internal = nint(hm(i,j,iblk),kind=dbl_kind) + mask_file = model_mask(n) + if (mask_internal /= mask_file) then + write(6,102) n,mask_internal,mask_file + mask_error = .true. + end if + enddo !i + enddo !j + enddo !iblk + if (mask_error) then + call abort_ice(error_message=subname, file=__FILE__, line=__LINE__) + end if + + call ESMF_ArrayDestroy(elemMaskArray, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + +100 format('ERROR: CICE n, mesh_lon , lon, diff_lon = ',i8,2(f21.13,3x),d21.5) +101 format('ERROR: CICE n, mesh_lat , lat, diff_lat = ',i8,2(f21.13,3x),d21.5) +102 format('ERROR: CICE n, mesh_internal, mask_file = ',i8,2(i2,2x)) + + ! deallocate memory + deallocate(ownedElemCoords) + deallocate(lon, lonMesh) + deallocate(lat, latMesh) + + end subroutine ice_mesh_check + +end module ice_mesh_mod diff --git a/cicecore/drivers/access/cmeps/ice_prescribed_mod.F90 b/cicecore/drivers/access/cmeps/ice_prescribed_mod.F90 new file mode 100644 index 000000000..7113fa915 --- /dev/null +++ b/cicecore/drivers/access/cmeps/ice_prescribed_mod.F90 @@ -0,0 +1,495 @@ +module ice_prescribed_mod + + ! !DESCRIPTION: + ! The prescribed ice model reads in ice concentration data from a netCDF + ! file. Ice thickness, temperature, the ice temperature profile are + ! prescribed. Air/ice fluxes are computed to get surface temperature, + ! Ice/ocean fluxes are set to zero, and ice dynamics are not calculated. + ! Regridding and data cycling capabilities are included. + + ! Note (8/8/2024): This code is dependent on CDEPS (to input ice data). + ! In the interests of cleaner code, drivers/nuopc/cmeps now is too. + ! If problematic, please see https://github.com/CICE-Consortium/CICE/pull/964 for alternatives. + + use ESMF, only : ESMF_GridComp, ESMF_Clock, ESMF_Mesh, ESMF_SUCCESS, ESMF_FAILURE + use ESMF, only : ESMF_LogFoundError, ESMF_LOGERR_PASSTHRU, ESMF_Finalize, ESMF_END_ABORT + + use ice_kinds_mod + use shr_nl_mod , only : shr_nl_find_group_name + use dshr_strdata_mod , only : shr_strdata_type, shr_strdata_print + use dshr_strdata_mod , only : shr_strdata_init_from_inline, shr_strdata_advance + use dshr_methods_mod , only : dshr_fldbun_getfldptr + use dshr_mod , only : dshr_pio_init + use ice_broadcast + use ice_communicate , only : my_task, master_task, MPI_COMM_ICE + use ice_fileunits + use ice_exit , only : abort_ice + use ice_domain_size , only : nx_global, ny_global, ncat, nilyr, nslyr, max_blocks + use ice_constants + use ice_blocks , only : nx_block, ny_block, block, get_block + use ice_domain , only : nblocks, distrb_info, blocks_ice + use ice_grid , only : TLAT, TLON, hm, tmask, tarea, grid_type, ocn_gridcell_frac + use ice_calendar , only : idate, msec, calendar_type + use ice_arrays_column , only : hin_max + use ice_read_write + use ice_exit , only: abort_ice + use icepack_intfc , only: icepack_warnings_flush, icepack_warnings_aborted + use icepack_intfc , only: icepack_query_tracer_indices, icepack_query_tracer_sizes + use icepack_intfc , only: icepack_query_parameters + use ice_shr_methods , only: chkerr + + implicit none + private ! except + + ! public member functions: + public :: ice_prescribed_init ! initialize input data stream + public :: ice_prescribed_run ! get time slices and time interp + public :: ice_prescribed_phys ! set prescribed ice state and fluxes + + ! public data members: + logical(kind=log_kind), public :: prescribed_ice ! true if prescribed ice + + ! private data members: + type(shr_strdata_type) :: sdat ! prescribed data stream + real(kind=dbl_kind),allocatable :: ice_cov(:,:,:) ! ice cover + + character(*), parameter :: u_FILE_u = & + __FILE__ + +!======================================================================= +contains +!=============================================================================== + + subroutine ice_prescribed_init(gcomp, clock, mesh, rc) + + ! Prescribed ice initialization + +#ifndef SERIAL_REMOVE_MPI + !TODO: add 1d character array to cicecore/cicedyn/infrastructure/comm/mpi/ice_broadcast.F90 + use mpi ! MPI Fortran module +#endif + + ! input/output parameters + type(ESMF_GridComp) , intent(in) :: gcomp + type(ESMF_Clock) , intent(in) :: clock + type(ESMF_Mesh) , intent(in) :: mesh + integer , intent(out) :: rc + + ! local parameters + integer(kind=int_kind),parameter :: nFilesMaximum = 400 ! max number of files + integer(kind=int_kind) :: n, nFile, ierr + integer(kind=int_kind) :: nml_error ! namelist i/o error flag + character(len=char_len_long) :: stream_meshFile + character(len=char_len_long) :: stream_dataFiles(nFilesMaximum) + character(len=char_len_long) :: stream_varname + character(len=char_len_long) :: stream_mapalgo + character(len=char_len_long) :: stream_taxmode + integer(kind=int_kind) :: stream_yearfirst ! first year in stream to use + integer(kind=int_kind) :: stream_yearlast ! last year in stream to use + integer(kind=int_kind) :: stream_yearalign ! align stream_year_first + integer(kind=int_kind) :: nu_nml + logical :: prescribed_ice_mode + character(*),parameter :: subName = "('ice_prescribed_init')" + character(*),parameter :: F00 = "('(ice_prescribed_init) ',4a)" + character(*),parameter :: F01 = "('(ice_prescribed_init) ',a,i0)" + character(*),parameter :: F02 = "('(ice_prescribed_init) ',2a,i0,)" + !-------------------------------- + + namelist /ice_prescribed_nml/ & + prescribed_ice_mode, & + stream_meshfile, & + stream_varname , & + stream_datafiles, & + stream_mapalgo, & + stream_taxmode, & + stream_yearalign, & + stream_yearfirst , & + stream_yearlast + + rc = ESMF_SUCCESS + + ! default values for namelist + prescribed_ice_mode = .false. ! if true, prescribe ice + stream_yearfirst = 1 ! first year in pice stream to use + stream_yearlast = 1 ! last year in pice stream to use + stream_yearalign = 1 ! align stream_year_first with this model year + stream_varname = 'ice_cov' + stream_meshfile = ' ' + stream_datafiles(:) = ' ' + stream_mapalgo = 'bilinear' + stream_taxmode = 'cycle' + + ! read namelist on master task + if (my_task == master_task) then + open (newunit=nu_nml, file=nml_filename, status='old',iostat=nml_error) + call shr_nl_find_group_name(nu_nml, 'ice_prescribed_nml', status=nml_error) + if (nml_error /= 0) then + write(nu_diag,F00) "ERROR: problem on read of ice_prescribed_nml namelist" + call abort_ice(subName) + endif + read(nu_nml, ice_prescribed_nml, iostat=nml_error) + close(nu_nml) + end if + + ! broadcast namelist input + call broadcast_scalar(prescribed_ice_mode, master_task) + + ! set module variable 'prescribed_ice' + prescribed_ice = prescribed_ice_mode + + ! -------------------------------------------------- + ! only do the following if prescribed ice mode is on + ! -------------------------------------------------- + + if (prescribed_ice_mode) then + + call broadcast_scalar(stream_yearalign , master_task) + call broadcast_scalar(stream_yearfirst , master_task) + call broadcast_scalar(stream_yearlast , master_task) + call broadcast_scalar(stream_meshfile , master_task) + call broadcast_scalar(stream_mapalgo , master_task) + call broadcast_scalar(stream_taxmode , master_task) + call broadcast_scalar(stream_varname , master_task) + call mpi_bcast(stream_dataFiles, len(stream_datafiles(1))*NFilesMaximum, MPI_CHARACTER, 0, MPI_COMM_ICE, ierr) + + nFile = 0 + do n = 1,nFilesMaximum + if (stream_datafiles(n) /= ' ') nFile = nFile + 1 + end do + + if (my_task == master_task) then + write(nu_diag,*) ' ' + write(nu_diag,F00) 'This is the prescribed ice coverage option.' + write(nu_diag,F01) ' stream_yearfirst = ',stream_yearfirst + write(nu_diag,F01) ' stream_yearlast = ',stream_yearlast + write(nu_diag,F01) ' stream_yearalign = ',stream_yearalign + write(nu_diag,F00) ' stream_meshfile = ',trim(stream_meshfile) + write(nu_diag,F00) ' stream_varname = ',trim(stream_varname) + write(nu_diag,F00) ' stream_mapalgo = ',trim(stream_mapalgo) + write(nu_diag,F00) ' stream_taxmode = ',trim(stream_taxmode) + do n = 1,nFile + write(nu_diag,F00) ' stream_datafiles = ',trim(stream_dataFiles(n)) + end do + write(nu_diag,*) ' ' + endif + +#ifndef CESMCOUPLED + !CESM does this elsewhere + call dshr_pio_init(gcomp, sdat, nu_diag, rc) +#endif + + ! initialize sdat + call shr_strdata_init_from_inline(sdat, & + my_task = my_task, & + logunit = nu_diag, & + compname = 'ICE', & + model_clock = clock, & + model_mesh = mesh, & + stream_meshfile = stream_meshfile, & + stream_lev_dimname = 'null', & + stream_mapalgo = trim(stream_mapalgo), & + stream_filenames = stream_datafiles(1:nfile), & + stream_fldlistFile = (/trim(stream_varname)/), & + stream_fldListModel = (/trim(stream_varname)/), & + stream_yearFirst = stream_yearFirst, & + stream_yearLast = stream_yearLast, & + stream_yearAlign = stream_yearAlign , & + stream_offset = 0, & + stream_taxmode = trim(stream_taxmode), & + stream_dtlimit = 1.5_dbl_kind, & + stream_tintalgo = 'linear', & + rc = rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + ! print out sdat info + if (my_task == master_task) then + call shr_strdata_print(sdat,'ice coverage prescribed data') + endif + + ! For one ice category, set hin_max(1) to something big + if (ncat == 1) then + hin_max(1) = 999._dbl_kind + end if + +#ifndef CESMCOUPLED + ! If need initial cice values for coupling + call ice_prescribed_run(idate, msec) +#endif + + end if ! end of if prescribed ice mode + + end subroutine ice_prescribed_init + + !======================================================================= + subroutine ice_prescribed_run(mDateIn, secIn) + + ! Finds two time slices bounding current model time, remaps if necessary + ! Interpolate to new ice coverage + + ! input/output parameters: + integer(kind=int_kind), intent(in) :: mDateIn ! Current model date (yyyymmdd) + integer(kind=int_kind), intent(in) :: secIn ! Elapsed seconds on model date + + ! local variables + integer(kind=int_kind) :: i,j,n,iblk ! loop indices and counter + integer(kind=int_kind) :: ilo,ihi,jlo,jhi ! beginning and end of physical domain + type (block) :: this_block + real(kind=dbl_kind) :: aice_max ! maximun ice concentration + real(kind=dbl_kind), pointer :: dataptr(:) + integer :: rc ! ESMF return code + character(*),parameter :: subName = "('ice_prescribed_run')" + character(*),parameter :: F00 = "('(ice_prescribed_run) ',a,2g20.13)" + logical :: first_time = .true. + !------------------------------------------------------------------------ + + rc = ESMF_SUCCESS + + ! Advance sdat stream + call shr_strdata_advance(sdat, ymd=mDateIn, tod=SecIn, logunit=nu_diag, istr='cice_pice', rc=rc) + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then + call ESMF_Finalize(endflag=ESMF_END_ABORT) + end if + + ! Get pointer for stream data that is time and spatially interpolate to model time and grid + call dshr_fldbun_getFldPtr(sdat%pstrm(1)%fldbun_model, 'ice_cov', dataptr, rc=rc) + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then + call ESMF_Finalize(endflag=ESMF_END_ABORT) + end if + + ! Fill in module ice_cov array + if (.not. allocated(ice_cov)) then + allocate(ice_cov(nx_block,ny_block,max_blocks)) + end if + ice_cov(:,:,:) = c0 ! This initializes ghost cells as well + n = 0 + do iblk = 1, nblocks + this_block = get_block(blocks_ice(iblk),iblk) + ilo = this_block%ilo + ihi = this_block%ihi + jlo = this_block%jlo + jhi = this_block%jhi + do j = jlo, jhi + do i = ilo, ihi + n = n+1 + ice_cov(i,j,iblk) = dataptr(n) + end do + end do + end do + + ! Check to see that ice concentration is in fraction, not percent + if (first_time) then + aice_max = maxval(ice_cov) + if (aice_max > c10) then + write(nu_diag,F00) "ERROR: Ice conc data must be in fraction, aice_max= ", aice_max + rc = ESMF_FAILURE + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=__FILE__)) then + call ESMF_Finalize(endflag=ESMF_END_ABORT) + end if + end if + first_time = .false. + end if + + ! Set prescribed ice state and fluxes + call ice_prescribed_phys() + + end subroutine ice_prescribed_run + + !======================================================================= + subroutine ice_prescribed_phys() + + ! Set prescribed ice state using input ice concentration; + ! set surface ice temperature to atmospheric value; use + ! linear temperature gradient in ice to ocean temperature. + + use ice_flux + use ice_state + use icepack_intfc, only : icepack_aggregate + use ice_dyn_evp + + !----- Local ------ + integer(kind=int_kind) :: layer ! level index + integer(kind=int_kind) :: nc ! ice category index + integer(kind=int_kind) :: i,j,k ! longitude, latitude and level indices + integer(kind=int_kind) :: iblk + integer(kind=int_kind) :: nt_Tsfc + integer(kind=int_kind) :: nt_sice + integer(kind=int_kind) :: nt_qice + integer(kind=int_kind) :: nt_qsno + integer(kind=int_kind) :: ntrcr + real(kind=dbl_kind) :: slope ! diff in underlying ocean tmp and ice surface tmp + real(kind=dbl_kind) :: Ti ! ice level temperature + real(kind=dbl_kind) :: Tmlt ! ice level melt temperature + real(kind=dbl_kind) :: qin_save(nilyr) + real(kind=dbl_kind) :: qsn_save(nslyr) + real(kind=dbl_kind) :: hi ! ice prescribed (hemispheric) ice thickness + real(kind=dbl_kind) :: hs ! snow thickness + real(kind=dbl_kind) :: zn ! normalized ice thickness + real(kind=dbl_kind) :: salin(nilyr) ! salinity (ppt) + real(kind=dbl_kind) :: rad_to_deg, pi, puny + real(kind=dbl_kind) :: rhoi + real(kind=dbl_kind) :: rhos + real(kind=dbl_kind) :: cp_ice + real(kind=dbl_kind) :: cp_ocn + real(kind=dbl_kind) :: lfresh + real(kind=dbl_kind) :: depressT + real(kind=dbl_kind), parameter :: nsal = 0.407_dbl_kind + real(kind=dbl_kind), parameter :: msal = 0.573_dbl_kind + real(kind=dbl_kind), parameter :: saltmax = 3.2_dbl_kind ! max salinity at ice base (ppm) + character(*),parameter :: subName = '(ice_prescribed_phys)' + !----------------------------------------------------------------- + + call icepack_query_tracer_indices(nt_Tsfc_out=nt_Tsfc, nt_sice_out=nt_sice, & + nt_qice_out=nt_qice, nt_qsno_out=nt_qsno) + call icepack_query_tracer_sizes(ntrcr_out=ntrcr) + call icepack_query_parameters(rad_to_deg_out=rad_to_deg, pi_out=pi, & + puny_out=puny, rhoi_out=rhoi, rhos_out=rhos, cp_ice_out=cp_ice, cp_ocn_out=cp_ocn, & + lfresh_out=lfresh, depressT_out=depressT) + call icepack_warnings_flush(nu_diag) + if (icepack_warnings_aborted()) call abort_ice(error_message=subname, & + file=__FILE__, line=__LINE__) + + !----------------------------------------------------------------- + ! Set ice cover over land to zero, not sure if this should be + ! be done earier, before time/spatial interp?????? + !----------------------------------------------------------------- + do iblk = 1,nblocks + do j = 1,ny_block + do i = 1,nx_block + if (tmask(i,j,iblk)) then + if (ice_cov(i,j,iblk) .lt. eps04) ice_cov(i,j,iblk) = c0 + if (ice_cov(i,j,iblk) .gt. c1) ice_cov(i,j,iblk) = c1 + else + ice_cov(i,j,iblk) = c0 + end if + enddo + enddo + enddo + + do iblk = 1,nblocks + do j = 1,ny_block + do i = 1,nx_block + + if (tmask(i,j,iblk)) then ! Over ocean points + + !-------------------------------------------------------------- + ! Place ice where ice concentration > .0001 + !-------------------------------------------------------------- + + if (ice_cov(i,j,iblk) >= eps04) then + + hi = 0.0_dbl_kind + !---------------------------------------------------------- + ! Set ice thickness in each hemisphere + !---------------------------------------------------------- + if(TLAT(i,j,iblk)*rad_to_deg > 40.0_dbl_kind) then + hi = 2.0_dbl_kind + else if(TLAT(i,j,iblk)*rad_to_deg < -40.0_dbl_kind) then + hi = 1.0_dbl_kind + end if + + !---------------------------------------------------------- + ! All ice in appropriate thickness category + !---------------------------------------------------------- + do nc = 1,ncat + + if(hin_max(nc-1) < hi .and. hi < hin_max(nc)) then + + if (aicen(i,j,nc,iblk) > c0) then + hs = vsnon(i,j,nc,iblk) / aicen(i,j,nc,iblk) + else + hs = c0 + endif + + aicen(i,j,nc,iblk) = ice_cov(i,j,iblk) + vicen(i,j,nc,iblk) = hi*aicen(i,j,nc,iblk) + vsnon(i,j,nc,iblk) = hs*aicen(i,j,nc,iblk) + + !--------------------------------------------------------- + ! make linear temp profile and compute enthalpy + !--------------------------------------------------------- + + if (abs(trcrn(i,j,nt_qice,nc,iblk)) < puny) then + + if (aice(i,j,iblk) < puny) & + trcrn(i,j,nt_Tsfc,nc,iblk) = Tf(i,j,iblk) + + slope = Tf(i,j,iblk) - trcrn(i,j,nt_Tsfc,nc,iblk) + do k = 1, nilyr + zn = (real(k,kind=dbl_kind)-p5) / real(nilyr,kind=dbl_kind) + Ti = trcrn(i,j,nt_Tsfc,nc,iblk) + slope*zn + salin(k) = (saltmax/c2)*(c1-cos(pi*zn**(nsal/(msal+zn)))) + Tmlt = -salin(k)*depressT + trcrn(i,j,nt_sice+k-1,nc,iblk) = salin(k) + trcrn(i,j,nt_qice+k-1,nc,iblk) = & + -(rhoi * (cp_ice*(Tmlt-Ti) & + + Lfresh*(c1-Tmlt/Ti) - cp_ocn*Tmlt)) + enddo + + do k=1,nslyr + trcrn(i,j,nt_qsno+k-1,nc,iblk) = & + -rhos*(Lfresh - cp_ice*trcrn(i,j,nt_Tsfc,nc,iblk)) + enddo + + endif ! aice < puny + end if ! hin_max + enddo ! ncat + else + trcrn(i,j,nt_Tsfc,:,iblk) = Tf(i,j,iblk) + aicen(i,j,:,iblk) = c0 + vicen(i,j,:,iblk) = c0 + vsnon(i,j,:,iblk) = c0 + trcrn(i,j,nt_sice:nt_sice+nilyr-1,:,iblk) = c0 + trcrn(i,j,nt_qice:nt_qice+nilyr-1,:,iblk) = c0 + trcrn(i,j,nt_qsno:nt_qsno+nslyr-1,:,iblk) = c0 + end if ! ice_cov >= eps04 + + !-------------------------------------------------------------------- + ! compute aggregate ice state and open water area + !-------------------------------------------------------------------- + call icepack_aggregate(aicen = aicen(i,j,:,iblk), & + trcrn = trcrn(i,j,1:ntrcr,:,iblk), & + vicen = vicen(i,j,:,iblk), & + vsnon = vsnon(i,j,:,iblk), & + aice = aice (i,j, iblk), & + trcr = trcr (i,j,1:ntrcr,iblk), & + vice = vice (i,j, iblk), & + vsno = vsno (i,j, iblk), & + aice0 = aice0(i,j, iblk), & + trcr_depend = trcr_depend(1:ntrcr), & + trcr_base = trcr_base(1:ntrcr,:), & + n_trcr_strata = n_trcr_strata(1:ntrcr), & + nt_strata = nt_strata(1:ntrcr,:), & + Tf = Tf(i,j,iblk)) + + end if ! tmask + enddo ! i + enddo ! j + enddo ! iblk + + do iblk = 1, nblocks + do j = 1, ny_block + do i = 1, nx_block + aice_init(i,j,iblk) = aice(i,j,iblk) + enddo + enddo + enddo + + !-------------------------------------------------------------------- + ! set non-computed fluxes, ice velocities, ice-ocn stresses to zero + !-------------------------------------------------------------------- + + frzmlt (:,:,:) = c0 + uvel (:,:,:) = c0 + vvel (:,:,:) = c0 + strocnxT_iavg(:,:,:) = c0 + strocnyT_iavg(:,:,:) = c0 + + !----------------------------------------------------------------- + ! other atm and ocn fluxes + !----------------------------------------------------------------- + call init_flux_atm + call init_flux_ocn + + end subroutine ice_prescribed_phys + +end module ice_prescribed_mod diff --git a/cicecore/drivers/access/cmeps/ice_scam.F90 b/cicecore/drivers/access/cmeps/ice_scam.F90 new file mode 100644 index 000000000..b92900e4f --- /dev/null +++ b/cicecore/drivers/access/cmeps/ice_scam.F90 @@ -0,0 +1,20 @@ +module ice_scam + + use ice_kinds_mod + + implicit none + + ! single column control variables (only used for latlon grid) + + logical :: single_column = .false. ! true => single column mode + real (kind=dbl_kind) :: scmlat ! single column latitude (degrees) + real (kind=dbl_kind) :: scmlon ! single column longitude (degrees) + real (kind=dbl_kind) :: scol_frac ! single column ocn fraction + real (kind=dbl_kind) :: scol_mask ! single column ocn mask + real (kind=dbl_kind) :: scol_area ! single column ocn area + integer :: scol_ni ! ni size of single column domain file + integer :: scol_nj ! nj size of single column domain file + logical :: scol_valid = .false. ! true => single column mask is 1 + +end module ice_scam + diff --git a/cicecore/drivers/access/cmeps/ice_shr_methods.F90 b/cicecore/drivers/access/cmeps/ice_shr_methods.F90 new file mode 100644 index 000000000..0a3a72840 --- /dev/null +++ b/cicecore/drivers/access/cmeps/ice_shr_methods.F90 @@ -0,0 +1,1014 @@ +module ice_shr_methods + + use ESMF , only : operator(<), operator(/=), operator(+) + use ESMF , only : operator(-), operator(*) , operator(>=) + use ESMF , only : operator(<=), operator(>), operator(==) + use ESMF , only : ESMF_LOGERR_PASSTHRU, ESMF_LogFoundError, ESMF_LOGMSG_ERROR, ESMF_MAXSTR + use ESMF , only : ESMF_SUCCESS, ESMF_LogWrite, ESMF_LOGMSG_INFO, ESMF_FAILURE + use ESMF , only : ESMF_State, ESMF_StateGet + use ESMF , only : ESMF_Field, ESMF_FieldGet + use ESMF , only : ESMF_GridComp, ESMF_GridCompGet, ESMF_GridCompSet + use ESMF , only : ESMF_GeomType_Flag, ESMF_FieldStatus_Flag + use ESMF , only : ESMF_Mesh, ESMF_MeshGet + use ESMF , only : ESMF_GEOMTYPE_MESH, ESMF_GEOMTYPE_GRID, ESMF_FIELDSTATUS_COMPLETE + use ESMF , only : ESMF_Clock, ESMF_ClockCreate, ESMF_ClockGet, ESMF_ClockSet + use ESMF , only : ESMF_ClockPrint, ESMF_ClockAdvance, ESMF_ClockGetAlarm + use ESMF , only : ESMF_Alarm, ESMF_AlarmCreate, ESMF_AlarmGet, ESMF_AlarmSet + use ESMF , only : ESMF_Calendar, ESMF_CALKIND_NOLEAP, ESMF_CALKIND_GREGORIAN + use ESMF , only : ESMF_Time, ESMF_TimeGet, ESMF_TimeSet + use ESMF , only : ESMF_TimeInterval, ESMF_TimeIntervalSet, ESMF_TimeIntervalGet + use ESMF , only : ESMF_VM, ESMF_VMGet, ESMF_VMBroadcast, ESMF_VMGetCurrent + use NUOPC , only : NUOPC_CompAttributeGet + use NUOPC_Model , only : NUOPC_ModelGet + use ice_kinds_mod, only : r8 => dbl_kind, cl=>char_len_long, cs=>char_len + use ice_exit , only : abort_ice +#ifdef CESMCOUPLED + use shr_log_mod , only : shr_log_setlogunit +#endif + + implicit none + private + + public :: memcheck + public :: get_component_instance + public :: set_component_logging + public :: log_clock_advance + public :: state_getscalar + public :: state_setscalar + public :: state_reset + public :: state_flddebug + public :: state_diagnose + public :: alarmInit + public :: chkerr + + private :: timeInit + private :: field_getfldptr + + ! Clock and alarm options + character(len=*), private, parameter :: & + optNONE = "none" , & + optNever = "never" , & + optNSteps = "nsteps" , & + optNStep = "nstep" , & + optNSeconds = "nseconds" , & + optNSecond = "nsecond" , & + optNMinutes = "nminutes" , & + optNMinute = "nminute" , & + optNHours = "nhours" , & + optNHour = "nhour" , & + optNDays = "ndays" , & + optNDay = "nday" , & + optNMonths = "nmonths" , & + optNMonth = "nmonth" , & + optNYears = "nyears" , & + optNYear = "nyear" , & + optMonthly = "monthly" , & + optYearly = "yearly" , & + optDate = "date" , & + optEnd = "end" , & + optIfdays0 = "ifdays0" + + ! Module data + integer, parameter :: SecPerDay = 86400 ! Seconds per day + integer, parameter :: memdebug_level=1 + character(len=1024) :: msgString + character(len=*), parameter :: u_FILE_u = & + __FILE__ + +!=============================================================================== +contains +!=============================================================================== + + subroutine memcheck(string, level, mastertask) + + ! input/output variables + character(len=*) , intent(in) :: string + integer , intent(in) :: level + logical , intent(in) :: mastertask + + ! local variables + integer :: ierr + integer, external :: GPTLprint_memusage + character(len=*), parameter :: subname='(memcheck)' + !----------------------------------------------------------------------- + +#ifdef CESMCOUPLED + if ((mastertask .and. memdebug_level > level) .or. memdebug_level > level+1) then + ierr = GPTLprint_memusage(string) + endif +#endif + + end subroutine memcheck + +!=============================================================================== + + subroutine get_component_instance(gcomp, inst_suffix, inst_index, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + character(len=*) , intent(out) :: inst_suffix + integer , intent(out) :: inst_index + integer , intent(out) :: rc + + ! local variables + logical :: isPresent + character(len=4) :: cvalue + character(len=*), parameter :: subname='(get_component_instance)' + !----------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + call NUOPC_CompAttributeGet(gcomp, name="inst_suffix", isPresent=isPresent, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (isPresent) then + call NUOPC_CompAttributeGet(gcomp, name="inst_suffix", value=inst_suffix, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + cvalue = inst_suffix(2:) + read(cvalue, *) inst_index + else + inst_suffix = "" + inst_index=1 + endif + + end subroutine get_component_instance + +!=============================================================================== + + subroutine set_component_logging(gcomp, mastertask, logunit, shrlogunit, rc) + + ! input/output variables + type(ESMF_GridComp) :: gcomp + logical, intent(in) :: mastertask + integer, intent(out) :: logunit + integer, intent(out) :: shrlogunit + integer, intent(out) :: rc + + ! local variables + character(len=CL) :: diro + character(len=CL) :: logfile + character(len=*), parameter :: subname='(set_component_logging)' + !----------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + shrlogunit = 6 + + if (mastertask) then + call NUOPC_CompAttributeGet(gcomp, name="diro", value=diro, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call NUOPC_CompAttributeGet(gcomp, name="logfile", value=logfile, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + open(newunit=logunit,file=trim(diro)//"/"//trim(logfile)) + else + logUnit = 6 + endif + +#ifdef CESMCOUPLED + call shr_log_setLogUnit (logunit) +#endif + + end subroutine set_component_logging + +!=============================================================================== + + subroutine log_clock_advance(clock, component, logunit, rc) + + ! input/output variables + type(ESMF_Clock) :: clock + character(len=*) , intent(in) :: component + integer , intent(in) :: logunit + integer , intent(out) :: rc + + ! local variables + character(len=CL) :: cvalue, prestring + character(len=*), parameter :: subname='(log_clock_advance)' + !----------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + write(prestring, *) "------>Advancing ",trim(component)," from: " + call ESMF_ClockPrint(clock, options="currTime", unit=cvalue, preString=trim(prestring), rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + write(logunit, *) trim(cvalue) + + call ESMF_ClockPrint(clock, options="stopTime", unit=cvalue, & + preString="--------------------------------> to: ", rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + write(logunit, *) trim(cvalue) + + end subroutine log_clock_advance + +!=============================================================================== + + subroutine state_getscalar(state, scalar_id, scalar_value, flds_scalar_name, flds_scalar_num, rc) + + ! ---------------------------------------------- + ! Get scalar data from State for a particular name and broadcast it to all other pets + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_State), intent(in) :: state + integer, intent(in) :: scalar_id + real(r8), intent(out) :: scalar_value + character(len=*), intent(in) :: flds_scalar_name + integer, intent(in) :: flds_scalar_num + integer, intent(inout) :: rc + + ! local variables + integer :: mytask, ierr, len + type(ESMF_VM) :: vm + type(ESMF_Field) :: field + real(r8), pointer :: farrayptr(:,:) + real(r8) :: tmp(1) + character(len=*), parameter :: subname='(state_getscalar)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_VMGetCurrent(vm, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_VMGet(vm, localPet=mytask, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_StateGet(State, itemName=trim(flds_scalar_name), field=field, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (mytask == 0) then + call ESMF_FieldGet(field, farrayPtr = farrayptr, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (scalar_id < 0 .or. scalar_id > flds_scalar_num) then + call ESMF_LogWrite(trim(subname)//": ERROR in scalar_id", ESMF_LOGMSG_INFO, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + if (ESMF_LogFoundError(rcToCheck=rc, msg=ESMF_LOGERR_PASSTHRU, line=__LINE__, file=u_FILE_u)) return + endif + tmp(:) = farrayptr(scalar_id,:) + endif + call ESMF_VMBroadCast(vm, tmp, 1, 0, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + scalar_value = tmp(1) + + end subroutine state_getscalar + +!================================================================================ + + subroutine state_setscalar(scalar_value, scalar_id, State, flds_scalar_name, flds_scalar_num, rc) + + ! ---------------------------------------------- + ! Set scalar data from State for a particular name + ! ---------------------------------------------- + + ! input/output arguments + real(r8), intent(in) :: scalar_value + integer, intent(in) :: scalar_id + type(ESMF_State), intent(inout) :: State + character(len=*), intent(in) :: flds_scalar_name + integer, intent(in) :: flds_scalar_num + integer, intent(inout) :: rc + + ! local variables + integer :: mytask + type(ESMF_Field) :: lfield + type(ESMF_VM) :: vm + real(r8), pointer :: farrayptr(:,:) + character(len=*), parameter :: subname='(state_setscalar)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_VMGetCurrent(vm, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_VMGet(vm, localPet=mytask, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_StateGet(State, itemName=trim(flds_scalar_name), field=lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (mytask == 0) then + call ESMF_FieldGet(lfield, farrayPtr = farrayptr, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (scalar_id < 0 .or. scalar_id > flds_scalar_num) then + call ESMF_LogWrite(trim(subname)//": ERROR in scalar_id", ESMF_LOGMSG_INFO) + rc = ESMF_FAILURE + return + endif + farrayptr(scalar_id,1) = scalar_value + endif + + end subroutine state_setscalar + +!=============================================================================== + + subroutine state_reset(State, reset_value, rc) + + ! ---------------------------------------------- + ! Set all fields to value in State to value + ! ---------------------------------------------- + + ! intput/output variables + type(ESMF_State) , intent(inout) :: State + real(R8) , intent(in) :: reset_value + integer , intent(out) :: rc + + ! local variables + integer :: i,j,n + type(ESMF_Field) :: lfield + integer :: fieldCount + integer :: lrank + character(ESMF_MAXSTR), allocatable :: lfieldnamelist(:) + real(R8), pointer :: fldptr1(:) + real(R8), pointer :: fldptr2(:,:) + real(R8), parameter :: czero = 0.0_R8 + character(len=*),parameter :: subname='(state_reset)' + ! ---------------------------------------------- + + rc = ESMF_SUCCESS + + call ESMF_StateGet(State, itemCount=fieldCount, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + allocate(lfieldnamelist(fieldCount)) + call ESMF_StateGet(State, itemNameList=lfieldnamelist, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + do n = 1, fieldCount + call ESMF_StateGet(State, itemName=trim(lfieldnamelist(n)), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + call field_getfldptr(lfield, fldptr1=fldptr1, fldptr2=fldptr2, rank=lrank, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if (lrank == 0) then + ! no local data + elseif (lrank == 1) then + fldptr1 = reset_value + elseif (lrank == 2) then + fldptr2 = reset_value + else + call ESMF_LogWrite(trim(subname)//": ERROR in rank "//trim(lfieldnamelist(n)), ESMF_LOGMSG_ERROR) + rc = ESMF_FAILURE + return + endif + enddo + + deallocate(lfieldnamelist) + + end subroutine state_reset + +!=============================================================================== + + subroutine state_flddebug(state, flds_scalar_name, prefix, ymd, tod, logunit, rc) + + ! input/output variables + type(ESMF_State) :: state + character(len=*) , intent(in) :: flds_scalar_name + character(len=*) , intent(in) :: prefix + integer , intent(in) :: ymd + integer , intent(in) :: tod + integer , intent(in) :: logunit + integer , intent(out) :: rc + + ! local variables + integer :: n, nfld, ungridded_index + integer :: lsize + real(R8), pointer :: dataPtr1d(:) + real(R8), pointer :: dataPtr2d(:,:) + integer :: fieldCount + integer :: ungriddedUBound(1) + integer :: gridToFieldMap(1) + character(len=ESMF_MAXSTR) :: string + type(ESMF_Field) , allocatable :: lfields(:) + integer , allocatable :: dimCounts(:) + character(len=ESMF_MAXSTR) , allocatable :: fieldNameList(:) + character(len=*), parameter :: subname='(state_flddebug)' + !----------------------------------------------------- + + ! Determine the list of fields and the dimension count for each field + call ESMF_StateGet(state, itemCount=fieldCount, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + allocate(fieldNameList(fieldCount)) + allocate(lfields(fieldCount)) + allocate(dimCounts(fieldCount)) + + call ESMF_StateGet(state, itemNameList=fieldNameList, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + do nfld=1, fieldCount + call ESMF_StateGet(state, itemName=trim(fieldNameList(nfld)), field=lfields(nfld), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfields(nfld), dimCount=dimCounts(nfld), rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + end do + + ! Determine local size of field + do nfld=1, fieldCount + if (dimCounts(nfld) == 1) then + call ESMF_FieldGet(lfields(nfld), farrayPtr=dataPtr1d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + lsize = size(dataPtr1d) + exit + end if + end do + + ! Write out debug output + do n = 1,lsize + do nfld=1, fieldCount + if (dimCounts(nfld) == 1) then + call ESMF_FieldGet(lfields(nfld), farrayPtr=dataPtr1d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (trim(fieldNameList(nfld)) /= flds_scalar_name .and. dataPtr1d(n) /= 0.) then + string = trim(prefix) // ' ymd, tod, index, '// trim(fieldNameList(nfld)) //' = ' + write(logunit,100) trim(string), ymd, tod, n, dataPtr1d(n) + end if + else if (dimCounts(nfld) == 2) then + call ESMF_FieldGet(lfields(nfld), ungriddedUBound=ungriddedUBound, gridtoFieldMap=gridToFieldMap, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfields(nfld), farrayPtr=dataPtr2d, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + do ungridded_index = 1,ungriddedUBound(1) + if (trim(fieldNameList(nfld)) /= flds_scalar_name) then + string = trim(prefix) // ' ymd, tod, lev, index, '// trim(fieldNameList(nfld)) //' = ' + if (gridToFieldMap(1) == 1) then + if (dataPtr2d(n,ungridded_index) /= 0.) then + write(logunit,101) trim(string), ymd, tod, ungridded_index, n, dataPtr2d(n,ungridded_index) + end if + else if (gridToFieldMap(1) == 2) then + if (dataPtr2d(ungridded_index,n) /= 0.) then + write(logunit,101) trim(string), ymd, tod, ungridded_index, n, dataPtr2d(ungridded_index,n) + end if + end if + end if + end do + end if + end do + end do +100 format(a60,3(i8,2x),d21.14) +101 format(a60,4(i8,2x),d21.14) + + deallocate(fieldNameList) + deallocate(lfields) + deallocate(dimCounts) + + end subroutine state_flddebug + +!=============================================================================== + + subroutine state_diagnose(State, string, rc) + + ! ---------------------------------------------- + ! Diagnose status of State + ! ---------------------------------------------- + + type(ESMF_State), intent(in) :: state + character(len=*), intent(in) :: string + integer , intent(out) :: rc + + ! local variables + integer :: i,j,n + type(ESMf_Field) :: lfield + integer :: fieldCount, lrank + character(ESMF_MAXSTR) ,pointer :: lfieldnamelist(:) + real(r8), pointer :: dataPtr1d(:) + real(r8), pointer :: dataPtr2d(:,:) + character(len=*),parameter :: subname='(state_diagnose)' + ! ---------------------------------------------- + + call ESMF_StateGet(state, itemCount=fieldCount, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + allocate(lfieldnamelist(fieldCount)) + + call ESMF_StateGet(state, itemNameList=lfieldnamelist, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + do n = 1, fieldCount + + call ESMF_StateGet(state, itemName=lfieldnamelist(n), field=lfield, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call field_getfldptr(lfield, fldptr1=dataPtr1d, fldptr2=dataPtr2d, rank=lrank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (lrank == 0) then + ! no local data + elseif (lrank == 1) then + if (size(dataPtr1d) > 0) then + write(msgString,'(A,3g14.7,i8)') trim(string)//': '//trim(lfieldnamelist(n)), & + minval(dataPtr1d), maxval(dataPtr1d), sum(dataPtr1d), size(dataPtr1d) + else + write(msgString,'(A,a)') trim(string)//': '//trim(lfieldnamelist(n))," no data" + endif + elseif (lrank == 2) then + if (size(dataPtr2d) > 0) then + write(msgString,'(A,3g14.7,i8)') trim(string)//': '//trim(lfieldnamelist(n)), & + minval(dataPtr2d), maxval(dataPtr2d), sum(dataPtr2d), size(dataPtr2d) + else + write(msgString,'(A,a)') trim(string)//': '//trim(lfieldnamelist(n))," no data" + endif + else + call ESMF_LogWrite(trim(subname)//": ERROR rank not supported ", ESMF_LOGMSG_ERROR) + rc = ESMF_FAILURE + return + endif + call ESMF_LogWrite(trim(msgString), ESMF_LOGMSG_INFO) + enddo + + deallocate(lfieldnamelist) + + end subroutine state_diagnose + +!=============================================================================== + + subroutine field_getfldptr(field, fldptr1, fldptr2, rank, abort, rc) + + ! ---------------------------------------------- + ! for a field, determine rank and return fldptr1 or fldptr2 + ! abort is true by default and will abort if fldptr is not yet allocated in field + ! rank returns 0, 1, or 2. 0 means fldptr not allocated and abort=false + ! ---------------------------------------------- + + ! input/output variables + type(ESMF_Field) , intent(in) :: field + real(r8), pointer , intent(inout), optional :: fldptr1(:) + real(r8), pointer , intent(inout), optional :: fldptr2(:,:) + integer , intent(out) , optional :: rank + logical , intent(in) , optional :: abort + integer , intent(out) , optional :: rc + + ! local variables + type(ESMF_GeomType_Flag) :: geomtype + type(ESMF_FieldStatus_Flag) :: status + type(ESMF_Mesh) :: lmesh + integer :: lrank, nnodes, nelements + logical :: labort + character(len=*), parameter :: subname='(field_getfldptr)' + ! ---------------------------------------------- + + if (.not.present(rc)) then + call ESMF_LogWrite(trim(subname)//": ERROR rc not present ", & + ESMF_LOGMSG_ERROR, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + return + endif + + rc = ESMF_SUCCESS + + labort = .true. + if (present(abort)) then + labort = abort + endif + lrank = -99 + + call ESMF_FieldGet(field, status=status, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (status /= ESMF_FIELDSTATUS_COMPLETE) then + lrank = 0 + if (labort) then + call ESMF_LogWrite(trim(subname)//": ERROR data not allocated ", ESMF_LOGMSG_INFO, rc=rc) + rc = ESMF_FAILURE + return + else + call ESMF_LogWrite(trim(subname)//": WARNING data not allocated ", ESMF_LOGMSG_INFO, rc=rc) + endif + else + + call ESMF_FieldGet(field, geomtype=geomtype, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + if (geomtype == ESMF_GEOMTYPE_GRID) then + call ESMF_FieldGet(field, rank=lrank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + elseif (geomtype == ESMF_GEOMTYPE_MESH) then + call ESMF_FieldGet(field, rank=lrank, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(field, mesh=lmesh, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_MeshGet(lmesh, numOwnedNodes=nnodes, numOwnedElements=nelements, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + if (nnodes == 0 .and. nelements == 0) lrank = 0 + else + call ESMF_LogWrite(trim(subname)//": ERROR geomtype not supported ", & + ESMF_LOGMSG_INFO, rc=rc) + rc = ESMF_FAILURE + return + endif ! geomtype + + if (lrank == 0) then + call ESMF_LogWrite(trim(subname)//": no local nodes or elements ", & + ESMF_LOGMSG_INFO) + elseif (lrank == 1) then + if (.not.present(fldptr1)) then + call ESMF_LogWrite(trim(subname)//": ERROR missing rank=1 array ", & + ESMF_LOGMSG_ERROR, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + return + endif + call ESMF_FieldGet(field, farrayPtr=fldptr1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + elseif (lrank == 2) then + if (.not.present(fldptr2)) then + call ESMF_LogWrite(trim(subname)//": ERROR missing rank=2 array ", & + ESMF_LOGMSG_ERROR, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + return + endif + call ESMF_FieldGet(field, farrayPtr=fldptr2, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + else + call ESMF_LogWrite(trim(subname)//": ERROR in rank ", & + ESMF_LOGMSG_ERROR, line=__LINE__, file=u_FILE_u) + rc = ESMF_FAILURE + return + endif + + endif ! status + + if (present(rank)) then + rank = lrank + endif + + end subroutine field_getfldptr + +!=============================================================================== + + subroutine alarmInit( clock, alarm, option, & + opt_n, opt_ymd, opt_tod, RefTime, alarmname, rc) + + ! Setup an alarm in a clock + ! Notes: The ringtime sent to AlarmCreate MUST be the next alarm + ! time. If you send an arbitrary but proper ringtime from the + ! past and the ring interval, the alarm will always go off on the + ! next clock advance and this will cause serious problems. Even + ! if it makes sense to initialize an alarm with some reference + ! time and the alarm interval, that reference time has to be + ! advance forward to be >= the current time. In the logic below + ! we set an appropriate "NextAlarm" and then we make sure to + ! advance it properly based on the ring interval. + + ! input/output variables + type(ESMF_Clock) , intent(inout) :: clock ! clock + type(ESMF_Alarm) , intent(inout) :: alarm ! alarm + character(len=*) , intent(in) :: option ! alarm option + integer , optional , intent(in) :: opt_n ! alarm freq + integer , optional , intent(in) :: opt_ymd ! alarm ymd + integer , optional , intent(in) :: opt_tod ! alarm tod (sec) + type(ESMF_Time) , optional , intent(in) :: RefTime ! ref time + character(len=*) , optional , intent(in) :: alarmname ! alarm name + integer , intent(inout) :: rc ! Return code + + ! local variables + type(ESMF_Calendar) :: cal ! calendar + integer :: lymd ! local ymd + integer :: ltod ! local tod + integer :: cyy,cmm,cdd,csec ! time info + character(len=64) :: lalarmname ! local alarm name + logical :: update_nextalarm ! update next alarm + type(ESMF_Time) :: CurrTime ! Current Time + type(ESMF_Time) :: NextAlarm ! Next restart alarm time + type(ESMF_TimeInterval) :: AlarmInterval ! Alarm interval + integer :: sec + character(len=*), parameter :: subname = '(alarmInit): ' + !------------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + lalarmname = 'alarm_unknown' + if (present(alarmname)) lalarmname = trim(alarmname) + ltod = 0 + if (present(opt_tod)) ltod = opt_tod + lymd = -1 + if (present(opt_ymd)) lymd = opt_ymd + + call ESMF_ClockGet(clock, CurrTime=CurrTime, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + call ESMF_TimeGet(CurrTime, yy=cyy, mm=cmm, dd=cdd, s=csec, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + ! initial guess of next alarm, this will be updated below + if (present(RefTime)) then + NextAlarm = RefTime + else + NextAlarm = CurrTime + endif + + ! Determine calendar + call ESMF_ClockGet(clock, calendar=cal) + + ! Determine inputs for call to create alarm + selectcase (trim(option)) + + case (optNONE) + call ESMF_TimeIntervalSet(AlarmInterval, yy=9999, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=9999, mm=12, dd=1, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .false. + + case (optNever) + call ESMF_TimeIntervalSet(AlarmInterval, yy=9999, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=9999, mm=12, dd=1, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .false. + + case (optDate) + if (.not. present(opt_ymd)) then + call abort_ice(subname//trim(option)//' requires opt_ymd') + end if + if (lymd < 0 .or. ltod < 0) then + call abort_ice(subname//trim(option)//'opt_ymd, opt_tod invalid') + end if + call ESMF_TimeIntervalSet(AlarmInterval, yy=9999, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call timeInit(NextAlarm, lymd, cal, ltod, rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .false. + + case (optIfdays0) + if (.not. present(opt_ymd)) then + call abort_ice(subname//trim(option)//' requires opt_ymd') + end if + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=cyy, mm=cmm, dd=opt_n, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .true. + + case (optNSteps) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_ClockGet(clock, TimeStep=AlarmInterval, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNStep) + if (.not.present(opt_n)) call abort_ice(subname//trim(option)//' requires opt_n') + if (opt_n <= 0) call abort_ice(subname//trim(option)//' invalid opt_n') + call ESMF_ClockGet(clock, TimeStep=AlarmInterval, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNSeconds) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNSecond) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNMinutes) + call ESMF_TimeIntervalSet(AlarmInterval, s=60, rc=rc) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNMinute) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=60, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNHours) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=3600, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNHour) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, s=3600, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNDays) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, d=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNDay) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, d=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNMonths) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNMonth) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optMonthly) + call ESMF_TimeIntervalSet(AlarmInterval, mm=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=cyy, mm=cmm, dd=1, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .true. + + case (optNYears) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, yy=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optNYear) + if (.not.present(opt_n)) then + call abort_ice(subname//trim(option)//' requires opt_n') + end if + if (opt_n <= 0) then + call abort_ice(subname//trim(option)//' invalid opt_n') + end if + call ESMF_TimeIntervalSet(AlarmInterval, yy=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + AlarmInterval = AlarmInterval * opt_n + update_nextalarm = .true. + + case (optYearly) + call ESMF_TimeIntervalSet(AlarmInterval, yy=1, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_TimeSet( NextAlarm, yy=cyy, mm=1, dd=1, s=0, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + update_nextalarm = .true. + + case (optEnd) + call ESMF_TimeIntervalSet(AlarmInterval, yy=9999, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + call ESMF_ClockGetAlarm(clock, alarmname="alarm_stop", alarm=alarm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_AlarmGet(alarm, ringTime=NextAlarm, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + case default + call abort_ice(subname//'unknown option '//trim(option)) + + end select + + ! -------------------------------------------------------------------------------- + ! --- AlarmInterval and NextAlarm should be set --- + ! -------------------------------------------------------------------------------- + + ! --- advance Next Alarm so it won't ring on first timestep for + ! --- most options above. go back one alarminterval just to be careful + + if (update_nextalarm) then + NextAlarm = NextAlarm - AlarmInterval + do while (NextAlarm <= CurrTime) + NextAlarm = NextAlarm + AlarmInterval + enddo + endif + + alarm = ESMF_AlarmCreate( name=lalarmname, clock=clock, ringTime=NextAlarm, & + ringInterval=AlarmInterval, rc=rc) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + end subroutine alarmInit + +!=============================================================================== + + subroutine timeInit( Time, ymd, cal, tod, rc) + + ! Create the ESMF_Time object corresponding to the given input time, + ! given in YMD (Year Month Day) and TOD (Time-of-day) format. + ! Set the time by an integer as YYYYMMDD and integer seconds in the day + + ! input/output parameters: + type(ESMF_Time) , intent(inout) :: Time ! ESMF time + integer , intent(in) :: ymd ! year, month, day YYYYMMDD + type(ESMF_Calendar) , intent(in) :: cal ! ESMF calendar + integer , intent(in) :: tod ! time of day in seconds + integer , intent(out) :: rc + + ! local variables + integer :: year, mon, day ! year, month, day as integers + integer :: tdate ! temporary date + integer :: date ! coded-date (yyyymmdd) + character(len=*), parameter :: subname='(timeInit)' + !------------------------------------------------------------------------------- + + rc = ESMF_SUCCESS + + if ( (ymd < 0) .or. (tod < 0) .or. (tod > SecPerDay) )then + call abort_ice( subname//'ERROR yymmdd is a negative number or time-of-day out of bounds' ) + end if + + tdate = abs(date) + year = int(tdate/10000) + if (date < 0) year = -year + mon = int( mod(tdate,10000)/ 100) + day = mod(tdate, 100) + + call ESMF_TimeSet( Time, yy=year, mm=mon, dd=day, s=tod, calendar=cal, rc=rc ) + if (chkerr(rc,__LINE__,u_FILE_u)) return + + end subroutine timeInit + +!=============================================================================== + + logical function chkerr(rc, line, file) + + integer, intent(in) :: rc + integer, intent(in) :: line + character(len=*), intent(in) :: file + + integer :: lrc + character(len=*), parameter :: subname='(chkerr)' + + chkerr = .false. + lrc = rc + if (ESMF_LogFoundError(rcToCheck=lrc, msg=ESMF_LOGERR_PASSTHRU, line=line, file=file)) then + chkerr = .true. + endif + end function chkerr + +end module ice_shr_methods diff --git a/icepack b/icepack index 43ead5638..31305ce7e 160000 --- a/icepack +++ b/icepack @@ -1 +1 @@ -Subproject commit 43ead56380bf11ecad66f165dcd736ed0c278763 +Subproject commit 31305ce7ebd30688bfb5d6929bc09a31f89e6c67