diff --git a/COMPILATION/CMakeLists.txt b/COMPILATION/CMakeLists.txt index 7b89f49440..72b461de2b 100644 --- a/COMPILATION/CMakeLists.txt +++ b/COMPILATION/CMakeLists.txt @@ -67,7 +67,8 @@ set(STELLA_SOURCES_f90 ../STELLA_CODE/diagnostics/diagnostics_fluxes_fluxtube.f90 ../STELLA_CODE/diagnostics/diagnostics_fluxes_fullfluxsurface.f90 ../STELLA_CODE/diagnostics/diagnostics_fluxes_radialvariation.f90 - ../STELLA_CODE/diagnostics/diagnostics_fluxes.f90 + ../STELLA_CODE/diagnostics/diagnostics_fluxes.f90 + ../STELLA_CODE/diagnostics/diagnostics_stresses.f90 ../STELLA_CODE/diagnostics/diagnostics_moments.f90 ../STELLA_CODE/diagnostics/diagnostics_omega.f90 ../STELLA_CODE/diagnostics/diagnostics_potential.f90 diff --git a/COMPILATION/Makefile.depend b/COMPILATION/Makefile.depend index 50315d7867..65a231f0af 100644 --- a/COMPILATION/Makefile.depend +++ b/COMPILATION/Makefile.depend @@ -34,6 +34,7 @@ stella_mod = \ $(OBJ_DIR)/diagnostics_moments.o \ $(OBJ_DIR)/diagnostics_omega.o \ $(OBJ_DIR)/diagnostics_potential.o \ + $(OBJ_DIR)/diagnostics_stresses.o \ $(OBJ_DIR)/diagnostics_write_radial_grid.o \ $(OBJ_DIR)/dissipation_and_collisions.o \ $(OBJ_DIR)/dissipation_hyper.o \ @@ -302,6 +303,7 @@ $(OBJ_DIR)/diagnostics.o: \ $(OBJ_DIR)/diagnostics_moments.o \ $(OBJ_DIR)/diagnostics_omega.o \ $(OBJ_DIR)/diagnostics_potential.o \ + $(OBJ_DIR)/diagnostics_stresses.o \ $(OBJ_DIR)/dissipation_and_collisions.o \ $(OBJ_DIR)/field_equations.o \ $(OBJ_DIR)/git_version.o \ @@ -466,6 +468,29 @@ $(OBJ_DIR)/diagnostics_potential.o: \ $(OBJ_DIR)/parameters_diagnostics.o \ $(OBJ_DIR)/parameters_physics.o \ $(OBJ_DIR)/write_diagnostics_to_netcdf.o +$(OBJ_DIR)/diagnostics_stresses.o: \ + $(OBJ_DIR)/arrays.o \ + $(OBJ_DIR)/arrays_distribution_function.o \ + $(OBJ_DIR)/arrays_fields.o \ + $(OBJ_DIR)/calculations_gyro_averages.o \ + $(OBJ_DIR)/calculations_kxky_derivatives.o \ + $(OBJ_DIR)/calculations_tofrom_ghf.o \ + $(OBJ_DIR)/calculations_transforms.o \ + $(OBJ_DIR)/calculations_velocity_integrals.o \ + $(OBJ_DIR)/constants.o \ + $(OBJ_DIR)/geometry.o \ + $(OBJ_DIR)/gk_flow_shear.o \ + $(OBJ_DIR)/grids_kxky.o \ + $(OBJ_DIR)/grids_species.o \ + $(OBJ_DIR)/grids_velocity.o \ + $(OBJ_DIR)/grids_z.o \ + $(OBJ_DIR)/initialise_redistribute.o \ + $(OBJ_DIR)/mp.o \ + $(OBJ_DIR)/parallelisation_layouts.o \ + $(OBJ_DIR)/parameters_diagnostics.o \ + $(OBJ_DIR)/parameters_physics.o \ + $(OBJ_DIR)/redistribute.o \ + $(OBJ_DIR)/write_diagnostics_to_netcdf.o $(OBJ_DIR)/diagnostics_write_radial_grid.o: \ $(OBJ_DIR)/file_utils.o \ $(OBJ_DIR)/geometry.o \ diff --git a/STELLA_CODE/diagnostics/diagnostics.f90 b/STELLA_CODE/diagnostics/diagnostics.f90 index 2c51fc2429..8f332fea46 100644 --- a/STELLA_CODE/diagnostics/diagnostics.f90 +++ b/STELLA_CODE/diagnostics/diagnostics.f90 @@ -68,6 +68,10 @@ ! write_moments = .false. ! write_radial_moments = .false. !/ +! +!&diagnostics_stresses +! write_stresses = .false. +! ! !---------------------------------- Time traces -------------------------------- ! @@ -164,6 +168,7 @@ subroutine diagnose_distribution_function_and_fields(istep) use diagnostics_fluxes, only: write_fluxes_to_netcdf_file use diagnostics_moments, only: write_moments_to_netcdf_file use diagnostics_distribution, only: write_distribution_to_netcdf_file + use diagnostics_stresses, only: write_stresses_to_netcdf_file use write_diagnostics_to_netcdf, only: sync_nc implicit none @@ -222,6 +227,7 @@ subroutine diagnose_distribution_function_and_fields(istep) ! Write data to the netcdf files call write_moments_to_netcdf_file(nout, time_individual_diagnostics(:, 5)) call write_distribution_to_netcdf_file(nout, time_individual_diagnostics(:, 6)) + call write_stresses_to_netcdf_file(nout) ! Synchronize the disk copy of a netCDF dataset with in-memory buffers if (proc0) call sync_nc diff --git a/STELLA_CODE/diagnostics/diagnostics_stresses.f90 b/STELLA_CODE/diagnostics/diagnostics_stresses.f90 new file mode 100644 index 0000000000..e3cea06631 --- /dev/null +++ b/STELLA_CODE/diagnostics/diagnostics_stresses.f90 @@ -0,0 +1,533 @@ + + + +module diagnostics_stresses + + implicit none + +contains + + subroutine write_stresses_to_netcdf_file(nout) + + ! Knowledge of first processor + use mp, only: proc0 + + ! Dimensions + use grids_kxky, only: nakx + use grids_z, only: nzgrid + + ! Flags + use parameters_physics, only: include_nonlinear + use parameters_diagnostics, only: write_stresses + + !load data + use arrays_distribution_function, only: gnew + + !Write to netCDF file + use write_diagnostics_to_netcdf, only: write_stresses_nc + + integer, intent(in) :: nout + !We want to write stresses(z, kx) to the netcdf file + complex, dimension(:,:), allocatable :: phi_h_nonlin_kx, apar_h_nonlin_kx, bpar_h_nonlin_kx + complex, dimension(:,:), allocatable :: gint + + allocate (phi_h_nonlin_kx(nakx,-nzgrid:nzgrid)) + allocate (apar_h_nonlin_kx(nakx,-nzgrid:nzgrid)) + allocate (bpar_h_nonlin_kx(nakx,-nzgrid:nzgrid)) + allocate (gint(nakx,-nzgrid:nzgrid)) + + if (write_stresses .and. include_nonlinear) then + call calculate_turbulent_stresses(gnew, phi_h_nonlin_kx,apar_h_nonlin_kx, bpar_h_nonlin_kx, gint) + else + phi_h_nonlin_kx = 0. + apar_h_nonlin_kx = 0. + bpar_h_nonlin_kx = 0. + gint = 0. + end if + + if (write_stresses) then + if (proc0) call write_stresses_nc(nout, phi_h_nonlin_kx, apar_h_nonlin_kx, bpar_h_nonlin_kx, gint) + end if + + if (allocated(phi_h_nonlin_kx)) deallocate (phi_h_nonlin_kx) + if (allocated(apar_h_nonlin_kx)) deallocate (apar_h_nonlin_kx) + if (allocated(bpar_h_nonlin_kx)) deallocate (bpar_h_nonlin_kx) + if (allocated(gint)) deallocate (gint) + + end subroutine write_stresses_to_netcdf_file + + + subroutine calculate_turbulent_stresses(g, phi_h_nonlin_kx,apar_h_nonlin_kx, bpar_h_nonlin_kx, gint) + + ! Constants + use constants, only: pi, zi + + + ! Parallelisation + use mp, only: proc0, min_allreduce, sum_reduce + use mp, only: scope, allprocs, subprocs + use parallelisation_layouts, only: vmu_lo, imu_idx, is_idx + use parallelisation_layouts, only: iky_idx, ikx_idx, iz_idx, it_idx, is_idx, kxkyz_lo,vmu_lo + + ! Dimensions + use grids_kxky, only: naky, nakx + use grids_z, only: nztot, ntubes + use grids_species, only: nspec + use grids_velocity, only: nvpa, nmu + + + ! Flags + use parameters_physics, only: full_flux_surface + use parameters_physics, only: include_apar, include_bpar + use parameters_physics, only: include_nonlinear + + !Load data + use arrays_distribution_function, only: gnew, gvmu + use arrays_fields, only: phi,apar, bpar + use arrays, only: shift_state + use parameters_physics, only: fphi + + ! Redistribute data + use redistribute, only: scatter + use initialise_redistribute, only: kxkyz2vmu + + !Calculations + use calculations_gyro_averages, only: gyro_average + use calculations_tofrom_ghf, only: g_to_h + use calculations_kxky_derivatives, only: get_dgdy, get_dgdx + use calculations_kxky_derivatives, only: get_dchidx, get_dchidy + use calculations_kxky, only: swap_kxky, swap_kxky_back + use calculations_transforms, only: transform_y2ky, transform_x2kx + use calculations_transforms, only: transform_y2ky_xfirst, transform_x2kx_xfirst + + !Grids + use grids_kxky, only: x + use grids_z, only: nzgrid, ntubes + use grids_kxky, only: akx, aky + use grids_kxky, only: nakx, ikx_max, naky, naky_all, nx, ny + + !Flow shear + use gk_flow_shear, only: prp_shear_enabled + use gk_flow_shear, only: hammett_flow_shear + use gk_flow_shear, only: g_exb, g_exbfac + + !Geometry + use geometry, only: exb_nonlin_fac, exb_nonlin_fac_p, gfac + + implicit none + + complex, dimension(:, :, -nzgrid:, :, vmu_lo%llim_proc:), intent(in out) :: g + complex, dimension(:, :), allocatable :: g0k, g0a + complex, dimension(:, :), allocatable :: prefac, g0xky + complex, dimension(:, :), allocatable :: g0kxy + complex, dimension(:, :), allocatable :: g0k_swap + complex, dimension(:, :), intent(in out) :: phi_h_nonlin_kx,apar_h_nonlin_kx, bpar_h_nonlin_kx, gint + complex, dimension(:,:,:,:,:), allocatable :: phi_h_nonlin_k,apar_h_nonlin_k, bpar_h_nonlin_k + complex, dimension(:,:,:,:), allocatable :: temp + real, dimension(:, :), allocatable :: g0xy, g1xy + real, dimension(:, :), allocatable :: bracket_phi_h, bracket_apar_h, bracket_bpar_h + + integer :: ikxkyz, iky, ikx + integer :: ivmu, iz, it, imu, is + complex, dimension(:, :), allocatable :: field, adjust + logical :: yfirst + + + ! By default, prp_shear_enabled = .false. and thus yfirst = .true., and we Fourier transform y first + ! If perpendicular flow shear is included, it is important to Fourier transform x first + yfirst = .not. prp_shear_enabled + + ! Allocate arrays + allocate (g0k(naky, nakx)) + allocate (g0a(naky, nakx)) + allocate (g0xy(ny, nx)) + allocate (g1xy(ny, nx)) + allocate (bracket_phi_h(ny, nx)) + allocate (bracket_apar_h(ny, nx)) + allocate (bracket_bpar_h(ny, nx)) + allocate (prefac(naky, nx)) + allocate (temp(naky, nakx, -nzgrid:nzgrid, ntubes)) + allocate (phi_h_nonlin_k(naky, nakx,-nzgrid:nzgrid,ntubes,vmu_lo%llim_proc:vmu_lo%ulim_alloc)) + allocate (apar_h_nonlin_k(naky, nakx,-nzgrid:nzgrid,ntubes,vmu_lo%llim_proc:vmu_lo%ulim_alloc)) + allocate (bpar_h_nonlin_k(naky, nakx,-nzgrid:nzgrid,ntubes,vmu_lo%llim_proc:vmu_lo%ulim_alloc)) + + ! Here naky_all = 2*naky-1 + if (yfirst) then + allocate (g0k_swap(naky_all, ikx_max)) + allocate (g0kxy(ny, ikx_max)) + else + allocate (g0xky(naky, nx)) + end if + + ! Compute phase factor needed when running with equilibrium flow shear + prefac = 1.0 + if (prp_shear_enabled .and. hammett_flow_shear) then + prefac = exp(-zi * g_exb * g_exbfac * spread(x, 1, naky) * spread(aky * shift_state, 2, nx)) + end if + + ! Incoming pdf is g = . + ! For EM simulations, the pdf entering the ExB nonlinearity needs to be + ! the non-Boltzmann part of f (h = f + (Ze/T)*phi*F0) + if (include_apar .or. include_bpar) call g_to_h(g, phi, bpar, fphi) + + if (include_nonlinear) then + do ivmu = vmu_lo%llim_proc, vmu_lo%ulim_proc + imu = imu_idx(vmu_lo, ivmu) + is = is_idx(vmu_lo, ivmu) + do it = 1, ntubes + do iz = -nzgrid, nzgrid + ! Compute i*ky*g + call get_dgdy(g(:, :, iz, it, ivmu), g0k) + ! Take the FFT to get dg/dy in (y,x) space + call forward_transform(g0k, g0xy) + ! Compute i*kx* + call get_dfielddx(iz, ivmu, phi(:, :, iz, it), apar(:, :, iz, it), bpar(:, :, iz, it), g0k, 'phi') + ! If running with equilibrium flow shear, make adjustment to + ! The term multiplying dg/dy + if (prp_shear_enabled .and. hammett_flow_shear) then + call get_dchidy(iz, ivmu, phi(:, :, iz, it), apar(:, :, iz, it), bpar(:, :, iz, it), g0a) + g0k = g0k - g_exb * g_exbfac * spread(shift_state, 2, nakx) * g0a + end if + ! Take the FFT to get d/dx in (y,x) space + call forward_transform(g0k, g1xy) + ! Multiply by the geometric factor appearing in the Poisson bracket; + ! i.e., (dx/dpsi*dy/dalpha)*0.5 + g1xy = g1xy * exb_nonlin_fac + ! Compute the contribution to the Poisson bracket from dg/dy*d/dx + bracket_phi_h = g0xy * g1xy + + !repeat the same calculation for apar and bpar terms: + call get_dfielddx(iz, ivmu, phi(:, :, iz, it), apar(:, :, iz, it), bpar(:, :, iz, it), g0k, 'apar') + if (prp_shear_enabled .and. hammett_flow_shear) then + g0k = g0k - g_exb * g_exbfac * spread(shift_state, 2, nakx) * g0a + end if + call forward_transform(g0k, g1xy) + g1xy = g1xy * exb_nonlin_fac + bracket_apar_h = g0xy * g1xy + + call get_dfielddx(iz, ivmu, phi(:, :, iz, it), apar(:, :, iz, it), bpar(:, :, iz, it), g0k, 'bpar') + if (prp_shear_enabled .and. hammett_flow_shear) then + g0k = g0k - g_exb * g_exbfac * spread(shift_state, 2, nakx) * g0a + end if + call forward_transform(g0k, g1xy) + g1xy = g1xy * exb_nonlin_fac + bracket_bpar_h = g0xy * g1xy + + ! Compute dg/dx in k-space (= i*kx*g) + call get_dgdx(g(:, :, iz, it, ivmu), g0k) + ! If running with equilibrium flow shear, correct dg/dx term + if (prp_shear_enabled .and. hammett_flow_shear) then + call get_dgdy(g(:, :, iz, it, ivmu), g0a) + g0k = g0k - g_exb * g_exbfac * spread(shift_state, 2, nakx) * g0a + end if + ! Take the FFT to get dg/dx in (y,x) space + call forward_transform(g0k, g0xy) + ! Compute d/dy in k-space + call get_dfielddy(iz, ivmu, phi(:, :, iz, it), apar(:, :, iz, it), bpar(:, :, iz, it), g0k, 'phi') + ! Take the FFT to get d/dy in (y,x) space + call forward_transform(g0k, g1xy) + ! Multiply by the geometric factor appearing in the Poisson bracket + ! i.e., (dx/dpsi*dy/dalpha)*0.5 + g1xy = g1xy * exb_nonlin_fac + ! Compute the contribution to the Poisson bracket from dg/dy*d/dx + bracket_phi_h = bracket_phi_h - g0xy * g1xy + + !repeat the same calculation for apar and bpar: + call get_dfielddy(iz, ivmu, phi(:, :, iz, it), apar(:, :, iz, it), bpar(:, :, iz, it), g0k, 'apar') + call forward_transform(g0k, g1xy) + g1xy = g1xy * exb_nonlin_fac + bracket_apar_h = bracket_apar_h - g0xy * g1xy + + call get_dfielddy(iz, ivmu, phi(:, :, iz, it), apar(:, :, iz, it), bpar(:, :, iz, it), g0k, 'bpar') + call forward_transform(g0k, g1xy) + g1xy = g1xy * exb_nonlin_fac + bracket_bpar_h = bracket_bpar_h - g0xy * g1xy + + !transform the brackets back to Fourier space: + if (yfirst) then + call transform_x2kx(bracket_phi_h, g0kxy) + call transform_y2ky(g0kxy, g0k_swap) + call swap_kxky_back(g0k_swap, phi_h_nonlin_k(:, :, iz, it, ivmu)) + + call transform_x2kx(bracket_apar_h, g0kxy) + call transform_y2ky(g0kxy, g0k_swap) + call swap_kxky_back(g0k_swap, apar_h_nonlin_k(:, :, iz, it, ivmu)) + + call transform_x2kx(bracket_bpar_h, g0kxy) + call transform_y2ky(g0kxy, g0k_swap) + call swap_kxky_back(g0k_swap, bpar_h_nonlin_k(:, :, iz, it, ivmu)) + else + call transform_y2ky_xfirst(bracket_phi_h, g0xky) + g0xky = g0xky / prefac + call transform_x2kx_xfirst(g0xky, phi_h_nonlin_k(:, :, iz, it, ivmu)) + + call transform_y2ky_xfirst(bracket_apar_h, g0xky) + g0xky = g0xky / prefac + call transform_x2kx_xfirst(g0xky, apar_h_nonlin_k(:, :, iz, it, ivmu)) + + call transform_y2ky_xfirst(bracket_bpar_h, g0xky) + g0xky = g0xky / prefac + call transform_x2kx_xfirst(g0xky, bpar_h_nonlin_k(:, :, iz, it, ivmu)) + end if + end do + end do + end do + + !apply QN operator on the brackets: gyroaverage followed by velocity integration: + call scatter(kxkyz2vmu, phi_h_nonlin_k, gvmu) + call integrate(gvmu, temp) + !take the ky = 0 component: + phi_h_nonlin_kx(:,:) = temp(1,:,:,1) + + !do the same for other brackets: + call scatter(kxkyz2vmu, apar_h_nonlin_k, gvmu) + call integrate(gvmu, temp) + apar_h_nonlin_kx(:,:) = temp(1,:,:,1) + + call scatter(kxkyz2vmu, bpar_h_nonlin_k, gvmu) + call integrate(gvmu, temp) + bpar_h_nonlin_kx(:,:) = temp(1,:,:,1) + else + phi_h_nonlin_kx = 0. + apar_h_nonlin_kx = 0. + bpar_h_nonlin_kx = 0. + end if + + if (include_apar .or. include_bpar) call g_to_h(g, phi, bpar, -fphi) + + !calculate QN operator acting on g. This is mainly for testing the diagnostics. + call scatter(kxkyz2vmu, g, gvmu) + call integrate(gvmu, temp) + gint(:,:) = temp(1,:,:,1) + + deallocate(g0k, g0a, g0xy, g1xy, bracket_phi_h, bracket_apar_h, bracket_bpar_h) + deallocate(temp, prefac, phi_h_nonlin_k, apar_h_nonlin_k, bpar_h_nonlin_k) + + + contains + + subroutine get_dfielddx(iz, ivmu, phi, apar, bpar, dfielddx, field_flag) + + ! Constants + use constants, only: zi + + ! Parallelisation + use parallelisation_layouts, only: vmu_lo + use parallelisation_layouts, only: is_idx, iv_idx, imu_idx + + ! Calculations + use calculations_gyro_averages, only: gyro_average + use calculations_gyro_averages, only: gyro_average_j1 + + ! Flags + use parameters_physics, only: fphi + + ! Grids + use grids_species, only: spec + use grids_velocity, only: vpa, mu + use grids_kxky, only: naky, nakx, akx + + implicit none + + ! Arguments + integer, intent(in) :: ivmu, iz + complex, dimension(:, :), intent(in) :: phi, apar, bpar + complex, dimension(:, :), intent(out) :: dfielddx + character(len=*), intent(in) :: field_flag + + ! Local variables + integer :: iv, is, imu + complex, dimension(:, :), allocatable :: field, gyro_tmp + + !------------------------------------------------------------------------- + + ! Allocate temporary arrays + allocate (field(naky, nakx)) + allocate (gyro_tmp(naky, nakx)) + + ! Get the (mu,vpa,s) point + is = is_idx(vmu_lo, ivmu) + iv = iv_idx(vmu_lo, ivmu) + imu = imu_idx(vmu_lo, ivmu) + + ! Calculate [i kx phi(ky,kx)] + if (field_flag == 'phi') then + field = fphi * zi * spread(akx, 1, naky) * phi + else if (field_flag == 'apar') then + field = - zi * spread(akx, 1, naky) * 2.0 * vpa(iv) * spec(is)%stm_psi0 * apar + else if (field_flag == 'bpar') then + field = zi * spread(akx, 1, naky) * 4 * mu(imu) * (spec(is)%tz) * bpar + end if + + if (field_flag == 'bpar') then + call gyro_average_j1(field, iz, ivmu, dfielddx) + else + call gyro_average(field, iz, ivmu, dfielddx) + end if + + ! Deallocate temporary arrays + deallocate (field) + deallocate (gyro_tmp) + + end subroutine get_dfielddx + + subroutine get_dfielddy(iz, ivmu, phi, apar, bpar, dfielddy, field_flag) + + ! Constants + use constants, only: zi + + ! Parallelisation + use parallelisation_layouts, only: vmu_lo + use parallelisation_layouts, only: is_idx, iv_idx, imu_idx + + ! Calculations + use calculations_gyro_averages, only: gyro_average + use calculations_gyro_averages, only: gyro_average_j1 + + ! Flags + use parameters_physics, only: fphi + + ! Grids + use grids_species, only: spec + use grids_velocity, only: vpa, mu + use grids_kxky, only: nakx, naky, aky + + implicit none + + ! Arguments + integer, intent(in) :: ivmu, iz + complex, dimension(:, :), intent(in) :: phi, apar, bpar + complex, dimension(:, :), intent(out) :: dfielddy + + ! Local variables + integer :: iv, is, imu + complex, dimension(:, :), allocatable :: field, gyro_tmp + character(len=*), intent(in) :: field_flag + + !------------------------------------------------------------------------- + + ! Allocate temporary arrays + allocate (field(naky, nakx)) + allocate (gyro_tmp(naky, nakx)) + + ! Get the (mu,vpa,s) point + is = is_idx(vmu_lo, ivmu) + iv = iv_idx(vmu_lo, ivmu) + imu = imu_idx(vmu_lo, ivmu) + + ! Calculate [i ky phi(ky,kx)] + if (field_flag == 'phi') then + field = zi * spread(aky, 2, nakx) * fphi * phi + else if (field_flag == 'apar') then + field = - zi * spread(aky, 2, nakx) * 2.0 * vpa(iv) * spec(is)%stm_psi0 * apar + else if (field_flag == 'bpar') then + field = zi * spread(aky, 2, nakx) * 4.0 * mu(imu) * (spec(is)%tz) * bpar + end if + + if (field_flag == 'bpar') then + call gyro_average_j1(field, iz, ivmu, dfielddy) + else + call gyro_average(field, iz, ivmu, dfielddy) + end if + + ! Deallocate temporary arrays + deallocate (field) + deallocate (gyro_tmp) + + end subroutine get_dfielddy + + + subroutine forward_transform(gk, gx) + + use calculations_transforms, only: transform_ky2y, transform_kx2x + use calculations_transforms, only: transform_ky2y_xfirst, transform_kx2x_xfirst + + implicit none + + ! Arguments + complex, dimension(:, :), intent(in) :: gk + real, dimension(:, :), intent(out) :: gx + + !---------------------------------------------------------------------- + + ! Fourier transform g(kx,ky) to g(kx,y) and then to g(x,y) + if (yfirst) then + ! We have i*ky*g(kx,ky) for ky >= 0 and all kx. + ! We want to do 1D complex to complex transform in y, + ! which requires i*ky*g(kx,ky) for all ky and kx >= 0 . + ! Use the reality condition: g(kx,-ky) = conjg(g(-kx,ky)) + ! so i*(-ky)*g(kx,-ky) = -i*ky*conjg(g(-kx,ky)) = conjg(i*ky*g(-kx,ky)) + ! and i*kx*g(kx,-ky) = i*kx*conjg(g(-kx,ky)) = conjg(i*(-kx)*g(-kx,ky)) + ! and i*(-ky)*J0(kx,-ky)*phi(kx,-ky) = conjg(i*ky*J0(-kx,ky)*phi(-kx,ky)) + ! and i*kx*J0(kx,-ky)*phi(kx,-ky) = conjg(i*(-kx)*J0(-kx,ky)*phi(-kx,ky)) + ! i.e., can calculate dg/dx, dg/dy, d/dx and d/dy + ! on stella (kx,ky) grid, then conjugate and flip sign of (kx,ky) + ! NB: J0(kx,ky) = J0(-kx,-ky) + ! TODO DSO: coordinate change for shearing + call swap_kxky(gk, g0k_swap) + call transform_ky2y(g0k_swap, g0kxy) + call transform_kx2x(g0kxy, gx) + + ! Fourier transform g(kx,ky) to g(x,ky) and then to g(x,y) + else + call transform_kx2x_xfirst(gk, g0xky) + g0xky = g0xky * prefac + call transform_ky2y_xfirst(g0xky, gx) + end if + + end subroutine forward_transform + + + subroutine integrate(fv, fint) + + + ! Parallelisation + use mp, only: proc0, sum_allreduce + use parallelisation_layouts, only: vmu_lo, imu_idx, is_idx + use parallelisation_layouts, only: iky_idx, ikx_idx, iz_idx, it_idx, is_idx, kxkyz_lo,vmu_lo + + !Grids + use grids_z, only: nzgrid, ntubes + use grids_kxky, only: akx, aky + use grids_kxky, only: nakx, ikx_max, naky, naky_all, nx, ny + use grids_species, only: spec, nspec + use grids_velocity, only: nvpa, nmu + + !calculations + use constants, only: zi + use calculations_velocity_integrals, only: integrate_vmu + use calculations_gyro_averages, only: gyro_average + + implicit none + + complex, dimension(:, :, kxkyz_lo%llim_proc:), intent(in) :: fv + complex, dimension(:, :, -nzgrid:, :), intent(out) :: fint + + !local variables + complex :: tmp + real :: wgt + complex, dimension(:, :), allocatable :: g0 + integer :: ikxkyz, iky, ikx, iz, it, is, ia + + allocate (g0(nvpa, nmu)) + fint = 0. + do ikxkyz = kxkyz_lo%llim_proc, kxkyz_lo%ulim_proc + iz = iz_idx(kxkyz_lo, ikxkyz) + it = it_idx(kxkyz_lo, ikxkyz) + ikx = ikx_idx(kxkyz_lo, ikxkyz) + iky = iky_idx(kxkyz_lo, ikxkyz) + is = is_idx(kxkyz_lo, ikxkyz) + call gyro_average(fv(:, :, ikxkyz), ikxkyz, g0) + wgt = spec(is)%z * spec(is)%dens_psi0 + call integrate_vmu(g0, iz, tmp) + fint(iky, ikx, iz, it) = fint(iky, ikx, iz, it) + wgt * tmp + end do + call sum_allreduce(fint) + deallocate (g0) + end subroutine integrate + +end subroutine calculate_turbulent_stresses + +end module diagnostics_stresses + + diff --git a/STELLA_CODE/diagnostics/write_diagnostics_to_netcdf.fpp b/STELLA_CODE/diagnostics/write_diagnostics_to_netcdf.fpp index 62b1c3183b..2ce9d22cc0 100644 --- a/STELLA_CODE/diagnostics/write_diagnostics_to_netcdf.fpp +++ b/STELLA_CODE/diagnostics/write_diagnostics_to_netcdf.fpp @@ -103,7 +103,14 @@ ! - |f_nozonal|^2(t, z, mu, s) --> f2nozonal_vs_zmus ! - |f_nozonal|^2(t, kx, ky, z, s) --> f2nozonal_vs_zkykxs ! - |f_nozonal|^2(t, z, vpa, mu, s) --> f2nozonal_vs_zvpamus -! +! +! The nonlinear turbulent fluxes are written to netCDF file: +! - QN[{phi, h}] --> phi_stress_zkx +! - QN[{apar, h}] --> apar_stress_zkx +! - QN[{bpar, h}] --> bpar_stress_zkx, where QN is the quasineutrality operator +! - QN[g] --> gint_zkx +! +! ! TODO-GA: Print input parameters to netcdf file ! !############################################################################### @@ -169,6 +176,7 @@ module write_diagnostics_to_netcdf public :: write_radial_moments_nc public :: write_fluxes_kxkyzs_nc public :: write_fluxes_kxkys_nc + public :: write_stresses_nc public :: get_nout public :: sync_nc @@ -925,6 +933,35 @@ contains #endif end subroutine write_radial_moments_nc + !============================================================================ + !============================NONLINEAR STRESSES ============================= + !============================================================================ + + subroutine write_stresses_nc(nout, phi_h_nonlin_kx, apar_h_nonlin_kx, bpar_h_nonlin_kx, gint) + use grids_z, only: nzgrid + implicit none + integer, intent(in) :: nout + complex, dimension(:,-nzgrid:), intent(in) :: phi_h_nonlin_kx, apar_h_nonlin_kx, bpar_h_nonlin_kx, gint +#ifdef NETCDF + call netcdf_write_complex(ncid, "phi_stress_zkx", phi_h_nonlin_kx, & + dim_names=[character(len=4)::"ri", "kx", "zed", "t"], start=[1, 1, 1, nout], & + units=" ", & + long_name="Reynolds stress as a function of t, zed and kx") + call netcdf_write_complex(ncid, "apar_stress_zkx", apar_h_nonlin_kx, & + dim_names=[character(len=4)::"ri", "kx", "zed", "t"], start=[1, 1, 1, nout], & + units=" ", & + long_name="Maxwell stress as a function of t, zed and kx") + call netcdf_write_complex(ncid, "bpar_stress_zkx", bpar_h_nonlin_kx, & + dim_names=[character(len=4)::"ri", "kx", "zed", "t"], start=[1, 1, 1, nout], & + units=" ", & + long_name="Bpar stress as a function of t, zed and kx") + call netcdf_write_complex(ncid, "gint_zkx", gint, & + dim_names=[character(len=4)::"ri", "kx", "zed", "t"], start=[1, 1, 1, nout], & + units=" ", & + long_name="ky = 0 component of v-integral of _r as a function of t, zed and kx") +#endif + end subroutine write_stresses_nc + !============================================================================ !=============================== DISTRIBUTION =============================== !============================================================================ diff --git a/STELLA_CODE/parameters/parameters_diagnostics.f90 b/STELLA_CODE/parameters/parameters_diagnostics.f90 index 10874c0e00..358643bdb1 100644 --- a/STELLA_CODE/parameters/parameters_diagnostics.f90 +++ b/STELLA_CODE/parameters/parameters_diagnostics.f90 @@ -49,6 +49,9 @@ module parameters_diagnostics public :: write_radial_moments public :: write_moments + ! Write stresses in + public :: write_stresses + private ! Variables used to write diagnostics @@ -105,6 +108,9 @@ module parameters_diagnostics ! Write moments in logical :: write_radial_moments logical :: write_moments + + ! Write stresses in + logical :: write_stresses contains @@ -140,7 +146,8 @@ subroutine read_input_file use namelist_diagnostics, only: & read_namelist_diagnostics, read_namelist_diagnostics_potential, & read_namelist_diagnostics_omega, read_namelist_diagnostics_distribution, & - read_namelist_diagnostics_fluxes, read_namelist_diagnostics_moments + read_namelist_diagnostics_fluxes, read_namelist_diagnostics_moments, & + read_namelist_diagnostics_stresses use parameters_numerical, only: autostop @@ -190,7 +197,10 @@ subroutine read_input_file call read_namelist_diagnostics_moments (write_all_moments, & write_moments, write_radial_moments) - + + + call read_namelist_diagnostics_stresses (write_stresses) + !------------------------------------------------------------------- ! If = False then we need = -1 @@ -254,6 +264,7 @@ subroutine broadcast_parameters call broadcast(write_radial_moments) call broadcast(write_fluxes_kxkyz) call broadcast(write_fluxes_kxky) + call broadcast(write_stresses) end subroutine broadcast_parameters diff --git a/STELLA_CODE/read_namelists_from_input_file/namelist_diagnostics.f90 b/STELLA_CODE/read_namelists_from_input_file/namelist_diagnostics.f90 index 9ede87a07c..7709e1f6f6 100644 --- a/STELLA_CODE/read_namelists_from_input_file/namelist_diagnostics.f90 +++ b/STELLA_CODE/read_namelists_from_input_file/namelist_diagnostics.f90 @@ -58,6 +58,9 @@ ! diagnostics_moments ! write_moments = .false. ! write_radial_moments = .false. +! +! diagnostics_stresses +! write_stresses = .false. ! ! For each namelists two (or three) routines exist: ! - set_default_parameters_ @@ -80,6 +83,7 @@ module namelist_diagnostics public :: read_namelist_diagnostics_distribution public :: read_namelist_diagnostics_fluxes public :: read_namelist_diagnostics_moments + public :: read_namelist_diagnostics_stresses private @@ -690,4 +694,57 @@ end subroutine check_inputs_diagnostics_moments end subroutine read_namelist_diagnostics_moments + !**************************************************************************** + ! DIAGNOSTICS STRESSES ! + !**************************************************************************** + subroutine read_namelist_diagnostics_stresses (write_stresses) + + use mp, only: proc0, mp_abort + use parameters_physics, only: initialised_parameters_physics + + implicit none + + ! Variables that are read from the input file + logical, intent (out) :: write_stresses + + !------------------------------------------------------------------------- + + ! read the physics parameters first + if (.not. initialised_parameters_physics) then + call mp_abort('Initialise physics parameters before reading diagnostics namelists. Aborting.') + end if + + ! Set the default input parameters and read the input file + if (.not. proc0) return + call set_default_parameters_diagnostics_stresses + call read_input_file_diagnostics_stresses + + contains + + !------------------------ Default input parameters ----------------------- + subroutine set_default_parameters_diagnostics_stresses + + implicit none + + write_stresses = .false. + + end subroutine set_default_parameters_diagnostics_stresses + + !---------------------------- Read input file ---------------------------- + subroutine read_input_file_diagnostics_stresses + + use file_utils, only: input_unit_exist + + implicit none + + namelist /diagnostics_stresses/ write_stresses + in_file = input_unit_exist('diagnostics_stresses', dexist) + if (dexist) read (unit=in_file, nml=diagnostics_stresses) + + end subroutine read_input_file_diagnostics_stresses + + end subroutine read_namelist_diagnostics_stresses + + + end module namelist_diagnostics