Commit 38b7917d authored by Espen Sollum's avatar Espen Sollum
Browse files

Parallelization of domain fill option (save/restart not implemented yet)

parent db712a8f
......@@ -191,10 +191,10 @@ subroutine boundcond_domainfill(itime,loutend)
windx=(windhl(1)*dt2+windhl(2)*dt1)*dtt
rhox=(rhohl(1)*dt2+rhohl(2)*dt1)*dtt
! Calculate mass flux
!********************
! Calculate mass flux, divided by number of processes
!****************************************************
fluxofmass=windx*rhox*boundarea*real(lsynctime)
fluxofmass=windx*rhox*boundarea*real(lsynctime)/mp_partgroup_np
! If the mass flux is directed into the domain, add it to previous mass fluxes;
......@@ -424,10 +424,10 @@ subroutine boundcond_domainfill(itime,loutend)
windx=(windhl(1)*dt2+windhl(2)*dt1)*dtt
rhox=(rhohl(1)*dt2+rhohl(2)*dt1)*dtt
! Calculate mass flux
!********************
! Calculate mass flux, divided by number of processes
!****************************************************
fluxofmass=windx*rhox*boundarea*real(lsynctime)
fluxofmass=windx*rhox*boundarea*real(lsynctime)/mp_partgroup_np
! If the mass flux is directed into the domain, add it to previous mass fluxes;
! if it is out of the domain, set accumulated mass flux to zero
......@@ -586,6 +586,7 @@ subroutine boundcond_domainfill(itime,loutend)
! must be dumped, too, to be used for later runs
!*****************************************************************************
! :TODO: eso parallelize
if ((ipout.gt.0).and.(itime.eq.loutend)) then
open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
form='unformatted')
......
......@@ -103,10 +103,10 @@ subroutine concoutput_nest(itime,outnum)
! mind eso:added to ensure identical results between 2&3-fields versions
! Measure execution time
if (mp_measure_time) then
call cpu_time(mp_root_time_beg)
mp_root_wtime_beg = mpi_wtime()
end if
if (mp_measure_time) call mpif_mtime('iotime',0)
! call cpu_time(mp_root_time_beg)
! mp_root_wtime_beg = mpi_wtime()
! end if
if (verbosity.eq.1) then
print*,'inside concoutput_surf '
......@@ -579,12 +579,13 @@ subroutine concoutput_nest(itime,outnum)
end do
end do
if (mp_measure_time) then
call cpu_time(mp_root_time_end)
mp_root_wtime_end = mpi_wtime()
mp_root_time_total = mp_root_time_total + (mp_root_time_end - mp_root_time_beg)
mp_root_wtime_total = mp_root_wtime_total + (mp_root_wtime_end - mp_root_wtime_beg)
end if
if (mp_measure_time) call mpif_mtime('iotime',1)
! if (mp_measure_time) then
! call cpu_time(mp_root_time_end)
! mp_root_wtime_end = mpi_wtime()
! mp_root_time_total = mp_root_time_total + (mp_root_time_end - mp_root_time_beg)
! mp_root_wtime_total = mp_root_wtime_total + (mp_root_wtime_end - mp_root_wtime_beg)
! end if
end subroutine concoutput_nest
......@@ -102,10 +102,10 @@ subroutine concoutput_surf_nest(itime,outnum)
! mind eso:added to get consistent results between 2&3-fields versions
! Measure execution time
if (mp_measure_time) then
call cpu_time(mp_root_time_beg)
mp_root_wtime_beg = mpi_wtime()
end if
if (mp_measure_time) call mpif_mtime('iotime',0)
! call cpu_time(mp_root_time_beg)
! mp_root_wtime_beg = mpi_wtime()
! end if
if (verbosity.eq.1) then
print*,'inside concoutput_surf '
......@@ -668,12 +668,13 @@ subroutine concoutput_surf_nest(itime,outnum)
end do
end do
if (mp_measure_time) then
call cpu_time(mp_root_time_end)
mp_root_wtime_end = mpi_wtime()
mp_root_time_total = mp_root_time_total + (mp_root_time_end - mp_root_time_beg)
mp_root_wtime_total = mp_root_wtime_total + (mp_root_wtime_end - mp_root_wtime_beg)
end if
if (mp_measure_time) call mpif_mtime('iotime',1)
! if (mp_measure_time) then
! call cpu_time(mp_root_time_end)
! mp_root_wtime_end = mpi_wtime()
! mp_root_time_total = mp_root_time_total + (mp_root_time_end - mp_root_time_beg)
! mp_root_wtime_total = mp_root_wtime_total + (mp_root_wtime_end - mp_root_wtime_beg)
! end if
end subroutine concoutput_surf_nest
......@@ -20,30 +20,30 @@
!**********************************************************************
subroutine init_domainfill
!
!*****************************************************************************
! *
! Initializes particles equally distributed over the first release location *
! specified in file RELEASES. This box is assumed to be the domain for doing *
! domain-filling trajectory calculations. *
! All particles carry the same amount of mass which alltogether comprises the*
! mass of air within the box. *
! *
! Author: A. Stohl *
! *
! 15 October 2002 *
! *
!*****************************************************************************
! *
! Variables: *
! *
! numparticlecount consecutively counts the number of particles released *
! nx_we(2) grid indices for western and eastern boundary of domain- *
! filling trajectory calculations *
! ny_sn(2) grid indices for southern and northern boundary of domain- *
! filling trajectory calculations *
! *
!*****************************************************************************
!
!*****************************************************************************
! *
! Initializes particles equally distributed over the first release location *
! specified in file RELEASES. This box is assumed to be the domain for doing *
! domain-filling trajectory calculations. *
! All particles carry the same amount of mass which alltogether comprises the*
! mass of air within the box. *
! *
! Author: A. Stohl *
! *
! 15 October 2002 *
! *
!*****************************************************************************
! *
! Variables: *
! *
! numparticlecount consecutively counts the number of particles released *
! nx_we(2) grid indices for western and eastern boundary of domain- *
! filling trajectory calculations *
! ny_sn(2) grid indices for southern and northern boundary of domain- *
! filling trajectory calculations *
! *
!*****************************************************************************
use point_mod
use par_mod
......@@ -64,19 +64,19 @@ subroutine init_domainfill
integer :: idummy = -11
! Determine the release region (only full grid cells), over which particles
! shall be initialized
! Use 2 fields for west/east and south/north boundary
!**************************************************************************
! Determine the release region (only full grid cells), over which particles
! shall be initialized
! Use 2 fields for west/east and south/north boundary
!**************************************************************************
nx_we(1)=max(int(xpoint1(1)),0)
nx_we(2)=min((int(xpoint2(1))+1),nxmin1)
ny_sn(1)=max(int(ypoint1(1)),0)
ny_sn(2)=min((int(ypoint2(1))+1),nymin1)
! For global simulations (both global wind data and global domain-filling),
! set a switch, such that no boundary conditions are used
!**************************************************************************
! For global simulations (both global wind data and global domain-filling),
! set a switch, such that no boundary conditions are used
!**************************************************************************
if (xglobal.and.sglobal.and.nglobal) then
if ((nx_we(1).eq.0).and.(nx_we(2).eq.nxmin1).and. &
(ny_sn(1).eq.0).and.(ny_sn(2).eq.nymin1)) then
......@@ -86,15 +86,15 @@ subroutine init_domainfill
endif
endif
! Do not release particles twice (i.e., not at both in the leftmost and rightmost
! grid cell) for a global domain
!*****************************************************************************
! Do not release particles twice (i.e., not at both in the leftmost and rightmost
! grid cell) for a global domain
!*****************************************************************************
if (xglobal) nx_we(2)=min(nx_we(2),nx-2)
! Calculate area of grid cell with formula M=2*pi*R*h*dx/360,
! see Netz, Formeln der Mathematik, 5. Auflage (1983), p.90
!************************************************************
! Calculate area of grid cell with formula M=2*pi*R*h*dx/360,
! see Netz, Formeln der Mathematik, 5. Auflage (1983), p.90
!************************************************************
do jy=ny_sn(1),ny_sn(2) ! loop about latitudes
ylat=ylat0+real(jy)*dy
......@@ -116,7 +116,7 @@ subroutine init_domainfill
gridarea(jy)=2.*pi*r_earth*hzone*dx/360.
end do
! Do the same for the south pole
! Do the same for the south pole
if (sglobal) then
ylat=ylat0
......@@ -129,7 +129,7 @@ subroutine init_domainfill
gridarea(0)=2.*pi*r_earth*hzone*dx/360.
endif
! Do the same for the north pole
! Do the same for the north pole
if (nglobal) then
ylat=ylat0+real(nymin1)*dy
......@@ -143,8 +143,8 @@ subroutine init_domainfill
endif
! Calculate total mass of each grid column and of the whole atmosphere
!*********************************************************************
! Calculate total mass of each grid column and of the whole atmosphere
!*********************************************************************
colmasstotal=0.
do jy=ny_sn(1),ny_sn(2) ! loop about latitudes
......@@ -156,13 +156,13 @@ subroutine init_domainfill
end do
end do
write(*,*) 'Atm. mass: ',colmasstotal
write(*,*) 'Atm. mass: ',colmasstotal
if (ipin.eq.0) numpart=0
! Determine the particle positions
!*********************************
! Determine the particle positions
!*********************************
numparttot=0
numcolumn=0
......@@ -174,9 +174,9 @@ subroutine init_domainfill
if (ncolumn.eq.0) goto 30
if (ncolumn.gt.numcolumn) numcolumn=ncolumn
! Calculate pressure at the altitudes of model surfaces, using the air density
! information, which is stored as a 3-d field
!*****************************************************************************
! Calculate pressure at the altitudes of model surfaces, using the air density
! information, which is stored as a 3-d field
!*****************************************************************************
do kz=1,nz
pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
......@@ -190,10 +190,10 @@ subroutine init_domainfill
jj=jj+1
! For columns with many particles (i.e. around the equator), distribute
! the particles equally, for columns with few particles (i.e. around the
! poles), distribute the particles randomly
!***********************************************************************
! For columns with many particles (i.e. around the equator), distribute
! the particles equally, for columns with few particles (i.e. around the
! poles), distribute the particles randomly
!***********************************************************************
if (ncolumn.gt.20) then
......@@ -208,10 +208,10 @@ subroutine init_domainfill
dz2=pnew-pp(kz+1)
dz=1./(dz1+dz2)
! Assign particle position
!*************************
! Do the following steps only if particles are not read in from previous model run
!*****************************************************************************
! Assign particle position
!*************************
! Do the following steps only if particles are not read in from previous model run
!*****************************************************************************
if (ipin.eq.0) then
xtra1(numpart+jj)=real(ix)-0.5+ran1(idummy)
if (ix.eq.0) xtra1(numpart+jj)=ran1(idummy)
......@@ -223,8 +223,8 @@ subroutine init_domainfill
ztra1(numpart+jj)=height(nz)-0.5
! Interpolate PV to the particle position
!****************************************
! Interpolate PV to the particle position
!****************************************
ixm=int(xtra1(numpart+jj))
jym=int(ytra1(numpart+jj))
ixp=ixm+1
......@@ -259,14 +259,14 @@ subroutine init_domainfill
if (ylat.lt.0.) pvpart=-1.*pvpart
! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
!*****************************************************************************
! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
!*****************************************************************************
if (((ztra1(numpart+jj).gt.3000.).and. &
(pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
! Assign certain properties to the particle
!******************************************
! Assign certain properties to the particle
!******************************************
nclass(numpart+jj)=min(int(ran1(idummy)* &
real(nclassunc))+1,nclassunc)
numparticlecount=numparticlecount+1
......@@ -292,10 +292,16 @@ subroutine init_domainfill
end do
end do
write(*,*) 'init_domainfill> ncolumn: ', ncolumn
write(*,*) 'init_domainfill> numcolumn: ', numcolumn
write(*,*) 'init_domainfill> ny_sn(1),ny_sn(2): ', ny_sn(1),ny_sn(2)
write(*,*) 'init_domainfill> nx_we(1),nx_we(2): ', nx_we(1),nx_we(2)
! Check whether numpart is really smaller than maxpart
!*****************************************************
! Check whether numpart is really smaller than maxpart
!*****************************************************
! ESO :TODO: this warning need to be moved further up, else out-of-bounds error earlier
if (numpart.gt.maxpart) then
write(*,*) 'numpart too large: change source in init_atm_mass.f'
write(*,*) 'numpart: ',numpart,' maxpart: ',maxpart
......@@ -305,8 +311,8 @@ subroutine init_domainfill
xmassperparticle=colmasstotal/real(numparttot)
! Make sure that all particles are within domain
!***********************************************
! Make sure that all particles are within domain
!***********************************************
do j=1,numpart
if ((xtra1(j).lt.0.).or.(xtra1(j).ge.real(nxmin1)).or. &
......@@ -318,15 +324,15 @@ subroutine init_domainfill
! For boundary conditions, we need fewer particle release heights per column,
! because otherwise it takes too long until enough mass has accumulated to
! release a particle at the boundary (would take dx/u seconds), leading to
! relatively large position errors of the order of one grid distance.
! It's better to release fewer particles per column, but to do so more often.
! Thus, use on the order of nz starting heights per column.
! We thus repeat the above to determine fewer starting heights, that are
! used furtheron in subroutine boundcond_domainfill.f.
!****************************************************************************
! For boundary conditions, we need fewer particle release heights per column,
! because otherwise it takes too long until enough mass has accumulated to
! release a particle at the boundary (would take dx/u seconds), leading to
! relatively large position errors of the order of one grid distance.
! It's better to release fewer particles per column, but to do so more often.
! Thus, use on the order of nz starting heights per column.
! We thus repeat the above to determine fewer starting heights, that are
! used furtheron in subroutine boundcond_domainfill.f.
!****************************************************************************
fractus=real(numcolumn)/real(nz)
write(*,*) 'Total number of particles at model start: ',numpart
......@@ -342,26 +348,26 @@ subroutine init_domainfill
if (ncolumn.eq.0) goto 80
! Memorize how many particles per column shall be used for all boundaries
! This is further used in subroutine boundcond_domainfill.f
! Use 2 fields for west/east and south/north boundary
!************************************************************************
! Memorize how many particles per column shall be used for all boundaries
! This is further used in subroutine boundcond_domainfill.f
! Use 2 fields for west/east and south/north boundary
!************************************************************************
if (ix.eq.nx_we(1)) numcolumn_we(1,jy)=ncolumn
if (ix.eq.nx_we(2)) numcolumn_we(2,jy)=ncolumn
if (jy.eq.ny_sn(1)) numcolumn_sn(1,ix)=ncolumn
if (jy.eq.ny_sn(2)) numcolumn_sn(2,ix)=ncolumn
! Calculate pressure at the altitudes of model surfaces, using the air density
! information, which is stored as a 3-d field
!*****************************************************************************
! Calculate pressure at the altitudes of model surfaces, using the air density
! information, which is stored as a 3-d field
!*****************************************************************************
do kz=1,nz
pp(kz)=rho(ix,jy,kz,1)*r_air*tt(ix,jy,kz,1)
end do
! Determine the reference starting altitudes
!*******************************************
! Determine the reference starting altitudes
!*******************************************
deltacol=(pp(1)-pp(nz))/real(ncolumn)
pnew=pp(1)+deltacol/2.
......@@ -373,19 +379,19 @@ subroutine init_domainfill
dz2=pnew-pp(kz+1)
dz=1./(dz1+dz2)
zposition=(height(kz)*dz2+height(kz+1)*dz1)*dz
if (zposition.gt.height(nz)-0.5) zposition=height(nz)-0.5
if (zposition.gt.height(nz)-0.5) zposition=height(nz)-0.5
! Memorize vertical positions where particles are introduced
! This is further used in subroutine boundcond_domainfill.f
!***********************************************************
! Memorize vertical positions where particles are introduced
! This is further used in subroutine boundcond_domainfill.f
!***********************************************************
if (ix.eq.nx_we(1)) zcolumn_we(1,jy,j)=zposition
if (ix.eq.nx_we(2)) zcolumn_we(2,jy,j)=zposition
if (jy.eq.ny_sn(1)) zcolumn_sn(1,ix,j)=zposition
if (jy.eq.ny_sn(2)) zcolumn_sn(2,ix,j)=zposition
! Initialize mass that has accumulated at boundary to zero
!*********************************************************
! Initialize mass that has accumulated at boundary to zero
!*********************************************************
acc_mass_we(1,jy,j)=0.
acc_mass_we(2,jy,j)=0.
......@@ -398,10 +404,10 @@ subroutine init_domainfill
end do
end do
! If particles shall be read in to continue an existing run,
! then the accumulated masses at the domain boundaries must be read in, too.
! This overrides any previous calculations.
!***************************************************************************
! If particles shall be read in to continue an existing run,
! then the accumulated masses at the domain boundaries must be read in, too.
! This overrides any previous calculations.
!***************************************************************************
if (ipin.eq.1) then
open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
......
......@@ -162,12 +162,14 @@ subroutine init_domainfill
do ix=nx_we(1),nx_we(2) ! loop about longitudes
pp(1)=rho(ix,jy,1,1)*r_air*tt(ix,jy,1,1)
pp(nz)=rho(ix,jy,nz,1)*r_air*tt(ix,jy,nz,1)
colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)
! Each MPI process is assigned an equal share of particles
colmass(ix,jy)=(pp(1)-pp(nz))/ga*gridarea(jy)/mp_partgroup_np
colmasstotal=colmasstotal+colmass(ix,jy)
end do
end do
write(*,*) 'Atm. mass: ',colmasstotal
if (lroot) write(*,*) 'Atm. mass: ',colmasstotal
if (ipin.eq.0) numpart=0
......@@ -414,6 +416,7 @@ subroutine init_domainfill
! This overrides any previous calculations.
!***************************************************************************
! :TODO: eso: parallelize
if (ipin.eq.1) then
open(unitboundcond,file=path(2)(1:length(2))//'boundcond.bin', &
form='unformatted')
......
......@@ -119,31 +119,31 @@ module mpi_mod
logical, parameter :: mp_dev_mode = .false.
logical, parameter :: mp_dbg_out = .false.
logical, parameter :: mp_time_barrier=.true.
logical, parameter :: mp_measure_time=.false.
logical, parameter :: mp_measure_time=.true.
logical, parameter :: mp_exact_numpart=.true.
! for measuring CPU/Wall time
real(sp) :: mp_comm_time_beg, mp_comm_time_end, mp_comm_time_total=0.
real(dp) :: mp_comm_wtime_beg, mp_comm_wtime_end, mp_comm_wtime_total=0.
real(sp) :: mp_root_time_beg, mp_root_time_end, mp_root_time_total=0.
real(dp) :: mp_root_wtime_beg, mp_root_wtime_end, mp_root_wtime_total=0.
real(sp) :: mp_barrier_time_beg, mp_barrier_time_end, mp_barrier_time_total=0.
real(dp) :: mp_barrier_wtime_beg, mp_barrier_wtime_end, mp_barrier_wtime_total=0.
real(sp) :: tm_nploop_beg, tm_nploop_end, tm_nploop_total=0.
real(sp) :: tm_tot_beg, tm_tot_end, tm_tot_total=0.
real(dp) :: mp_getfields_wtime_beg, mp_getfields_wtime_end, mp_getfields_wtime_total=0.
real(sp) :: mp_getfields_time_beg, mp_getfields_time_end, mp_getfields_time_total=0.
real(dp) :: mp_readwind_wtime_beg, mp_readwind_wtime_end, mp_readwind_wtime_total=0.
real(sp) :: mp_readwind_time_beg, mp_readwind_time_end, mp_readwind_time_total=0.
real(dp) :: mp_io_wtime_beg, mp_io_wtime_end, mp_io_wtime_total=0.
real(sp) :: mp_io_time_beg, mp_io_time_end, mp_io_time_total=0.
real(dp) :: mp_wetdepo_wtime_beg, mp_wetdepo_wtime_end, mp_wetdepo_wtime_total=0.
real(sp) :: mp_wetdepo_time_beg, mp_wetdepo_time_end, mp_wetdepo_time_total=0.
real(dp) :: mp_advance_wtime_beg, mp_advance_wtime_end, mp_advance_wtime_total=0.
real(dp) :: mp_conccalc_time_beg, mp_conccalc_time_end, mp_conccalc_time_total=0.
real(dp) :: mp_total_wtime_beg, mp_total_wtime_end, mp_total_wtime_total=0.
real(dp) :: mp_vt_wtime_beg, mp_vt_wtime_end, mp_vt_wtime_total
real(sp) :: mp_vt_time_beg, mp_vt_time_end, mp_vt_time_total
real(sp),private :: mp_comm_time_beg, mp_comm_time_end, mp_comm_time_total=0.
real(dp),private :: mp_comm_wtime_beg, mp_comm_wtime_end, mp_comm_wtime_total=0.
real(sp),private :: mp_root_time_beg, mp_root_time_end, mp_root_time_total=0.
real(dp),private :: mp_root_wtime_beg, mp_root_wtime_end, mp_root_wtime_total=0.
real(sp),private :: mp_barrier_time_beg, mp_barrier_time_end, mp_barrier_time_total=0.
real(dp),private :: mp_barrier_wtime_beg, mp_barrier_wtime_end, mp_barrier_wtime_total=0.
real(sp),private :: tm_nploop_beg, tm_nploop_end, tm_nploop_total=0.
real(sp),private :: tm_tot_beg, tm_tot_end, tm_tot_total=0.
real(dp),private :: mp_getfields_wtime_beg, mp_getfields_wtime_end, mp_getfields_wtime_total=0.
real(sp),private :: mp_getfields_time_beg, mp_getfields_time_end, mp_getfields_time_total=0.
real(dp),private :: mp_readwind_wtime_beg, mp_readwind_wtime_end, mp_readwind_wtime_total=0.
real(sp),private :: mp_readwind_time_beg, mp_readwind_time_end, mp_readwind_time_total=0.
real(dp),private :: mp_io_wtime_beg, mp_io_wtime_end, mp_io_wtime_total=0.
real(sp),private :: mp_io_time_beg, mp_io_time_end, mp_io_time_total=0.
real(dp),private :: mp_wetdepo_wtime_beg, mp_wetdepo_wtime_end, mp_wetdepo_wtime_total=0.
real(sp),private :: mp_wetdepo_time_beg, mp_wetdepo_time_end, mp_wetdepo_time_total=0.
real(dp),private :: mp_advance_wtime_beg, mp_advance_wtime_end, mp_advance_wtime_total=0.
real(dp),private :: mp_conccalc_time_beg, mp_conccalc_time_end, mp_conccalc_time_total=0.
real(dp),private :: mp_total_wtime_beg, mp_total_wtime_end, mp_total_wtime_total=0.
real(dp),private :: mp_vt_wtime_beg, mp_vt_wtime_end, mp_vt_wtime_total
real(sp),private :: mp_vt_time_beg, mp_vt_time_end, mp_vt_time_total
! dat_lun logical unit number for i/o
integer, private :: dat_lun
......@@ -1823,6 +1823,16 @@ contains
&(mp_wetdepo_time_end - mp_wetdepo_time_beg)
end if
case ('advance')
if (imode.eq.0) then
mp_advance_wtime_beg = mpi_wtime()
else
mp_advance_wtime_end = mpi_wtime()
mp_advance_wtime_total = mp_advance_wtime_total + &
&(mp_advance_wtime_end - mp_advance_wtime_beg)
end if
case ('getfields')
if (imode.eq.0) then
mp_getfields_wtime_beg = mpi_wtime()
......@@ -1853,6 +1863,7 @@ contains
mp_conccalc_time_total = mp_conccalc_time_total + mp_conccalc_time_end - &
&mp_conccalc_time_beg
end if
case ('rootonly')
if (imode.eq.0) then
call cpu_time(mp_root_time_beg)
......
......@@ -213,8 +213,8 @@ module par_mod
! Maximum number of particles, species, and similar
!**************************************************
integer,parameter :: maxpart=400000
integer,parameter :: maxspec=6
integer,parameter :: maxpart=40000000
integer,parameter :: maxspec=1
real,parameter :: minmass=0.0 !0.0001
! maxpart Maximum number of particles
......
......@@ -694,15 +694,18 @@ subroutine timemanager
! Integrate Lagevin equation for lsynctime seconds
!*************************************************
mp_advance_wtime_beg = mpi_wtime()
if (mp_measure_time) call mpif_mtime('advance',0)
!mp_advance_wtime_beg = mpi_wtime()
call advance(itime,npoint(j),idt(j),uap(j),ucp(j),uzp(j), &
us(j),vs(j),ws(j),nstop,xtra1(j),ytra1(j),ztra1(j),prob, &
cbt(j))
mp_advance_wtime_end = mpi_wtime()
mp_advance_wtime_total = mp_advance_wtime_total + (mp_advance_wtime_end - &
& mp_advance_wtime_beg)
if (mp_measure_time) call mpif_mtime('advance',1)
! mp_advance_wtime_end = mpi_wtime()
! mp_advance_wtime_total = mp_advance_wtime_total + (mp_advance_wtime_end - &
! & mp_advance_wtime_beg)
! Calculate the gross fluxes across layer interfaces
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment