Commit d9f0585f authored by Sabine's avatar Sabine
Browse files

Merge branch 'dev' of git.nilu.no:flexpart/flexpart into dev

parents d404d981 c8fc7249
......@@ -66,9 +66,10 @@ program flexpart
end do
call gasdev1(idummy,rannumb(maxrand),rannumb(maxrand-1))
! FLEXPART version string
flexversion_major = '10' ! Major version number, also used for species file names
flexversion='Version '//trim(flexversion_major)//'.0beta (2015-05-01)'
flexversion='Version '//trim(flexversion_major)//'.1beta (2016-11-02)'
verbosity=0
! Read the pathnames where input/output files are stored
......@@ -383,6 +384,17 @@ program flexpart
end do
end do
! Inform whether output kernel is used or not
!*********************************************
if (lroot) then
if (lnokernel) then
write(*,*) "Concentrations are calculated without using kernel"
else
write(*,*) "Concentrations are calculated using kernel"
end if
end if
! Calculate particle trajectories
!********************************
......@@ -401,10 +413,15 @@ program flexpart
call timemanager
! NIK 16.02.2005
do i=1,nspec
write(*,*) '**********************************************'
write(*,*) 'Total number of occurences of below-cloud scavenging', tot_blc_count
write(*,*) 'Total number of occurences of in-cloud scavenging', tot_inc_count
write(*,*) 'Scavenging statistics for species ', species(i), ':'
write(*,*) 'Total number of occurences of below-cloud scavenging', &
& tot_blc_count(i)
write(*,*) 'Total number of occurences of in-cloud scavenging', &
& tot_inc_count(i)
write(*,*) '**********************************************'
end do
write(*,*) 'CONGRATULATIONS: YOU HAVE SUCCESSFULLY COMPLETED A FLE&
&XPART MODEL RUN!'
......
......@@ -54,15 +54,17 @@ program flexpart
character(len=256) :: inline_options !pathfile, flexversion, arg2
! Initialize mpi
!*********************
! Initialize mpi
!*********************
call mpif_init
if (mp_measure_time) call mpif_mtime('flexpart',0)
! Initialize arrays in com_mod
!*****************************
call com_mod_allocate_part(maxpart_mpi)
! Initialize arrays in com_mod
!*****************************
if(.not.(lmpreader.and.lmp_use_reader)) call com_mod_allocate_part(maxpart_mpi)
! Generate a large number of random numbers
!******************************************
......@@ -78,7 +80,7 @@ program flexpart
! FLEXPART version string
flexversion_major = '10' ! Major version number, also used for species file names
! flexversion='Ver. 10 Beta MPI (2015-05-01)'
flexversion='Ver. '//trim(flexversion_major)//' Beta MPI (2015-05-01)'
flexversion='Ver. '//trim(flexversion_major)//'.1beta MPI (2016-11-02)'
verbosity=0
! Read the pathnames where input/output files are stored
......@@ -305,9 +307,11 @@ program flexpart
print*,'Initialize all particles to non-existent'
endif
if (.not.(lmpreader.and.lmp_use_reader)) then
do j=1, size(itra1) ! maxpart_mpi
itra1(j)=-999999999
end do
end if
! For continuation of previous run, read in particle positions
!*************************************************************
......@@ -317,7 +321,7 @@ program flexpart
print*,'call readpartpositions'
endif
! readwind process skips this step
if (lmp_use_reader.and..not.lmpreader) call readpartpositions
if (.not.(lmpreader.and.lmp_use_reader)) call readpartpositions
else
if (verbosity.gt.0 .and. lroot) then
print*,'numpart=0, numparticlecount=0'
......@@ -424,6 +428,16 @@ program flexpart
end do
end do
! Inform whether output kernel is used or not
!*********************************************
if (lroot) then
if (lnokernel) then
write(*,*) "Concentrations are calculated without using kernel"
else
write(*,*) "Concentrations are calculated using kernel"
end if
end if
! Calculate particle trajectories
!********************************
......@@ -447,24 +461,29 @@ program flexpart
! NIK 16.02.2005
if (lroot) then
call MPI_Reduce(MPI_IN_PLACE, tot_blc_count, 1, MPI_INTEGER8, MPI_SUM, id_root, &
call MPI_Reduce(MPI_IN_PLACE, tot_blc_count, nspec, MPI_INTEGER8, MPI_SUM, id_root, &
& mp_comm_used, mp_ierr)
call MPI_Reduce(MPI_IN_PLACE, tot_inc_count, 1, MPI_INTEGER8, MPI_SUM, id_root, &
call MPI_Reduce(MPI_IN_PLACE, tot_inc_count, nspec, MPI_INTEGER8, MPI_SUM, id_root, &
& mp_comm_used, mp_ierr)
else
if (mp_partgroup_pid.ge.0) then ! Skip for readwind process
call MPI_Reduce(tot_blc_count, 0, 1, MPI_INTEGER8, MPI_SUM, id_root, &
call MPI_Reduce(tot_blc_count, 0, nspec, MPI_INTEGER8, MPI_SUM, id_root, &
& mp_comm_used, mp_ierr)
call MPI_Reduce(tot_inc_count, 0, 1, MPI_INTEGER8, MPI_SUM, id_root, &
call MPI_Reduce(tot_inc_count, 0, nspec, MPI_INTEGER8, MPI_SUM, id_root, &
& mp_comm_used, mp_ierr)
end if
end if
if (lroot) then
do i=1,nspec
write(*,*) '**********************************************'
write(*,*) 'Total number of occurences of below-cloud scavenging', tot_blc_count
write(*,*) 'Total number of occurences of in-cloud scavenging', tot_inc_count
write(*,*) 'Scavenging statistics for species ', species(i), ':'
write(*,*) 'Total number of occurences of below-cloud scavenging', &
& tot_blc_count(i)
write(*,*) 'Total number of occurences of in-cloud scavenging', &
& tot_inc_count(i)
write(*,*) '**********************************************'
end do
write(*,*) 'CONGRATULATIONS: YOU HAVE SUCCESSFULLY COMPLETED A FLE&
&XPART MODEL RUN!'
......
......@@ -20,28 +20,39 @@
!**********************************************************************
subroutine boundcond_domainfill(itime,loutend)
! i i
!*****************************************************************************
! *
! Particles are created by this subroutine continuously throughout the *
! simulation at the boundaries of the domain-filling box. *
! All particles carry the same amount of mass which alltogether comprises the*
! mass of air within the box, which remains (more or less) constant. *
! *
! Author: A. Stohl *
! *
! 16 October 2002 *
! *
!*****************************************************************************
! *
! Variables: *
! *
! nx_we(2) grid indices for western and eastern boundary of domain- *
! filling trajectory calculations *
! ny_sn(2) grid indices for southern and northern boundary of domain- *
! filling trajectory calculations *
! *
!*****************************************************************************
! i i
!*****************************************************************************
! *
! Particles are created by this subroutine continuously throughout the *
! simulation at the boundaries of the domain-filling box. *
! All particles carry the same amount of mass which alltogether comprises the*
! mass of air within the box, which remains (more or less) constant. *
! *
! Author: A. Stohl *
! *
! 16 October 2002 *
! *
!*****************************************************************************
! *
! Variables: *
! *
! nx_we(2) grid indices for western and eastern boundary of domain- *
! filling trajectory calculations *
! ny_sn(2) grid indices for southern and northern boundary of domain- *
! filling trajectory calculations *
! *
!*****************************************************************************
! CHANGES
! 08/2016 eso: MPI version:
!
! -Root process release particles and distributes to other processes.
! Temporary arrays are used, also for the non-root (receiving) processes.
! -The scheme can be improved by having all processes report numpart
! (keeping track of how many particles have left the domain), so that
! a proportional amount of new particles can be distributed (however
! we have a separate function called from timemanager that will
! redistribute particles among processes if there are imbalances)
!*****************************************************************************
use point_mod
use par_mod
......@@ -54,7 +65,8 @@ subroutine boundcond_domainfill(itime,loutend)
real :: dz,dz1,dz2,dt1,dt2,dtt,ylat,xm,cosfact,accmasst
integer :: itime,in,indz,indzp,i,loutend
integer :: j,k,ix,jy,m,indzh,indexh,minpart,ipart,mmass
integer :: numactiveparticles
integer :: numactiveparticles, numpart_total, rel_counter
integer,allocatable,dimension(:) :: numrel_mpi !, numactiveparticles_mpi
real :: windl(2),rhol(2)
real :: windhl(2),rhohl(2)
......@@ -65,26 +77,37 @@ subroutine boundcond_domainfill(itime,loutend)
real :: pvpart,ddx,ddy,rddx,rddy,p1,p2,p3,p4,y1(2),yh1(2)
integer :: idummy = -11
integer :: mtag
logical :: first_call=.true.
! Sizes of temporary arrays are maxpartfract*maxpart. Increase maxpartfract if
! needed.
real,parameter :: maxpartfract=0.1
integer :: tmp_size = int(maxpartfract*maxpart)
! Use different seed for each process
! Use different seed for each process
if (first_call) then
idummy=idummy+mp_seed
first_call=.false.
end if
! If domain-filling is global, no boundary conditions are needed
!***************************************************************
! If domain-filling is global, no boundary conditions are needed
!***************************************************************
if (gdomainfill) return
accmasst=0.
numactiveparticles=0
! Keep track of active particles on each process
allocate(numrel_mpi(0:mp_partgroup_np-1))
! numactiveparticles_mpi(0:mp_partgroup_np-1)
! Terminate trajectories that have left the domain, if domain-filling
! trajectory calculation domain is not global
!********************************************************************
! New particles to be released on each process
numrel_mpi(:)=0
! Terminate trajectories that have left the domain, if domain-filling
! trajectory calculation domain is not global. Done for all processes
!********************************************************************
do i=1,numpart
if (itra1(i).eq.itime) then
......@@ -97,49 +120,75 @@ subroutine boundcond_domainfill(itime,loutend)
if (itra1(i).ne.-999999999) numactiveparticles= &
numactiveparticles+1
end do
! numactiveparticles_mpi(mp_partid) = numactiveparticles
! Collect number of active particles from all processes
! call MPI_Allgather(numactiveparticles, 1, MPI_INTEGER, &
! &numactiveparticles_mpi, 1, MPI_INTEGER, mp_comm_used, mp_ierr)
! Total number of new releases
numpart_total = 0
! This section only done by root process
!***************************************
if (lroot) then
! Use separate arrays for newly released particles
!*************************************************
allocate(itra1_tmp(tmp_size),npoint_tmp(tmp_size),nclass_tmp(tmp_size),&
& idt_tmp(tmp_size),itramem_tmp(tmp_size),itrasplit_tmp(tmp_size),&
& xtra1_tmp(tmp_size),ytra1_tmp(tmp_size),ztra1_tmp(tmp_size),&
& xmass1_tmp(tmp_size, maxspec))
! Initialize all particles as non-existent
itra1_tmp(:)=-999999999
! Determine auxiliary variables for time interpolation
!*****************************************************
! Determine auxiliary variables for time interpolation
!*****************************************************
dt1=real(itime-memtime(1))
dt2=real(memtime(2)-itime)
dtt=1./(dt1+dt2)
! Initialize auxiliary variable used to search for vacant storage space
!**********************************************************************
! Initialize auxiliary variable used to search for vacant storage space
!**********************************************************************
minpart=1
!***************************************
! Western and eastern boundary condition
!***************************************
!***************************************
! Western and eastern boundary condition
!***************************************
! Loop from south to north
!*************************
! Loop from south to north
!*************************
do jy=ny_sn(1),ny_sn(2)
! Loop over western (index 1) and eastern (index 2) boundary
!***********************************************************
! Loop over western (index 1) and eastern (index 2) boundary
!***********************************************************
do k=1,2
! Loop over all release locations in a column
!********************************************
! Loop over all release locations in a column
!********************************************
do j=1,numcolumn_we(k,jy)
! Determine, for each release location, the area of the corresponding boundary
!*****************************************************************************
! Determine, for each release location, the area of the corresponding boundary
!*****************************************************************************
if (j.eq.1) then
deltaz=(zcolumn_we(k,jy,2)+zcolumn_we(k,jy,1))/2.
else if (j.eq.numcolumn_we(k,jy)) then
! deltaz=height(nz)-(zcolumn_we(k,jy,j-1)+
! + zcolumn_we(k,jy,j))/2.
! In order to avoid taking a very high column for very many particles,
! use the deltaz from one particle below instead
! deltaz=height(nz)-(zcolumn_we(k,jy,j-1)+
! + zcolumn_we(k,jy,j))/2.
! In order to avoid taking a very high column for very many particles,
! use the deltaz from one particle below instead
deltaz=(zcolumn_we(k,jy,j)-zcolumn_we(k,jy,j-2))/2.
else
deltaz=(zcolumn_we(k,jy,j+1)-zcolumn_we(k,jy,j-1))/2.
......@@ -151,11 +200,11 @@ subroutine boundcond_domainfill(itime,loutend)
endif
! Interpolate the wind velocity and density to the release location
!******************************************************************
! Interpolate the wind velocity and density to the release location
!******************************************************************
! Determine the model level below the release position
!*****************************************************
! Determine the model level below the release position
!*****************************************************
do i=2,nz
if (height(i).gt.zcolumn_we(k,jy,j)) then
......@@ -166,15 +215,15 @@ subroutine boundcond_domainfill(itime,loutend)
end do
6 continue
! Vertical distance to the level below and above current position
!****************************************************************
! Vertical distance to the level below and above current position
!****************************************************************
dz1=zcolumn_we(k,jy,j)-height(indz)
dz2=height(indzp)-zcolumn_we(k,jy,j)
dz=1./(dz1+dz2)
! Vertical and temporal interpolation
!************************************
! Vertical and temporal interpolation
!************************************
do m=1,2
indexh=memind(m)
......@@ -191,15 +240,15 @@ subroutine boundcond_domainfill(itime,loutend)
windx=(windhl(1)*dt2+windhl(2)*dt1)*dtt
rhox=(rhohl(1)*dt2+rhohl(2)*dt1)*dtt
! Calculate mass flux, divided by number of processes
!****************************************************
! Calculate mass flux
!********************
fluxofmass=windx*rhox*boundarea*real(lsynctime)/mp_partgroup_np
fluxofmass=windx*rhox*boundarea*real(lsynctime)
! If the mass flux is directed into the domain, add it to previous mass fluxes;
! if it is out of the domain, set accumulated mass flux to zero
!******************************************************************************
! If the mass flux is directed into the domain, add it to previous mass fluxes;
! if it is out of the domain, set accumulated mass flux to zero
!******************************************************************************
if (k.eq.1) then
if (fluxofmass.ge.0.) then
......@@ -216,10 +265,10 @@ subroutine boundcond_domainfill(itime,loutend)
endif
accmasst=accmasst+acc_mass_we(k,jy,j)
! If the accumulated mass exceeds half the mass that each particle shall carry,
! one (or more) particle(s) is (are) released and the accumulated mass is
! reduced by the mass of this (these) particle(s)
!******************************************************************************
! If the accumulated mass exceeds half the mass that each particle shall carry,
! one (or more) particle(s) is (are) released and the accumulated mass is
! reduced by the mass of this (these) particle(s)
!******************************************************************************
if (acc_mass_we(k,jy,j).ge.xmassperparticle/2.) then
mmass=int((acc_mass_we(k,jy,j)+xmassperparticle/2.)/ &
......@@ -231,43 +280,45 @@ subroutine boundcond_domainfill(itime,loutend)
endif
do m=1,mmass
do ipart=minpart,maxpart_mpi
do ipart=minpart,maxpart
! If a vacant storage space is found, attribute everything to this array element
!*****************************************************************************
! If a vacant storage space is found, attribute everything to this array element
! TODO: for the MPI version this test can be removed, as all
! elements in _tmp arrays are initialized to zero
!*****************************************************************************
if (itra1(ipart).ne.itime) then
if (itra1_tmp(ipart).ne.itime) then
! Assign particle positions
!**************************
! Assign particle positions
!**************************
xtra1(ipart)=real(nx_we(k))
xtra1_tmp(ipart)=real(nx_we(k))
if (jy.eq.ny_sn(1)) then
ytra1(ipart)=real(jy)+0.5*ran1(idummy)
ytra1_tmp(ipart)=real(jy)+0.5*ran1(idummy)
else if (jy.eq.ny_sn(2)) then
ytra1(ipart)=real(jy)-0.5*ran1(idummy)
ytra1_tmp(ipart)=real(jy)-0.5*ran1(idummy)
else
ytra1(ipart)=real(jy)+(ran1(idummy)-.5)
ytra1_tmp(ipart)=real(jy)+(ran1(idummy)-.5)
endif
if (j.eq.1) then
ztra1(ipart)=zcolumn_we(k,jy,1)+(zcolumn_we(k,jy,2)- &
ztra1_tmp(ipart)=zcolumn_we(k,jy,1)+(zcolumn_we(k,jy,2)- &
zcolumn_we(k,jy,1))/4.
else if (j.eq.numcolumn_we(k,jy)) then
ztra1(ipart)=(2.*zcolumn_we(k,jy,j)+ &
ztra1_tmp(ipart)=(2.*zcolumn_we(k,jy,j)+ &
zcolumn_we(k,jy,j-1)+height(nz))/4.
else
ztra1(ipart)=zcolumn_we(k,jy,j-1)+ran1(idummy)* &
ztra1_tmp(ipart)=zcolumn_we(k,jy,j-1)+ran1(idummy)* &
(zcolumn_we(k,jy,j+1)-zcolumn_we(k,jy,j-1))
endif
! Interpolate PV to the particle position
!****************************************
ixm=int(xtra1(ipart))
jym=int(ytra1(ipart))
! Interpolate PV to the particle position
!****************************************
ixm=int(xtra1_tmp(ipart))
jym=int(ytra1_tmp(ipart))
ixp=ixm+1
jyp=jym+1
ddx=xtra1(ipart)-real(ixm)
ddy=ytra1(ipart)-real(jym)
ddx=xtra1_tmp(ipart)-real(ixm)
ddy=ytra1_tmp(ipart)-real(jym)
rddx=1.-ddx
rddy=1.-ddy
p1=rddx*rddy
......@@ -275,15 +326,15 @@ subroutine boundcond_domainfill(itime,loutend)
p3=rddx*ddy
p4=ddx*ddy
do i=2,nz
if (height(i).gt.ztra1(ipart)) then
if (height(i).gt.ztra1_tmp(ipart)) then
indzm=i-1
indzp=i
goto 26
endif
end do
26 continue
dz1=ztra1(ipart)-height(indzm)
dz2=height(indzp)-ztra1(ipart)
dz1=ztra1_tmp(ipart)-height(indzm)
dz2=height(indzp)-ztra1_tmp(ipart)
dz=1./(dz1+dz2)
do mm=1,2
indexh=memind(mm)
......@@ -297,41 +348,41 @@ subroutine boundcond_domainfill(itime,loutend)
yh1(mm)=(dz2*y1(1)+dz1*y1(2))*dz
end do
pvpart=(yh1(1)*dt2+yh1(2)*dt1)*dtt
ylat=ylat0+ytra1(ipart)*dy
ylat=ylat0+ytra1_tmp(ipart)*dy
if (ylat.lt.0.) pvpart=-1.*pvpart
! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
!*****************************************************************************
! For domain-filling option 2 (stratospheric O3), do the rest only in the stratosphere
!*****************************************************************************
if (((ztra1(ipart).gt.3000.).and. &
if (((ztra1_tmp(ipart).gt.3000.).and. &
(pvpart.gt.pvcrit)).or.(mdomainfill.eq.1)) then
nclass(ipart)=min(int(ran1(idummy)* &
nclass_tmp(ipart)=min(int(ran1(idummy)* &
real(nclassunc))+1,nclassunc)
numactiveparticles=numactiveparticles+1
numparticlecount=numparticlecount+1
npoint(ipart)=numparticlecount
idt(ipart)=mintime
itra1(ipart)=itime
itramem(ipart)=itra1(ipart)
itrasplit(ipart)=itra1(ipart)+ldirect*itsplit
xmass1(ipart,1)=xmassperparticle
if (mdomainfill.eq.2) xmass1(ipart,1)= &
xmass1(ipart,1)*pvpart*48./29.*ozonescale/10.**9
npoint_tmp(ipart)=numparticlecount
idt_tmp(ipart)=mintime
itra1_tmp(ipart)=itime
itramem_tmp(ipart)=itra1_tmp(ipart)
itrasplit_tmp(ipart)=itra1_tmp(ipart)+ldirect*itsplit
xmass1_tmp(ipart,1)=xmassperparticle
if (mdomainfill.eq.2) xmass1_tmp(ipart,1)= &
xmass1_tmp(ipart,1)*pvpart*48./29.*ozonescale/10.**9
else
goto 71
endif
! Increase numpart, if necessary
!*******************************
! Increase numpart, if necessary
!*******************************
numpart=max(numpart,ipart)
numpart_total=max(numpart_total,ipart)
goto 73 ! Storage space has been found, stop searching
endif
end do
if (ipart.gt.maxpart_mpi) &
stop 'boundcond_domainfill.f: too many particles required'
if (ipart.gt.tmp_size) &
stop 'boundcond_domainfill_mpi.f90: too many particles required'
73 minpart=ipart+1
71 continue
end do
......@@ -342,37 +393,37 @@ subroutine boundcond_domainfill(itime,loutend)
end do
!*****************************************
! Southern and northern boundary condition
!*****************************************
!*****************************************
! Southern and northern boundary condition
!*****************************************
! Loop from west to east
!***********************
! Loop from west to east
!***********************
do ix=nx_we(1),nx_we(2)
! Loop over southern (index 1) and northern (index 2) boundary
!*************************************************************
! Loop over southern (index 1) and northern (index 2) boundary
!*************************************************************
do k=1,2
ylat=ylat0+real(ny_sn(k))*dy
cosfact=cos(ylat*