Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
flexpart
flexpart
Commits
b5127f90
Commit
b5127f90
authored
Aug 04, 2017
by
Espen Sollum
Browse files
Fixed an inconsistency (serial vs parallel) with domain-filling option
parent
5184a7c7
Changes
7
Hide whitespace changes
Inline
Side-by-side
src/init_domainfill.f90
View file @
b5127f90
...
...
@@ -399,6 +399,15 @@ subroutine init_domainfill
end
do
end
do
! Reduce numpart if invalid particles at end of arrays
do
i
=
numpart
,
1
,
-1
if
(
itra1
(
i
)
.eq.
-999999999
)
then
numpart
=
numpart
-1
else
exit
end
if
end
do
! If particles shall be read in to continue an existing run,
! then the accumulated masses at the domain boundaries must be read in, too.
! This overrides any previous calculations.
...
...
@@ -413,6 +422,4 @@ subroutine init_domainfill
endif
end
subroutine
init_domainfill
src/init_domainfill_mpi.f90
View file @
b5127f90
...
...
@@ -369,8 +369,7 @@ subroutine init_domainfill
! Make sure that all particles are within domain
!***********************************************
! do j=1,numpart
do
j
=
1
,
npoint
(
1
)
do
j
=
1
,
numpart
if
((
xtra1_tmp
(
j
)
.lt.
0.
)
.or.
(
xtra1_tmp
(
j
)
.ge.
real
(
nxmin1
))
.or.
&
(
ytra1_tmp
(
j
)
.lt.
0.
)
.or.
(
ytra1_tmp
(
j
)
.ge.
real
(
nymin1
)))
then
itra1_tmp
(
j
)
=
-999999999
...
...
@@ -474,7 +473,7 @@ subroutine init_domainfill
close
(
unitboundcond
)
endif
numpart
=
npart
(
1
)
/
mp_partgroup_np
numpart
=
n
um
part
/
mp_partgroup_np
if
(
mod
(
numpart
,
mp_partgroup_np
)
.ne.
0
)
numpart
=
numpart
+1
else
! Allocate dummy arrays for receiving processes
...
...
@@ -487,13 +486,13 @@ subroutine init_domainfill
! Distribute particles to other processes (numpart is 'per-process', not total)
call
MPI_Bcast
(
numpart
,
1
,
MPI_INTEGER
,
id_root
,
mp_comm_used
,
mp_ierr
)
call
MPI_Bcast
(
numpart
,
1
,
MPI_INTEGER
,
id_root
,
mp_comm_used
,
mp_ierr
)
! eso TODO: xmassperparticle: not necessary to send
call
MPI_Bcast
(
xmassperparticle
,
1
,
mp_sp
,
id_root
,
mp_comm_used
,
mp_ierr
)
call
mpif_send_part_properties
(
numpart
)
call
MPI_Bcast
(
xmassperparticle
,
1
,
mp_sp
,
id_root
,
mp_comm_used
,
mp_ierr
)
call
mpif_send_part_properties
(
numpart
)
! Deallocate the temporary arrays used for all particles
deallocate
(
itra1_tmp
,
npoint_tmp
,
nclass_tmp
,
idt_tmp
,
itramem_tmp
,&
deallocate
(
itra1_tmp
,
npoint_tmp
,
nclass_tmp
,
idt_tmp
,
itramem_tmp
,&
&
itrasplit_tmp
,
xtra1_tmp
,
ytra1_tmp
,
ztra1_tmp
,
xmass1_tmp
)
...
...
src/mean_mod.f90
View file @
b5127f90
...
...
@@ -179,8 +179,8 @@ contains
xl
=
0._sp
xq
=
0._sp
do
i
=
1
,
number
xl
=
xl
+
x_dp
(
i
)
xq
=
xq
+
x_dp
(
i
)
*
x_dp
(
i
)
xl
=
xl
+
real
(
x_dp
(
i
)
,
kind
=
sp
)
xq
=
xq
+
real
(
x_dp
(
i
)
,
kind
=
sp
)
*
real
(
x_dp
(
i
),
kind
=
sp
)
end
do
xm
=
xl
/
real
(
number
,
kind
=
sp
)
...
...
@@ -236,7 +236,7 @@ contains
xq
=
xq
+
x_dp
(
i
)
*
x_dp
(
i
)
end
do
xm
=
xl
/
real
(
number
,
kind
=
sp
)
xm
=
real
(
xl
,
kind
=
sp
)
/
real
(
number
,
kind
=
sp
)
xaux
=
xq
-
xl
*
xl
/
real
(
number
,
kind
=
dp
)
...
...
src/mpi_mod.f90
View file @
b5127f90
...
...
@@ -331,6 +331,7 @@ contains
! Set maxpart per process
! eso 08/2016: Increase maxpart per process, in case of unbalanced distribution
maxpart_mpi
=
int
(
mp_maxpart_factor
*
real
(
maxpart
)/
real
(
mp_partgroup_np
))
if
(
mp_np
==
1
)
maxpart_mpi
=
maxpart
! Do not allocate particle data arrays for readwind process
if
(
lmpreader
.and.
lmp_use_reader
)
then
...
...
@@ -549,6 +550,7 @@ contains
! After the transfer of particles it it possible that the value of
! "numpart" is larger than the actual used, so we reduce it if there are
! invalid particles at the end of the arrays
601
do
i
=
num_part
,
1
,
-1
if
(
itra1
(
i
)
.eq.
-999999999
)
then
numpart
=
numpart
-1
...
...
@@ -558,7 +560,6 @@ contains
end
do
!601 end subroutine mpif_send_part_properties
end
subroutine
mpif_send_part_properties
...
...
src/readavailable.f90
View file @
b5127f90
...
...
@@ -62,6 +62,7 @@ subroutine readavailable
integer
::
i
,
idiff
,
ldat
,
ltim
,
wftime1
(
maxwf
),
numbwfn
(
maxnests
),
k
integer
::
wftime1n
(
maxnests
,
maxwf
),
wftimen
(
maxnests
,
maxwf
)
logical
::
lwarntd
=
.true.
real
(
kind
=
dp
)
::
juldate
,
jul
,
beg
,
end
character
(
len
=
255
)
::
fname
,
spec
,
wfname1
(
maxwf
),
wfspec1
(
maxwf
)
character
(
len
=
255
)
::
wfname1n
(
maxnests
,
maxwf
)
...
...
@@ -240,10 +241,11 @@ subroutine readavailable
write
(
*
,
*
)
'WIND FIELDS IS TOO BIG FOR TRANSPORT CALCULATION.&
&'
write
(
*
,
*
)
'THEREFORE, TRAJECTORIES HAVE TO BE SKIPPED.'
else
if
(
idiff
.gt.
idiffnorm
.and.
lroot
)
then
else
if
(
idiff
.gt.
idiffnorm
.and.
lroot
.and.
lwarntd
)
then
write
(
*
,
*
)
'FLEXPART WARNING: TIME DIFFERENCE BETWEEN TWO'
write
(
*
,
*
)
'WIND FIELDS IS BIG. THIS MAY CAUSE A DEGRADATION'
write
(
*
,
*
)
'OF SIMULATION QUALITY.'
lwarntd
=
.false.
! only issue this warning once
endif
end
do
...
...
src/readcommand.f90
View file @
b5127f90
...
...
@@ -145,6 +145,10 @@ subroutine readcommand
cblflag
=
0
! if using old-style COMMAND file, set to 1 here to use mc cbl routine
ohfields_path
=
"../../flexin/"
!Af set release-switch
WETBKDEP
=
.false.
DRYBKDEP
=
.false.
! Open the command file and read user options
! Namelist input first: try to read as namelist file
!**************************************************************************
...
...
@@ -320,9 +324,6 @@ subroutine readcommand
else
! mass mix
ind_samp
=
0
endif
!Af set release-switch
WETBKDEP
=
.false.
DRYBKDEP
=
.false.
select
case
(
ind_receptor
)
case
(
1
)
! 1 .. concentration at receptor
ind_rel
=
1
...
...
src/timemanager_mpi.f90
View file @
b5127f90
...
...
@@ -772,7 +772,7 @@ subroutine timemanager
endif
else
xmassfract
=
1.
xmassfract
=
1.
0
endif
end
do
...
...
@@ -780,6 +780,9 @@ subroutine timemanager
! print*,'terminated particle ',j,' for small mass (', sum(real(npart(npoint(j)))* &
! xmass1(j,:)), ' of ', sum(xmass(npoint(j),:)),')'
itra1
(
j
)
=
-999999999
if
(
verbosity
.gt.
0
)
then
print
*
,
'terminated particle '
,
j
,
' for small mass'
endif
endif
! Sabine Eckhardt, June 2008
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment