timemanager_mpi.f90 33.5 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
!**********************************************************************
! Copyright 1998,1999,2000,2001,2002,2005,2007,2008,2009,2010         *
! Andreas Stohl, Petra Seibert, A. Frank, Gerhard Wotawa,             *
! Caroline Forster, Sabine Eckhardt, John Burkhart, Harald Sodemann   *
!                                                                     *
! This file is part of FLEXPART.                                      *
!                                                                     *
! FLEXPART is free software: you can redistribute it and/or modify    *
! it under the terms of the GNU General Public License as published by*
! the Free Software Foundation, either version 3 of the License, or   *
! (at your option) any later version.                                 *
!                                                                     *
! FLEXPART is distributed in the hope that it will be useful,         *
! but WITHOUT ANY WARRANTY; without even the implied warranty of      *
! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the       *
! GNU General Public License for more details.                        *
!                                                                     *
! You should have received a copy of the GNU General Public License   *
! along with FLEXPART.  If not, see <http://www.gnu.org/licenses/>.   *
!**********************************************************************

22
subroutine timemanager(metdata_format)
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51

!*****************************************************************************
!                                                                            *
! Handles the computation of trajectories, i.e. determines which             *
! trajectories have to be computed at what time.                             *
! Manages dry+wet deposition routines, radioactive decay and the computation *
! of concentrations.                                                         *
!                                                                            *
!     Author: A. Stohl                                                       *
!                                                                            *
!     20 May 1996                                                            *
!                                                                            *
!*****************************************************************************
!  Changes, Bernd C. Krueger, Feb. 2001:                                     *
!        Call of convmix when new windfield is read                          *
!------------------------------------                                        *
!  Changes Petra Seibert, Sept 2002                                          *
!     fix wet scavenging problem                                             *
!     Code may not be correct for decay of deposition!                       *
!  Changes Petra Seibert, Nov 2002                                           *
!     call convection BEFORE new fields are read in BWD mode                 *
!  Changes Caroline Forster, Feb 2005                                        *
!   new interface between flexpart and convection scheme                     *
!   Emanuel's latest subroutine convect43c.f is used                         *
!  Changes Stefan Henne, Harald Sodemann, 2013-2014                          *
!   added netcdf output code                                                 *
!  Changes Espen Sollum 2014                                                 *
!   MPI version                                                              *
!   Variables uap,ucp,uzp,us,vs,ws,cbt now in module com_mod                 *
52
53
54
!  Unified ECMWF and GFS builds                                              *
!   Marian Harustak, 12.5.2017                                               *
!   - Added passing of metdata_format as it was needed by called routines    *
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
!*****************************************************************************
!                                                                            *
! Variables:                                                                 *
! dep                .true. if either wet or dry deposition is switched on   *
! decay(maxspec) [1/s] decay constant for radioactive decay                  *
! drydep             .true. if dry deposition is switched on                 *
! ideltas [s]        modelling period                                        *
! itime [s]          actual temporal position of calculation                 *
! ldeltat [s]        time since computation of radioact. decay of depositions*
! loutaver [s]       averaging period for concentration calculations         *
! loutend [s]        end of averaging for concentration calculations         *
! loutnext [s]       next time at which output fields shall be centered      *
! loutsample [s]     sampling interval for averaging of concentrations       *
! loutstart [s]      start of averaging for concentration calculations       *
! loutstep [s]       time interval for which concentrations shall be         *
!                    calculated                                              *
! npoint(maxpart)    index, which starting point the trajectory has          *
!                    starting positions of trajectories                      *
! nstop              serves as indicator for fate of particles               *
!                    in the particle loop                                    *
! nstop1             serves as indicator for wind fields (see getfields)     *
! memstat            additional indicator for wind fields (see getfields)    *
! outnum             number of samples for each concentration calculation    *
! prob               probability of absorption at ground due to dry          *
!                    deposition                                              *
! wetdep             .true. if wet deposition is switched on                 *
! weight             weight for each concentration sample (1/2 or 1)         *
! uap(maxpart),ucp(maxpart),uzp(maxpart) = random velocities due to          *
!                    turbulence                                              *
! us(maxpart),vs(maxpart),ws(maxpart) = random velocities due to inter-      *
!                    polation                                                *
! xtra1(maxpart), ytra1(maxpart), ztra1(maxpart) =                           *
!                    spatial positions of trajectories                       *
Espen Sollum's avatar
Espen Sollum committed
88
! metdata_format     format of metdata (ecmwf/gfs)                           *
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
!                                                                            *
! Constants:                                                                 *
! maxpart            maximum number of trajectories                          *
!                                                                            *
!*****************************************************************************

  use unc_mod
  use point_mod
  use xmass_mod
  use flux_mod
  use outg_mod
  use oh_mod
  use par_mod
  use com_mod
  use mpi_mod
  use netcdf_output_mod, only: concoutput_netcdf,concoutput_nest_netcdf,&
       &concoutput_surf_netcdf,concoutput_surf_nest_netcdf

  implicit none

Espen Sollum's avatar
Espen Sollum committed
109
  integer :: metdata_format
110
  logical :: reqv_state=.false. ! .true. if waiting for a MPI_Irecv to complete
111
  integer :: j,ks,kp,l,n,itime=0,nstop,nstop1,memstat=0
112
113
114
115
116
! integer :: ksp
  integer :: ip
  integer :: loutnext,loutstart,loutend
  integer :: ix,jy,ldeltat,itage,nage
  integer :: i_nan=0,ii_nan,total_nan_intl=0  !added by mc to check instability in CBL scheme 
117
  integer :: numpart_tot_mpi ! for summing particles on all processes
118
119
120
  real :: outnum,weight,prob(maxspec)
  real :: decfact

121
  real(sp) :: gridtotalunc
122
123
  real(dep_prec) :: drygridtotalunc=0_dep_prec,wetgridtotalunc=0_dep_prec,&
       & drydeposit(maxspec)=0_dep_prec
124
  real :: xold,yold,zold,xmassfract
125
  real, parameter :: e_inv = 1.0/exp(1.0)
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142

! Measure time spent in timemanager
  if (mp_measure_time) call mpif_mtime('timemanager',0)

! First output for time 0
!************************

  loutnext=loutstep/2
  outnum=0.
  loutstart=loutnext-loutaver/2
  loutend=loutnext+loutaver/2


!**********************************************************************
! Loop over the whole modelling period in time steps of mintime seconds
!**********************************************************************

143

144
  if (lroot.or.mp_dev_mode) then
145
146
  !  write(*,45) itime,numpart*mp_partgroup_np,gridtotalunc,wetgridtotalunc,drygridtotalunc
    write(*,46) float(itime)/3600,itime,numpart*mp_partgroup_np
147
148
149
150
151
152
153
    
    if (verbosity.gt.0) then
      write (*,*) 'timemanager> starting simulation'
    end if
  end if ! (lroot)

!CGZ-lifetime: set lifetime to 0
154
155
156
  ! checklifetime(:,:)=0
  ! species_lifetime(:,:)=0
  ! print*, 'Initialized lifetime'
157
158
159
160
!CGZ-lifetime: set lifetime to 0



161
  do itime=0,ideltas,lsynctime
162
    
163
164
165
166
167
168
169
170
171
172
173

! Computation of wet deposition, OH reaction and mass transfer
! between two species every lsynctime seconds
! maybe wet depo frequency can be relaxed later but better be on safe side
! wetdepo must be called BEFORE new fields are read in but should not
! be called in the very beginning before any fields are loaded, or
! before particles are in the system
! Code may not be correct for decay of deposition
! changed by Petra Seibert 9/02
!********************************************************************

174
    if (mp_dbg_mode) write(*,*) 'myid, itime: ',mp_pid,itime
175
    
176
    if (WETDEP .and. itime .ne. 0 .and. numpart .gt. 0) then
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
      if (verbosity.gt.0) then
        write (*,*) 'timemanager> call wetdepo'
      endif

      if (mp_measure_time) call mpif_mtime('wetdepo',0)
        
! readwind process skips this step
      if (.not.(lmpreader.and.lmp_use_reader)) then
        call wetdepo(itime,lsynctime,loutnext)
      end if

      if (mp_measure_time) call mpif_mtime('wetdepo',1)
    end if

    if (OHREA .and. itime .ne. 0 .and. numpart .gt. 0) then
! readwind process skips this step
Espen Sollum's avatar
Espen Sollum committed
193
      if (.not.(lmpreader.and.lmp_use_reader)) then
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
        call ohreaction(itime,lsynctime,loutnext)
      endif
    end if

    if (assspec .and. itime .ne. 0 .and. numpart .gt. 0) then
      stop 'associated species not yet implemented!'
!     call transferspec(itime,lsynctime,loutnext)
    endif


! compute convection for backward runs
!*************************************

    if ((ldirect.eq.-1).and.(lconvection.eq.1).and.(itime.lt.0)) then
      if (verbosity.gt.0) then
        write (*,*) 'timemanager> call convmix -- backward'
      endif
! readwind process skips this step
212
      if (.not.(lmpreader.and.lmp_use_reader)) call convmix(itime,metdata_format)
213
214
215
216
217
218
219
220
    endif

! Get necessary wind fields if not available
!*******************************************
    if (verbosity.gt.0 .and. lmpreader) then
      write (*,*) 'timemanager> call getfields'
    endif

221
222
! This time measure includes reading/MPI communication (for the reader process),
! or MPI communication time only (for other processes)
223
224
    if (mp_measure_time) call mpif_mtime('getfields',0)

225
    call getfields(itime,nstop1,memstat,metdata_format)
226
227
228

    if (mp_measure_time) call mpif_mtime('getfields',1)

229

230
231
232
233
! Broadcast fields to all MPI processes 
! Skip if all processes have called getfields or if no new fields
!*****************************************************************

234
235
    if (mp_measure_time.and..not.(lmpreader.and.lmp_use_reader)) call mpif_mtime('getfields',0)

Espen Sollum's avatar
Espen Sollum committed
236
! Two approaches to MPI getfields is implemented:
237
238
! Version 1 (lmp_sync=.true.) uses a read-ahead process where send/recv is done
! in sync at start of each new field time interval
Espen Sollum's avatar
Espen Sollum committed
239
240
241
242
243
244
!
! Version 2 (lmp_sync=.false.) is for holding three fields in memory. Uses a
! read-ahead process where sending/receiving of the 3rd fields is done in
! the background in parallel with performing computations with fields 1&2
!********************************************************************************

245
246
    if (lmp_sync.and.lmp_use_reader.and.memstat.gt.0) then
      call mpif_gf_send_vars(memstat)
247
      if (numbnests>0) call mpif_gf_send_vars_nest(memstat)
Espen Sollum's avatar
Espen Sollum committed
248
! Version 2  (lmp_sync=.false.) is also used whenever 2 new fields are
249
! read (as at first time step), in which case async send/recv is impossible.
250
251
    else if (.not.lmp_sync.and.lmp_use_reader.and.memstat.ge.32) then
      call mpif_gf_send_vars(memstat)
252
      if (numbnests>0) call mpif_gf_send_vars_nest(memstat)
253
254
255
256
    end if

    if (.not.lmp_sync) then
    
Espen Sollum's avatar
Espen Sollum committed
257
! Reader process:
258
      if (memstat.gt.0..and.memstat.lt.32.and.lmp_use_reader.and.lmpreader) then
259
        if (mp_dev_mode) write(*,*) 'Reader process: calling mpif_gf_send_vars_async' 
260
        call mpif_gf_send_vars_async(memstat)
261
        if (numbnests>0) call mpif_gf_send_vars_nest_async(memstat)
262
263
      end if

Espen Sollum's avatar
Espen Sollum committed
264
! Completion check:
265
266
! Issued at start of each new field period. 
      if (memstat.ne.0.and.memstat.lt.32.and.lmp_use_reader) then
Espen Sollum's avatar
Espen Sollum committed
267
        call mpif_gf_request
268
269
      end if

Espen Sollum's avatar
Espen Sollum committed
270
271
272
! Recveiving process(es):
! eso TODO: at this point we do not know if clwc/ciwc will be available
! at next time step. Issue receive request anyway, cancel at mpif_gf_request
273
      if (memstat.gt.0.and.lmp_use_reader.and..not.lmpreader) then
274
        if (mp_dev_mode) write(*,*) 'Receiving process: calling mpif_gf_send_vars_async. PID: ', mp_pid
275
        call mpif_gf_recv_vars_async(memstat)
276
        if (numbnests>0) call mpif_gf_recv_vars_nest_async(memstat)
277
278
279
280
      end if

    end if

281
282
    if (mp_measure_time.and..not.(lmpreader.and.lmp_use_reader)) call mpif_mtime('getfields',1)

283
284
285
286
287
288
! For validation and tests: call the function below to set all fields to simple
! homogeneous values
!    if (mp_dev_mode) call set_fields_synthetic

!*******************************************************************************

289
290
291
292
293
294
    if (lmpreader.and.nstop1.gt.1) stop 'NO METEO FIELDS AVAILABLE'

! Reader process goes back to top of time loop (unless simulation end)
!*********************************************************************

    if (lmpreader.and.lmp_use_reader) then
295
      if (itime.lt.ideltas*ldirect) then
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
        cycle
      else
        goto 999
      end if
    end if


! Get hourly OH fields if not available 
!****************************************************
    if (OHREA) then
      if (verbosity.gt.0) then
        write (*,*) 'timemanager> call gethourlyOH'
      endif
      call gethourlyOH(itime)
    endif


! Release particles
!******************

    if (verbosity.gt.0.and.lroot) then
      write (*,*) 'timemanager>  Release particles'
    endif

    if (mdomainfill.ge.1) then
      if (itime.eq.0) then
        if (verbosity.gt.0) then
          write (*,*) 'timemanager>  call init_domainfill'
        endif
        call init_domainfill
      else
        if (verbosity.gt.0.and.lroot) then
          write (*,*) 'timemanager>  call boundcond_domainfill'
        endif
        call boundcond_domainfill(itime,loutend)
      endif
    else
      if (verbosity.gt.0.and.lroot) then
        print*,'call releaseparticles'  
      endif
      call releaseparticles(itime)
    endif


340
341
342
343
344
! Check if particles should be redistributed among processes
!***********************************************************
    call mpif_calculate_part_redist(itime)


345
346
347
348
349
350
351
352
! Compute convective mixing for forward runs
! for backward runs it is done before next windfield is read in
!**************************************************************

    if ((ldirect.eq.1).and.(lconvection.eq.1)) then
      if (verbosity.gt.0) then
        write (*,*) 'timemanager> call convmix -- forward'
      endif
Espen Sollum's avatar
Espen Sollum committed
353
      call convmix(itime,metdata_format)
354
355
356
357
358
359
360
361
362
    endif

! If middle of averaging period of output fields is reached, accumulated
! deposited mass radioactively decays
!***********************************************************************

    if (DEP.and.(itime.eq.loutnext).and.(ldirect.gt.0)) then
      do ks=1,nspec
        do kp=1,maxpointspec_act
363
          if (decay(ks).gt.0.) then ! TODO move this statement up 2 levels
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
            do nage=1,nageclass
              do l=1,nclassunc
! Mother output grid
                do jy=0,numygrid-1
                  do ix=0,numxgrid-1
                    wetgridunc(ix,jy,ks,kp,l,nage)= &
                         wetgridunc(ix,jy,ks,kp,l,nage)* &
                         exp(-1.*outstep*decay(ks))
                    drygridunc(ix,jy,ks,kp,l,nage)= &
                         drygridunc(ix,jy,ks,kp,l,nage)* &
                         exp(-1.*outstep*decay(ks))
                  end do
                end do
! Nested output grid
                if (nested_output.eq.1) then
                  do jy=0,numygridn-1
                    do ix=0,numxgridn-1
                      wetgriduncn(ix,jy,ks,kp,l,nage)= &
                           wetgriduncn(ix,jy,ks,kp,l,nage)* &
                           exp(-1.*outstep*decay(ks))
                      drygriduncn(ix,jy,ks,kp,l,nage)= &
                           drygriduncn(ix,jy,ks,kp,l,nage)* &
                           exp(-1.*outstep*decay(ks))
                    end do
                  end do
                endif
              end do
            end do
          endif
        end do
      end do
    endif


!!! CHANGE: These lines may be switched on to check the conservation
!!! of mass within FLEXPART
!   if (itime.eq.loutnext) then
!   do 247 ksp=1, nspec
!   do 247 kp=1, maxpointspec_act
!47         xm(ksp,kp)=0.

!   do 249 ksp=1, nspec
!     do 249 j=1,numpart
!          if (ioutputforeachrelease.eq.1) then
!            kp=npoint(j)
!          else
!            kp=1
!          endif
!       if (itra1(j).eq.itime) then
!          xm(ksp,kp)=xm(ksp,kp)+xmass1(j,ksp)
!         write(*,*) 'xmass: ',xmass1(j,ksp),j,ksp,nspec
!       endif
!49     continue
!  do 248 ksp=1,nspec
!  do 248 kp=1,maxpointspec_act
!  xm_depw(ksp,kp)=0.
!  xm_depd(ksp,kp)=0.
!     do 248 nage=1,nageclass
!       do 248 ix=0,numxgrid-1
!         do 248 jy=0,numygrid-1
!           do 248 l=1,nclassunc
!              xm_depw(ksp,kp)=xm_depw(ksp,kp)
!    +                  +wetgridunc(ix,jy,ksp,kp,l,nage)
!48                 xm_depd(ksp,kp)=xm_depd(ksp,kp)
!    +                  +drygridunc(ix,jy,ksp,kp,l,nage)
!             do 246 ksp=1,nspec
!46                    write(88,'(2i10,3e12.3)')
!    +              itime,ksp,(xm(ksp,kp),kp=1,maxpointspec_act),
!    +                (xm_depw(ksp,kp),kp=1,maxpointspec_act),
!    +                (xm_depd(ksp,kp),kp=1,maxpointspec_act)
!  endif
!!! CHANGE




! Check whether concentrations are to be calculated
!**************************************************

    if ((ldirect*itime.ge.ldirect*loutstart).and. &
         (ldirect*itime.le.ldirect*loutend)) then ! add to grid
      if (mod(itime-loutstart,loutsample).eq.0) then

! If we are exactly at the start or end of the concentration averaging interval,
! give only half the weight to this sample
!*****************************************************************************

        if ((itime.eq.loutstart).or.(itime.eq.loutend)) then
          weight=0.5
        else
          weight=1.0
        endif
        outnum=outnum+weight

        call conccalc(itime,weight)

      endif

! :TODO: MPI output of particle positions;  each process sequentially
!   access the same file
      if ((mquasilag.eq.1).and.(itime.eq.(loutstart+loutend)/2)) &
           call partoutput_short(itime)    ! dump particle positions in extremely compressed format


! Output and reinitialization of grid
! If necessary, first sample of new grid is also taken
!*****************************************************

      if ((itime.eq.loutend).and.(outnum.gt.0.)) then
        if ((iout.le.3.).or.(iout.eq.5)) then

! MPI: Root process collects/sums grids
!**************************************
          call mpif_tm_reduce_grid

479
          if (mp_measure_time) call mpif_mtime('iotime',0)
480
481
482
483
484
485
486
487
488
          if (surf_only.ne.1) then
            if (lroot) then
              if (lnetcdfout.eq.1) then 
                call concoutput_netcdf(itime,outnum,gridtotalunc,wetgridtotalunc,&
                     &drygridtotalunc)
              else 
                call concoutput(itime,outnum,gridtotalunc,wetgridtotalunc,drygridtotalunc)
              endif
            else
489
! zero arrays on non-root processes
490
              gridunc(:,:,:,:,:,:,:)=0.
491
              creceptor(:,:)=0.
492
493
494
495
496
497
498
499
500
501
            end if
          else 
            if (lroot) then
              if (lnetcdfout.eq.1) then
                call concoutput_surf_netcdf(itime,outnum,gridtotalunc,wetgridtotalunc,&
                     &drygridtotalunc)
              else
                call concoutput_surf(itime,outnum,gridtotalunc,wetgridtotalunc,drygridtotalunc)
              end if
            else
502
! zero arrays on non-root processes
503
              gridunc(:,:,:,:,:,:,:)=0.
504
              creceptor(:,:)=0.
505
506
            endif
          endif
507
          if (mp_measure_time) call mpif_mtime('iotime',1)
508
509
510
511
512
513

          if (nested_output.eq.1) then

! MPI: Root process collects/sums nested grids
!*********************************************
            call mpif_tm_reduce_grid_nest
514
515
 
           if (mp_measure_time) call mpif_mtime('iotime',0)
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549

            if (lnetcdfout.eq.0) then
              if (surf_only.ne.1) then

                if (lroot) then
                  call concoutput_nest(itime,outnum)
                else
                  griduncn(:,:,:,:,:,:,:)=0.
                end if

              else  ! :TODO: check for zeroing in the netcdf module
                call concoutput_surf_nest(itime,outnum)

              end if

            else

              if (surf_only.ne.1) then
                if (lroot) then              
                  call concoutput_nest_netcdf(itime,outnum)
                else
                  griduncn(:,:,:,:,:,:,:)=0.
                end if
              else 
                if (lroot) then
                  call concoutput_surf_nest_netcdf(itime,outnum)
                else
                  griduncn(:,:,:,:,:,:,:)=0.
                end if
              endif


            end if
          end if
550
          
551
552
553
554
555

          outnum=0.
        endif
        if ((iout.eq.4).or.(iout.eq.5)) call plumetraj(itime)
        if (iflux.eq.1) call fluxoutput(itime)
556
557
        if (mp_measure_time) call mpif_mtime('iotime',1)

558
559
560

! Decide whether to write an estimate of the number of particles released, 
! or exact number (require MPI reduce operation)
561
        if (mp_dbg_mode) then
562
563
564
565
          numpart_tot_mpi = numpart
        else
          numpart_tot_mpi = numpart*mp_partgroup_np
        end if
566

567
        if (mp_exact_numpart.and..not.(lmpreader.and.lmp_use_reader).and.&
568
             &.not.mp_dbg_mode) then
569
570
571
572
          call MPI_Reduce(numpart, numpart_tot_mpi, 1, MPI_INTEGER, MPI_SUM, id_root, &
               & mp_comm_used, mp_ierr)
        endif
        
573
        !CGZ-lifetime: output species lifetime
574
        if (lroot.or.mp_dbg_mode) then
575
576
577
        !   write(*,*) 'Overview species lifetime in days', &
        !        real((species_lifetime(:,1)/species_lifetime(:,2))/real(3600.0*24.0))
        !   write(*,*) 'all info:',species_lifetime
578
579
          write(*,45) itime,numpart_tot_mpi,gridtotalunc,&
               &wetgridtotalunc,drygridtotalunc
580
581
582
        !   if (verbosity.gt.0) then
        !     write (*,*) 'timemanager> starting simulation'
        !   end if
583
        end if
584

585
        ! Write number of particles for all processes
586
587
588
        if (mp_dev_mode) write(*,*) "PID, itime, numpart", mp_pid,itime,numpart


589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
45      format(i13,' SECONDS SIMULATED: ',i13, ' PARTICLES:    Uncertainty: ',3f7.3)
46      format(' Simulated ',f7.1,' hours (',i13,' s), ',i13, ' particles')
        if (ipout.ge.1) then
          do ip=0, mp_partgroup_np-1
            if (ip.eq.mp_partid) call partoutput(itime) ! dump particle positions
            call mpif_mpi_barrier
          end do
        end if

        loutnext=loutnext+loutstep
        loutstart=loutnext-loutaver/2
        loutend=loutnext+loutaver/2
        if (itime.eq.loutstart) then
          weight=0.5
          outnum=outnum+weight
          call conccalc(itime,weight)
        endif


! Check, whether particles are to be split:
! If so, create new particles and attribute all information from the old
! particles also to the new ones; old and new particles both get half the
! mass of the old ones
!************************************************************************
        
        if (ldirect*itime.ge.ldirect*itsplit) then
          n=numpart
          do j=1,numpart
            if (ldirect*itime.ge.ldirect*itrasplit(j)) then
!                if (n.lt.maxpart) then
              if (n.lt.maxpart_mpi) then
                n=n+1
                itrasplit(j)=2*(itrasplit(j)-itramem(j))+itramem(j)
                itrasplit(n)=itrasplit(j)
                itramem(n)=itramem(j)
                itra1(n)=itra1(j)
                idt(n)=idt(j)
                npoint(n)=npoint(j)
                nclass(n)=nclass(j)
                xtra1(n)=xtra1(j)
                ytra1(n)=ytra1(j)
                ztra1(n)=ztra1(j)
                uap(n)=uap(j)
                ucp(n)=ucp(j)
                uzp(n)=uzp(j)
                us(n)=us(j)
                vs(n)=vs(j)
                ws(n)=ws(j)
                cbt(n)=cbt(j)
                do ks=1,nspec
                  xmass1(j,ks)=xmass1(j,ks)/2.
                  xmass1(n,ks)=xmass1(j,ks)
                end do
              endif
            endif
          end do
          numpart=n
        endif
      endif
    endif
    

    if (itime.eq.ideltas) exit         ! almost finished

! Compute interval since radioactive decay of deposited mass was computed
!************************************************************************

    if (itime.lt.loutnext) then
      ldeltat=itime-(loutnext-loutstep)
    else                                  ! first half of next interval
      ldeltat=itime-loutnext
    endif


! Loop over all particles
!************************
    if (mp_measure_time) call mpif_mtime('partloop1',0)

!--------------------------------------------------------------------------------
! various variables for testing reason of CBL scheme, by mc
    well_mixed_vector=0. !erase vector to test well mixed condition: modified by mc
    well_mixed_norm=0.   !erase normalization to test well mixed condition: modified by mc
    avg_ol=0.
    avg_wst=0.
    avg_h=0.
    avg_air_dens=0.  !erase vector to obtain air density at particle positions: modified by mc
!--------------------------------------------------------------------------------

    do j=1,numpart

! If integration step is due, do it
!**********************************

      if (itra1(j).eq.itime) then

        if (ioutputforeachrelease.eq.1) then
          kp=npoint(j)
        else
          kp=1
        endif
! Determine age class of the particle
        itage=abs(itra1(j)-itramem(j))
        do nage=1,nageclass
          if (itage.lt.lage(nage)) exit
        end do

! Initialize newly released particle
!***********************************

        if ((itramem(j).eq.itime).or.(itime.eq.0)) &
             call initialize(itime,idt(j),uap(j),ucp(j),uzp(j), &
             us(j),vs(j),ws(j),xtra1(j),ytra1(j),ztra1(j),cbt(j))

! Memorize particle positions
!****************************

        xold=xtra1(j)
        yold=ytra1(j)
        zold=ztra1(j)

! Integrate Lagevin equation for lsynctime seconds
!*************************************************

712
        if (mp_measure_time) call mpif_mtime('advance',0)
713
714
715
716
717

        call advance(itime,npoint(j),idt(j),uap(j),ucp(j),uzp(j), &
             us(j),vs(j),ws(j),nstop,xtra1(j),ytra1(j),ztra1(j),prob, &
             cbt(j))

718
719
        if (mp_measure_time) call mpif_mtime('advance',1)

720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761

! Calculate the gross fluxes across layer interfaces
!***************************************************

        if (iflux.eq.1) call calcfluxes(nage,j,xold,yold,zold)


! Determine, when next time step is due
! If trajectory is terminated, mark it
!**************************************

        if (nstop.gt.1) then
          if (linit_cond.ge.1) call initial_cond_calc(itime,j)
          itra1(j)=-999999999
        else
          itra1(j)=itime+lsynctime


! Dry deposition and radioactive decay for each species
! Also check maximum (of all species) of initial mass remaining on the particle;
! if it is below a threshold value, terminate particle
!*****************************************************************************

          xmassfract=0.
          do ks=1,nspec
            if (decay(ks).gt.0.) then             ! radioactive decay
              decfact=exp(-real(abs(lsynctime))*decay(ks))
            else
              decfact=1.
            endif

            if (DRYDEPSPEC(ks)) then        ! dry deposition
              drydeposit(ks)=xmass1(j,ks)*prob(ks)*decfact
              xmass1(j,ks)=xmass1(j,ks)*(1.-prob(ks))*decfact
              if (decay(ks).gt.0.) then   ! correct for decay (see wetdepo)
                drydeposit(ks)=drydeposit(ks)* &
                     exp(real(abs(ldeltat))*decay(ks))
              endif
            else                           ! no dry deposition
              xmass1(j,ks)=xmass1(j,ks)*decfact
            endif

762
763
! Skip check on mass fraction when npoint represents particle number
            if (mdomainfill.eq.0.and.mquasilag.eq.0) then
764
              if (xmass(npoint(j),ks).gt.0.)then 
765
766
                   xmassfract=max(xmassfract,real(npart(npoint(j)))* &
                   xmass1(j,ks)/xmass(npoint(j),ks))
767
768
                   
                   !CGZ-lifetime: Check mass fraction left/save lifetime
769
                   ! if(lroot.and.real(npart(npoint(j)))*xmass1(j,ks)/xmass(npoint(j),ks).lt.e_inv.and.checklifetime(j,ks).eq.0.)then
770
                       !Mass below 1% of initial >register lifetime
771
                   !     checklifetime(j,ks)=abs(itra1(j)-itramem(j))
772

773
774
775
                   !     species_lifetime(ks,1)=species_lifetime(ks,1)+abs(itra1(j)-itramem(j))
                   !     species_lifetime(ks,2)= species_lifetime(ks,2)+1
                   ! endif
776
777
778
                   !CGZ-lifetime: Check mass fraction left/save lifetime
                   
              endif
779
            else
780
              xmassfract=1.0
781
782
783
            endif
          end do

784
          if (xmassfract.lt.minmass) then ! .and. sum(real(npart(npoint(j)))*xmass1(j,:)).lt.1.0) then   ! terminate all particles carrying less mass
785
786
          !            print*,'terminated particle ',j,' for small mass (', sum(real(npart(npoint(j)))* &
          !         xmass1(j,:)), ' of ', sum(xmass(npoint(j),:)),')'
787
            itra1(j)=-999999999
788
789
790
            if (verbosity.gt.0) then
              print*,'terminated particle ',j,' for small mass'
            endif
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
          endif

!        Sabine Eckhardt, June 2008
!        don't create depofield for backward runs
          if (DRYDEP.AND.(ldirect.eq.1)) then
            call drydepokernel(nclass(j),drydeposit,real(xtra1(j)), &
                 real(ytra1(j)),nage,kp)
            if (nested_output.eq.1) call drydepokernel_nest( &
                 nclass(j),drydeposit,real(xtra1(j)),real(ytra1(j)), &
                 nage,kp)
          endif

! Terminate trajectories that are older than maximum allowed age
!***************************************************************

          if (abs(itra1(j)-itramem(j)).ge.lage(nageclass)) then
            if (linit_cond.ge.1) &
                 call initial_cond_calc(itime+lsynctime,j)
            itra1(j)=-999999999
810
811
812
            if (verbosity.gt.0) then
              print*, 'terminated particle ',j,'for age'
            endif
813
814
815
816
817
818
819
820
821
822
          endif
        endif

      endif

    end do ! j=1, numpart

    if(mp_measure_time) call mpif_mtime('partloop1',1)


823
824
825
! Counter of "unstable" particle velocity during a time scale
! of maximumtl=20 minutes (defined in com_mod)
!************************************************************
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
    total_nan_intl=0
    i_nan=i_nan+1 ! added by mc to count nan during a time of maxtl (i.e. maximum tl fixed here to 20 minutes, see com_mod)
    sum_nan_count(i_nan)=nan_count
    if (i_nan > maxtl/lsynctime) i_nan=1 !lsynctime must be <= maxtl
    do ii_nan=1, (maxtl/lsynctime) 
      total_nan_intl=total_nan_intl+sum_nan_count(ii_nan)
    end do

! Output to keep track of the numerical instabilities in CBL simulation
! and if they are compromising the final result (or not):
    if (cblflag.eq.1) print *,j,itime,'nan_synctime',nan_count,'nan_tl',total_nan_intl  


  end do ! itime=0,ideltas,lsynctime


! Complete the calculation of initial conditions for particles not yet terminated
!*****************************************************************************

! eso :TODO: this not implemented yet (transfer particles to PID 0 or rewrite)
Espen Sollum's avatar
Espen Sollum committed
846
! the tools to do this are already in mpi_mod.f90
847
848
849
850
851
852
853
854
  if (lroot) then 
    do j=1,numpart
      if (linit_cond.ge.1) call initial_cond_calc(itime,j)
    end do
  end if


  if (ipout.eq.2) then
Espen Sollum's avatar
Espen Sollum committed
855
! MPI process 0 creates the file, the other processes append to it
856
857
    do ip=0, mp_partgroup_np-1
      if (ip.eq.mp_partid) then 
Espen Sollum's avatar
Espen Sollum committed
858
        !if (mp_dbg_mode) write(*,*) 'call partoutput(itime), proc, mp_partid',ip,mp_partid
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
        call partoutput(itime)    ! dump particle positions
      end if
      call mpif_mpi_barrier
    end do
  end if

! eso :TODO: MPI
  if (linit_cond.ge.1.and.lroot) call initial_cond_output(itime)   ! dump initial cond. field


! De-allocate memory and end
!***************************

999 if (iflux.eq.1) then
    deallocate(flux)
  endif
  if (OHREA) then
    deallocate(OH_field,OH_hourly,lonOH,latOH,altOH)
  endif
  if (ldirect.gt.0) then
    deallocate(drygridunc,wetgridunc)
  endif
  deallocate(gridunc)
882
883
  deallocate(xpoint1,xpoint2,ypoint1,ypoint2,zpoint1,zpoint2,xmass)
  if (allocated(checklifetime)) deallocate(checklifetime)
884
885
886
887
888
889
890
891
892
893
894
895
896
897
  deallocate(ireleasestart,ireleaseend,npart,kindz)
  deallocate(xmasssave)
  if (nested_output.eq.1) then
    deallocate(orooutn, arean, volumen)
    if (ldirect.gt.0) then
      deallocate(griduncn,drygriduncn,wetgriduncn)
    endif
  endif
  deallocate(outheight,outheighthalf)
  deallocate(oroout, area, volume)

  if (mp_measure_time) call mpif_mtime('timemanager',1)

end subroutine timemanager