source: palm/trunk/SOURCE/lpm.f90 @ 2912

Last change on this file since 2912 was 2801, checked in by thiele, 7 years ago

Introduce particle transfer in nested models

  • Property svn:keywords set to Id
File size: 18.6 KB
RevLine 
[2801]1MODULE lpm_mod
2
[1682]3!> @file lpm.f90
[2000]4!------------------------------------------------------------------------------!
[2696]5! This file is part of the PALM model system.
[1036]6!
[2000]7! PALM is free software: you can redistribute it and/or modify it under the
8! terms of the GNU General Public License as published by the Free Software
9! Foundation, either version 3 of the License, or (at your option) any later
10! version.
[1036]11!
12! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
13! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
14! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
15!
16! You should have received a copy of the GNU General Public License along with
17! PALM. If not, see <http://www.gnu.org/licenses/>.
18!
[2718]19! Copyright 1997-2018 Leibniz Universitaet Hannover
[2000]20!------------------------------------------------------------------------------!
[1036]21!
[230]22! Current revisions:
[759]23! ------------------
[1823]24!
[2701]25!
[1823]26! Former revisions:
27! -----------------
28! $Id: lpm.f90 2801 2018-02-14 16:01:55Z knoop $
[2801]29! Changed lpm from subroutine to module.
30! Introduce particle transfer in nested models.
31!
32! 2718 2018-01-02 08:49:38Z maronga
[2716]33! Corrected "Former revisions" section
[2701]34!
[2716]35! 2701 2017-12-15 15:40:50Z suehring
36! Changes from last commit documented
37!
[2701]38! 2698 2017-12-14 18:46:24Z suehring
[2716]39! Grid indices passed to lpm_boundary_conds. (responsible Philipp Thiele)
40!
41! 2696 2017-12-14 17:12:51Z kanani
42! Change in file header (GPL part)
43!
44! 2606 2017-11-10 10:36:31Z schwenkel
[2606]45! Changed particle box locations: center of particle box now coincides
46! with scalar grid point of same index.
47! Renamed module and subroutines: lpm_pack_arrays_mod -> lpm_pack_and_sort_mod
48! lpm_pack_all_arrays -> lpm_sort_in_subboxes, lpm_pack_arrays -> lpm_pack
49! lpm_sort -> lpm_sort_timeloop_done
50!
51! 2418 2017-09-06 15:24:24Z suehring
[2417]52! Major bugfixes in modeling SGS particle speeds (since revision 1359).
53! Particle sorting added to distinguish between already completed and
54! non-completed particles.
55!
56! 2263 2017-06-08 14:59:01Z schwenkel
[2263]57! Implemented splitting and merging algorithm
58!
59! 2233 2017-05-30 18:08:54Z suehring
[1823]60!
[2233]61! 2232 2017-05-30 17:47:52Z suehring
62! Adjustments to new topography concept
63!
[2001]64! 2000 2016-08-20 18:09:15Z knoop
65! Forced header and separation lines into 80 columns
66!
[1937]67! 1936 2016-06-13 13:37:44Z suehring
68! Call routine for deallocation of unused memory.
69! Formatting adjustments
70!
[1930]71! 1929 2016-06-09 16:25:25Z suehring
72! Call wall boundary conditions only if particles are in the vertical range of
73! topography.
74!
[1823]75! 1822 2016-04-07 07:49:42Z hoffmann
[1822]76! Tails removed.
77!
78! Initialization of sgs model not necessary for the use of cloud_droplets and
79! use_sgs_for_particles.
80!
81! lpm_release_set integrated.
82!
83! Unused variabled removed.
[1321]84!
[1683]85! 1682 2015-10-07 23:56:08Z knoop
86! Code annotations made doxygen readable
87!
[1417]88! 1416 2014-06-04 16:04:03Z suehring
89! user_lpm_advec is called for each gridpoint.
90! Bugfix: in order to prevent an infinite loop, time_loop_done is set .TRUE.
91! at the head of the do-loop. 
92!
[1360]93! 1359 2014-04-11 17:15:14Z hoffmann
94! New particle structure integrated.
95! Kind definition added to all floating point numbers.
96!
[1321]97! 1320 2014-03-20 08:40:49Z raasch
[1320]98! ONLY-attribute added to USE-statements,
99! kind-parameters added to all INTEGER and REAL declaration statements,
100! kinds are defined in new module kinds,
101! revision history before 2012 removed,
102! comment fields (!:) to be used for variable explanations added to
103! all variable declaration statements
[829]104!
[1319]105! 1318 2014-03-17 13:35:16Z raasch
106! module interfaces removed
107!
[1037]108! 1036 2012-10-22 13:43:42Z raasch
109! code put under GPL (PALM 3.9)
110!
[852]111! 851 2012-03-15 14:32:58Z raasch
112! Bugfix: resetting of particle_mask and tail mask moved from routine
113! lpm_exchange_horiz to here (end of sub-timestep loop)
114!
[850]115! 849 2012-03-15 10:35:09Z raasch
116! original routine advec_particles split into several subroutines and renamed
117! lpm
118!
[832]119! 831 2012-02-22 00:29:39Z raasch
120! thermal_conductivity_l and diff_coeff_l now depend on temperature and
121! pressure
122!
[829]123! 828 2012-02-21 12:00:36Z raasch
[828]124! fast hall/wang kernels with fixed radius/dissipation classes added,
125! particle feature color renamed class, routine colker renamed
126! recalculate_kernel,
127! lower limit for droplet radius changed from 1E-7 to 1E-8
[826]128!
[828]129! Bugfix: transformation factor for dissipation changed from 1E5 to 1E4
130!
[826]131! 825 2012-02-19 03:03:44Z raasch
[825]132! droplet growth by condensation may include curvature and solution effects,
133! initialisation of temporary particle array for resorting removed,
134! particle attributes speed_x|y|z_sgs renamed rvar1|2|3,
135! module wang_kernel_mod renamed lpm_collision_kernels_mod,
136! wang_collision_kernel renamed wang_kernel
[482]137!
[800]138!
[1]139! Revision 1.1  1999/11/25 16:16:06  raasch
140! Initial revision
141!
142!
143! Description:
144! ------------
[1682]145!> Particle advection
[1]146!------------------------------------------------------------------------------!
[1682]147 
[1]148
[1320]149    USE arrays_3d,                                                             &
150        ONLY:  ql_c, ql_v, ql_vp
151
152    USE control_parameters,                                                    &
153        ONLY:  cloud_droplets, dt_3d, dt_3d_reached, dt_3d_reached_l,          &
[1359]154               molecular_viscosity, simulated_time, topography
[1320]155
156    USE cpulog,                                                                &
157        ONLY:  cpu_log, log_point, log_point_s
158
[1359]159    USE indices,                                                               &
[2232]160        ONLY: nxl, nxr, nys, nyn, nzb, nzb_max, nzt
[1359]161
[1320]162    USE kinds
163
[1359]164    USE lpm_exchange_horiz_mod,                                                &
[1936]165        ONLY:  dealloc_particles_array, lpm_exchange_horiz, lpm_move_particle
[1359]166
[1822]167    USE lpm_init_mod,                                                          &
168        ONLY: lpm_create_particle, PHASE_RELEASE
169
[2801]170    USE lpm_pack_and_sort_mod
[1359]171
[1320]172    USE particle_attributes,                                                   &
[1936]173        ONLY:  collision_kernel, deleted_particles, deallocate_memory,         &
[1359]174               dt_write_particle_data, dt_prel, end_time_prel,                 &
[2263]175               grid_particles, merging,  number_of_particles,                  &
176               number_of_particle_groups, particles, particle_groups,          &
177               prt_count, splitting, step_dealloc, time_prel,                  &
178               time_write_particle_data, trlp_count_sum, trlp_count_recv_sum,  &
179               trnp_count_sum, trnp_count_recv_sum, trrp_count_sum,            & 
180               trrp_count_recv_sum, trsp_count_sum, trsp_count_recv_sum,       &
[1359]181               use_sgs_for_particles, write_particle_statistics
[2263]182                             
[1]183    USE pegrid
184
[2801]185 USE pmc_particle_interface,                                                &
186     ONLY: pmcp_c_get_particle_from_parent, pmcp_p_fill_particle_win,       &
187           pmcp_c_send_particle_to_parent, pmcp_p_empty_particle_win,       &
188           pmcp_p_delete_particles_in_fine_grid_area
189
190    USE pmc_interface,                                                      &
191       ONLY: nested_run
192
193 IMPLICIT NONE
194 PRIVATE
195 SAVE
196
197 INTERFACE lpm
198    MODULE PROCEDURE lpm
199 END INTERFACE lpm
200
201 PUBLIC lpm
202
203CONTAINS
204 SUBROUTINE lpm
[1]205    IMPLICIT NONE
206
[1682]207    INTEGER(iwp)       ::  i                  !<
208    INTEGER(iwp)       ::  ie                 !<
209    INTEGER(iwp)       ::  is                 !<
210    INTEGER(iwp)       ::  j                  !<
211    INTEGER(iwp)       ::  je                 !<
212    INTEGER(iwp)       ::  js                 !<
[1936]213    INTEGER(iwp), SAVE ::  lpm_count = 0      !<
[1682]214    INTEGER(iwp)       ::  k                  !<
215    INTEGER(iwp)       ::  ke                 !<
216    INTEGER(iwp)       ::  ks                 !<
217    INTEGER(iwp)       ::  m                  !<
218    INTEGER(iwp), SAVE ::  steps = 0          !<
[1]219
[1682]220    LOGICAL            ::  first_loop_stride  !<
[60]221
[849]222    CALL cpu_log( log_point(25), 'lpm', 'start' )
[824]223
[849]224!
225!-- Write particle data at current time on file.
226!-- This has to be done here, before particles are further processed,
227!-- because they may be deleted within this timestep (in case that
228!-- dt_write_particle_data = dt_prel = particle_maximum_age).
229    time_write_particle_data = time_write_particle_data + dt_3d
230    IF ( time_write_particle_data >= dt_write_particle_data )  THEN
[1]231
[849]232       CALL lpm_data_output_particles
[824]233!
[849]234!--    The MOD function allows for changes in the output interval with restart
235!--    runs.
236       time_write_particle_data = MOD( time_write_particle_data, &
237                                  MAX( dt_write_particle_data, dt_3d ) )
238    ENDIF
[1]239
[849]240!
[1822]241!-- Initialize arrays for marking those particles to be deleted after the
[849]242!-- (sub-) timestep
243    deleted_particles = 0
[60]244
[1]245!
[849]246!-- Initialize variables used for accumulating the number of particles
247!-- exchanged between the subdomains during all sub-timesteps (if sgs
248!-- velocities are included). These data are output further below on the
249!-- particle statistics file.
250    trlp_count_sum      = 0
251    trlp_count_recv_sum = 0
252    trrp_count_sum      = 0
253    trrp_count_recv_sum = 0
254    trsp_count_sum      = 0
255    trsp_count_recv_sum = 0
256    trnp_count_sum      = 0
257    trnp_count_recv_sum = 0
[1]258
259
260!
261!-- Calculate exponential term used in case of particle inertia for each
262!-- of the particle groups
263    DO  m = 1, number_of_particle_groups
[1359]264       IF ( particle_groups(m)%density_ratio /= 0.0_wp )  THEN
[1]265          particle_groups(m)%exp_arg  =                                        &
[1359]266                    4.5_wp * particle_groups(m)%density_ratio *                &
[1]267                    molecular_viscosity / ( particle_groups(m)%radius )**2
[1359]268
269          particle_groups(m)%exp_term = EXP( -particle_groups(m)%exp_arg *     &
270                    dt_3d )
[1]271       ENDIF
272    ENDDO
273
274!
[1359]275!-- If necessary, release new set of particles
276    IF ( time_prel >= dt_prel  .AND.  end_time_prel > simulated_time )  THEN
[1]277
[1822]278       CALL lpm_create_particle(PHASE_RELEASE)
[1]279!
[1359]280!--    The MOD function allows for changes in the output interval with
281!--    restart runs.
282       time_prel = MOD( time_prel, MAX( dt_prel, dt_3d ) )
[1]283
[1359]284    ENDIF
[1]285!
[1359]286!-- Reset summation arrays
287    IF ( cloud_droplets)  THEN
288       ql_c  = 0.0_wp
289       ql_v  = 0.0_wp
290       ql_vp = 0.0_wp
[849]291    ENDIF
[420]292
[1359]293    first_loop_stride = .TRUE.
294    grid_particles(:,:,:)%time_loop_done = .TRUE.
[1]295!
[849]296!-- Timestep loop for particle advection.
[1]297!-- This loop has to be repeated until the advection time of every particle
[849]298!-- (within the total domain!) has reached the LES timestep (dt_3d).
299!-- In case of including the SGS velocities, the particle timestep may be
[1359]300!-- smaller than the LES timestep (because of the Lagrangian timescale
301!-- restriction) and particles may require to undergo several particle
302!-- timesteps, before the LES timestep is reached. Because the number of these
303!-- particle timesteps to be carried out is unknown at first, these steps are
304!-- carried out in the following infinite loop with exit condition.
[1]305    DO
[849]306       CALL cpu_log( log_point_s(44), 'lpm_advec', 'start' )
[1359]307       CALL cpu_log( log_point_s(44), 'lpm_advec', 'pause' )
[1416]308       
[1359]309!
310!--    If particle advection includes SGS velocity components, calculate the
311!--    required SGS quantities (i.e. gradients of the TKE, as well as
312!--    horizontally averaged profiles of the SGS TKE and the resolved-scale
313!--    velocity variances)
[1822]314       IF ( use_sgs_for_particles  .AND.  .NOT. cloud_droplets )  THEN
315          CALL lpm_init_sgs_tke
316       ENDIF
[1359]317
[2417]318!
319!--    In case SGS-particle speed is considered, particles may carry out
320!--    several particle timesteps. In order to prevent unnecessary
321!--    treatment of particles that already reached the final time level,
322!--    particles are sorted into contiguous blocks of finished and
323!--    not-finished particles, in addition to their already sorting
324!--    according to their sub-boxes.
325       IF ( .NOT. first_loop_stride  .AND.  use_sgs_for_particles )            &
[2606]326          CALL lpm_sort_timeloop_done
[2417]327
[1359]328       DO  i = nxl, nxr
329          DO  j = nys, nyn
330             DO  k = nzb+1, nzt
331
332                number_of_particles = prt_count(k,j,i)
[1]333!
[1359]334!--             If grid cell gets empty, flag must be true
335                IF ( number_of_particles <= 0 )  THEN
336                   grid_particles(k,j,i)%time_loop_done = .TRUE.
337                   CYCLE
338                ENDIF
[1]339
[1359]340                IF ( .NOT. first_loop_stride  .AND.  &
341                     grid_particles(k,j,i)%time_loop_done ) CYCLE
342
343                particles => grid_particles(k,j,i)%particles(1:number_of_particles)
344
345                particles(1:number_of_particles)%particle_mask = .TRUE.
[1]346!
[1359]347!--             Initialize the variable storing the total time that a particle
348!--             has advanced within the timestep procedure
349                IF ( first_loop_stride )  THEN
350                   particles(1:number_of_particles)%dt_sum = 0.0_wp
351                ENDIF
352!
353!--             Particle (droplet) growth by condensation/evaporation and
354!--             collision
355                IF ( cloud_droplets  .AND.  first_loop_stride)  THEN
356!
357!--                Droplet growth by condensation / evaporation
358                   CALL lpm_droplet_condensation(i,j,k)
359!
360!--                Particle growth by collision
361                   IF ( collision_kernel /= 'none' )  THEN
362                      CALL lpm_droplet_collision(i,j,k)
363                   ENDIF
[1]364
[1359]365                ENDIF
[1]366!
[1359]367!--             Initialize the switch used for the loop exit condition checked
368!--             at the end of this loop. If at least one particle has failed to
369!--             reach the LES timestep, this switch will be set false in
370!--             lpm_advec.
371                dt_3d_reached_l = .TRUE.
[57]372
373!
[1359]374!--             Particle advection
375                CALL lpm_advec(i,j,k)
376!
[1929]377!--             Particle reflection from walls. Only applied if the particles
378!--             are in the vertical range of the topography. (Here, some
379!--             optimization is still possible.)
380                IF ( topography /= 'flat' .AND. k < nzb_max + 2 )  THEN
[2698]381                   CALL  lpm_boundary_conds( 'walls', i, j, k )
[1359]382                ENDIF
383!
384!--             User-defined actions after the calculation of the new particle
385!--             position
[1416]386                CALL user_lpm_advec(i,j,k)
[1359]387!
388!--             Apply boundary conditions to those particles that have crossed
389!--             the top or bottom boundary and delete those particles, which are
390!--             older than allowed
[2698]391                CALL lpm_boundary_conds( 'bottom/top', i, j, k )
[1359]392!
393!---            If not all particles of the actual grid cell have reached the
[2417]394!--             LES timestep, this cell has to do another loop iteration. Due to
395!--             the fact that particles can move into neighboring grid cells,
396!--             these neighbor cells also have to perform another loop iteration.
397!--             Please note, this realization does not work properly if
398!--             particles move into another subdomain.
[1359]399                IF ( .NOT. dt_3d_reached_l )  THEN
[2417]400                   ks = MAX(nzb+1,k-1)
401                   ke = MIN(nzt,k+1)
402                   js = MAX(nys,j-1)
403                   je = MIN(nyn,j+1)
404                   is = MAX(nxl,i-1)
405                   ie = MIN(nxr,i+1)
[1359]406                   grid_particles(ks:ke,js:je,is:ie)%time_loop_done = .FALSE.
[2417]407                ELSE
408                   grid_particles(k,j,i)%time_loop_done = .TRUE.
[1359]409                ENDIF
[57]410
[1359]411             ENDDO
412          ENDDO
413       ENDDO
414
415       steps = steps + 1
416       dt_3d_reached_l = ALL(grid_particles(:,:,:)%time_loop_done)
[57]417!
[1]418!--    Find out, if all particles on every PE have completed the LES timestep
419!--    and set the switch corespondingly
420#if defined( __parallel )
[622]421       IF ( collective_wait )  CALL MPI_BARRIER( comm2d, ierr )
[1]422       CALL MPI_ALLREDUCE( dt_3d_reached_l, dt_3d_reached, 1, MPI_LOGICAL, &
423                           MPI_LAND, comm2d, ierr )
424#else
425       dt_3d_reached = dt_3d_reached_l
[799]426#endif
[1]427
[849]428       CALL cpu_log( log_point_s(44), 'lpm_advec', 'stop' )
[1]429
430!
431!--    Increment time since last release
432       IF ( dt_3d_reached )  time_prel = time_prel + dt_3d
433!
[2263]434!--    Apply splitting and merging algorithm
435       IF ( cloud_droplets )  THEN
436          IF ( splitting ) THEN
437             CALL lpm_splitting
438          ENDIF
439          IF ( merging ) THEN
440             CALL lpm_merging
441          ENDIF
442       ENDIF
443!
[1359]444!--    Move Particles local to PE to a different grid cell
445       CALL lpm_move_particle
[1]446
447!
[849]448!--    Horizontal boundary conditions including exchange between subdmains
449       CALL lpm_exchange_horiz
[2801]450
451       IF ( .NOT. dt_3d_reached .OR. .NOT. nested_run )   THEN    ! IF .FALSE., lpm_sort_in_subboxes is done inside pcmp
[1]452!
[2801]453!--       Pack particles (eliminate those marked for deletion),
454!--       determine new number of particles
455          CALL lpm_sort_in_subboxes
[851]456!
[2801]457!--       Initialize variables for the next (sub-) timestep, i.e., for marking
458!--       those particles to be deleted after the timestep
459          deleted_particles = 0
460       ENDIF
[851]461
[1]462       IF ( dt_3d_reached )  EXIT
463
[1359]464       first_loop_stride = .FALSE.
[1]465    ENDDO   ! timestep loop
[2801]466!   
467!-- in case of nested runs do the transfer of particles after every full model time step
468    IF ( nested_run )   THEN
469       CALL particles_from_parent_to_child
470       CALL particles_from_child_to_parent
471       CALL pmcp_p_delete_particles_in_fine_grid_area
[1]472
[2801]473       CALL lpm_sort_in_subboxes
474
475       deleted_particles = 0
476    ENDIF
477
[1]478!
[1359]479!-- Calculate the new liquid water content for each grid box
[1936]480    IF ( cloud_droplets )  CALL lpm_calc_liquid_water_content
481!
482!-- Deallocate unused memory
483    IF ( deallocate_memory  .AND.  lpm_count == step_dealloc )  THEN
484       CALL dealloc_particles_array
485       lpm_count = 0
486    ELSEIF ( deallocate_memory )  THEN
487       lpm_count = lpm_count + 1
[116]488    ENDIF
489
[1]490!
[828]491!-- Set particle attributes.
492!-- Feature is not available if collision is activated, because the respective
493!-- particle attribute (class) is then used for storing the particle radius
494!-- class.
[849]495    IF ( collision_kernel == 'none' )  CALL lpm_set_attributes
[264]496
497!
[1]498!-- Set particle attributes defined by the user
[849]499    CALL user_lpm_set_attributes
[1]500
501!
[849]502!-- Write particle statistics (in particular the number of particles
503!-- exchanged between the subdomains) on file
504    IF ( write_particle_statistics )  CALL lpm_write_exchange_statistics
[1]505
[849]506    CALL cpu_log( log_point(25), 'lpm', 'stop' )
[1]507
[849]508 END SUBROUTINE lpm
[2801]509
510 SUBROUTINE particles_from_parent_to_child
511    IMPLICIT NONE
512
513    CALL pmcp_c_get_particle_from_parent                         ! Child actions
514    CALL pmcp_p_fill_particle_win                                ! Parent actions
515
516    RETURN
517 END SUBROUTINE particles_from_parent_to_child
518
519 SUBROUTINE particles_from_child_to_parent
520    IMPLICIT NONE
521
522    CALL pmcp_c_send_particle_to_parent                         ! Child actions
523    CALL pmcp_p_empty_particle_win                              ! Parent actions
524
525    RETURN
526 END SUBROUTINE particles_from_child_to_parent
527
528
529END MODULE lpm_mod
Note: See TracBrowser for help on using the repository browser.