Changeset 4649 for palm


Ignore:
Timestamp:
Aug 25, 2020 12:11:17 PM (2 months ago)
Author:
raasch
Message:

files re-formatted to follow the PALM coding standard

Location:
palm/trunk/SOURCE
Files:
16 edited

Legend:

Unmodified
Added
Removed
  • palm/trunk/SOURCE/pmc_child_mod.f90

    r4629 r4649  
    11MODULE pmc_child
    22
    3 !------------------------------------------------------------------------------!
     3!--------------------------------------------------------------------------------------------------!
    44! This file is part of the PALM model system.
    55!
    6 ! PALM is free software: you can redistribute it and/or modify it under the
    7 ! terms of the GNU General Public License as published by the Free Software
    8 ! Foundation, either version 3 of the License, or (at your option) any later
    9 ! version.
    10 !
    11 ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
    12 ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
    13 ! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
    14 !
    15 ! You should have received a copy of the GNU General Public License along with
    16 ! PALM. If not, see <http://www.gnu.org/licenses/>.
     6! PALM is free software: you can redistribute it and/or modify it under the terms of the GNU General
     7! Public License as published by the Free Software Foundation, either version 3 of the License, or
     8! (at your option) any later version.
     9!
     10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
     11! implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
     12! Public License for more details.
     13!
     14! You should have received a copy of the GNU General Public License along with PALM. If not, see
     15! <http://www.gnu.org/licenses/>.
    1716!
    1817! Copyright 1997-2020 Leibniz Universitaet Hannover
    19 !------------------------------------------------------------------------------!
     18!--------------------------------------------------------------------------------------------------!
     19!
    2020!
    2121! Current revisions:
    22 ! ------------------
     22! -----------------
    2323!
    2424!
     
    2626! -----------------
    2727! $Id$
    28 ! support for MPI Fortran77 interface (mpif.h) removed
    29 !
     28! File re-formatted to follow the PALM coding standard
     29!
     30!
     31! 4629 2020-07-29 09:37:56Z raasch
     32! Support for MPI Fortran77 interface (mpif.h) removed
     33!
    3034! 4360 2020-01-07 11:25:50Z suehring
    31 ! 
    32 ! 
     35!
     36!
    3337! 4182 2019-08-22 15:20:23Z scharf
    3438! Corrected "Former revisions" section
    35 ! 
     39!
    3640! 3964 2019-05-09 09:48:32Z suehring
    3741! Remove unused variable
    38 ! 
     42!
    3943! 3963 2019-05-08 20:09:11Z suehring
    40 ! Bugfixes in initial settings of child and parent communication patterns. 
     44! Bugfixes in initial settings of child and parent communication patterns.
    4145!
    4246! 3945 2019-05-02 11:29:27Z raasch
    4347!
    4448! 3932 2019-04-24 17:31:34Z suehring
    45 ! typo removed
     49! Typo removed
    4650!
    4751! 2019-02-25 15:31:42Z raasch
    48 ! statement added to avoid compiler warning
    49 ! 
     52! Statement added to avoid compiler warning
     53!
    5054! 3655 2019-01-07 16:51:22Z knoop
    51 ! explicit kind settings
     55! Explicit kind settings
    5256!
    5357! 1762 2016-02-25 12:31:13Z hellstea
    5458! Initial revision by K. Ketelsen
    5559!
     60!--------------------------------------------------------------------------------------------------!
    5661! Description:
    5762! ------------
    5863!> Child part of Palm Model Coupler
    59 !------------------------------------------------------------------------------!
     64!--------------------------------------------------------------------------------------------------!
    6065
    6166#if defined( __parallel )
     
    6772    USE kinds
    6873
    69     USE pmc_general,                                                           &
    70         ONLY:  arraydef, childdef, da_desclen, da_namedef, da_namelen, pedef,  &
    71                pmc_da_name_err,  pmc_g_setname, pmc_max_array, pmc_status_ok
    72 
    73     USE pmc_handle_communicator,                                               &
    74         ONLY:  m_model_comm, m_model_npes, m_model_rank, m_to_parent_comm
    75 
    76     USE pmc_mpi_wrapper,                                                       &
    77         ONLY:  pmc_alloc_mem, pmc_bcast, pmc_inter_bcast, pmc_time
     74    USE pmc_general,                                                                               &
     75        ONLY:  arraydef,                                                                           &
     76               childdef,                                                                           &
     77               da_desclen,                                                                         &
     78               da_namedef,                                                                         &
     79               da_namelen,                                                                         &
     80               pedef,                                                                              &
     81               pmc_da_name_err,                                                                    &
     82               pmc_g_setname,                                                                      &
     83               pmc_max_array,                                                                      &
     84               pmc_status_ok
     85
     86    USE pmc_handle_communicator,                                                                   &
     87        ONLY:  m_model_comm,                                                                       &
     88               m_model_npes,                                                                       &
     89               m_model_rank,                                                                       &
     90               m_to_parent_comm
     91
     92    USE pmc_mpi_wrapper,                                                                           &
     93        ONLY:  pmc_alloc_mem,                                                                      &
     94               pmc_bcast,                                                                          &
     95               pmc_inter_bcast,                                                                    &
     96               pmc_time
    7897
    7998    IMPLICIT NONE
     
    83102    SAVE
    84103
    85     TYPE(childdef), PUBLIC ::  me   !<
    86 
    87     INTEGER(iwp) ::  myindex = 0         !< counter and unique number for data arrays
    88     INTEGER(iwp) ::  next_array_in_list = 0   !<
     104    INTEGER(iwp) ::  myindex = 0             !< counter and unique number for data arrays
     105    INTEGER(iwp) ::  next_array_in_list = 0  !<
     106
     107    TYPE(childdef), PUBLIC ::  me  !<
    89108
    90109
     
    129148
    130149
    131     PUBLIC pmc_childinit, pmc_c_clear_next_array_list, pmc_c_getbuffer,        &
    132            pmc_c_getnextarray, pmc_c_putbuffer, pmc_c_setind_and_allocmem,     &
    133            pmc_c_set_dataarray, pmc_set_dataarray_name, pmc_c_get_2d_index_list
     150    PUBLIC pmc_childinit,                                                                          &
     151           pmc_c_clear_next_array_list,                                                            &
     152           pmc_c_getbuffer,                                                                        &
     153           pmc_c_getnextarray,                                                                     &
     154           pmc_c_putbuffer,                                                                        &
     155           pmc_c_setind_and_allocmem,                                                              &
     156           pmc_c_set_dataarray,                                                                    &
     157           pmc_set_dataarray_name,                                                                 &
     158           pmc_c_get_2d_index_list
    134159
    135160 CONTAINS
    136161
    137162
    138 
     163!--------------------------------------------------------------------------------------------------!
     164! Description:
     165! ------------
     166!> @Todo: Missing subroutine description.
     167!--------------------------------------------------------------------------------------------------!
    139168 SUBROUTINE pmc_childinit
    140169
    141      IMPLICIT NONE
    142 
    143      INTEGER(iwp) ::  i        !<
    144      INTEGER(iwp) ::  istat    !<
    145 
    146 !
    147 !--  Get / define the MPI environment
    148      me%model_comm = m_model_comm
    149      me%inter_comm = m_to_parent_comm
    150 
    151      CALL MPI_COMM_RANK( me%model_comm, me%model_rank, istat )
    152      CALL MPI_COMM_SIZE( me%model_comm, me%model_npes, istat )
    153      CALL MPI_COMM_REMOTE_SIZE( me%inter_comm, me%inter_npes, istat )
    154 !
    155 !--  Intra-communicator is used for MPI_GET
    156      CALL MPI_INTERCOMM_MERGE( me%inter_comm, .TRUE., me%intra_comm, istat )
    157      CALL MPI_COMM_RANK( me%intra_comm, me%intra_rank, istat )
    158 
    159      ALLOCATE( me%pes(me%inter_npes) )
    160 !
    161 !--  Allocate an array of type arraydef for all parent processes to store
    162 !--  information of then transfer array
    163      DO  i = 1, me%inter_npes
    164         ALLOCATE( me%pes(i)%array_list(pmc_max_array) )
    165      ENDDO
     170    IMPLICIT NONE
     171
     172    INTEGER(iwp) ::  i      !<
     173    INTEGER(iwp) ::  istat  !<
     174
     175!
     176!-- Get / define the MPI environment.
     177    me%model_comm = m_model_comm
     178    me%inter_comm = m_to_parent_comm
     179
     180    CALL MPI_COMM_RANK( me%model_comm, me%model_rank, istat )
     181    CALL MPI_COMM_SIZE( me%model_comm, me%model_npes, istat )
     182    CALL MPI_COMM_REMOTE_SIZE( me%inter_comm, me%inter_npes, istat )
     183!
     184!-- Intra-communicator is used for MPI_GET.
     185    CALL MPI_INTERCOMM_MERGE( me%inter_comm, .TRUE., me%intra_comm, istat )
     186    CALL MPI_COMM_RANK( me%intra_comm, me%intra_rank, istat )
     187
     188    ALLOCATE( me%pes(me%inter_npes) )
     189!
     190!-- Allocate an array of type arraydef for all parent processes to store information of then
     191!-- transfer array.
     192    DO  i = 1, me%inter_npes
     193       ALLOCATE( me%pes(i)%array_list(pmc_max_array) )
     194    ENDDO
    166195
    167196 END SUBROUTINE pmc_childinit
    168197
    169198
    170 
    171  SUBROUTINE pmc_set_dataarray_name( parentarraydesc, parentarrayname,          &
    172                                     childarraydesc, childarrayname, istat )
    173 
    174     IMPLICIT NONE
    175 
     199!--------------------------------------------------------------------------------------------------!
     200! Description:
     201! ------------
     202!> @Todo: Missing subroutine description.
     203!--------------------------------------------------------------------------------------------------!
     204 SUBROUTINE pmc_set_dataarray_name( parentarraydesc, parentarrayname, childarraydesc,              &
     205                                    childarrayname, istat )
     206
     207    IMPLICIT NONE
     208
     209    CHARACTER(LEN=*), INTENT(IN) ::  childarraydesc   !<
     210    CHARACTER(LEN=*), INTENT(IN) ::  childarrayname   !<
     211    CHARACTER(LEN=*), INTENT(IN) ::  parentarraydesc  !<
    176212    CHARACTER(LEN=*), INTENT(IN) ::  parentarrayname  !<
    177     CHARACTER(LEN=*), INTENT(IN) ::  parentarraydesc  !<
    178     CHARACTER(LEN=*), INTENT(IN) ::  childarrayname   !<
    179     CHARACTER(LEN=*), INTENT(IN) ::  childarraydesc   !<
    180213
    181214    INTEGER(iwp), INTENT(OUT) ::  istat  !<
    182215!
    183216!-- Local variables
     217    INTEGER(iwp) ::  mype  !<
     218
    184219    TYPE(da_namedef) ::  myname  !<
    185220
    186     INTEGER(iwp) ::  mype  !<
    187 
    188221
    189222    istat = pmc_status_ok
    190223!
    191 !-- Check length of array names
    192     IF ( LEN( TRIM( parentarrayname) ) > da_namelen  .OR.                      &
    193          LEN( TRIM( childarrayname) ) > da_namelen )  THEN
     224!-- Check length of array names.
     225    IF ( LEN( TRIM( parentarrayname) ) > da_namelen  .OR.                                          &
     226         LEN( TRIM( childarrayname ) ) > da_namelen )  THEN
    194227       istat = pmc_da_name_err
    195228    ENDIF
     
    205238
    206239!
    207 !-- Broadcast to all child processes
    208 !
    209 !-- The complete description of an transfer names array is broadcasted
    210 
     240!-- Broadcast to all child processes.
     241!-- The complete description of a transfer array is broadcasted.
    211242    CALL pmc_bcast( myname%couple_index, 0, comm=m_model_comm )
    212243    CALL pmc_bcast( myname%parentdesc,   0, comm=m_model_comm )
     
    214245    CALL pmc_bcast( myname%childdesc,    0, comm=m_model_comm )
    215246    CALL pmc_bcast( myname%nameonchild,  0, comm=m_model_comm )
    216 !
    217 !-- Broadcast to all parent processes
    218 !-- The complete description of an transfer array names is broadcasted als to all parent processe
    219 !   Only the root PE of the broadcasts to parent using intra communicator
    220 
     247
     248!
     249!-- Broadcast to all parent processes.
     250!-- The complete description of an transfer array names is broadcasted also to all parent processes.
     251!-- Only the root PE of the broadcasts to parent is using intra communicator.
    221252    IF ( m_model_rank == 0 )  THEN
    222253        mype = MPI_ROOT
     
    236267
    237268
    238 
     269!--------------------------------------------------------------------------------------------------!
     270! Description:
     271! ------------
     272!> @Todo: Missing subroutine description.
     273!--------------------------------------------------------------------------------------------------!
    239274 SUBROUTINE pmc_set_dataarray_name_lastentry( lastentry )
    240275
     
    246281    INTEGER ::  idum  !<
    247282    INTEGER ::  mype  !<
     283
    248284    TYPE(dA_namedef) ::  myname  !<
    249285
     
    265301
    266302
    267 
     303!--------------------------------------------------------------------------------------------------!
     304! Description:
     305! ------------
     306!> @Todo: Missing subroutine description.
     307!--------------------------------------------------------------------------------------------------!
    268308 SUBROUTINE pmc_c_get_2d_index_list
    269309
    270310    IMPLICIT NONE
    271311
    272     INTEGER(iwp) :: dummy               !<
    273     INTEGER(iwp) :: i, ierr, i2, j, nr  !<
    274     INTEGER(iwp) :: indwin              !< MPI window object
    275     INTEGER(iwp) :: indwin2             !< MPI window object
    276 
    277     INTEGER(KIND=MPI_ADDRESS_KIND) :: win_size !< Size of MPI window 1 (in bytes)
    278     INTEGER(KIND=MPI_ADDRESS_KIND) :: disp     !< Displacement unit (Integer = 4, floating poit = 8
    279     INTEGER(KIND=MPI_ADDRESS_KIND) :: winsize  !< Size of MPI window 2 (in bytes)
    280 
    281     INTEGER, DIMENSION(me%inter_npes*2) :: nrele  !< Number of Elements of a
    282                                                   !< horizontal slice
     312    INTEGER(iwp) ::  dummy               !<
     313    INTEGER(iwp) ::  i, ierr, i2, j, nr  !<
     314    INTEGER(iwp) ::  indwin              !< MPI window object
     315    INTEGER(iwp) ::  indwin2             !< MPI window object
     316
     317    INTEGER(KIND=MPI_ADDRESS_KIND) ::  disp      !< Displacement unit (Integer = 4, floating poit = 8
     318    INTEGER(KIND=MPI_ADDRESS_KIND) ::  winsize   !< Size of MPI window 2 (in bytes)
     319    INTEGER(KIND=MPI_ADDRESS_KIND) ::  win_size  !< Size of MPI window 1 (in bytes)
     320
     321    INTEGER, DIMENSION(me%inter_npes*2) ::  nrele  !< Number of Elements of a horizontal slice
     322
    283323    INTEGER, DIMENSION(:), POINTER ::  myind  !<
    284324
     
    286326
    287327
    288     win_size = STORAGE_SIZE( dummy )/8
    289     CALL MPI_WIN_CREATE( dummy, win_size, iwp, MPI_INFO_NULL, me%intra_comm,   &
    290                          indwin, ierr )
     328    win_size = STORAGE_SIZE( dummy ) / 8
     329    CALL MPI_WIN_CREATE( dummy, win_size, iwp, MPI_INFO_NULL, me%intra_comm, indwin, ierr )
    291330!
    292331!-- Close window on child side and open on parent side
    293332    CALL MPI_WIN_FENCE( 0, indwin, ierr )
    294 
    295 !   Between the two MPI_WIN_FENCE calls, the parent can fill the RMA window
    296 
    297 !-- Close window on parent side and open on child side
     333!
     334!-- Between the two MPI_WIN_FENCE calls, the parent can fill the RMA window.
     335!-- Close window on parent side and open on child side.
    298336
    299337    CALL MPI_WIN_FENCE( 0, indwin, ierr )
     
    301339    DO  i = 1, me%inter_npes
    302340       disp = me%model_rank * 2
    303        CALL MPI_GET( nrele((i-1)*2+1), 2, MPI_INTEGER, i-1, disp, 2,           &
    304                      MPI_INTEGER, indwin, ierr )
    305     ENDDO
    306 !
    307 !-- MPI_GET is non-blocking -> data in nrele is not available until MPI_FENCE is
    308 !-- called
     341       CALL MPI_GET( nrele((i-1)*2+1), 2, MPI_INTEGER, i-1, disp, 2, MPI_INTEGER, indwin, ierr )
     342    ENDDO
     343!
     344!-- MPI_GET is non-blocking -> data in nrele is not available until MPI_FENCE is called.
    309345    CALL MPI_WIN_FENCE( 0, indwin, ierr )
    310346!
     
    327363!
    328364!-- Local buffer used in MPI_GET can but must not be inside the MPI Window.
    329 !-- Here, we use a dummy for the MPI window because the parent processes do
    330 !-- not access the RMA window via MPI_GET or MPI_PUT
    331     CALL MPI_WIN_CREATE( dummy, winsize, iwp, MPI_INFO_NULL, me%intra_comm,    &
    332                          indwin2, ierr )
    333 !
    334 !-- MPI_GET is non-blocking -> data in nrele is not available until MPI_FENCE is
    335 !-- called
     365!-- Here, we use a dummy for the MPI window because the parent processes do not access the RMA
     366!-- window via MPI_GET or MPI_PUT.
     367    CALL MPI_WIN_CREATE( dummy, winsize, iwp, MPI_INFO_NULL, me%intra_comm, indwin2, ierr )
     368!
     369!-- MPI_GET is non-blocking -> data in nrele is not available until MPI_FENCE is called.
    336370
    337371    CALL MPI_WIN_FENCE( 0, indwin2, ierr )
    338 
    339 !  Between the two MPI_WIN_FENCE calls, the parent can fill the RMA window
     372!
     373!-- Between the two MPI_WIN_FENCE calls, the parent can fill the RMA window
    340374
    341375    CALL MPI_WIN_FENCE( 0, indwin2, ierr )
     
    347381          disp = nrele(2*(i-1)+1)
    348382          CALL MPI_WIN_LOCK( MPI_LOCK_SHARED , i-1, 0, indwin2, ierr )
    349           CALL MPI_GET( myind, 2*nr, MPI_INTEGER, i-1, disp, 2*nr,             &
    350                         MPI_INTEGER, indwin2, ierr )
     383          CALL MPI_GET( myind, 2*nr, MPI_INTEGER, i-1, disp, 2*nr, MPI_INTEGER, indwin2, ierr )
    351384          CALL MPI_WIN_UNLOCK( i-1, indwin2, ierr )
    352385          DO  j = 1, nr
     
    360393    ENDDO
    361394!
    362 !-- Don't know why, but this barrier is necessary before we can free the windows
     395!-- Don't know why, but this barrier is necessary before we can free the windows.
    363396    CALL MPI_BARRIER( me%intra_comm, ierr )
    364397
     
    384417
    385418!
    386 !--  List handling is still required to get minimal interaction with
    387 !--  pmc_interface
     419!--  List handling is still required to get minimal interaction with pmc_interface.
    388420     CHARACTER(LEN=*), INTENT(OUT) ::  myname  !<
    389421!
    390422!-- Local variables
    391     TYPE(pedef), POINTER    :: ape
    392     TYPE(arraydef), POINTER :: ar
     423    TYPE(pedef), POINTER    :: ape  !<
     424    TYPE(arraydef), POINTER :: ar   !<
    393425
    394426
    395427    next_array_in_list = next_array_in_list + 1
    396428!
    397 !-- Array names are the same on all child PEs, so take first process to
    398 !-- get the name   
     429!-- Array names are the same on all child PEs, so take first process to get the name.
    399430    ape => me%pes(1)
    400431!
    401 !-- Check if all arrays have been processed
     432!-- Check if all arrays have been processed.
    402433    IF ( next_array_in_list > ape%nr_arrays )  THEN
    403434       pmc_c_getnextarray = .FALSE.
     
    409440    myname = ar%name
    410441!
    411 !-- Return true if annother array
    412 !-- If all array have been processed, the RETURN statement a couple of lines above is active
     442!-- Return TRUE if annother array.
     443!-- If all array have been processed, the RETURN statement a couple of lines above is active.
    413444
    414445    pmc_c_getnextarray = .TRUE.
     
    417448
    418449
    419 
     450!--------------------------------------------------------------------------------------------------!
     451! Description:
     452! ------------
     453!> @Todo: Missing subroutine description.
     454!--------------------------------------------------------------------------------------------------!
    420455 SUBROUTINE pmc_c_set_dataarray_2d( array )
    421456
    422457    IMPLICIT NONE
    423458
     459    INTEGER(iwp) ::  i       !<
     460    INTEGER(iwp) ::  nrdims  !<
     461
     462    INTEGER(iwp), DIMENSION(4) ::  dims  !<
     463
    424464    REAL(wp), INTENT(IN) , DIMENSION(:,:), POINTER ::  array  !<
    425465
    426     INTEGER(iwp)               ::  i       !<
    427     INTEGER(iwp)               ::  nrdims  !<
    428     INTEGER(iwp), DIMENSION(4) ::  dims    !<
    429 
    430     TYPE(C_PTR)             ::  array_adr
    431     TYPE(arraydef), POINTER ::  ar
    432     TYPE(pedef), POINTER    ::  ape
     466    TYPE(C_PTR)             ::  array_adr  !<
     467    TYPE(arraydef), POINTER ::  ar         !<
     468    TYPE(pedef), POINTER    ::  ape        !<
    433469
    434470
     
    451487 END SUBROUTINE pmc_c_set_dataarray_2d
    452488
     489
     490!--------------------------------------------------------------------------------------------------!
     491! Description:
     492! ------------
     493!> @Todo: Missing subroutine description.
     494!--------------------------------------------------------------------------------------------------!
    453495 SUBROUTINE pmc_c_set_dataarray_ip2d( array )
    454496
     
    457499    INTEGER(idp), INTENT(IN) , DIMENSION(:,:), POINTER ::  array  !<
    458500
    459     INTEGER(iwp)               ::  i       !<
    460     INTEGER(iwp)               ::  nrdims  !<
    461     INTEGER(iwp), DIMENSION(4) ::  dims    !<
    462 
    463     TYPE(C_PTR)             ::  array_adr
    464     TYPE(arraydef), POINTER ::  ar
    465     TYPE(pedef), POINTER    ::  ape
     501    INTEGER(iwp) ::  i       !<
     502    INTEGER(iwp) ::  nrdims  !<
     503
     504    INTEGER(iwp), DIMENSION(4) ::  dims  !<
     505
     506    TYPE(C_PTR)             ::  array_adr  !<
     507    TYPE(arraydef), POINTER ::  ar         !<
     508    TYPE(pedef), POINTER    ::  ape        !<
    466509
    467510    dims    = 1
     
    483526 END SUBROUTINE pmc_c_set_dataarray_ip2d
    484527
     528
     529!--------------------------------------------------------------------------------------------------!
     530! Description:
     531! ------------
     532!> @Todo: Missing subroutine description.
     533!--------------------------------------------------------------------------------------------------!
    485534 SUBROUTINE pmc_c_set_dataarray_3d (array)
    486535
    487536    IMPLICIT NONE
    488537
     538    INTEGER(iwp) ::  i       !<
     539    INTEGER(iwp) ::  nrdims  !<
     540
     541    INTEGER(iwp), DIMENSION (4) ::  dims  !<
     542
    489543    REAL(wp), INTENT(IN), DIMENSION(:,:,:), POINTER ::  array  !<
    490544
    491     INTEGER(iwp)                ::  i
    492     INTEGER(iwp)                ::  nrdims
    493     INTEGER(iwp), DIMENSION (4) ::  dims
    494    
    495     TYPE(C_PTR)             ::  array_adr
    496     TYPE(pedef), POINTER    ::  ape
    497     TYPE(arraydef), POINTER ::  ar
     545    TYPE(C_PTR)             ::  array_adr  !<
     546    TYPE(pedef), POINTER    ::  ape        !<
     547    TYPE(arraydef), POINTER ::  ar         !<
    498548
    499549
     
    519569
    520570
     571!--------------------------------------------------------------------------------------------------!
     572! Description:
     573! ------------
     574!> @Todo: Missing subroutine description.
     575!--------------------------------------------------------------------------------------------------!
    521576 SUBROUTINE pmc_c_setind_and_allocmem
    522577
     
    528583!--                                    recv -> parent to child transfer
    529584!--                                    send -> child to parent transfer
     585
     586    INTEGER(iwp), PARAMETER ::  noindex = -1  !<
     587
    530588    INTEGER(iwp) ::  arlen        !<
    531     INTEGER(iwp) ::  myindex      !<
    532589    INTEGER(iwp) ::  i            !<
    533590    INTEGER(iwp) ::  ierr         !<
     
    535592    INTEGER(iwp) ::  j            !<
    536593    INTEGER(iwp) ::  lo_nr_arrays !<
     594    INTEGER(iwp) ::  myindex      !<
    537595    INTEGER(iwp) ::  rcount       !<
    538596    INTEGER(iwp) ::  tag          !<
    539597    INTEGER(iwp) ::  total_npes   !<
    540598
    541     INTEGER(iwp), PARAMETER ::  noindex = -1  !<
    542 
    543599    INTEGER(idp)                   ::  bufsize  !< size of MPI data window
     600
    544601    INTEGER(KIND=MPI_ADDRESS_KIND) ::  winsize  !<
    545    
    546     INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE ::  myindex_s
    547     INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE ::  myindex_r
    548 
     602
     603    INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE ::  myindex_s  !<
     604    INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE ::  myindex_r  !<
     605
     606    REAL(wp), DIMENSION(:), POINTER, SAVE ::  base_array_cp  !< base array
    549607    REAL(wp), DIMENSION(:), POINTER, SAVE ::  base_array_pc  !< base array
    550     REAL(wp), DIMENSION(:), POINTER, SAVE ::  base_array_cp  !< base array
    551608
    552609    TYPE(pedef), POINTER    ::  ape       !<
    553610    TYPE(arraydef), POINTER ::  ar        !<
     611
    554612    Type(C_PTR)             ::  base_ptr  !<
    555613
    556  
     614
    557615    CALL MPI_COMM_SIZE (me%intra_comm, total_npes, ierr)
    558616
    559617    lo_nr_arrays = me%pes(1)%nr_arrays
    560618
    561     ALLOCATE(myindex_s(lo_nr_arrays,0:total_npes-1))
    562     ALLOCATE(myindex_r(lo_nr_arrays,0:total_npes-1))
     619    ALLOCATE( myindex_s(lo_nr_arrays,0:total_npes-1) )
     620    ALLOCATE( myindex_r(lo_nr_arrays,0:total_npes-1) )
    563621
    564622    myindex_s = 0
    565623
    566624!
    567 !-- Receive indices from child
    568     CALL MPI_ALLTOALL( myindex_s, lo_nr_arrays, MPI_INTEGER,                   &
    569                        myindex_r, lo_nr_arrays, MPI_INTEGER,                   &
     625!-- Receive indices from child.
     626    CALL MPI_ALLTOALL( myindex_s, lo_nr_arrays, MPI_INTEGER, myindex_r, lo_nr_arrays, MPI_INTEGER, &
    570627                       me%intra_comm, ierr )
    571628
     
    574631!
    575632!-- Parent to child direction.
    576 !-- First stride: compute size and set index
     633!-- First stride: compute size and set index.
    577634    DO  i = 1, me%inter_npes
    578635       ape => me%pes(i)
     
    581638          ar%recvindex = myindex_r(j,i-1)
    582639!
    583 !--       Determine max, because child buffer is allocated only once
    584 !--       All 2D and 3d arrays use the same buffer
     640!--       Determine max, because child buffer is allocated only once.
     641!--       All 2D and 3d arrays use the same buffer.
    585642
    586643          IF ( ar%nrdims == 3 )  THEN
    587              bufsize = MAX( bufsize,                                           &
    588                             INT( ar%a_dim(1)*ar%a_dim(2)*ar%a_dim(3),          &
    589                                  MPI_ADDRESS_KIND ) )
     644             bufsize = MAX( bufsize, INT( ar%a_dim(1)*ar%a_dim(2)*ar%a_dim(3), MPI_ADDRESS_KIND ) )
    590645          ELSE
    591              bufsize = MAX( bufsize,                                           &
    592                             INT( ar%a_dim(1)*ar%a_dim(2), MPI_ADDRESS_KIND ) )
     646             bufsize = MAX( bufsize, INT( ar%a_dim(1)*ar%a_dim(2), MPI_ADDRESS_KIND ) )
    593647          ENDIF
    594648       ENDDO
     
    597651!
    598652!-- Create RMA (one sided communication) data buffer.
    599 !-- The buffer for MPI_GET can be PE local, i.e. it can but must not be part of
    600 !-- the MPI RMA window
     653!-- The buffer for MPI_GET can be PE local, i.e. it can but must not be part of the MPI RMA window.
    601654    CALL pmc_alloc_mem( base_array_pc, bufsize, base_ptr )
    602     me%totalbuffersize = bufsize*wp  ! total buffer size in byte
    603 !
    604 !-- Second stride: set buffer pointer
     655    me%totalbuffersize = bufsize*wp  ! Total buffer size in byte
     656!
     657!-- Second stride: set buffer pointer.
    605658    DO  i = 1, me%inter_npes
    606659       ape => me%pes(i)
     
    611664    ENDDO
    612665!
    613 !-- Child to parent direction
     666!-- Child to parent direction.
    614667    myindex = 1
    615668    rcount  = 0
     
    648701    ENDDO
    649702!
    650 !-- Send indices to parent
    651 
    652     CALL MPI_ALLTOALL( myindex_s, lo_nr_arrays, MPI_INTEGER,                   &
    653                        myindex_r, lo_nr_arrays, MPI_INTEGER,                   &
     703!-- Send indices to parent.
     704
     705    CALL MPI_ALLTOALL( myindex_s, lo_nr_arrays, MPI_INTEGER, myindex_r, lo_nr_arrays, MPI_INTEGER, &
    654706                       me%intra_comm, ierr)
    655707
     
    658710
    659711!
    660 !-- Create RMA (one sided communication) window for data buffer child to parent
    661 !-- transfer.
    662 !-- The buffer of MPI_GET (counter part of transfer) can be PE-local, i.e. it
    663 !-- can but must not be part of the MPI RMA window. Only one RMA window is
    664 !-- required to prepare the data
     712!-- Create RMA (one sided communication) window for data buffer child to parent transfer.
     713!-- The buffer of MPI_GET (counter part of transfer) can be PE-local, i.e. it can but must not be
     714!-- part of the MPI RMA window. Only one RMA window is required to prepare the data:
    665715!--        for parent -> child transfer on the parent side
    666716!-- and
    667717!--        for child -> parent transfer on the child side
    668718    CALL pmc_alloc_mem( base_array_cp, bufsize )
    669     me%totalbuffersize = bufsize * wp  ! total buffer size in byte
     719    me%totalbuffersize = bufsize * wp  ! Total buffer size in byte
    670720
    671721    winSize = me%totalbuffersize
    672722
    673     CALL MPI_WIN_CREATE( base_array_cp, winsize, wp, MPI_INFO_NULL,            &
    674                          me%intra_comm, me%win_parent_child, ierr )
     723    CALL MPI_WIN_CREATE( base_array_cp, winsize, wp, MPI_INFO_NULL, me%intra_comm,                 &
     724                         me%win_parent_child, ierr )
    675725    CALL MPI_WIN_FENCE( 0, me%win_parent_child, ierr )
    676726    CALL MPI_BARRIER( me%intra_comm, ierr )
    677727!
    678 !-- Second stride: set buffer pointer
     728!-- Second stride: set buffer pointer.
    679729    DO  i = 1, me%inter_npes
    680730       ape => me%pes(i)
    681731       DO  j = 1, ape%nr_arrays
    682           ar => ape%array_list(j)         
     732          ar => ape%array_list(j)
    683733          IF ( ape%nrele > 0 )  THEN
    684734             ar%sendbuf = C_LOC( base_array_cp(ar%sendindex) )
    685735!
    686 !--          TODO: if this is an error to be really expected, replace the
    687 !--                following message by a meaningful standard PALM message using
    688 !--                the message-routine
     736!--          TODO: If this is an error to be really expected, replace the following message by a
     737!--                meaningful standard PALM message using the message-routine.
    689738             IF ( ar%sendindex+ar%sendsize > bufsize )  THEN
    690                 WRITE( 0,'(a,i4,4i7,1x,a)') 'Child buffer too small ', i,      &
    691                           ar%sendindex, ar%sendsize, ar%sendindex+ar%sendsize, &
    692                           bufsize, TRIM( ar%name )
     739                WRITE( 0,'(a,i4,4i7,1x,a)') 'Child buffer too small ', i, ar%sendindex,            &
     740                       ar%sendsize, ar%sendindex+ar%sendsize, bufsize, TRIM( ar%name )
    693741                CALL MPI_ABORT( MPI_COMM_WORLD, istat, ierr )
    694742             ENDIF
     
    700748
    701749
    702 
     750!--------------------------------------------------------------------------------------------------!
     751! Description:
     752! ------------
     753!> @Todo: Missing subroutine description.
     754!--------------------------------------------------------------------------------------------------!
    703755 SUBROUTINE pmc_c_getbuffer( waittime, particle_transfer )
    704756
    705757    IMPLICIT NONE
    706758
     759    INTEGER(iwp) ::  ierr     !<
     760    INTEGER(iwp) ::  ij       !<
     761    INTEGER(iwp) ::  ip       !<
     762    INTEGER(iwp) ::  j        !<
     763    INTEGER(iwp) ::  myindex  !<
     764    INTEGER(iwp) ::  nr       !< number of elements to get from parent
     765
     766    INTEGER(KIND=MPI_ADDRESS_KIND) ::  target_disp  !<
     767    INTEGER,DIMENSION(1)           ::  buf_shape    !<
     768
     769    INTEGER(idp), POINTER, DIMENSION(:)   ::  ibuf      !<
     770    INTEGER(idp), POINTER, DIMENSION(:,:) ::  idata_2d  !<
     771
     772    LOGICAL ::  lo_ptrans  !<
     773
     774    LOGICAL, INTENT(IN), OPTIONAL ::  particle_transfer  !<
     775
     776    REAL(wp)                            ::  t1  !<
     777    REAL(wp)                            ::  t2  !<
     778
     779    REAL(wp), POINTER, DIMENSION(:)     ::  buf      !<
     780    REAL(wp), POINTER, DIMENSION(:,:)   ::  data_2d  !<
     781    REAL(wp), POINTER, DIMENSION(:,:,:) ::  data_3d  !<
     782
    707783    REAL(wp), INTENT(OUT), OPTIONAL ::  waittime  !<
    708     LOGICAL, INTENT(IN), OPTIONAL   ::  particle_transfer  !<
    709 
    710     LOGICAL                        ::  lo_ptrans!<
    711    
    712     INTEGER(iwp)                        ::  ierr    !<
    713     INTEGER(iwp)                        ::  ij      !<
    714     INTEGER(iwp)                        ::  ip      !<
    715     INTEGER(iwp)                        ::  j       !<
    716     INTEGER(iwp)                        ::  myindex !<
    717     INTEGER(iwp)                        ::  nr      !< number of elements to get
    718                                                     !< from parent
    719     INTEGER(KIND=MPI_ADDRESS_KIND) ::  target_disp
    720     INTEGER,DIMENSION(1)           ::  buf_shape
    721 
    722     REAL(wp)                            ::  t1
    723     REAL(wp)                            ::  t2
    724 
    725     REAL(wp), POINTER, DIMENSION(:)     ::  buf
    726     REAL(wp), POINTER, DIMENSION(:,:)   ::  data_2d
    727     REAL(wp), POINTER, DIMENSION(:,:,:) ::  data_3d
    728     TYPE(pedef), POINTER                ::  ape
    729     TYPE(arraydef), POINTER             ::  ar
    730     INTEGER(idp), POINTER, DIMENSION(:)     ::  ibuf      !<
    731     INTEGER(idp), POINTER, DIMENSION(:,:)   ::  idata_2d  !<
    732 
    733 !
    734 !-- Synchronization of the model is done in pmci_synchronize.
    735 !-- Therefore the RMA window can be filled without
    736 !-- sychronization at this point and a barrier is not necessary.
    737 
    738 !-- In case waittime is present, the following barrier is necessary to
    739 !-- insure the same number of barrier calls on parent and child
    740 !-- This means, that here on child side two barriers are call successively
    741 !-- The parent is filling its buffer between the two barrier calls
    742 
    743 !-- Please note that waittime has to be set in pmc_s_fillbuffer AND
    744 !-- pmc_c_getbuffer
     784
     785    TYPE(pedef), POINTER    ::  ape  !<
     786    TYPE(arraydef), POINTER ::  ar   !<
     787
     788
     789
     790!
     791!-- Synchronization of the model is done in pmci_synchronize. Therefore the RMA window can be filled
     792!-- without sychronization at this point and a barrier is not necessary.
     793
     794!-- In case waittime is present, the following barrier is necessary to insure the same number of
     795!-- barrier calls on parent and child. This means, that here on child side two barriers are called
     796!-- successively. The parent is filling its buffer between the two barrier calls.
     797
     798!-- Please note that waittime has to be set in pmc_s_fillbuffer AND pmc_c_getbuffer.
    745799    IF ( PRESENT( waittime ) )  THEN
    746800       t1 = pmc_time()
     
    751805
    752806    lo_ptrans = .FALSE.
    753     IF ( PRESENT( particle_transfer))    lo_ptrans = particle_transfer
    754 
    755 !
    756 !-- Wait for buffer is filled.
    757 !
    758 !-- The parent side (in pmc_s_fillbuffer) is filling the buffer in the MPI RMA window
    759 !-- When the filling is complet, a MPI_BARRIER is called.
    760 !-- The child is not allowd to access the parent-buffer before it is completely filled
    761 !-- therefore the following barrier is required.
     807    IF ( PRESENT( particle_transfer) )  lo_ptrans = particle_transfer
     808
     809!
     810!-- Wait for buffer to be filled.
     811!
     812!-- The parent side (in pmc_s_fillbuffer) is filling the buffer in the MPI RMA window. When the
     813!-- filling is complet, a MPI_BARRIER is called. The child is not allowd to access the parent-buffer
     814!-- before it is completely filled. Therefore the following barrier is required.
    762815
    763816    CALL MPI_BARRIER( me%intra_comm, ierr )
     
    768821          ar => ape%array_list(j)
    769822
    770           IF ( ar%dimkey == 2 .AND. .NOT.lo_ptrans)  THEN
     823          IF ( ar%dimkey == 2  .AND.  .NOT.  lo_ptrans)  THEN
    771824             nr = ape%nrele
    772           ELSEIF ( ar%dimkey == 3 .AND. .NOT.lo_ptrans)  THEN
     825          ELSEIF ( ar%dimkey == 3  .AND.  .NOT.  lo_ptrans)  THEN
    773826             nr = ape%nrele * ar%a_dim(1)
    774827          ELSE IF ( ar%dimkey == 22 .AND. lo_ptrans)  THEN
    775828             nr = ape%nrele
    776829          ELSE
    777              CYCLE                    ! Particle array ar not transferd here
     830             CYCLE   ! Particle arrays are not transferd here
    778831          ENDIF
    779832          buf_shape(1) = nr
    780           IF ( lo_ptrans )   THEN
     833          IF ( lo_ptrans )  THEN
    781834             CALL C_F_POINTER( ar%recvbuf, ibuf, buf_shape )
    782835          ELSE
     
    785838!
    786839!--       MPI passive target RMA
    787 !--       One data array is fetcht from MPI RMA window on parent
     840!--       One data array is fetchted from MPI RMA window on parent
    788841
    789842          IF ( nr > 0 )  THEN
    790843             target_disp = ar%recvindex - 1
    791              CALL MPI_WIN_LOCK( MPI_LOCK_SHARED , ip-1, 0,                     &
    792                                 me%win_parent_child, ierr )
    793              IF ( lo_ptrans )   THEN
    794                 CALL MPI_GET( ibuf, nr*8, MPI_BYTE, ip-1, target_disp, nr*8, MPI_BYTE,  &               !There is no MPI_INTEGER8 datatype
    795                                    me%win_parent_child, ierr )
     844             CALL MPI_WIN_LOCK( MPI_LOCK_SHARED , ip-1, 0, me%win_parent_child, ierr )
     845             IF ( lo_ptrans )  THEN
     846                CALL MPI_GET( ibuf, nr*8, MPI_BYTE, ip-1, target_disp, nr*8, MPI_BYTE, &  !There is no MPI_INTEGER8 datatype
     847                              me%win_parent_child, ierr )
    796848             ELSE
    797                 CALL MPI_GET( buf, nr, MPI_REAL, ip-1, target_disp, nr,        &
    798                               MPI_REAL, me%win_parent_child, ierr )
     849                CALL MPI_GET( buf, nr, MPI_REAL, ip-1, target_disp, nr, MPI_REAL,                  &
     850                              me%win_parent_child, ierr )
    799851             ENDIF
    800852             CALL MPI_WIN_UNLOCK( ip-1, me%win_parent_child, ierr )
    801853          ENDIF
    802854          myindex = 1
    803           IF ( ar%dimkey == 2 .AND. .NOT.lo_ptrans)  THEN
     855          IF ( ar%dimkey == 2  .AND.  .NOT.  lo_ptrans)  THEN
    804856
    805857             CALL C_F_POINTER( ar%data, data_2d, ar%a_dim(1:2) )
     
    809861             ENDDO
    810862
    811           ELSEIF ( ar%dimkey == 3 .AND. .NOT.lo_ptrans)  THEN
     863          ELSEIF ( ar%dimkey == 3  .AND.  .NOT.  lo_ptrans)  THEN
    812864
    813865             CALL C_F_POINTER( ar%data, data_3d, ar%a_dim(1:3) )
    814866             DO  ij = 1, ape%nrele
    815                 data_3d(:,ape%locind(ij)%j,ape%locind(ij)%i) =                 &
    816                                               buf(myindex:myindex+ar%a_dim(1)-1)
     867                data_3d(:,ape%locind(ij)%j,ape%locind(ij)%i) = buf(myindex:myindex+ar%a_dim(1)-1)
    817868                myindex = myindex+ar%a_dim(1)
    818869             ENDDO
     
    833884
    834885
    835 
     886!--------------------------------------------------------------------------------------------------!
     887! Description:
     888! ------------
     889!> @Todo: Missing subroutine description.
     890!--------------------------------------------------------------------------------------------------!
    836891 SUBROUTINE pmc_c_putbuffer( waittime , particle_transfer )
    837892
    838893    IMPLICIT NONE
    839894
    840     REAL(wp), INTENT(OUT), OPTIONAL ::  waittime  !<
     895    INTEGER(iwp) ::  ierr     !<
     896    INTEGER(iwp) ::  ij       !<
     897    INTEGER(iwp) ::  ip       !<
     898    INTEGER(iwp) ::  j        !<
     899    INTEGER(iwp) ::  myindex  !<
     900
     901    INTEGER(iwp), DIMENSION(1) ::  buf_shape  !<
     902
     903    INTEGER(idp), POINTER, DIMENSION(:)   ::  ibuf      !<
     904    INTEGER(idp), POINTER, DIMENSION(:,:) ::  idata_2d  !<
     905
    841906    LOGICAL, INTENT(IN), OPTIONAL   ::  particle_transfer  !<
    842907
    843     LOGICAL ::  lo_ptrans!<
    844    
    845     INTEGER(iwp) ::  ierr         !<
    846     INTEGER(iwp) ::  ij           !<
    847     INTEGER(iwp) ::  ip           !<
    848     INTEGER(iwp) ::  j            !<
    849     INTEGER(iwp) ::  myindex      !<
    850 
    851     INTEGER(iwp), DIMENSION(1) ::  buf_shape    !<
     908    LOGICAL ::  lo_ptrans  !<
    852909
    853910    REAL(wp) ::  t1  !<
    854911    REAL(wp) ::  t2  !<
    855912
    856     REAL(wp), POINTER, DIMENSION(:)         ::  buf      !<
    857     REAL(wp), POINTER, DIMENSION(:,:)       ::  data_2d  !<
    858     REAL(wp), POINTER, DIMENSION(:,:,:)     ::  data_3d  !<
    859    
    860     INTEGER(idp), POINTER, DIMENSION(:)     ::  ibuf      !<
    861     INTEGER(idp), POINTER, DIMENSION(:,:)   ::  idata_2d  !<
    862 
    863     TYPE(pedef), POINTER                    ::  ape  !<
    864     TYPE(arraydef), POINTER                 ::  ar   !<
    865 
    866 !
    867 !-- Wait for empty buffer
    868 !-- Switch RMA epoche
     913    REAL(wp), INTENT(OUT), OPTIONAL ::  waittime  !<
     914
     915    REAL(wp), POINTER, DIMENSION(:)     ::  buf      !<
     916    REAL(wp), POINTER, DIMENSION(:,:)   ::  data_2d  !<
     917    REAL(wp), POINTER, DIMENSION(:,:,:) ::  data_3d  !<
     918
     919    TYPE(pedef), POINTER    ::  ape  !<
     920    TYPE(arraydef), POINTER ::  ar   !<
     921
     922!
     923!-- Wait for empty buffer.
     924!-- Switch RMA epoche.
    869925
    870926    t1 = pmc_time()
     
    874930
    875931    lo_ptrans = .FALSE.
    876     IF ( PRESENT( particle_transfer)  lo_ptrans = particle_transfer
     932    IF ( PRESENT( particle_transfer) )  lo_ptrans = particle_transfer
    877933
    878934    DO  ip = 1, me%inter_npes
     
    882938          myindex = 1
    883939
    884           IF ( ar%dimkey == 2 .AND. .NOT.lo_ptrans )  THEN
     940          IF ( ar%dimkey == 2  .AND.  .NOT.  lo_ptrans )  THEN
    885941
    886942             buf_shape(1) = ape%nrele
     
    892948             ENDDO
    893949
    894           ELSEIF ( ar%dimkey == 3 .AND. .NOT.lo_ptrans )  THEN
     950          ELSEIF ( ar%dimkey == 3  .AND.  .NOT.  lo_ptrans )  THEN
    895951
    896952             buf_shape(1) = ape%nrele*ar%a_dim(1)
     
    898954             CALL C_F_POINTER( ar%data,    data_3d, ar%a_dim(1:3) )
    899955             DO  ij = 1, ape%nrele
    900                 buf(myindex:myindex+ar%a_dim(1)-1) =                            &
    901                                     data_3d(:,ape%locind(ij)%j,ape%locind(ij)%i)
     956                buf(myindex:myindex+ar%a_dim(1)-1) = data_3d(:,ape%locind(ij)%j,ape%locind(ij)%i)
    902957                myindex = myindex + ar%a_dim(1)
    903958             ENDDO
     
    918973    ENDDO
    919974!
    920 !-- Buffer is filled
    921 !-- Switch RMA epoche
     975!-- Buffer is filled.
     976!-- Switch RMA epoche.
    922977
    923978    CALL MPI_Barrier(me%intra_comm, ierr)
  • palm/trunk/SOURCE/pmc_general_mod.f90

    r4629 r4649  
    11 MODULE pmc_general
    22
    3 !------------------------------------------------------------------------------!
     3!--------------------------------------------------------------------------------------------------!
    44! This file is part of the PALM model system.
    55!
    6 ! PALM is free software: you can redistribute it and/or modify it under the
    7 ! terms of the GNU General Public License as published by the Free Software
    8 ! Foundation, either version 3 of the License, or (at your option) any later
    9 ! version.
    10 !
    11 ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
    12 ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
    13 ! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
    14 !
    15 ! You should have received a copy of the GNU General Public License along with
    16 ! PALM. If not, see <http://www.gnu.org/licenses/>.
     6! PALM is free software: you can redistribute it and/or modify it under the terms of the GNU General
     7! Public License as published by the Free Software Foundation, either version 3 of the License, or
     8! (at your option) any later version.
     9!
     10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
     11! implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
     12! Public License for more details.
     13!
     14! You should have received a copy of the GNU General Public License along with PALM. If not, see
     15! <http://www.gnu.org/licenses/>.
    1716!
    1817! Copyright 1997-2020 Leibniz Universitaet Hannover
    19 !------------------------------------------------------------------------------!
     18!--------------------------------------------------------------------------------------------------!
     19!
    2020!
    2121! Current revisions:
    22 ! ------------------
     22! -----------------
    2323!
    2424!
     
    2626! -----------------
    2727! $Id$
    28 ! support for MPI Fortran77 interface (mpif.h) removed
    29 !
     28! File re-formatted to follow the PALM coding standard
     29!
     30! 4629 2020-07-29 09:37:56Z raasch
     31! Support for MPI Fortran77 interface (mpif.h) removed
     32!
    3033! 4360 2020-01-07 11:25:50Z suehring
    3134! Corrected "Former revisions" section
     
    3841! 3655 2019-01-07 16:51:22Z knoop
    3942! Determine number of coupled arrays dynamically.
    40 ! 
     43!
    4144! 1762 2016-02-25 12:31:13Z hellstea
    4245! Initial revision by K. Ketelsen
     
    4649!
    4750! Structure definition and utilities of Palm Model Coupler
    48 !------------------------------------------------------------------------------!
     51!--------------------------------------------------------------------------------------------------!
    4952
    5053#if defined( __parallel )
     
    6164    SAVE
    6265
    63     INTEGER(iwp), PUBLIC            :: pmc_max_array          !< max # of arrays which can be coupled - will be determined dynamically in pmc_interface
    64    
    6566    INTEGER(iwp), PARAMETER, PUBLIC :: da_desclen       =  8  !<
    6667    INTEGER(iwp), PARAMETER, PUBLIC :: da_namelen       = 16  !<
     
    7071    INTEGER(iwp), PARAMETER, PUBLIC :: pmc_status_error = -1  !<
    7172
     73    INTEGER(iwp), PUBLIC ::  pmc_max_array  !< max # of arrays which can be coupled
     74                                            !< - will be determined dynamically in pmc_interface
     75
    7276
    7377    TYPE, PUBLIC :: xy_ind  !< pair of indices in horizontal plane
     
    7781
    7882    TYPE, PUBLIC ::  arraydef
    79        INTEGER(iwp)                   :: coupleindex  !<
    80        INTEGER(iwp)                   :: nrdims       !< number of dimensions
    81        INTEGER(iwp)                   :: dimkey       !< key for NR dimensions and array type
    82        INTEGER(iwp), DIMENSION(4)     :: a_dim        !< size of dimensions
    83        TYPE(C_PTR)               :: data         !< pointer of data in parent space
    84        TYPE(C_PTR), DIMENSION(2) :: po_data      !< base pointers,
    85                                                  !< pmc_s_set_active_data_array
    86                                                  !< sets active pointer
    87        INTEGER(idp)              :: SendIndex    !< index in send buffer
    88        INTEGER(idp)              :: RecvIndex    !< index in receive buffer
    89        INTEGER(iwp)              :: SendSize     !< size in send buffer
    90        INTEGER(iwp)              :: RecvSize     !< size in receive buffer
    91        TYPE(C_PTR)               :: SendBuf      !< data pointer in send buffer
    92        TYPE(C_PTR)               :: RecvBuf      !< data pointer in receive buffer
    93        CHARACTER(LEN=da_namelen) :: Name         !< name of array
    94        TYPE(arraydef), POINTER   :: next
     83       CHARACTER(LEN=da_namelen) ::  Name  !< name of array
     84
     85       INTEGER(iwp) ::  coupleindex  !<
     86       INTEGER(iwp) ::  dimkey       !< key for NR dimensions and array type
     87       INTEGER(iwp) ::  nrdims       !< number of dimensions
     88       INTEGER(iwp) ::  RecvSize     !< size in receive buffer
     89       INTEGER(iwp) ::  SendSize     !< size in send buffer
     90
     91       INTEGER(idp) ::  RecvIndex  !< index in receive buffer
     92       INTEGER(idp) ::  SendIndex  !< index in send buffer
     93
     94       INTEGER(iwp), DIMENSION(4) ::  a_dim  !< size of dimensions
     95
     96       TYPE(C_PTR) ::  data     !< pointer of data in parent space
     97       TYPE(C_PTR) ::  SendBuf  !< data pointer in send buffer
     98       TYPE(C_PTR) ::  RecvBuf  !< data pointer in receive buffer
     99
     100       TYPE(arraydef), POINTER ::  next  !<
     101
     102       TYPE(C_PTR), DIMENSION(2) ::  po_data  !< base pointers, pmc_s_set_active_data_array
     103                                              !< sets active pointer
    95104    END TYPE arraydef
    96105
    97     TYPE(arraydef), PUBLIC, POINTER  :: next
     106
     107    TYPE(arraydef), PUBLIC, POINTER  :: next  !<
     108
    98109
    99110    TYPE, PUBLIC ::  pedef
    100111       INTEGER(iwp) :: nr_arrays = 0  !< number of arrays which will be transfered
    101112       INTEGER(iwp) :: nrele          !< number of elements, same for all arrays
     113
     114       TYPE(arraydef), POINTER, DIMENSION(:) ::  array_list  !< list of data arrays to be transfered
    102115       TYPE(xy_ind), POINTER, DIMENSION(:)   ::  locInd      !< xy index local array for remote PE
    103        TYPE(arraydef), POINTER, DIMENSION(:) ::  array_list  !< list of data arrays to be transfered
    104116    END TYPE pedef
    105117
     118
    106119    TYPE, PUBLIC ::  childdef
     120       INTEGER(iwp) ::  inter_comm         !< inter communicator model and child
     121       INTEGER(iwp) ::  inter_npes         !< number of PEs child model
     122       INTEGER(iwp) ::  intra_comm         !< intra communicator model and child
     123       INTEGER(iwp) ::  intra_rank         !< rank within intra_comm
     124       INTEGER(iwp) ::  model_comm         !< communicator of this model
     125       INTEGER(iwp) ::  model_npes         !< number of PEs this model
     126       INTEGER(iwp) ::  model_rank         !< rank of this model
    107127       INTEGER(idp) ::  totalbuffersize    !<
    108        INTEGER(iwp) ::  model_comm         !< communicator of this model
    109        INTEGER(iwp) ::  inter_comm         !< inter communicator model and child
    110        INTEGER(iwp) ::  intra_comm         !< intra communicator model and child
    111        INTEGER(iwp) ::  model_rank         !< rank of this model
    112        INTEGER(iwp) ::  model_npes         !< number of PEs this model
    113        INTEGER(iwp) ::  inter_npes         !< number of PEs child model
    114        INTEGER(iwp) ::  intra_rank         !< rank within intra_comm
    115128       INTEGER(iwp) ::  win_parent_child   !< MPI RMA for preparing data on parent AND child side
    116129       TYPE(pedef), DIMENSION(:), POINTER ::  pes  !< list of all child PEs
    117130    END TYPE childdef
    118131
     132
    119133    TYPE, PUBLIC ::  da_namedef  !< data array name definition
    120        INTEGER(iwp)              ::  couple_index  !< unique number of array
    121        CHARACTER(LEN=da_desclen) ::  parentdesc    !< parent array description
    122        CHARACTER(LEN=da_namelen) ::  nameonparent  !< name of array within parent
    123134       CHARACTER(LEN=da_desclen) ::  childdesc     !< child array description
    124135       CHARACTER(LEN=da_namelen) ::  nameonchild   !< name of array within child
     136       CHARACTER(LEN=da_namelen) ::  nameonparent  !< name of array within parent
     137       CHARACTER(LEN=da_desclen) ::  parentdesc    !< parent array description
     138       INTEGER(iwp)              ::  couple_index  !< unique number of array
    125139    END TYPE da_namedef
    126140
     
    137151 CONTAINS
    138152
    139 
    140    
     153!--------------------------------------------------------------------------------------------------!
     154! Description:
     155! ------------
     156!> @Todo: Missing subroutine description.
     157!--------------------------------------------------------------------------------------------------!
    141158 SUBROUTINE pmc_g_setname( mychild, couple_index, aname )
    142159
     
    144161
    145162    CHARACTER(LEN=*)              ::  aname         !<
     163
    146164    INTEGER(iwp), INTENT(IN)      ::  couple_index  !<
     165
     166    INTEGER(iwp) ::  i  !<
     167
    147168    TYPE(childdef), INTENT(INOUT) ::  mychild       !<
    148 
    149     INTEGER(iwp) ::  i  !<
    150169
    151170    TYPE(pedef), POINTER    ::  ape  !<
     
    164183
    165184
    166 
     185!--------------------------------------------------------------------------------------------------!
     186! Description:
     187! ------------
     188!> @Todo: Missing subroutine description.
     189!--------------------------------------------------------------------------------------------------!
    167190 SUBROUTINE sort_2d_i( array, sort_ind )
    168191
     
    178201    INTEGER(iwp), DIMENSION(SIZE(array,1)) ::  tmp  !<
    179202
    180     n = SIZE(array,2)
     203    n = SIZE( array, 2 )
    181204    DO  j = 1, n-1
    182205       DO  i = j+1, n
  • palm/trunk/SOURCE/pmc_handle_communicator_mod.f90

    r4629 r4649  
    11!> @file pmc_handle_communicator_mod.f90
    2 !------------------------------------------------------------------------------!
     2!--------------------------------------------------------------------------------------------------!
    33! This file is part of the PALM model system.
    44!
    5 ! PALM is free software: you can redistribute it and/or modify it under the
    6 ! terms of the GNU General Public License as published by the Free Software
    7 ! Foundation, either version 3 of the License, or (at your option) any later
    8 ! version.
    9 !
    10 ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
    11 ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
    12 ! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
    13 !
    14 ! You should have received a copy of the GNU General Public License along with
    15 ! PALM. If not, see <http://www.gnu.org/licenses/>.
     5! PALM is free software: you can redistribute it and/or modify it under the terms of the GNU General
     6! Public License as published by the Free Software Foundation, either version 3 of the License, or
     7! (at your option) any later version.
     8!
     9! PALM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
     10! implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
     11! Public License for more details.
     12!
     13! You should have received a copy of the GNU General Public License along with PALM. If not, see
     14! <http://www.gnu.org/licenses/>.
    1615!
    1716! Copyright 1997-2020 Leibniz Universitaet Hannover
    18 !------------------------------------------------------------------------------!
     17!--------------------------------------------------------------------------------------------------!
     18!
    1919!
    2020! Current revisions:
    21 ! ------------------
     21! -----------------
    2222!
    2323!
     
    2525! -----------------
    2626! $Id$
    27 ! support for MPI Fortran77 interface (mpif.h) removed
    28 !
     27! File re-formatted to follow the PALM coding standard
     28!
     29!
     30! 4629 2020-07-29 09:37:56Z raasch
     31! Support for MPI Fortran77 interface (mpif.h) removed
     32!
    2933! 4360 2020-01-07 11:25:50Z suehring
    3034! Corrected "Former revisions" section
    31 ! 
     35!
    3236! 3888 2019-04-12 09:18:10Z hellstea
    3337! Missing MPI_BCAST of anterpolation_buffer_width added.
    34 ! 
     38!
    3539! 3885 2019-04-11 11:29:34Z kanani
    36 ! Changes related to global restructuring of location messages and introduction
    37 ! of additional debug messages
    38 ! 
     40! Changes related to global restructuring of location messages and introduction of additional debug
     41! messages
     42!
    3943! 3819 2019-03-27 11:01:36Z hellstea
    40 ! Adjustable anterpolation buffer introduced on all nest boundaries, it is controlled
    41 ! by the new nesting_parameters parameter anterpolation_buffer_width.
    42 ! 
     44! Adjustable anterpolation buffer introduced on all nest boundaries, it is controlled by the new
     45! nesting_parameters parameter anterpolation_buffer_width.
     46!
    4347! 3655 2019-01-07 16:51:22Z knoop
    4448! nestpar renamed to nesting_parameters
    45 ! 
     49!
    4650! 1762 2016-02-25 12:31:13Z hellstea
    4751! Initial revision by K. Ketelsen
     
    5054! ------------
    5155! Handle MPI communicator in PALM model coupler
    52 !-------------------------------------------------------------------------------!
     56!--------------------------------------------------------------------------------------------------!
    5357 MODULE PMC_handle_communicator
    5458#if defined( __parallel )
     
    5761    USE MPI
    5862
    59     USE pmc_general,                                                            &
    60         ONLY: pmc_status_ok, pmc_status_error, pmc_max_models
    61     USE control_parameters,                                                     &
     63    USE pmc_general,                                                                               &
     64        ONLY: pmc_max_models,                                                                      &
     65              pmc_status_error,                                                                    &
     66              pmc_status_ok
     67
     68
     69    USE control_parameters,                                                                        &
    6270        ONLY: message_string
    6371
     
    6775    TYPE pmc_layout
    6876
    69        CHARACTER(LEN=32) ::  name
    70 
    71        INTEGER  ::  id            !<
    72        INTEGER  ::  parent_id     !<
    73        INTEGER  ::  npe_total     !<
     77       CHARACTER(LEN=32) ::  name  !<
     78
     79       INTEGER ::  id            !<
     80       INTEGER ::  npe_total     !<
     81       INTEGER ::  parent_id     !<
    7482
    7583       REAL(wp) ::  lower_left_x  !<
     
    7886    END TYPE pmc_layout
    7987
    80     PUBLIC  pmc_status_ok, pmc_status_error
     88    PUBLIC  pmc_status_ok, pmc_status_error  !<
    8189
    8290    INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes        = 1  !< illegal number of processes
     
    8492    INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found = 3  !< no couple layout namelist found
    8593
     94    INTEGER ::  m_my_cpl_id   !< coupler id of this model
     95    INTEGER ::  m_ncpl        !< number of couplers given in nesting_parameters namelist
     96    INTEGER ::  m_parent_id   !< coupler id of parent of this model
    8697    INTEGER ::  m_world_comm  !< global nesting communicator
    87     INTEGER ::  m_my_cpl_id   !< coupler id of this model
    88     INTEGER ::  m_parent_id   !< coupler id of parent of this model
    89     INTEGER ::  m_ncpl        !< number of couplers given in nesting_parameters namelist
    9098
    9199    TYPE(pmc_layout), PUBLIC, DIMENSION(pmc_max_models) ::  m_couplers  !< information of all couplers
    92100
    93101    INTEGER, PUBLIC ::  m_model_comm          !< communicator of this model
     102    INTEGER, PUBLIC ::  m_model_npes          !<
     103    INTEGER, PUBLIC ::  m_model_rank          !<
    94104    INTEGER, PUBLIC ::  m_to_parent_comm      !< communicator to the parent
    95105    INTEGER, PUBLIC ::  m_world_rank          !<
     106    INTEGER         ::  m_parent_remote_size  !< number of processes in the parent model
    96107    INTEGER         ::  m_world_npes          !<
    97     INTEGER, PUBLIC ::  m_model_rank          !<
    98     INTEGER, PUBLIC ::  m_model_npes          !<
    99     INTEGER         ::  m_parent_remote_size  !< number of processes in the parent model
    100108    INTEGER         ::  peer_comm             !< peer_communicator for inter communicators
    101109
    102     INTEGER, DIMENSION(pmc_max_models), PUBLIC ::  m_to_child_comm    !< communicator to the child(ren)
    103     INTEGER, DIMENSION(:), POINTER, PUBLIC ::  pmc_parent_for_child   !<
    104 
     110    INTEGER, DIMENSION(pmc_max_models), PUBLIC ::  m_to_child_comm   !< communicator to the child(ren)
     111    INTEGER, DIMENSION(:), POINTER, PUBLIC ::  pmc_parent_for_child  !<
     112
     113
     114    INTERFACE pmc_get_model_info
     115       MODULE PROCEDURE pmc_get_model_info
     116    END INTERFACE pmc_get_model_info
    105117
    106118    INTERFACE pmc_is_rootmodel
     
    108120    END INTERFACE pmc_is_rootmodel
    109121
    110     INTERFACE pmc_get_model_info
    111        MODULE PROCEDURE pmc_get_model_info
    112     END INTERFACE pmc_get_model_info
    113 
    114     PUBLIC pmc_get_model_info, pmc_init_model, pmc_is_rootmodel
     122    PUBLIC pmc_get_model_info, pmc_init_model, pmc_is_rootmodel   !<
     123
     124
    115125
    116126 CONTAINS
    117127
    118  SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode,      &
     128
     129!--------------------------------------------------------------------------------------------------!
     130! Description:
     131! ------------
     132!> @Todo: Missing subroutine description.
     133!--------------------------------------------------------------------------------------------------!
     134 SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode,                         &
    119135                            anterpolation_buffer_width, pmc_status )
    120136
    121     USE control_parameters,                                                     &
     137    USE control_parameters,                                                                        &
    122138        ONLY:  message_string
    123139
    124     USE pegrid,                                                                 &
     140    USE pegrid,                                                                                    &
    125141        ONLY:  myid
    126142
    127143      IMPLICIT NONE
    128144
     145    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode  !<
    129146    CHARACTER(LEN=8), INTENT(INOUT) ::  nesting_mode               !<
    130     CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode  !<
    131 
    132     INTEGER, INTENT(INOUT) ::  anterpolation_buffer_width          !< Boundary buffer width for anterpolation
    133     INTEGER, INTENT(INOUT) ::  comm        !<
    134     INTEGER, INTENT(INOUT) ::  pmc_status  !<
     147
     148    INTEGER, INTENT(INOUT) ::  anterpolation_buffer_width  !< Boundary buffer width for anterpolation
     149    INTEGER, INTENT(INOUT) ::  comm                        !<
     150    INTEGER, INTENT(INOUT) ::  pmc_status                  !<
    135151
    136152    INTEGER ::  childcount     !<
     
    141157    INTEGER ::  tag            !<
    142158
    143     INTEGER, DIMENSION(pmc_max_models)   ::  activeparent  ! I am active parent for this child ID
    144     INTEGER, DIMENSION(pmc_max_models+1) ::  start_pe
     159    INTEGER, DIMENSION(pmc_max_models)   ::  activeparent  !< I am active parent for this child ID
     160    INTEGER, DIMENSION(pmc_max_models+1) ::  start_pe      !<
    145161
    146162    pmc_status   = pmc_status_ok
     
    158174    IF ( m_world_rank == 0 )  THEN
    159175
    160        CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode,      &
     176       CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode,                         &
    161177                                  anterpolation_buffer_width, pmc_status )
    162178
    163        IF ( pmc_status /= pmc_no_namelist_found  .AND.                          &
    164             pmc_status /= pmc_namelist_error )                                  &
     179       IF ( pmc_status /= pmc_no_namelist_found  .AND.                                             &
     180            pmc_status /= pmc_namelist_error )                                                     &
    165181       THEN
    166182!
     
    172188
    173189!
    174 !--       The sum of numbers of processes requested by all the domains
    175 !--       must be equal to the total number of processes of the run
     190!--       The sum of numbers of processes requested by all the domains must be equal to the total
     191!--       number of processes of the run
    176192          IF ( start_pe(m_ncpl+1) /= m_world_npes )  THEN
    177              WRITE ( message_string, '(2A,I6,2A,I6,A)' )                        &
    178                              'nesting-setup requires different number of ',     &
    179                              'MPI procs (', start_pe(m_ncpl+1), ') than ',      &
    180                              'provided (', m_world_npes,')'
     193             WRITE( message_string, '(2A,I6,2A,I6,A)' )                                            &
     194                                                'nesting-setup requires different number of ',     &
     195                                                'MPI procs (', start_pe(m_ncpl+1), ') than ',      &
     196                                                'provided (', m_world_npes,')'
    181197             CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
    182198          ENDIF
     
    186202    ENDIF
    187203!
    188 !-- Broadcast the read status. This synchronises all other processes with
    189 !-- process 0 of the root model. Without synchronisation, they would not
    190 !-- behave in the correct way (e.g. they would not return in case of a
    191 !-- missing NAMELIST).
     204!-- Broadcast the read status. This synchronises all other processes with process 0 of the root
     205!-- model. Without synchronisation, they would not behave in the correct way (e.g. they would not
     206!-- return in case of a missing NAMELIST).
    192207    CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
    193208
     
    200215    ELSEIF ( pmc_status == pmc_namelist_error )  THEN
    201216!
    202 !--    Only the root model gives the error message. Others are aborted by the
    203 !--    message-routine with MPI_ABORT. Must be done this way since myid and
    204 !--    comm2d have not yet been assigned at this point.
     217!--    Only the root model gives the error message. Others are aborted by the message-routine with
     218!--    MPI_ABORT. Must be done this way since myid and comm2d have not yet been assigned at this
     219!--    point.
    205220       IF ( m_world_rank == 0 )  THEN
    206221          message_string = 'errors in \$nesting_parameters'
     
    215230!-- Broadcast coupling layout
    216231    DO  i = 1, m_ncpl
    217        CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ),           &
     232       CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ),                              &
    218233                       MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
    219        CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0,           &
     234       CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0,                              &
    220235                       MPI_COMM_WORLD, istat )
    221        CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0,           &
     236       CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0,                              &
    222237                       MPI_COMM_WORLD, istat )
    223        CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0,           &
     238       CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0,                              &
    224239                       MPI_COMM_WORLD, istat )
    225        CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0,           &
     240       CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0,                              &
    226241                       MPI_COMM_WORLD, istat )
    227        CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0,           &
     242       CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0,                              &
    228243                       MPI_COMM_WORLD, istat )
    229244    ENDDO
    230     CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0,        &
     245    CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
     246    CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode), MPI_CHARACTER, 0,   &
    231247                    MPI_COMM_WORLD, istat )
    232     CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode),  &
    233                     MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
    234     CALL MPI_BCAST( anterpolation_buffer_width, 1, MPI_INT, 0, MPI_COMM_WORLD,  &
    235                     istat )
     248    CALL MPI_BCAST( anterpolation_buffer_width, 1, MPI_INT, 0, MPI_COMM_WORLD, istat )
    236249!
    237250!-- Assign global MPI processes to individual models by setting the couple id
    238251    DO  i = 1, m_ncpl
    239        IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) )  &
    240        THEN
     252       IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) )  THEN
    241253          m_my_cpl_id = i
    242254          EXIT
     
    245257    m_my_cpl_rank = m_world_rank - start_pe(i)
    246258!
    247 !-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
    248 !-- The communictors for the individual models as created by MPI_COMM_SPLIT.
    249 !-- The color of the model is represented by the coupler id
    250     CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,      &
    251                          istat )
     259!-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach). The communictors for the
     260!-- individual models as created by MPI_COMM_SPLIT. The color of the model is represented by the
     261!-- coupler id
     262    CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm, istat )
    252263!
    253264!-- Get size and rank of the model running on this process
     
    257268!-- Broadcast (from process 0) the parent id and id of every model
    258269    DO  i = 1, m_ncpl
    259        CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,              &
    260                        MPI_COMM_WORLD, istat )
    261        CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,              &
    262                        MPI_COMM_WORLD, istat )
     270       CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
     271       CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
    263272    ENDDO
    264273!
     
    268277!
    269278!-- Create intercommunicator between the parent and children.
    270 !-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
    271 !-- different colors.
    272 !-- The grouping was done above with MPI_COMM_SPLIT.
    273 !-- A duplicate of MPI_COMM_WORLD is created and used as peer communicator
    274 !-- (peer_comm) for MPI_INTERCOMM_CREATE.
    275     CALL MPI_COMM_DUP( MPI_COMM_WORLD, peer_comm, ierr )
     279!-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of different colors. The
     280!-- grouping was done above with MPI_COMM_SPLIT. A duplicate of MPI_COMM_WORLD is created and used
     281!-- as peer communicator (peer_comm) for MPI_INTERCOMM_CREATE.
     282    CALL MPI_COMM_DUP( MPI_COMM_WORLD, peer_comm, ierr )
    276283    DO  i = 2, m_ncpl
    277284       IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
    278285!
    279 !--       Identify all children models of the current model and create
    280 !--       inter-communicators to connect between the current model and its
    281 !--       children models.
     286!--       Identify all children models of the current model and create inter-communicators to
     287!--       connect between the current model and its children models.
    282288          tag = 500 + i
    283           CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm, start_pe(i),           &
    284                                      tag, m_to_child_comm(i), istat)
     289          CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm, start_pe(i), tag, m_to_child_comm(i),     &
     290                                     istat )
    285291          childcount = childcount + 1
    286292          activeparent(i) = 1
    287293       ELSEIF ( i == m_my_cpl_id)  THEN
    288294!
    289 !--       Create an inter-communicator to connect between the current
    290 !--       model and its parent model.   
     295!--       Create an inter-communicator to connect between the current model and its parent model.
    291296          tag = 500 + i
    292           CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm,                        &
    293                                      start_pe(m_couplers(i)%parent_id),         &
    294                                      tag, m_to_parent_comm, istat )
     297          CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm, start_pe(m_couplers(i)%parent_id), tag,   &
     298                                     m_to_parent_comm, istat )
    295299       ENDIF
    296300    ENDDO
    297301!
    298 !-- If I am a parent, count the number of children I have.
    299 !-- Although this loop is symmetric on all processes, the "activeparent" flag
    300 !-- is true (==1) on the respective individual process only.
     302!-- If I am a parent, count the number of children I have. Although this loop is symmetric on all
     303!-- processes, the "activeparent" flag is true (==1) on the respective individual process only.
    301304    ALLOCATE( pmc_parent_for_child(childcount+1) )
    302305
     
    311314!-- Get the size of the parent model
    312315    IF ( m_my_cpl_id > 1 )  THEN
    313        CALL MPI_COMM_REMOTE_SIZE( m_to_parent_comm, m_parent_remote_size,       &
    314                                   istat )
     316       CALL MPI_COMM_REMOTE_SIZE( m_to_parent_comm, m_parent_remote_size, istat )
    315317    ELSE
    316318!
     
    319321    ENDIF
    320322!
    321 !-- Set myid to non-zero value except for the root domain. This is a setting
    322 !-- for the message routine which is called at the end of pmci_init. That
    323 !-- routine outputs messages for myid = 0, only. However, myid has not been
    324 !-- assigened so far, so that all processes of the root model would output a
    325 !-- message. To avoid this, set myid to some other value except for process 0
    326 !-- of the root domain.
     323!-- Set myid to non-zero value except for the root domain. This is a setting for the message routine
     324!-- which is called at the end of pmci_init. That routine outputs messages for myid = 0, only.
     325!-- However, myid has not been assigened so far, so that all processes of the root model would
     326!-- output a message. To avoid this, set myid to some other value except for process 0 of the root
     327!-- domain.
    327328    IF ( m_world_rank /= 0 )  myid = 1
    328329
     
    330331
    331332
    332 
    333  SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name,           &
    334                                 cpl_parent_id, lower_left_x, lower_left_y,      &
    335                                 ncpl, npe_total, request_for_cpl_id )
     333!--------------------------------------------------------------------------------------------------!
     334! Description:
     335! ------------
     336!> @Todo: Missing subroutine description.
     337!--------------------------------------------------------------------------------------------------!
     338 SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name, cpl_parent_id, lower_left_x, &
     339                                lower_left_y, ncpl, npe_total, request_for_cpl_id )
    336340!
    337341!-- Provide module private variables of the pmc for PALM
     
    407411
    408412
    409  SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode,      &
    410       anterpolation_buffer_width, pmc_status )
     413!--------------------------------------------------------------------------------------------------!
     414! Description:
     415! ------------
     416!> @Todo: Missing subroutine description.
     417!--------------------------------------------------------------------------------------------------!
     418 SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode,                         &
     419                                  anterpolation_buffer_width, pmc_status )
    411420
    412421    IMPLICIT NONE
    413422
    414     CHARACTER(LEN=8), INTENT(INOUT) ::  nesting_mode
    415     CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode
    416    
    417     INTEGER, INTENT(INOUT)      ::  anterpolation_buffer_width     !< Boundary buffer width for anterpolation
    418     INTEGER(iwp), INTENT(INOUT) ::  pmc_status
    419     INTEGER(iwp)                ::  bad_llcorner
    420     INTEGER(iwp)                ::  i
    421     INTEGER(iwp)                ::  istat
    422 
    423     TYPE(pmc_layout), DIMENSION(pmc_max_models) ::  domain_layouts
    424 
    425     NAMELIST /nesting_parameters/  domain_layouts, nesting_datatransfer_mode,  &
    426                                    nesting_mode, anterpolation_buffer_width
    427    
     423    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode  !<
     424    CHARACTER(LEN=8), INTENT(INOUT) ::  nesting_mode               !<
     425
     426    INTEGER, INTENT(INOUT)      ::  anterpolation_buffer_width  !< Boundary buffer width for anterpolation
     427    INTEGER(iwp), INTENT(INOUT) ::  pmc_status                  !<
     428
     429    INTEGER(iwp) ::  bad_llcorner  !<
     430    INTEGER(iwp) ::  i             !<
     431    INTEGER(iwp) ::  istat         !<
     432
     433    TYPE(pmc_layout), DIMENSION(pmc_max_models) ::  domain_layouts  !<
     434
     435    NAMELIST /nesting_parameters/  domain_layouts,                                                 &
     436                                   nesting_datatransfer_mode,                                      &
     437                                   nesting_mode,                                                   &
     438                                   anterpolation_buffer_width
     439
    428440!
    429441!-- Initialize some coupling variables
     
    435447!-- Open the NAMELIST-file and read the nesting layout
    436448    CALL check_open( 11 )
    437     READ ( 11, nesting_parameters, IOSTAT=istat )
    438 !
    439 !-- Set filepointer to the beginning of the file. Otherwise process 0 will later
    440 !-- be unable to read the inipar-NAMELIST
     449    READ ( 11, nesting_parameters, IOSTAT = istat )
     450!
     451!-- Set filepointer to the beginning of the file. Otherwise process 0 will later be unable to read
     452!-- the inipar-NAMELIST
    441453    REWIND ( 11 )
    442454
     
    473485    ENDDO
    474486!
    475 !-- Make sure that all domains have equal lower left corner in case of vertical
    476 !-- nesting
     487!-- Make sure that all domains have equal lower left corner in case of vertical nesting
    477488    IF ( nesting_mode == 'vertical' )  THEN
    478489       bad_llcorner = 0
    479490       DO  i = 1, m_ncpl
    480           IF ( domain_layouts(i)%lower_left_x /= 0.0_wp .OR.                    &
     491          IF ( domain_layouts(i)%lower_left_x /= 0.0_wp .OR.                                       &
    481492               domain_layouts(i)%lower_left_y /= 0.0_wp )  THEN
    482493             bad_llcorner = bad_llcorner + 1
     
    486497       ENDDO
    487498       IF ( bad_llcorner /= 0)  THEN
    488           WRITE ( message_string, *)  'at least one dimension of lower ',       &
    489                                       'left corner of one domain is not 0. ',   &
    490                                       'All lower left corners were set to (0, 0)'
     499          WRITE( message_string, *)  'at least one dimension of lower ',                           &
     500                                     'left corner of one domain is not 0. ',                       &
     501                                     'All lower left corners were set to (0, 0)'
    491502          CALL message( 'read_coupling_layout', 'PA0427', 0, 0, 0, 6, 0 )
    492503       ENDIF
  • palm/trunk/SOURCE/pmc_interface_mod.f90

    r4629 r4649  
    11!> @file pmc_interface_mod.f90
    2 !------------------------------------------------------------------------------!
     2!--------------------------------------------------------------------------------------------------!
    33! This file is part of the PALM model system.
    44!
    5 ! PALM is free software: you can redistribute it and/or modify it under the
    6 ! terms of the GNU General Public License as published by the Free Software
    7 ! Foundation, either version 3 of the License, or (at your option) any later
    8 ! version.
    9 !
    10 ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
    11 ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
    12 ! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
    13 !
    14 ! You should have received a copy of the GNU General Public License along with
    15 ! PALM. If not, see <http://www.gnu.org/licenses/>.
     5! PALM is free software: you can redistribute it and/or modify it under the terms of the GNU General
     6! Public License as published by the Free Software Foundation, either version 3 of the License, or
     7! (at your option) any later version.
     8!
     9! PALM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
     10! implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
     11! Public License for more details.
     12!
     13! You should have received a copy of the GNU General Public License along with PALM. If not, see
     14! <http://www.gnu.org/licenses/>.
    1615!
    1716! Copyright 1997-2020 Leibniz Universitaet Hannover
    18 !------------------------------------------------------------------------------!
     17!--------------------------------------------------------------------------------------------------!
     18!
    1919!
    2020! Current revisions:
    21 ! ------------------
     21! -----------------
    2222!
    2323!
     
    2525! -----------------
    2626! $Id$
     27! File re-formatted to follow the PALM coding standard
     28!
     29!
     30! 4629 2020-07-29 09:37:56Z raasch
    2731! support for MPI Fortran77 interface (mpif.h) removed
    28 ! 
     32!
    2933! 4508 2020-04-24 13:32:20Z raasch
    30 ! salsa variable name changed
    31 ! 
     34! Salsa variable name changed
     35!
    3236! 4444 2020-03-05 15:59:50Z raasch
    33 ! bugfix: cpp-directives and variable declarations for serial mode added
    34 ! 
     37! Bugfix: cpp-directives and variable declarations for serial mode added
     38!
    3539! 4413 2020-02-19 15:52:19Z hellstea
    3640! All the USE-statements within subroutines moved up to the module declaration section.
    37 ! 
     41!
    3842! 4385 2020-01-27 08:37:37Z hellstea
    3943! Error messages PA0425 and PA0426 made more specific
    40 ! 
     44!
    4145! 4360 2020-01-07 11:25:50Z suehring
    42 ! Introduction of wall_flags_total_0, which currently sets bits based on static
    43 ! topography information used in wall_flags_static_0
    44 ! 
     46! Introduction of wall_flags_total_0, which currently sets bits based on static topography
     47! information used in wall_flags_static_0
     48!
    4549! 4329 2019-12-10 15:46:36Z motisi
    4650! Renamed wall_flags_0 to wall_flags_static_0
    47 ! 
     51!
    4852! 4273 2019-10-24 13:40:54Z monakurppa
    4953! Add a logical switch nesting_chem and rename nest_salsa to nesting_salsa
    50 ! 
     54!
    5155! 4260 2019-10-09 14:04:03Z hellstea
    52 ! Rest of the possibly round-off-error sensitive grid-line matching tests
    53 ! changed to round-off-error tolerant forms throughout the module.
    54 ! 
     56! Rest of the possibly round-off-error sensitive grid-line matching tests changed to round-off-error
     57! tolerant forms throughout the module.
     58!
    5559! 4249 2019-10-01 12:27:47Z hellstea
    56 ! Several grid-line matching tests changed to a round-off-error tolerant form
    57 ! in pmci_setup_parent, pmci_define_index_mapping and pmci_check_grid_matching.
    58 ! 
     60! Several grid-line matching tests changed to a round-off-error tolerant form in pmci_setup_parent,
     61! pmci_define_index_mapping and pmci_check_grid_matching.
     62!
    5963! 4182 2019-08-22 15:20:23Z scharf
    6064! Corrected "Former revisions" section
    61 ! 
     65!
    6266! 4168 2019-08-16 13:50:17Z suehring
    6367! Replace function get_topography_top_index by topo_top_ind
    64 ! 
     68!
    6569! 4029 2019-06-14 14:04:35Z raasch
    6670! nest_chemistry switch removed
    67 ! 
     71!
    6872! 4026 2019-06-12 16:50:15Z suehring
    69 ! Masked topography at boundary grid points in mass conservation, in order to 
     73! Masked topography at boundary grid points in mass conservation, in order to
    7074! avoid that mean velocities within topography are imposed
    71 ! 
     75!
    7276! 4011 2019-05-31 14:34:03Z hellstea
    7377! Mass (volume) flux correction included to ensure global mass conservation for child domains.
    74 ! 
     78!
    7579! 3987 2019-05-22 09:52:13Z kanani
    7680! Introduce alternative switch for debug output during timestepping
    77 ! 
     81!
    7882! 3984 2019-05-16 15:17:03Z hellstea
    7983! Commenting improved, pmci_map_fine_to_coarse_grid renamed as pmci_map_child_grid_to_parent_grid,
    80 ! set_child_edge_coords renamed as pmci_set_child_edge_coords, some variables renamed, etc. 
    81 ! 
     84! set_child_edge_coords renamed as pmci_set_child_edge_coords, some variables renamed, etc.
     85!
    8286! 3979 2019-05-15 13:54:29Z hellstea
    83 ! Bugfix in pmc_interp_1sto_sn. This bug had effect only in case of 1-d domain
    84 ! decomposition with npex = 1.
    85 ! 
     87! Bugfix in pmc_interp_1sto_sn. This bug had effect only in case of 1-d domain decomposition with
     88! npex = 1.
     89!
    8690! 3976 2019-05-15 11:02:34Z hellstea
    87 ! Child initialization also for the redundant ghost points behind the nested
    88 ! boundaries added (2nd and 3rd ghost-point layers and corners).
    89 ! 
     91! Child initialization also for the redundant ghost points behind the nested boundaries added
     92! (2nd and 3rd ghost-point layers and corners).
     93!
    9094! 3948 2019-05-03 14:49:57Z hellstea
    91 ! Some variables renamed, a little cleaning up and some commenting improvements 
    92 ! 
     95! Some variables renamed, a little cleaning up and some commenting improvements
     96!
    9397! 3947 2019-05-03 07:56:44Z hellstea
    94 ! The checks included in 3946 are extended for the z-direction and moved into its
    95 ! own subroutine called from pmci_define_index_mapping.
    96 ! 
     98! The checks included in 3946 are extended for the z-direction and moved into its own subroutine
     99! called from pmci_define_index_mapping.
     100!
    97101! 3946 2019-05-02 14:18:59Z hellstea
    98 ! Check added for child domains too small in terms of number of parent-grid cells so
    99 ! that anterpolation is not possible. Checks added for too wide anterpolation buffer
    100 ! for the same reason. Some minor code reformatting done.
     102! Check added for child domains too small in terms of number of parent-grid cells so that
     103! anterpolation is not possible. Checks added for too wide anterpolation buffer for the same reason.
     104! Some minor code reformatting done.
    101105!
    102106! 3945 2019-05-02 11:29:27Z raasch
    103107!
    104108! 3932 2019-04-24 17:31:34Z suehring
    105 ! Add missing if statements for call of pmc_set_dataarray_name for TKE and
    106 ! dissipation.
     109! Add missing if statements for call of pmc_set_dataarray_name for TKE and dissipation.
    107110!
    108111! 3888 2019-04-12 09:18:10Z hellstea
    109112! Variables renamed, commenting improved etc.
    110 ! 
     113!
    111114! 3885 2019-04-11 11:29:34Z kanani
    112 ! Changes related to global restructuring of location messages and introduction
    113 ! of additional debug messages
    114 ! 
     115! Changes related to global restructuring of location messages and introduction of additional debug
     116! messages
     117!
    115118! 3883 2019-04-10 12:51:50Z hellstea
    116 ! Checks and error messages improved and extended. All the child index bounds in the
    117 ! parent-grid index space are made module variables. Function get_number_of_childs
    118 ! renamed get_number_of_children. A number of variables renamed
    119 ! and qite a lot of other code reshaping made all around the module.
    120 ! 
     119! Checks and error messages improved and extended. All the child index bounds in the parent-grid
     120! index space are made module variables. Function get_number_of_childs renamed
     121! get_number_of_children. A number of variables renamed and qite a lot of other code reshaping made
     122! all around the module.
     123!
    121124! 3876 2019-04-08 18:41:49Z knoop
    122125! Implemented nesting for salsa variables.
    123 ! 
     126!
    124127! 3833 2019-03-28 15:04:04Z forkel
    125 ! replaced USE chem_modules by USE chem_gasphase_mod 
    126 ! 
     128! replaced USE chem_modules by USE chem_gasphase_mod
     129!
    127130! 3822 2019-03-27 13:10:23Z hellstea
    128 ! Temporary increase of the vertical dimension of the parent-grid arrays and
    129 ! workarrc_t is cancelled as unnecessary.
    130 ! 
     131! Temporary increase of the vertical dimension of the parent-grid arrays and workarrc_t is cancelled
     132! as unnecessary.
     133!
    131134! 3819 2019-03-27 11:01:36Z hellstea
    132 ! Adjustable anterpolation buffer introduced on all nest boundaries, it is controlled
    133 ! by the new nesting_parameters parameter anterpolation_buffer_width.
    134 ! 
     135! Adjustable anterpolation buffer introduced on all nest boundaries, it is controlled by the new
     136! nesting_parameters parameter anterpolation_buffer_width.
     137!
    135138! 3804 2019-03-19 13:46:20Z hellstea
    136 ! Anterpolation domain is lowered from kct-1 to kct-3 to avoid exessive      
    137 ! kinetic energy from building up in CBL flows.
    138 ! 
     139! Anterpolation domain is lowered from kct-1 to kct-3 to avoid exessive kinetic energy from building
     140! up in CBL flows.
     141!
    139142! 3803 2019-03-19 13:44:40Z hellstea
    140 ! A bug fixed in lateral boundary interpolations. Dimension of val changed from  
    141 ! 5 to 3 in pmci_setup_parent and pmci_setup_child.
    142 ! 
     143! A bug fixed in lateral boundary interpolations. Dimension of val changed from 5 to 3 in
     144! pmci_setup_parent and pmci_setup_child.
     145!
    143146! 3794 2019-03-15 09:36:33Z raasch
    144 ! two remaining unused variables removed
    145 ! 
     147! Two remaining unused variables removed
     148!
    146149! 3792 2019-03-14 16:50:07Z hellstea
    147150! Interpolations improved. Large number of obsolete subroutines removed.
    148 ! All unused variables removed. 
    149 ! 
     151! All unused variables removed.
     152!
    150153! 3741 2019-02-13 16:24:49Z hellstea
    151 ! Interpolations and child initialization adjusted to handle set ups with child
    152 ! pe-subdomain dimension not integer divisible by the grid-spacing ratio in the
    153 ! respective direction. Set ups with pe-subdomain dimension smaller than the
    154 ! grid-spacing ratio in the respective direction are now forbidden.
    155 ! 
     154! Interpolations and child initialization adjusted to handle set ups with child pe-subdomain
     155! dimension not integer divisible by the grid-spacing ratio in the respective direction. Set ups
     156! with pe-subdomain dimension smaller than the grid-spacing ratio in the respective direction are
     157! now forbidden.
     158!
    156159! 3708 2019-01-30 12:58:13Z hellstea
    157160! Checks for parent / child grid line matching introduced.
    158161! Interpolation of nest-boundary-tangential velocity components revised.
    159 ! 
     162!
    160163! 3697 2019-01-24 17:16:13Z hellstea
    161 ! Bugfix: upper k-bound in the child initialization interpolation
    162 ! pmci_interp_1sto_all corrected.
    163 ! Copying of the nest boundary values into the redundant 2nd and 3rd ghost-node
    164 ! layers is added to the pmci_interp_1sto_*-routines.
    165 !
     164! Bugfix: upper k-bound in the child initialization interpolation pmci_interp_1sto_all corrected.
     165! Copying of the nest boundary values into the redundant 2nd and 3rd ghost-node layers is added to
     166! the pmci_interp_1sto_*-routines.
     167!
    166168! 3681 2019-01-18 15:06:05Z hellstea
    167 ! Linear interpolations are replaced by first order interpolations. The linear
    168 ! interpolation routines are still included but not called. In the child
    169 ! inititialization the interpolation is also changed to 1st order and the linear
    170 ! interpolation is not kept.
     169! Linear interpolations are replaced by first order interpolations. The linear interpolation
     170! routines are still included but not called. In the child inititialization the interpolation is
     171! also changed to 1st order and the linear interpolation is not kept.
    171172! Subroutine pmci_map_fine_to_coarse_grid is rewritten.
    172173! Several changes in pmci_init_anterp_tophat.
    173 ! Child's parent-grid arrays (uc, vc,...) are made non-overlapping on the PE-
    174 ! subdomain boundaries in order to allow grid-spacing ratios higher than nbgp.
    175 ! Subroutine pmci_init_tkefactor is removed as unnecessary.
    176 ! 
     174! Child's parent-grid arrays (uc, vc,...) are made non-overlapping on the PE-subdomain boundaries in
     175! order to allow grid-spacing ratios higher than nbgp. Subroutine pmci_init_tkefactor is removed as
     176! unnecessary.
     177!
    177178! 3655 2019-01-07 16:51:22Z knoop
    178179! Remove unused variable simulated_time
    179 ! 
     180!
    180181! 1762 2016-02-25 12:31:13Z hellstea
    181182! Initial revision by A. Hellsten
     
    183184! Description:
    184185! ------------
    185 ! Domain nesting interface routines. The low-level inter-domain communication   
    186 ! is conducted by the PMC-library routines.
    187 !
    188 ! @todo Remove array_3d variables from USE statements thate not used in the
    189 !       routine
     186! Domain nesting interface routines. The low-level inter-domain communication is conducted by the
     187! PMC-library routines.
     188!
     189! @todo Remove array_3d variables from USE statements thate not used in the routine
    190190! @todo Data transfer of qc and nc is prepared but not activated
    191 !------------------------------------------------------------------------------!
     191!--------------------------------------------------------------------------------------------------!
    192192 MODULE pmc_interface
    193193
    194194#if ! defined( __parallel )
    195195!
    196 !-- Serial mode does not allow nesting, but requires the following variables as steering
    197 !-- quantities
     196!-- Serial mode does not allow nesting, but requires the following variables as steering quantities
    198197    USE kinds
    199198
     
    202201    PUBLIC
    203202
    204     CHARACTER(LEN=8), SAVE ::  nesting_mode = 'none'   !< steering parameter for 1- or 2-way nesting
    205 
    206     INTEGER(iwp), SAVE     ::  comm_world_nesting    !< Global nesting communicator
    207     INTEGER(iwp), SAVE     ::  cpl_id  = 1           !<
     203    CHARACTER(LEN=8), SAVE ::  nesting_mode = 'none'  !< steering parameter for 1- or 2-way nesting
     204
     205    INTEGER(iwp), SAVE ::  comm_world_nesting  !< Global nesting communicator
     206    INTEGER(iwp), SAVE ::  cpl_id  = 1         !<
    208207
    209208    LOGICAL, SAVE ::  nested_run = .FALSE.        !< general switch
     
    215214
    216215
    217     USE arrays_3d,                                                             &
    218         ONLY:  diss, diss_2, dzu, dzw, e, e_p, e_2, nc, nc_2, nc_p, nr, nr_2,  &
    219                pt, pt_2, q, q_2, qc, qc_2, qr, qr_2, s, s_2,                   &
    220                u, u_p, u_2, v, v_p, v_2, w, w_p, w_2, zu, zw
    221    
    222     USE chem_gasphase_mod,                                                     &
     216    USE arrays_3d,                                                                                 &
     217        ONLY:  diss,                                                                               &
     218               diss_2,                                                                             &
     219               dzu,                                                                                &
     220               dzw,                                                                                &
     221               e,                                                                                  &
     222               e_p,                                                                                &
     223               e_2,                                                                                &
     224               nc,                                                                                 &
     225               nc_2,                                                                               &
     226               nc_p,                                                                               &
     227               nr,                                                                                 &
     228               nr_2,                                                                               &
     229               pt,                                                                                 &
     230               pt_2,                                                                               &
     231               q,                                                                                  &
     232               q_2,                                                                                &
     233               qc,                                                                                 &
     234               qc_2,                                                                               &
     235               qr,                                                                                 &
     236               qr_2,                                                                               &
     237               s,                                                                                  &
     238               s_2,                                                                                &
     239               u,                                                                                  &
     240               u_p,                                                                                &
     241               u_2,                                                                                &
     242               v,                                                                                  &
     243               v_p,                                                                                &
     244               v_2,                                                                                &
     245               w,                                                                                  &
     246               w_p,                                                                                &
     247               w_2,                                                                                &
     248               zu,                                                                                 &
     249               zw
     250
     251    USE chem_gasphase_mod,                                                                         &
    223252        ONLY:  nspec
    224253
    225     USE chem_modules,                                                          &
    226         ONLY:  chem_species, ibc_cs_b, nesting_chem
    227 
    228     USE chemistry_model_mod,                                                   &
     254    USE chem_modules,                                                                              &
     255        ONLY:  chem_species,                                                                       &
     256               ibc_cs_b,                                                                           &
     257               nesting_chem
     258
     259    USE chemistry_model_mod,                                                                       &
    229260        ONLY:  spec_conc_2
    230    
    231     USE control_parameters,                                                    &
    232         ONLY:  air_chemistry, bc_dirichlet_l, bc_dirichlet_n, bc_dirichlet_r,  &
    233                bc_dirichlet_s, child_domain,                                   &
    234                constant_diffusion, constant_flux_layer,                        &
    235                coupling_char, end_time,                                        &
    236                debug_output_timestep,                                          &
    237                dt_restart, dt_3d, dz, humidity,                                &
    238                ibc_pt_b, ibc_q_b, ibc_s_b, ibc_uv_b,                           &
    239                message_string, neutral, passive_scalar, rans_mode, rans_tke_e, &
    240                restart_time,                                                   &
    241                roughness_length, salsa, topography, volume_flow, time_restart
    242    
    243     USE cpulog,                                                                &
    244         ONLY:  cpu_log, log_point_s
    245 
    246     USE grid_variables,                                                        &
    247         ONLY:  dx, dy
    248 
    249     USE indices,                                                               &
    250         ONLY:  nbgp, nx, nxl, nxlg, nxlu, nxr, nxrg, ny, nyn, nyng, nys, nysg, &
    251                nysv, nz, nzb, nzt, topo_top_ind, wall_flags_total_0
    252 
    253     USE bulk_cloud_model_mod,                                                  &
    254         ONLY: bulk_cloud_model, microphysics_morrison, microphysics_seifert
    255 
    256     USE particle_attributes,                                                   &
     261
     262    USE control_parameters,                                                                        &
     263        ONLY:  air_chemistry,                                                                      &
     264               bc_dirichlet_l,                                                                     &
     265               bc_dirichlet_n,                                                                     &
     266               bc_dirichlet_r,                                                                     &
     267               bc_dirichlet_s,                                                                     &
     268               child_domain,                                                                       &
     269               constant_diffusion,                                                                 &
     270               constant_flux_layer,                                                                &
     271               coupling_char,                                                                      &
     272               debug_output_timestep,                                                              &
     273               dt_restart,                                                                         &
     274               dt_3d,                                                                              &
     275               dz,                                                                                 &
     276               end_time,                                                                           &
     277               humidity,                                                                           &
     278               ibc_pt_b,                                                                           &
     279               ibc_q_b,                                                                            &
     280               ibc_s_b,                                                                            &
     281               ibc_uv_b,                                                                           &
     282               message_string,                                                                     &
     283               neutral,                                                                            &
     284               passive_scalar,                                                                     &
     285               rans_mode,                                                                          &
     286               rans_tke_e,                                                                         &
     287               restart_time,                                                                       &
     288               roughness_length,                                                                   &
     289               salsa,                                                                              &
     290               time_restart,                                                                       &
     291               topography,                                                                         &
     292               volume_flow
     293
     294
     295    USE cpulog,                                                                                    &
     296        ONLY:  cpu_log,                                                                            &
     297               log_point_s
     298
     299    USE grid_variables,                                                                            &
     300        ONLY:  dx,                                                                                 &
     301               dy
     302
     303    USE indices,                                                                                   &
     304        ONLY:  nbgp,                                                                               &
     305               nx,                                                                                 &
     306               nxl,                                                                                &
     307               nxlg,                                                                               &
     308               nxlu,                                                                               &
     309               nxr,                                                                                &
     310               nxrg,                                                                               &
     311               ny,                                                                                 &
     312               nyn,                                                                                &
     313               nyng,                                                                               &
     314               nys,                                                                                &
     315               nysg,                                                                               &
     316               nysv,                                                                               &
     317               nz,                                                                                 &
     318               nzb,                                                                                &
     319               nzt,                                                                                &
     320               topo_top_ind,                                                                       &
     321               wall_flags_total_0
     322
     323    USE bulk_cloud_model_mod,                                                                      &
     324        ONLY:  bulk_cloud_model,                                                                   &
     325               microphysics_morrison,                                                              &
     326               microphysics_seifert
     327
     328    USE particle_attributes,                                                                       &
    257329        ONLY:  particle_advection
    258330
     
    262334    USE MPI
    263335
    264     USE pegrid,                                                                &
    265         ONLY:  collective_wait, comm1dx, comm1dy, comm2d, myid, myidx, myidy,  &
    266                numprocs, pdims, pleft, pnorth, pright, psouth, status
    267 
    268     USE pmc_child,                                                             &
    269         ONLY:  pmc_childinit, pmc_c_clear_next_array_list,                     &
    270                pmc_c_getnextarray, pmc_c_get_2d_index_list, pmc_c_getbuffer,   &
    271                pmc_c_putbuffer, pmc_c_setind_and_allocmem,                     &
    272                pmc_c_set_dataarray, pmc_set_dataarray_name
    273 
    274     USE pmc_general,                                                           &
    275         ONLY:  da_namelen, pmc_max_array
    276 
    277     USE pmc_handle_communicator,                                               &
    278         ONLY:  pmc_get_model_info, pmc_init_model, pmc_is_rootmodel,           &
    279                pmc_no_namelist_found, pmc_parent_for_child, m_couplers
    280 
    281     USE pmc_mpi_wrapper,                                                       &
    282         ONLY:  pmc_bcast, pmc_recv_from_child, pmc_recv_from_parent,           &
    283                pmc_send_to_child, pmc_send_to_parent
    284 
    285     USE pmc_parent,                                                            &
    286         ONLY:  pmc_parentinit, pmc_s_clear_next_array_list, pmc_s_fillbuffer,  &
    287                pmc_s_getdata_from_buffer, pmc_s_getnextarray,                  &
    288                pmc_s_setind_and_allocmem, pmc_s_set_active_data_array,         &
    289                pmc_s_set_dataarray, pmc_s_set_2d_index_list
     336    USE pegrid,                                                                                    &
     337        ONLY:  collective_wait,                                                                    &
     338               comm1dx,                                                                            &
     339               comm1dy,                                                                            &
     340               comm2d,                                                                             &
     341               myid,                                                                               &
     342               myidx,                                                                              &
     343               myidy,                                                                              &
     344               numprocs,                                                                           &
     345               pdims,                                                                              &
     346               pleft,                                                                              &
     347               pnorth,                                                                             &
     348               pright,                                                                             &
     349               psouth,                                                                             &
     350               status
     351
     352    USE pmc_child,                                                                                 &
     353        ONLY:  pmc_childinit,                                                                      &
     354               pmc_c_clear_next_array_list,                                                        &
     355               pmc_c_getnextarray,                                                                 &
     356               pmc_c_get_2d_index_list,                                                            &
     357               pmc_c_getbuffer,                                                                    &
     358               pmc_c_putbuffer,                                                                    &
     359               pmc_c_setind_and_allocmem,                                                          &
     360               pmc_c_set_dataarray,                                                                &
     361               pmc_set_dataarray_name
     362
     363    USE pmc_general,                                                                               &
     364        ONLY:  da_namelen,                                                                         &
     365               pmc_max_array
     366
     367    USE pmc_handle_communicator,                                                                   &
     368        ONLY:  pmc_get_model_info,                                                                 &
     369               pmc_init_model,                                                                     &
     370               pmc_is_rootmodel,                                                                   &
     371               pmc_no_namelist_found,                                                              &
     372               pmc_parent_for_child,                                                               &
     373               m_couplers
     374
     375    USE pmc_mpi_wrapper,                                                                           &
     376        ONLY:  pmc_bcast,                                                                          &
     377               pmc_recv_from_child,                                                                &
     378               pmc_recv_from_parent,                                                               &
     379               pmc_send_to_child,                                                                  &
     380               pmc_send_to_parent
     381
     382    USE pmc_parent,                                                                                &
     383        ONLY:  pmc_parentinit,                                                                     &
     384               pmc_s_clear_next_array_list,                                                        &
     385               pmc_s_fillbuffer,                                                                   &
     386               pmc_s_getdata_from_buffer,                                                          &
     387               pmc_s_getnextarray,                                                                 &
     388               pmc_s_setind_and_allocmem,                                                          &
     389               pmc_s_set_active_data_array,                                                        &
     390               pmc_s_set_dataarray,                                                                &
     391               pmc_s_set_2d_index_list
    290392
    291393#endif
    292394
    293     USE salsa_mod,                                                             &
    294         ONLY:  aerosol_mass, aerosol_number, gconc_2, ibc_aer_b,               &
    295                mconc_2, nbins_aerosol,                                         &
    296                ncomponents_mass, nconc_2, nesting_salsa, ngases_salsa,         &
    297                salsa_gas, salsa_gases_from_chem
    298 
    299     USE surface_mod,                                                           &
    300         ONLY:  bc_h, surf_def_h, surf_lsm_h, surf_usm_h
     395    USE salsa_mod,                                                                                 &
     396        ONLY:  aerosol_mass,                                                                       &
     397               aerosol_number,                                                                     &
     398               gconc_2,                                                                            &
     399               ibc_aer_b,                                                                          &
     400               mconc_2,                                                                            &
     401               nbins_aerosol,                                                                      &
     402               ncomponents_mass,                                                                   &
     403               nconc_2,                                                                            &
     404               nesting_salsa,                                                                      &
     405               ngases_salsa,                                                                       &
     406               salsa_gas,                                                                          &
     407               salsa_gases_from_chem
     408
     409    USE surface_mod,                                                                               &
     410        ONLY:  bc_h,                                                                               &
     411               surf_def_h,                                                                         &
     412               surf_lsm_h,                                                                         &
     413               surf_usm_h
    301414
    302415    IMPLICIT NONE
     
    306419!
    307420!-- Constants
    308     INTEGER(iwp), PARAMETER ::  child_to_parent = 2   !< Parameter for pmci_parent_datatrans indicating the direction of transfer
    309     INTEGER(iwp), PARAMETER ::  parent_to_child = 1   !< Parameter for pmci_parent_datatrans indicating the direction of transfer
    310     INTEGER(iwp), PARAMETER ::  interpolation_scheme_lrsn  = 2  !< Interpolation scheme to be used on lateral boundaries
    311     INTEGER(iwp), PARAMETER ::  interpolation_scheme_t     = 3  !< Interpolation scheme to be used on top boundary
    312 
    313     REAL(wp), PARAMETER ::  tolefac = 1.0E-6_wp                 !< Relative tolerence for grid-line matching tests and comparisons
     421    INTEGER(iwp), PARAMETER ::  child_to_parent = 2            !< Parameter for pmci_parent_datatrans indicating the direction of
     422                                                               !< transfer
     423    INTEGER(iwp), PARAMETER ::  interpolation_scheme_lrsn = 2  !< Interpolation scheme to be used on lateral boundaries
     424    INTEGER(iwp), PARAMETER ::  interpolation_scheme_t = 3     !< Interpolation scheme to be used on top boundary
     425    INTEGER(iwp), PARAMETER ::  parent_to_child = 1            !< Parameter for pmci_parent_datatrans indicating the direction of
     426                                                               !< transfer
     427
     428    REAL(wp), PARAMETER ::  tolefac = 1.0E-6_wp  !< Relative tolerence for grid-line matching tests and comparisons
    314429!
    315430!-- Coupler setup
    316     INTEGER(iwp), SAVE      ::  comm_world_nesting    !< Global nesting communicator
    317     INTEGER(iwp), SAVE      ::  cpl_id  = 1           !<
    318     INTEGER(iwp), SAVE      ::  cpl_npe_total         !<
    319     INTEGER(iwp), SAVE      ::  cpl_parent_id         !<
    320    
    321     CHARACTER(LEN=32), SAVE ::  cpl_name              !<
     431    CHARACTER(LEN=32), SAVE ::  cpl_name  !<
     432
     433    INTEGER(iwp), SAVE ::  comm_world_nesting  !< Global nesting communicator
     434    INTEGER(iwp), SAVE ::  cpl_id  = 1         !<
     435    INTEGER(iwp), SAVE ::  cpl_npe_total       !<
     436    INTEGER(iwp), SAVE ::  cpl_parent_id       !<
    322437
    323438!
    324439!-- Control parameters
    325     INTEGER(iwp),     SAVE ::  anterpolation_buffer_width = 2       !< Boundary buffer width for anterpolation
    326440    CHARACTER(LEN=7), SAVE ::  nesting_datatransfer_mode = 'mixed'  !< steering parameter for data-transfer mode
    327441    CHARACTER(LEN=8), SAVE ::  nesting_mode = 'two-way'             !< steering parameter for 1- or 2-way nesting
    328    
    329     LOGICAL, SAVE ::  nested_run = .FALSE.  !< general switch
    330     LOGICAL, SAVE ::  rans_mode_parent = .FALSE. !< mode of parent model (.F. - LES mode, .T. - RANS mode)
     442
     443    INTEGER(iwp), SAVE ::  anterpolation_buffer_width = 2  !< Boundary buffer width for anterpolation
     444
     445    LOGICAL, SAVE ::  nested_run = .FALSE.        !< general switch
     446    LOGICAL, SAVE ::  rans_mode_parent = .FALSE.  !< mode of parent model (.F. - LES mode, .T. - RANS mode)
    331447!
    332448!-- Geometry
    333     REAL(wp), SAVE, DIMENSION(:), ALLOCATABLE, PUBLIC ::  coord_x            !< Array for the absolute x-coordinates
    334     REAL(wp), SAVE, DIMENSION(:), ALLOCATABLE, PUBLIC ::  coord_y            !< Array for the absolute y-coordinates
    335     REAL(wp), SAVE, PUBLIC                            ::  lower_left_coord_x !< x-coordinate of the lower left corner of the domain
    336     REAL(wp), SAVE, PUBLIC                            ::  lower_left_coord_y !< y-coordinate of the lower left corner of the domain
     449    REAL(wp), SAVE, DIMENSION(:), ALLOCATABLE, PUBLIC ::  coord_x             !< Array for the absolute x-coordinates
     450    REAL(wp), SAVE, DIMENSION(:), ALLOCATABLE, PUBLIC ::  coord_y             !< Array for the absolute y-coordinates
     451    REAL(wp), SAVE, PUBLIC                            ::  lower_left_coord_x  !< x-coordinate of the lower left corner of the domain
     452    REAL(wp), SAVE, PUBLIC                            ::  lower_left_coord_y  !< y-coordinate of the lower left corner of the domain
    337453!
    338454!-- Children's parent-grid arrays
    339     INTEGER(iwp), SAVE, DIMENSION(5), PUBLIC    ::  parent_bound        !< subdomain index bounds for children's parent-grid arrays
    340 
    341     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  dissc !< Parent-grid array on child domain - dissipation rate
    342     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  ec    !< Parent-grid array on child domain - SGS TKE
    343     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  ptc   !< Parent-grid array on child domain - potential temperature
    344     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  uc    !< Parent-grid array on child domain - velocity component u
    345     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  vc    !< Parent-grid array on child domain - velocity component v
    346     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  wc    !< Parent-grid array on child domain - velocity component w
    347     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  q_c   !< Parent-grid array on child domain -
    348     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  qcc   !< Parent-grid array on child domain -
    349     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  qrc   !< Parent-grid array on child domain -
    350     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  nrc   !< Parent-grid array on child domain -
    351     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  ncc   !< Parent-grid array on child domain -
    352     REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  sc    !< Parent-grid array on child domain -
    353     INTEGER(idp), SAVE, DIMENSION(:,:), ALLOCATABLE, TARGET, PUBLIC ::  nr_partc    !<
    354     INTEGER(idp), SAVE, DIMENSION(:,:), ALLOCATABLE, TARGET, PUBLIC ::  part_adrc   !<
    355 
    356     REAL(wp), SAVE, DIMENSION(:,:,:,:), ALLOCATABLE, TARGET ::  chem_spec_c  !< Parent-grid array on child domain - chemical species
     455    INTEGER(iwp), SAVE, DIMENSION(5), PUBLIC ::  parent_bound  !< subdomain index bounds for children's parent-grid arrays
     456
     457    INTEGER(idp), SAVE, DIMENSION(:,:), ALLOCATABLE, TARGET, PUBLIC ::  nr_partc   !<
     458    INTEGER(idp), SAVE, DIMENSION(:,:), ALLOCATABLE, TARGET, PUBLIC ::  part_adrc  !<
     459
     460    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  dissc  !< Parent-grid array on child domain - dissipation rate
     461    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  ec     !< Parent-grid array on child domain - SGS TKE
     462    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  nrc    !< Parent-grid array on child domain -
     463    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  ncc    !< Parent-grid array on child domain -
     464    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  ptc    !< Parent-grid array on child domain - potential temperature
     465    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  q_c    !< Parent-grid array on child domain -
     466    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  qcc    !< Parent-grid array on child domain -
     467    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  qrc    !< Parent-grid array on child domain -
     468    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  sc     !< Parent-grid array on child domain -
     469    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  uc     !< Parent-grid array on child domain - velocity component u
     470    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  vc     !< Parent-grid array on child domain - velocity component v
     471    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET ::  wc     !< Parent-grid array on child domain - velocity component w
    357472
    358473    REAL(wp), SAVE, DIMENSION(:,:,:,:), ALLOCATABLE, TARGET ::  aerosol_mass_c    !< Aerosol mass
    359     REAL(wp), SAVE, DIMENSION(:,:,:,:), ALLOCATABLE, TARGET ::  aerosol_number_c  !< Aerosol number
     474    REAL(wp), SAVE, DIMENSION(:,:,:,:), ALLOCATABLE, TARGET ::  aerosol_number_c  !< Aerosol number
     475    REAL(wp), SAVE, DIMENSION(:,:,:,:), ALLOCATABLE, TARGET ::  chem_spec_c       !< Parent-grid array on child domain
     476                                                                                  !< - chemical species
    360477    REAL(wp), SAVE, DIMENSION(:,:,:,:), ALLOCATABLE, TARGET ::  salsa_gas_c       !< SALSA gases
    361478!
    362479!-- Grid-spacing ratios.
    363     INTEGER(iwp), SAVE ::  igsr    !< Integer grid-spacing ratio in i-direction
    364     INTEGER(iwp), SAVE ::  jgsr    !< Integer grid-spacing ratio in j-direction
    365     INTEGER(iwp), SAVE ::  kgsr    !< Integer grid-spacing ratio in k-direction
     480    INTEGER(iwp), SAVE ::  igsr  !< Integer grid-spacing ratio in i-direction
     481    INTEGER(iwp), SAVE ::  jgsr  !< Integer grid-spacing ratio in j-direction
     482    INTEGER(iwp), SAVE ::  kgsr  !< Integer grid-spacing ratio in k-direction
    366483!
    367484!-- Global parent-grid index bounds
    368     INTEGER(iwp), SAVE ::  iplg    !< Leftmost parent-grid array ip index of the whole child domain
    369     INTEGER(iwp), SAVE ::  iprg    !< Rightmost parent-grid array ip index of the whole child domain
    370     INTEGER(iwp), SAVE ::  jpsg    !< Southmost parent-grid array jp index of the whole child domain
    371     INTEGER(iwp), SAVE ::  jpng    !< Northmost parent-grid array jp index of the whole child domain
    372 !
    373 !-- Local parent-grid index bounds. Different sets of index bounds are needed for parent-grid arrays (uc, etc),
    374 !-- for index mapping arrays (iflu, etc) and for work arrays (workarr_lr, etc). This is because these arrays
    375 !-- have different dimensions depending on the location of the subdomain relative to boundaries and corners.
    376     INTEGER(iwp), SAVE ::  ipl     !< Left index limit for children's parent-grid arrays
    377     INTEGER(iwp), SAVE ::  ipla    !< Left index limit for allocation of index-mapping and other auxiliary arrays
    378     INTEGER(iwp), SAVE ::  iplw    !< Left index limit for children's parent-grid work arrays
    379     INTEGER(iwp), SAVE ::  ipr     !< Right index limit for children's parent-grid arrays
    380     INTEGER(iwp), SAVE ::  ipra    !< Right index limit for allocation of index-mapping and other auxiliary arrays
    381     INTEGER(iwp), SAVE ::  iprw    !< Right index limit for children's parent-grid work arrays
    382     INTEGER(iwp), SAVE ::  jpn     !< North index limit for children's parent-grid arrays
    383     INTEGER(iwp), SAVE ::  jpna    !< North index limit for allocation of index-mapping and other auxiliary arrays
    384     INTEGER(iwp), SAVE ::  jpnw    !< North index limit for children's parent-grid work arrays
    385     INTEGER(iwp), SAVE ::  jps     !< South index limit for children's parent-grid arrays
    386     INTEGER(iwp), SAVE ::  jpsa    !< South index limit for allocation of index-mapping and other auxiliary arrays
    387     INTEGER(iwp), SAVE ::  jpsw    !< South index limit for children's parent-grid work arrays
     485    INTEGER(iwp), SAVE ::  iplg  !< Leftmost parent-grid array ip index of the whole child domain
     486    INTEGER(iwp), SAVE ::  iprg  !< Rightmost parent-grid array ip index of the whole child domain
     487    INTEGER(iwp), SAVE ::  jpsg  !< Southmost parent-grid array jp index of the whole child domain
     488    INTEGER(iwp), SAVE ::  jpng  !< Northmost parent-grid array jp index of the whole child domain
     489!
     490!-- Local parent-grid index bounds. Different sets of index bounds are needed for parent-grid arrays
     491!-- (uc, etc), for index mapping arrays (iflu, etc) and for work arrays (workarr_lr, etc). This is
     492!-- because these arrays have different dimensions depending on the location of the subdomain
     493!-- relative to boundaries and corners.
     494    INTEGER(iwp), SAVE ::  ipl   !< Left index limit for children's parent-grid arrays
     495    INTEGER(iwp), SAVE ::  ipla  !< Left index limit for allocation of index-mapping and other auxiliary arrays
     496    INTEGER(iwp), SAVE ::  iplw  !< Left index limit for children's parent-grid work arrays
     497    INTEGER(iwp), SAVE ::  ipr   !< Right index limit for children's parent-grid arrays
     498    INTEGER(iwp), SAVE ::  ipra  !< Right index limit for allocation of index-mapping and other auxiliary arrays
     499    INTEGER(iwp), SAVE ::  iprw  !< Right index limit for children's parent-grid work arrays
     500    INTEGER(iwp), SAVE ::  jpn   !< North index limit for children's parent-grid arrays
     501    INTEGER(iwp), SAVE ::  jpna  !< North index limit for allocation of index-mapping and other auxiliary arrays
     502    INTEGER(iwp), SAVE ::  jpnw  !< North index limit for children's parent-grid work arrays
     503    INTEGER(iwp), SAVE ::  jps   !< South index limit for children's parent-grid arrays
     504    INTEGER(iwp), SAVE ::  jpsa  !< South index limit for allocation of index-mapping and other auxiliary arrays
     505    INTEGER(iwp), SAVE ::  jpsw  !< South index limit for children's parent-grid work arrays
    388506!
    389507!-- Highest prognostic parent-grid k-indices.
    390     INTEGER(iwp), SAVE ::  kcto     !< Upper bound for k in anterpolation of variables other than w.
    391     INTEGER(iwp), SAVE ::  kctw     !< Upper bound for k in anterpolation of w.
     508    INTEGER(iwp), SAVE ::  kcto  !< Upper bound for k in anterpolation of variables other than w.
     509    INTEGER(iwp), SAVE ::  kctw  !< Upper bound for k in anterpolation of w.
    392510!
    393511!-- Child-array indices to be precomputed and stored for anterpolation.
     
    406524!
    407525!-- Number of child-grid nodes within anterpolation cells to be precomputed for anterpolation.
     526    INTEGER(iwp), SAVE, ALLOCATABLE, DIMENSION(:,:,:) ::  ijkfc_s  !< number of child grid points contributing to a parent grid
     527                                                                   !< node in anterpolation, scalar-grid
    408528    INTEGER(iwp), SAVE, ALLOCATABLE, DIMENSION(:,:,:) ::  ijkfc_u  !< number of child grid points contributing to a parent grid
    409529                                                                   !< node in anterpolation, u-grid
     
    412532    INTEGER(iwp), SAVE, ALLOCATABLE, DIMENSION(:,:,:) ::  ijkfc_w  !< number of child grid points contributing to a parent grid
    413533                                                                   !< node in anterpolation, w-grid
    414     INTEGER(iwp), SAVE, ALLOCATABLE, DIMENSION(:,:,:) ::  ijkfc_s  !< number of child grid points contributing to a parent grid
    415                                                                    !< node in anterpolation, scalar-grid
    416 !   
    417 !-- Work arrays for interpolation and user-defined type definitions for horizontal work-array exchange   
    418     REAL(wp), DIMENSION(:,:,:), ALLOCATABLE :: workarr_lr
    419     REAL(wp), DIMENSION(:,:,:), ALLOCATABLE :: workarr_sn
    420     REAL(wp), DIMENSION(:,:,:), ALLOCATABLE :: workarr_t
    421 
    422     INTEGER(iwp) :: workarr_lr_exchange_type
    423     INTEGER(iwp) :: workarr_sn_exchange_type
    424     INTEGER(iwp) :: workarr_t_exchange_type_x
    425     INTEGER(iwp) :: workarr_t_exchange_type_y
    426  
    427     INTEGER(iwp), DIMENSION(3)          ::  parent_grid_info_int    !< Array for communicating the parent-grid dimensions
    428                                                                     !< to its children.
    429 
    430     REAL(wp), DIMENSION(6)              ::  face_area               !< Surface area of each boundary face
    431     REAL(wp), DIMENSION(7)              ::  parent_grid_info_real   !< Array for communicating the real-type parent-grid
    432                                                                     !< parameters to its children.
     534!
     535!-- Work arrays for interpolation and user-defined type definitions for horizontal work-array exchange
     536    INTEGER(iwp) ::  workarr_lr_exchange_type   !<
     537    INTEGER(iwp) ::  workarr_sn_exchange_type   !<
     538    INTEGER(iwp) ::  workarr_t_exchange_type_x  !<
     539    INTEGER(iwp) ::  workarr_t_exchange_type_y  !<
     540
     541    INTEGER(iwp), DIMENSION(3) ::  parent_grid_info_int  !< Array for communicating the parent-grid dimensions to its children.
     542
     543    REAL(wp), DIMENSION(6) ::  face_area              !< Surface area of each boundary face
     544    REAL(wp), DIMENSION(7) ::  parent_grid_info_real  !< Array for communicating the real-type parent-grid parameters to its
     545                                                      !< children.
     546
     547    REAL(wp), DIMENSION(:,:,:), ALLOCATABLE ::  workarr_lr  !<
     548    REAL(wp), DIMENSION(:,:,:), ALLOCATABLE ::  workarr_sn  !<
     549    REAL(wp), DIMENSION(:,:,:), ALLOCATABLE ::  workarr_t   !<
    433550
    434551    TYPE parentgrid_def
    435        INTEGER(iwp)                        ::  nx                 !<
    436        INTEGER(iwp)                        ::  ny                 !<
    437        INTEGER(iwp)                        ::  nz                 !<
    438        REAL(wp)                            ::  dx                 !<
    439        REAL(wp)                            ::  dy                 !<
    440        REAL(wp)                            ::  dz                 !<
    441        REAL(wp)                            ::  lower_left_coord_x !<
    442        REAL(wp)                            ::  lower_left_coord_y !<
    443        REAL(wp)                            ::  xend               !<
    444        REAL(wp)                            ::  yend               !<
    445        REAL(wp), DIMENSION(:), ALLOCATABLE ::  coord_x            !<
    446        REAL(wp), DIMENSION(:), ALLOCATABLE ::  coord_y            !<
    447        REAL(wp), DIMENSION(:), ALLOCATABLE ::  dzu                !<
    448        REAL(wp), DIMENSION(:), ALLOCATABLE ::  dzw                !<
    449        REAL(wp), DIMENSION(:), ALLOCATABLE ::  zu                 !<
    450        REAL(wp), DIMENSION(:), ALLOCATABLE ::  zw                 !<
     552       INTEGER(iwp) ::  nx  !<
     553       INTEGER(iwp) ::  ny  !<
     554       INTEGER(iwp) ::  nz  !<
     555       REAL(wp) ::  dx                  !<
     556       REAL(wp) ::  dy                  !<
     557       REAL(wp) ::  dz                  !<
     558       REAL(wp) ::  lower_left_coord_x !<
     559       REAL(wp) ::  lower_left_coord_y !<
     560       REAL(wp) ::  xend                !<
     561       REAL(wp) ::  yend                !<
     562       REAL(wp), DIMENSION(:), ALLOCATABLE ::  coord_x  !<
     563       REAL(wp), DIMENSION(:), ALLOCATABLE ::  coord_y  !<
     564       REAL(wp), DIMENSION(:), ALLOCATABLE ::  dzu      !<
     565       REAL(wp), DIMENSION(:), ALLOCATABLE ::  dzw      !<
     566       REAL(wp), DIMENSION(:), ALLOCATABLE ::  zu       !<
     567       REAL(wp), DIMENSION(:), ALLOCATABLE ::  zw       !<
    451568    END TYPE parentgrid_def
    452569
    453     TYPE(parentgrid_def), SAVE, PUBLIC     ::  pg                 !< Parent-grid information package of type parentgrid_def
     570    TYPE(parentgrid_def), SAVE, PUBLIC ::  pg  !< Parent-grid information package of type parentgrid_def
    454571!
    455572!-- Variables for particle coupling
    456573    TYPE, PUBLIC :: childgrid_def
    457        INTEGER(iwp)                        ::  nx                   !<
    458        INTEGER(iwp)                        ::  ny                   !<
    459        INTEGER(iwp)                        ::  nz                   !<
    460        REAL(wp)                            ::  dx                   !<
    461        REAL(wp)                            ::  dy                   !<
    462        REAL(wp)                            ::  dz                   !<
    463        REAL(wp)                            ::  lx_coord, lx_coord_b !<   ! split onto separate lines
    464        REAL(wp)                            ::  rx_coord, rx_coord_b !<
    465        REAL(wp)                            ::  sy_coord, sy_coord_b !<
    466        REAL(wp)                            ::  ny_coord, ny_coord_b !<
    467        REAL(wp)                            ::  uz_coord, uz_coord_b !<
     574       INTEGER(iwp) ::  nx  !<
     575       INTEGER(iwp) ::  ny  !<
     576       INTEGER(iwp) ::  nz  !<
     577       REAL(wp)     ::  dx  !<
     578       REAL(wp)     ::  dy  !<
     579       REAL(wp)     ::  dz  !<
     580       REAL(wp)     ::  lx_coord, lx_coord_b !<   ! split onto separate lines
     581       REAL(wp)     ::  rx_coord, rx_coord_b !<
     582       REAL(wp)     ::  sy_coord, sy_coord_b !<
     583       REAL(wp)     ::  ny_coord, ny_coord_b !<
     584       REAL(wp)     ::  uz_coord, uz_coord_b !<
    468585    END TYPE childgrid_def
    469586
    470587    TYPE(childgrid_def), SAVE, ALLOCATABLE, DIMENSION(:), PUBLIC ::  childgrid  !<
    471588
    472     INTEGER(idp), ALLOCATABLE,DIMENSION(:,:), PUBLIC,TARGET ::  nr_part  !<
    473     INTEGER(idp), ALLOCATABLE,DIMENSION(:,:), PUBLIC,TARGET ::  part_adr !<
    474 
    475    
     589    INTEGER(idp), ALLOCATABLE,DIMENSION(:,:), PUBLIC,TARGET ::  nr_part   !<
     590    INTEGER(idp), ALLOCATABLE,DIMENSION(:,:), PUBLIC,TARGET ::  part_adr  !<
     591
     592
    476593    INTERFACE pmci_boundary_conds
    477594       MODULE PROCEDURE pmci_boundary_conds
    478595    END INTERFACE pmci_boundary_conds
    479    
     596
    480597    INTERFACE pmci_check_setting_mismatches
    481598       MODULE PROCEDURE pmci_check_setting_mismatches
     
    534651    END INTERFACE pmci_set_swaplevel
    535652
    536     PUBLIC child_to_parent, comm_world_nesting, cpl_id, nested_run,                                 &
    537            nesting_datatransfer_mode, nesting_mode, parent_to_child, rans_mode_parent
     653    PUBLIC child_to_parent,                                                                        &
     654           comm_world_nesting,                                                                     &
     655           cpl_id,                                                                                 &
     656           nested_run,                                                                             &
     657           nesting_datatransfer_mode,                                                              &
     658           nesting_mode,                                                                           &
     659           parent_to_child,                                                                        &
     660           rans_mode_parent
    538661
    539662    PUBLIC pmci_boundary_conds
     
    548671    PUBLIC pmci_ensure_nest_mass_conservation
    549672    PUBLIC pmci_ensure_nest_mass_conservation_vertical
    550    
     673
    551674 CONTAINS
    552675
    553 
     676!--------------------------------------------------------------------------------------------------!
     677! Description:
     678! ------------
     679!> @Todo: Missing subroutine description.
     680!--------------------------------------------------------------------------------------------------!
    554681 SUBROUTINE pmci_init( world_comm )
    555682
    556683    IMPLICIT NONE
    557684
    558     INTEGER(iwp), INTENT(OUT) ::  world_comm   !<
     685    INTEGER(iwp), INTENT(OUT) ::  world_comm  !<
    559686
    560687#if defined( __parallel )
    561688
    562     INTEGER(iwp) ::  pmc_status   !<
    563 
    564 
    565     CALL pmc_init_model( world_comm, nesting_datatransfer_mode, nesting_mode,                       &
     689    INTEGER(iwp) ::  pmc_status  !<
     690
     691
     692    CALL pmc_init_model( world_comm, nesting_datatransfer_mode, nesting_mode,                      &
    566693                         anterpolation_buffer_width, pmc_status )
    567694
     
    578705!
    579706!-- Check steering parameter values
    580     IF ( TRIM( nesting_mode ) /= 'one-way'  .AND.                                                   &
    581          TRIM( nesting_mode ) /= 'two-way'  .AND.                                                   &
    582          TRIM( nesting_mode ) /= 'vertical' )                                                       &
     707    IF ( TRIM( nesting_mode ) /= 'one-way'  .AND.                                                  &
     708         TRIM( nesting_mode ) /= 'two-way'  .AND.                                                  &
     709         TRIM( nesting_mode ) /= 'vertical' )                                                      &
    583710    THEN
    584711       message_string = 'illegal nesting mode: ' // TRIM( nesting_mode )
     
    586713    ENDIF
    587714
    588     IF ( TRIM( nesting_datatransfer_mode ) /= 'cascade'  .AND.                                      &
    589          TRIM( nesting_datatransfer_mode ) /= 'mixed'    .AND.                                      &
    590          TRIM( nesting_datatransfer_mode ) /= 'overlap' )                                           &
     715    IF ( TRIM( nesting_datatransfer_mode ) /= 'cascade'  .AND.                                     &
     716         TRIM( nesting_datatransfer_mode ) /= 'mixed'    .AND.                                     &
     717         TRIM( nesting_datatransfer_mode ) /= 'overlap' )                                          &
    591718    THEN
    592719       message_string = 'illegal nesting datatransfer mode: ' // TRIM( nesting_datatransfer_mode )
     
    594721    ENDIF
    595722!
    596 !-- Set the general steering switch which tells PALM that its a nested run
     723!-- Set the general steering switch which tells PALM that it is a nested run
    597724    nested_run = .TRUE.
    598725!
    599 !-- Get some variables required by the pmc-interface (and in some cases in the
    600 !-- PALM code out of the pmci) out of the pmc-core
    601     CALL pmc_get_model_info( comm_world_nesting = comm_world_nesting,                               &
    602                              cpl_id = cpl_id, cpl_parent_id = cpl_parent_id,                        &
    603                              cpl_name = cpl_name, npe_total = cpl_npe_total,                        &
    604                              lower_left_x = lower_left_coord_x,                                     &
     726!-- Get some variables required by the pmc-interface (and in some cases in the PALM code out of the
     727!-- pmci) out of the pmc-core
     728    CALL pmc_get_model_info( comm_world_nesting = comm_world_nesting, cpl_id = cpl_id,             &
     729                             cpl_parent_id = cpl_parent_id, cpl_name = cpl_name,                   &
     730                             npe_total = cpl_npe_total, lower_left_x = lower_left_coord_x,         &
    605731                             lower_left_y = lower_left_coord_y )
    606732!
    607 !-- Set the steering switch which tells the models that they are nested (of
    608 !-- course the root domain is not nested)
     733!-- Set the steering switch which tells the models that they are nested (of course the root domain
     734!-- is not nested)
    609735    IF ( .NOT.  pmc_is_rootmodel() )  THEN
    610736       child_domain = .TRUE.
     
    614740!
    615741!-- Message that communicators for nesting are initialized.
    616 !-- Attention: myid has been set at the end of pmc_init_model in order to
    617 !-- guarantee that only PE0 of the root domain does the output.
     742!-- Attention: myid has been set at the end of pmc_init_model in order to guarantee that only PE0 of
     743!-- the root domain does the output.
    618744    CALL location_message( 'initialize model nesting', 'finished' )
    619745!
     
    622748#else
    623749!
    624 !-- Nesting cannot be used in serial mode. cpl_id is set to root domain (1)
    625 !-- because no location messages would be generated otherwise.
    626 !-- world_comm is given a dummy value to avoid compiler warnings (INTENT(OUT)
    627 !-- must get an explicit value).
    628 !-- Note that this branch is only to avoid compiler warnings. The actual
    629 !-- execution never reaches here because the call of this subroutine is
    630 !-- already enclosed by  #if defined( __parallel ).
     750!-- Nesting cannot be used in serial mode. cpl_id is set to root domain (1) because no location
     751!-- messages would be generated otherwise. world_comm is given a dummy value to avoid compiler
     752!-- warnings (INTENT(OUT) must get an explicit value).
     753!-- Note that this branch is only to avoid compiler warnings. The actual execution never reaches
     754!-- here because the call of this subroutine is already enclosed by  #if defined( __parallel ).
    631755    cpl_id     = 1
    632756    nested_run = .FALSE.
     
    637761
    638762
    639 
     763!--------------------------------------------------------------------------------------------------!
     764! Description:
     765! ------------
     766!> @Todo: Missing subroutine description.
     767!--------------------------------------------------------------------------------------------------!
    640768 SUBROUTINE pmci_modelconfiguration
    641769
    642770    IMPLICIT NONE
    643771
    644     INTEGER(iwp) ::  ncpl   !< number of nest domains
    645 
    646    
     772    INTEGER(iwp) ::  ncpl  !< number of nest domains
     773
     774
    647775#if defined( __parallel )
    648776    CALL location_message( 'setup the nested model configuration', 'start' )
     
    650778!
    651779!-- Compute absolute coordinates for all models
    652     CALL pmci_setup_coordinates         ! CONTAIN THIS 
     780    CALL pmci_setup_coordinates         ! CONTAIN THIS
    653781!
    654782!-- Determine the number of coupled arrays
     
    656784!
    657785!-- Initialize the child (must be called before pmc_setup_parent)
    658 !-- Klaus, extend this comment to explain why it must be called before   
     786!-- Klaus, extend this comment to explain why it must be called before
    659787    CALL pmci_setup_child               ! CONTAIN THIS
    660788!
     
    666794    CALL pmci_check_setting_mismatches  ! CONTAIN THIS
    667795!
    668 !-- Set flag file for combine_plot_fields for precessing the nest output data
    669     OPEN( 90, FILE='3DNESTING', FORM='FORMATTED' )
     796!-- Set flag file for combine_plot_fields for processing the nest output data
     797    OPEN( 90, FILE = '3DNESTING', FORM = 'FORMATTED' )
    670798    CALL pmc_get_model_info( ncpl = ncpl )
    671799    WRITE( 90, '(I2)' )  ncpl
     
    679807
    680808
    681 
     809!--------------------------------------------------------------------------------------------------!
     810! Description:
     811! ------------
     812!> @Todo: Missing subroutine description.
     813!--------------------------------------------------------------------------------------------------!
    682814 SUBROUTINE pmci_setup_parent
    683815
     
    685817    IMPLICIT NONE
    686818
    687     INTEGER(iwp) ::  child_id           !< Child id-number for the child m
    688     INTEGER(iwp) ::  ierr               !< MPI-error code
    689     INTEGER(iwp) ::  kp                 !< Parent-grid index n the z-direction
    690     INTEGER(iwp) ::  lb = 1             !< Running index for aerosol size bins
    691     INTEGER(iwp) ::  lc = 1             !< Running index for aerosol mass bins
    692     INTEGER(iwp) ::  lg = 1             !< Running index for SALSA gases
    693     INTEGER(iwp) ::  m                  !< Loop index over all children of the current parent
    694     INTEGER(iwp) ::  msib               !< Loop index over all other children than m in case of siblings (parallel children)
    695     INTEGER(iwp) ::  n = 1              !< Running index for chemical species
    696     INTEGER(iwp) ::  nx_child           !< Number of child-grid points in the x-direction
    697     INTEGER(iwp) ::  ny_child           !< Number of child-grid points in the y-direction
    698     INTEGER(iwp) ::  nz_child           !< Number of child-grid points in the z-direction
    699     INTEGER(iwp) ::  sibling_id         !< Child id-number for the child msib (sibling of child m)
    700    
     819    CHARACTER(LEN=32) ::  myname  !< String for variable name such as 'u'
     820
     821    INTEGER(iwp) ::  child_id    !< Child id-number for the child m
     822    INTEGER(iwp) ::  ierr        !< MPI-error code
     823    INTEGER(iwp) ::  kp          !< Parent-grid index n the z-direction
     824    INTEGER(iwp) ::  lb = 1      !< Running index for aerosol size bins
     825    INTEGER(iwp) ::  lc = 1      !< Running index for aerosol mass bins
     826    INTEGER(iwp) ::  lg = 1      !< Running index for SALSA gases
     827    INTEGER(iwp) ::  m           !< Loop index over all children of the current parent
     828    INTEGER(iwp) ::  msib        !< Loop index over all other children than m in case of siblings (parallel children)
     829    INTEGER(iwp) ::  n = 1       !< Running index for chemical species
     830    INTEGER(iwp) ::  nx_child    !< Number of child-grid points in the x-direction
     831    INTEGER(iwp) ::  ny_child    !< Number of child-grid points in the y-direction
     832    INTEGER(iwp) ::  nz_child    !< Number of child-grid points in the z-direction
     833    INTEGER(iwp) ::  sibling_id  !< Child id-number for the child msib (sibling of child m)
     834
    701835    INTEGER(iwp), DIMENSION(3) ::  child_grid_dim  !< Array for receiving the child-grid dimensions from the children
    702    
    703     REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_x_left   !< Minimum x-coordinate of the child domain including the ghost
    704                                                            !< point layers
    705     REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_x_right  !< Maximum x-coordinate of the child domain including the ghost
    706                                                            !< point layers   
    707     REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_y_south  !< Minimum y-coordinate of the child domain including the ghost
    708                                                            !< point layers
    709     REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_y_north  !< Maximum y-coordinate of the child domain including the ghost
    710                                                            !< point layers
    711     REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_coord_x  !< Child domain x-coordinate array
    712     REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_coord_y  !< Child domain y-coordinate array
    713    
    714     REAL(wp), DIMENSION(5) ::  child_grid_info  !< Array for receiving the child-grid spacings etc from the children
    715    
     836
     837    LOGICAL :: m_left_in_msib   !< Logical auxiliary parameter for the overlap test: true if the left border
     838                                !< of the child m is within the x-range of the child msib
     839    LOGICAL :: m_right_in_msib  !< Logical auxiliary parameter for the overlap test: true if the right border
     840                                !< of the child m is within the x-range of the child msib
     841    LOGICAL :: msib_left_in_m   !< Logical auxiliary parameter for the overlap test: true if the left border
     842                                !< of the child msib is within the x-range of the child m
     843    LOGICAL :: msib_right_in_m  !< Logical auxiliary parameter for the overlap test: true if the right border
     844                                !< of the child msib is within the x-range of the child m
     845    LOGICAL :: m_south_in_msib  !< Logical auxiliary parameter for the overlap test: true if the south border
     846                                !< of the child m is within the y-range of the child msib
     847    LOGICAL :: m_north_in_msib  !< Logical auxiliary parameter for the overlap test: true if the north border
     848                                !< of the child m is within the y-range of the child msib
     849    LOGICAL :: msib_south_in_m  !< Logical auxiliary parameter for the overlap test: true if the south border
     850                                !< of the child msib is within the y-range of the child m
     851    LOGICAL :: msib_north_in_m  !< Logical auxiliary parameter for the overlap test: true if the north border
     852                                !< of the child msib is within the y-range of the child m
     853
    716854    REAL(wp) ::  child_height         !< Height of the child domain defined on the child side as zw(nzt+1)
    717855    REAL(wp) ::  dx_child             !< Child-grid spacing in the x-direction
    718856    REAL(wp) ::  dy_child             !< Child-grid spacing in the y-direction
    719857    REAL(wp) ::  dz_child             !< Child-grid spacing in the z-direction
    720     REAL(wp) ::  left_limit           !< Left limit for the absolute x-coordinate of the child left boundary 
     858    REAL(wp) ::  left_limit           !< Left limit for the absolute x-coordinate of the child left boundary
    721859    REAL(wp) ::  north_limit          !< North limit for the absolute y-coordinate of the child north boundary
    722860    REAL(wp) ::  right_limit          !< Right limit for the absolute x-coordinate of the child right boundary
    723     REAL(wp) ::  south_limit          !< South limit for the absolute y-coordinate of the child south boundary 
    724     REAL(wp) ::  upper_right_coord_x  !< Absolute x-coordinate of the upper right corner of the child domain 
    725     REAL(wp) ::  upper_right_coord_y  !< Absolute y-coordinate of the upper right corner of the child domain 
     861    REAL(wp) ::  south_limit          !< South limit for the absolute y-coordinate of the child south boundary
     862    REAL(wp) ::  upper_right_coord_x  !< Absolute x-coordinate of the upper right corner of the child domain
     863    REAL(wp) ::  upper_right_coord_y  !< Absolute y-coordinate of the upper right corner of the child domain
    726864    REAL(wp) ::  xez                  !< Minimum separation in the x-direction required between the child and
    727865                                      !< parent boundaries (left or right)
    728866    REAL(wp) ::  yez                  !< Minimum separation in the y-direction required between the child and
    729867                                      !< parent boundaries (south or north)
    730     REAL(wp)     ::  tolex            !< Tolerance for grid-line matching in x-direction
    731     REAL(wp)     ::  toley            !< Tolerance for grid-line matching in y-direction
    732     REAL(wp)     ::  tolez            !< Tolerance for grid-line matching in z-direction   
    733 
    734     CHARACTER(LEN=32) ::  myname      !< String for variable name such as 'u'
    735 
    736     LOGICAL :: m_left_in_msib         !< Logical auxiliary parameter for the overlap test: true if the left border
    737                                       !< of the child m is within the x-range of the child msib
    738     LOGICAL :: m_right_in_msib        !< Logical auxiliary parameter for the overlap test: true if the right border
    739                                       !< of the child m is within the x-range of the child msib
    740     LOGICAL :: msib_left_in_m         !< Logical auxiliary parameter for the overlap test: true if the left border
    741                                       !< of the child msib is within the x-range of the child m
    742     LOGICAL :: msib_right_in_m        !< Logical auxiliary parameter for the overlap test: true if the right border
    743                                       !< of the child msib is within the x-range of the child m
    744     LOGICAL :: m_south_in_msib        !< Logical auxiliary parameter for the overlap test: true if the south border
    745                                       !< of the child m is within the y-range of the child msib
    746     LOGICAL :: m_north_in_msib        !< Logical auxiliary parameter for the overlap test: true if the north border
    747                                       !< of the child m is within the y-range of the child msib
    748     LOGICAL :: msib_south_in_m        !< Logical auxiliary parameter for the overlap test: true if the south border
    749                                       !< of the child msib is within the y-range of the child m
    750     LOGICAL :: msib_north_in_m        !< Logical auxiliary parameter for the overlap test: true if the north border
    751                                       !< of the child msib is within the y-range of the child m
     868    REAL(wp) ::  tolex                !< Tolerance for grid-line matching in x-direction
     869    REAL(wp) ::  toley                !< Tolerance for grid-line matching in y-direction
     870    REAL(wp) ::  tolez                !< Tolerance for grid-line matching in z-direction
     871
     872    REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_coord_x  !< Child domain x-coordinate array
     873    REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_coord_y  !< Child domain y-coordinate array
     874    REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_x_left   !< Minimum x-coordinate of the child domain including the ghost
     875                                                           !< point layers
     876    REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_x_right  !< Maximum x-coordinate of the child domain including the ghost
     877                                                           !< point layers
     878    REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_y_north  !< Maximum y-coordinate of the child domain including the ghost
     879                                                           !< point layers
     880    REAL(wp), DIMENSION(:), ALLOCATABLE ::  child_y_south  !< Minimum y-coordinate of the child domain including the ghost
     881                                                           !< point layers
     882
     883    REAL(wp), DIMENSION(5) ::  child_grid_info  !< Array for receiving the child-grid spacings etc from the children
    752884
    753885!
     
    755887    tolex = tolefac * dx
    756888    toley = tolefac * dy
    757     tolez = tolefac * dz(1)   
     889    tolez = tolefac * dz(1)
    758890!
    759891!-- Initialize the current pmc parent.
    760892    CALL pmc_parentinit
    761893!
    762 !-- Corners of all children of the present parent. Note that
    763 !-- SIZE( pmc_parent_for_child ) = 1 if we have no children.
    764     IF ( ( SIZE( pmc_parent_for_child ) - 1 > 0 )  .AND.  myid == 0 )  THEN 
    765        ALLOCATE( child_x_left(1:SIZE( pmc_parent_for_child ) - 1) )
     894!-- Corners of all children of the present parent. Note that SIZE( pmc_parent_for_child ) = 1 if we
     895!-- have no children.
     896    IF ( ( SIZE( pmc_parent_for_child ) - 1 > 0 )  .AND.  myid == 0 )  THEN
     897       ALLOCATE( child_x_left(1:SIZE(  pmc_parent_for_child ) - 1) )
    766898       ALLOCATE( child_x_right(1:SIZE( pmc_parent_for_child ) - 1) )
    767899       ALLOCATE( child_y_south(1:SIZE( pmc_parent_for_child ) - 1) )
     
    772904    ENDIF
    773905!
    774 !-- Get coordinates from all children and check that the children match the parent
    775 !-- domain and each others. Note that SIZE( pmc_parent_for_child ) = 1
    776 !-- if we have no children, thence the loop is not executed at all.
     906!-- Get coordinates from all children and check that the children match the parent domain and each
     907!-- others. Note that SIZE( pmc_parent_for_child ) = 1 if we have no children, hence the loop is
     908!-- not executed at all.
    777909    DO  m = 1, SIZE( pmc_parent_for_child ) - 1
    778910
     
    781913       IF ( myid == 0 )  THEN
    782914
    783           CALL pmc_recv_from_child( child_id, child_grid_dim,  SIZE(child_grid_dim), 0, 123, ierr )
    784           CALL pmc_recv_from_child( child_id, child_grid_info, SIZE(child_grid_info), 0, 124, ierr )
    785          
     915          CALL pmc_recv_from_child( child_id, child_grid_dim,  SIZE( child_grid_dim ), 0, 123,     &
     916                                    ierr )
     917          CALL pmc_recv_from_child( child_id, child_grid_info, SIZE( child_grid_info ), 0, 124,    &
     918                                    ierr )
     919
    786920          nx_child     = child_grid_dim(1)
    787921          ny_child     = child_grid_dim(2)
     
    792926!
    793927!--       Find the highest child-domain level in the parent grid for the reduced z transfer
    794           DO  kp = 1, nzt                 
    795              IF ( zw(kp) - child_height > tolez )  THEN                   
     928          DO  kp = 1, nzt
     929             IF ( zw(kp) - child_height > tolez )  THEN
    796930                nz_child = kp
    797931                EXIT
    798932             ENDIF
    799933          ENDDO
    800 !   
     934!
    801935!--       Get absolute coordinates from the child
    802936          ALLOCATE( child_coord_x(-nbgp:nx_child+nbgp) )
    803937          ALLOCATE( child_coord_y(-nbgp:ny_child+nbgp) )
    804          
     938
    805939          CALL pmc_recv_from_child( child_id, child_coord_x, SIZE( child_coord_x ), 0, 11, ierr )
    806940          CALL pmc_recv_from_child( child_id, child_coord_y, SIZE( child_coord_y ), 0, 12, ierr )
    807          
     941
    808942          parent_grid_info_real(1) = lower_left_coord_x
    809943          parent_grid_info_real(2) = lower_left_coord_y
     
    821955          parent_grid_info_int(3)  = nz_child
    822956!
    823 !--       Check that the child domain matches its parent domain. 
     957!--       Check that the child domain matches its parent domain.
    824958          IF ( nesting_mode == 'vertical' )  THEN
    825959!
    826 !--          In case of vertical nesting, the lateral boundaries must match exactly. 
     960!--          In case of vertical nesting, the lateral boundaries must match exactly.
    827961             right_limit = upper_right_coord_x
    828962             north_limit = upper_right_coord_y
    829963             IF ( ABS( child_coord_x(nx_child+1) - right_limit ) > tolex )  THEN
    830                 WRITE ( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
    831                      ') domain right edge does not match its parent right edge'
     964                WRITE( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
     965                       ') domain right edge does not match its parent right edge'
    832966                CALL message( 'pmci_setup_parent', 'PA0425', 3, 2, 0, 6, 0 )
    833967             ENDIF
    834968             IF ( ABS( child_coord_y(ny_child+1) - north_limit ) > toley )  THEN
    835                 WRITE ( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
    836                      ') domain north edge does not match its parent north edge'
     969                WRITE( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
     970                       ') domain north edge does not match its parent north edge'
    837971                CALL message( 'pmci_setup_parent', 'PA0425', 3, 2, 0, 6, 0 )
    838972             ENDIF
    839           ELSE       
    840 !
    841 !--          In case of 3-D nesting, check that the child domain is completely
    842 !--          inside its parent domain.
    843              xez = ( nbgp + 1 ) * dx 
    844              yez = ( nbgp + 1 ) * dy 
     973          ELSE
     974!
     975!--          In case of 3-D nesting, check that the child domain is completely inside its parent
     976!--          domain.
     977             xez = ( nbgp + 1 ) * dx
     978             yez = ( nbgp + 1 ) * dy
    845979             left_limit  = lower_left_coord_x + xez
    846980             right_limit = upper_right_coord_x - xez
     
    848982             north_limit = upper_right_coord_y - yez
    849983             IF ( left_limit - child_coord_x(0) > tolex )  THEN
    850                 WRITE ( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
    851                      ') domain does not fit in its parent domain, left edge is either too ' //      &
    852                      'close or outside its parent left edge'
     984                WRITE( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
     985                       ') domain does not fit in its parent domain, left edge is either too ' //   &
     986                       'close or outside its parent left edge'
    853987                CALL message( 'pmci_setup_parent', 'PA0425', 3, 2, 0, 6, 0 )
    854988             ENDIF
    855989             IF ( child_coord_x(nx_child+1) - right_limit > tolex )  THEN
    856                 WRITE ( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
    857                      ') domain does not fit in its parent domain, right edge is either too ' //     &
    858                      'close or outside its parent right edge'
     990                WRITE( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
     991                       ') domain does not fit in its parent domain, right edge is either too ' //  &
     992                       'close or outside its parent right edge'
    859993                CALL message( 'pmci_setup_parent', 'PA0425', 3, 2, 0, 6, 0 )
    860994             ENDIF
    861995             IF ( south_limit - child_coord_y(0) > toley )  THEN
    862                 WRITE ( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
    863                      ') domain does not fit in its parent domain, south edge is either too ' //     &
    864                      'close or outside its parent south edge'
     996                WRITE( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
     997                       ') domain does not fit in its parent domain, south edge is either too ' //  &
     998                       'close or outside its parent south edge'
    865999                CALL message( 'pmci_setup_parent', 'PA0425', 3, 2, 0, 6, 0 )
    8661000             ENDIF
    8671001             IF ( child_coord_y(ny_child+1) - north_limit > toley )  THEN
    868                 WRITE ( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
    869                      ') domain does not fit in its parent domain, north edge is either too ' //     &
    870                      'close or outside its parent north edge'
     1002                WRITE( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                 &
     1003                       ') domain does not fit in its parent domain, north edge is either too ' //  &
     1004                       'close or outside its parent north edge'
    8711005                CALL message( 'pmci_setup_parent', 'PA0425', 3, 2, 0, 6, 0 )
    8721006             ENDIF
    8731007          ENDIF
    874 !             
    875 !--       Child domain must be lower than the parent domain such that the top ghost
    876 !--       layer of the child grid does not exceed the parent domain top boundary.
     1008!
     1009!--       Child domain must be lower than the parent domain such that the top ghost layer of the
     1010!--       child grid does not exceed the parent domain top boundary.
    8771011          IF ( child_height - zw(nzt) > tolez ) THEN
    878              WRITE ( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                    &
    879                      ') domain does not fit in its parent domain, top edge is either too ' //       &
    880                      'close or above its parent top edge'
     1012             WRITE( message_string, "(a,i2,a)" ) 'nested child (id: ',child_id,                    &
     1013                    ') domain does not fit in its parent domain, top edge is either too ' //       &
     1014                    'close or above its parent top edge'
    8811015             CALL message( 'pmci_setup_parent', 'PA0425', 3, 2, 0, 6, 0 )
    8821016          ENDIF
    8831017!
    884 !--       If parallel child domains (siblings) do exist ( m > 1 ),
    885 !--       check that they do not overlap.
     1018!--       If parallel child domains (siblings) do exist ( m > 1 ), check that they do not overlap.
    8861019          child_x_left(m)  = child_coord_x(-nbgp)
    8871020          child_x_right(m) = child_coord_x(nx_child+nbgp)
     
    8911024          IF ( nesting_mode /= 'vertical' )  THEN
    8921025!
    893 !--          Note that the msib-loop is executed only if ( m > 1 ). 
    894 !--          Also note that the tests have to be made both ways (m vs msib and msib vs m)
    895 !--          in order to detect all the possible overlap situations.
     1026!--          Note that the msib-loop is executed only if ( m > 1 ).
     1027!--          Also note that the tests have to be done both ways (m vs msib and msib vs m) in order
     1028!--          to detect all the possible overlap situations.
    8961029             DO  msib = 1, m - 1
    8971030!
    898 !--             Set some logical auxiliary parameters to simplify the IF-condition.                 
    899                 m_left_in_msib  = ( child_x_left(m)  >= child_x_left(msib)  - tolex )  .AND.        &
     1031!--             Set some logical auxiliary parameters to simplify the IF-condition.
     1032                m_left_in_msib  = ( child_x_left(m)  >= child_x_left(msib)  - tolex )  .AND.       &
    9001033                                  ( child_x_left(m)  <= child_x_right(msib) + tolex )
    901                 m_right_in_msib = ( child_x_right(m) >= child_x_left(msib)  - tolex )  .AND.        &
     1034                m_right_in_msib = ( child_x_right(m) >= child_x_left(msib)  - tolex )  .AND.       &
    9021035                                  ( child_x_right(m) <= child_x_right(msib) + tolex )
    903                 msib_left_in_m  = ( child_x_left(msib)  >= child_x_left(m)  - tolex )  .AND.        &
     1036                msib_left_in_m  = ( child_x_left(msib)  >= child_x_left(m)  - tolex )  .AND.       &
    9041037                                  ( child_x_left(msib)  <= child_x_right(m) + tolex )
    905                 msib_right_in_m = ( child_x_right(msib) >= child_x_left(m)  - tolex )  .AND.        &
     1038                msib_right_in_m = ( child_x_right(msib) >= child_x_left(m)  - tolex )  .AND.       &
    9061039                                  ( child_x_right(msib) <= child_x_right(m) + tolex )
    907                 m_south_in_msib = ( child_y_south(m) >= child_y_south(msib) - toley )  .AND.        &
     1040                m_south_in_msib = ( child_y_south(m) >= child_y_south(msib) - toley )  .AND.       &
    9081041                                  ( child_y_south(m) <= child_y_north(msib) + toley )
    909                 m_north_in_msib = ( child_y_north(m) >= child_y_south(msib) - toley )  .AND.        &
     1042                m_north_in_msib = ( child_y_north(m) >= child_y_south(msib) - toley )  .AND.       &
    9101043                                  ( child_y_north(m) <= child_y_north(msib) + toley )
    911                 msib_south_in_m = ( child_y_south(msib) >= child_y_south(m) - toley )  .AND.        &
     1044                msib_south_in_m = ( child_y_south(msib) >= child_y_south(m) - toley )  .AND.       &
    9121045                                  ( child_y_south(msib) <= child_y_north(m) + toley )
    913                 msib_north_in_m = ( child_y_north(msib) >= child_y_south(m) - toley )  .AND.        &
     1046                msib_north_in_m = ( child_y_north(msib) >= child_y_south(m) - toley )  .AND.       &
    9141047                                  ( child_y_north(msib) <= child_y_north(m) + toley )
    915                
    916                 IF ( ( m_left_in_msib  .OR.  m_right_in_msib  .OR.                                  &
    917                        msib_left_in_m  .OR.  msib_right_in_m )                                      &
    918                      .AND.                                                                          &
    919                      ( m_south_in_msib  .OR.  m_north_in_msib  .OR.                                 &
     1048
     1049                IF ( ( m_left_in_msib  .OR.  m_right_in_msib  .OR.                                 &
     1050                       msib_left_in_m  .OR.  msib_right_in_m )  .AND.                              &
     1051                     ( m_south_in_msib  .OR.  m_north_in_msib  .OR.                                &
    9201052                       msib_south_in_m  .OR.  msib_north_in_m ) )  THEN
    9211053                   sibling_id = pmc_parent_for_child(msib)
    922                    WRITE ( message_string, "(a,i2,a,i2,a)" ) 'nested parallel child domains (ids: ',&
    923                         child_id, ' and ', sibling_id, ') overlap'
     1054                   WRITE( message_string, "(a,i2,a,i2,a)" ) 'nested parallel child domains (ids: ',&
     1055                          child_id, ' and ', sibling_id, ') overlap'
    9241056                   CALL message( 'pmci_setup_parent', 'PA0426', 3, 2, 0, 6, 0 )
    9251057                ENDIF
    9261058
    9271059             ENDDO
    928           ENDIF         
     1060          ENDIF
    9291061
    9301062          CALL pmci_set_child_edge_coords
     
    9331065          DEALLOCATE( child_coord_y )
    9341066!
    935 !--       Send information about operating mode (LES or RANS) to child. This will be
    936 !--       used to control TKE nesting and setting boundary conditions properly.
    937           CALL pmc_send_to_child( child_id, rans_mode, 1, 0, 19, ierr ) 
     1067!--       Send information about operating mode (LES or RANS) to child. This will be used to
     1068!--       control TKE nesting and setting boundary conditions properly.
     1069          CALL pmc_send_to_child( child_id, rans_mode, 1, 0, 19, ierr )
    9381070!
    9391071!--       Send parent grid information to child
    940           CALL pmc_send_to_child( child_id, parent_grid_info_real,                                  &
    941                                   SIZE( parent_grid_info_real ), 0, 21,                             &
    942                                   ierr )
    943           CALL pmc_send_to_child( child_id, parent_grid_info_int,  3, 0,                            &
    944                                   22, ierr )
     1072          CALL pmc_send_to_child( child_id, parent_grid_info_real, SIZE( parent_grid_info_real ),  &
     1073                                  0, 21, ierr )
     1074          CALL pmc_send_to_child( child_id, parent_grid_info_int,  3, 0, 22, ierr )
    9451075!
    9461076!--       Send local grid to child
    947           CALL pmc_send_to_child( child_id, coord_x, nx+1+2*nbgp, 0, 24,                            &
    948                                   ierr )
    949           CALL pmc_send_to_child( child_id, coord_y, ny+1+2*nbgp, 0, 25,                            &
    950                                   ierr )
     1077          CALL pmc_send_to_child( child_id, coord_x, nx+1+2*nbgp, 0, 24, ierr )
     1078          CALL pmc_send_to_child( child_id, coord_y, ny+1+2*nbgp, 0, 25, ierr )
    9511079!
    9521080!--       Also send the dzu-, dzw-, zu- and zw-arrays here
     
    9551083          CALL pmc_send_to_child( child_id, zu,  nz_child + 2, 0, 28, ierr )
    9561084          CALL pmc_send_to_child( child_id, zw,  nz_child + 2, 0, 29, ierr )
    957          
    958        ENDIF  ! ( myid == 0 ) 
     1085
     1086       ENDIF  ! ( myid == 0 )
    9591087
    9601088       CALL MPI_BCAST( nz_child, 1, MPI_INTEGER, 0, comm2d, ierr )
    9611089
    962        CALL MPI_BCAST( childgrid(m), STORAGE_SIZE(childgrid(1))/8, MPI_BYTE, 0, comm2d, ierr )
    963 !
    964 !--    Set up the index-list which is an integer array that maps the child index space on
    965 !--    the parent index- and subdomain spaces.
     1090       CALL MPI_BCAST( childgrid(m), STORAGE_SIZE( childgrid( 1 ) ) / 8, MPI_BYTE, 0, comm2d, ierr )
     1091!
     1092!--    Set up the index-list which is an integer array that maps the child index space on the parent
     1093!--    index- and subdomain spaces.
    9661094       CALL pmci_create_index_list
    9671095!
    9681096!--    Include couple arrays into parent content.
    969 !--    The adresses of the PALM 2D or 3D array (here parent grid) which are candidates
    970 !--    for coupling are stored once into the pmc context. While data transfer, the array do not
    971 !--    have to be specified again
     1097!--    The adresses of the PALM 2D or 3D array (here parent grid) which are candidates for coupling
     1098!--    are stored once into the pmc context. While data transfer, the arrays do not have to be
     1099!--    specified again
    9721100       CALL pmc_s_clear_next_array_list
    9731101       DO WHILE ( pmc_s_getnextarray( child_id, myname ) )
    974           IF ( INDEX( TRIM( myname ), 'chem_' ) /= 0 )  THEN             
     1102          IF ( INDEX( TRIM( myname ), 'chem_' ) /= 0 )  THEN
    9751103             CALL pmci_set_array_pointer( myname, child_id = child_id, nz_child = nz_child, n = n )
    976              n = n + 1 
     1104             n = n + 1
    9771105          ELSEIF ( INDEX( TRIM( myname ), 'an_' ) /= 0 )  THEN
    9781106             CALL pmci_set_array_pointer( myname, child_id = child_id, nz_child = nz_child, n = lb )
    979              lb = lb + 1 
     1107             lb = lb + 1
    9801108          ELSEIF ( INDEX( TRIM( myname ), 'am_' ) /= 0 )  THEN
    9811109             CALL pmci_set_array_pointer( myname, child_id = child_id, nz_child = nz_child, n = lc )
    982              lc = lc + 1 
     1110             lc = lc + 1
    9831111          ELSEIF ( INDEX( TRIM( myname ), 'sg_' ) /= 0  .AND.  .NOT. salsa_gases_from_chem )  THEN
    9841112             CALL pmci_set_array_pointer( myname, child_id = child_id, nz_child = nz_child, n = lg )
     
    9901118
    9911119       CALL pmc_s_setind_and_allocmem( child_id )
    992        
     1120
    9931121    ENDDO  ! m
    9941122
    995     IF ( ( SIZE( pmc_parent_for_child ) - 1 > 0 ) .AND. myid == 0 )  THEN
     1123    IF ( ( SIZE( pmc_parent_for_child ) - 1 > 0 )  .AND. myid == 0 )  THEN
    9961124       DEALLOCATE( child_x_left )
    9971125       DEALLOCATE( child_x_right )
     
    10001128    ENDIF
    10011129
    1002    
     1130
    10031131 CONTAINS
    10041132
    1005 
    1006     SUBROUTINE pmci_create_index_list
    1007 
    1008        IMPLICIT NONE
    1009 
    1010        INTEGER(iwp) ::  ilist            !< Index-list index running over the child's parent-grid jc,ic-space
    1011        INTEGER(iwp) ::  index_list_size  !< Dimension 2 of the array index_list
    1012        INTEGER(iwp) ::  ierr             !< MPI error code
    1013        INTEGER(iwp) ::  ip               !< Running parent-grid index on the child domain in the x-direction
    1014        INTEGER(iwp) ::  jp               !< Running parent-grid index on the child domain in the y-direction
    1015        INTEGER(iwp) ::  n                !< Running index over child subdomains
    1016        INTEGER(iwp) ::  nrx              !< Parent subdomain dimension in the x-direction
    1017        INTEGER(iwp) ::  nry              !< Parent subdomain dimension in the y-direction
    1018        INTEGER(iwp) ::  pex              !< Two-dimensional subdomain (pe) index in the x-direction
    1019        INTEGER(iwp) ::  pey              !< Two-dimensional subdomain (pe) index in the y-direction
    1020        INTEGER(iwp) ::  parent_pe        !< Parent subdomain index (one-dimensional)
    1021 
    1022        INTEGER(iwp), DIMENSION(2) ::  pe_indices_2d                                  !< Array for two-dimensional subdomain (pe)
    1023                                                                                      !< indices needed for MPI_CART_RANK
    1024        INTEGER(iwp), DIMENSION(2) ::  size_of_childs_parent_grid_bounds_all          !< Dimensions of childs_parent_grid_bounds_all
    1025        INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE  ::  childs_parent_grid_bounds_all  !< Array that contains the child's
    1026                                                                                      !< parent-grid index bounds for all its
    1027                                                                                      !< subdomains (pes)
    1028        INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE  ::  index_list                     !< Array that maps the child index space on
    1029                                                                                      !< the parent index- and subdomain spaces
    1030        
    1031        IF ( myid == 0 )  THEN
    1032          
    1033           CALL pmc_recv_from_child( child_id, size_of_childs_parent_grid_bounds_all,                &
    1034                                     2, 0, 40, ierr )
    1035           ALLOCATE( childs_parent_grid_bounds_all(size_of_childs_parent_grid_bounds_all(1),         &
    1036                                                   size_of_childs_parent_grid_bounds_all(2)) )
    1037           CALL pmc_recv_from_child( child_id, childs_parent_grid_bounds_all,                        &
    1038                                     SIZE( childs_parent_grid_bounds_all ), 0, 41, ierr )
    1039 !
    1040 !--       Compute size (dimension) of the index_list.
    1041           index_list_size = 0         
    1042           DO  n = 1, size_of_childs_parent_grid_bounds_all(2)
    1043              index_list_size = index_list_size +                                                    &
    1044                   ( childs_parent_grid_bounds_all(4,n) - childs_parent_grid_bounds_all(3,n) + 1 ) * &
    1045                   ( childs_parent_grid_bounds_all(2,n) - childs_parent_grid_bounds_all(1,n) + 1 )
    1046           ENDDO
    1047 
    1048           ALLOCATE( index_list(6,index_list_size) )
    1049 
    1050           nrx = nxr - nxl + 1
    1051           nry = nyn - nys + 1
    1052           ilist = 0
    1053 !
    1054 !--       Loop over all children PEs
    1055           DO  n = 1, size_of_childs_parent_grid_bounds_all(2)           !
    1056 !
    1057 !--          Subspace along y required by actual child PE
    1058              DO  jp = childs_parent_grid_bounds_all(3,n), childs_parent_grid_bounds_all(4,n)  ! jp = jps, jpn of child PE# n
    1059 !
    1060 !--             Subspace along x required by actual child PE
    1061                 DO  ip = childs_parent_grid_bounds_all(1,n), childs_parent_grid_bounds_all(2,n)  ! ip = ipl, ipr of child PE# n
    1062 
    1063                    pex = ip / nrx
    1064                    pey = jp / nry
    1065                    pe_indices_2d(1) = pex
    1066                    pe_indices_2d(2) = pey
    1067                    CALL MPI_CART_RANK( comm2d, pe_indices_2d, parent_pe, ierr )
    1068                  
    1069                    ilist = ilist + 1
    1070 !
    1071 !--                First index in parent array  ! TO_DO: Klaus, please explain better
    1072                    index_list(1,ilist) = ip - ( pex * nrx ) + 1 + nbgp
    1073 !
    1074 !--                Second index in parent array  ! TO_DO: Klaus, please explain better
    1075                    index_list(2,ilist) = jp - ( pey * nry ) + 1 + nbgp
    1076 !
    1077 !--                x index of child's parent grid
    1078                    index_list(3,ilist) = ip - childs_parent_grid_bounds_all(1,n) + 1
    1079 !
    1080 !--                y index of child's parent grid
    1081                    index_list(4,ilist) = jp - childs_parent_grid_bounds_all(3,n) + 1
    1082 !
    1083 !--                PE number of child
    1084                    index_list(5,ilist) = n - 1
    1085 !
    1086 !--                PE number of parent
    1087                    index_list(6,ilist) = parent_pe
    1088 
    1089                 ENDDO
    1090              ENDDO
    1091           ENDDO
    1092 !
    1093 !--       TO_DO: Klaus: comment what is done here
    1094           CALL pmc_s_set_2d_index_list( child_id, index_list(:,1:ilist) )
    1095 
    1096        ELSE
    1097 !
    1098 !--       TO_DO: Klaus: comment why this dummy allocation is required
    1099           ALLOCATE( index_list(6,1) )
    1100           CALL pmc_s_set_2d_index_list( child_id, index_list )
    1101        ENDIF
    1102 
    1103        DEALLOCATE(index_list)
    1104 
    1105      END SUBROUTINE pmci_create_index_list
    1106 
    1107 
    1108 
    1109      SUBROUTINE pmci_set_child_edge_coords
    1110         IMPLICIT  NONE
    1111 
    1112         INTEGER(iwp) ::  nbgp_lpm = 1  !< Number of ghost-point layers used for lpm (Klaus, is this correct?)
    1113 
    1114        
    1115         nbgp_lpm = MIN( nbgp_lpm, nbgp )
    1116 
    1117         childgrid(m)%nx = nx_child
    1118         childgrid(m)%ny = ny_child
    1119         childgrid(m)%nz = nz_child
    1120         childgrid(m)%dx = dx_child
    1121         childgrid(m)%dy = dy_child
    1122         childgrid(m)%dz = dz_child
    1123 
    1124         childgrid(m)%lx_coord   = child_coord_x(0)
    1125         childgrid(m)%lx_coord_b = child_coord_x(-nbgp_lpm)
    1126         childgrid(m)%rx_coord   = child_coord_x(nx_child) + dx_child
    1127         childgrid(m)%rx_coord_b = child_coord_x(nx_child+nbgp_lpm) + dx_child
    1128         childgrid(m)%sy_coord   = child_coord_y(0)
    1129         childgrid(m)%sy_coord_b = child_coord_y(-nbgp_lpm)
    1130         childgrid(m)%ny_coord   = child_coord_y(ny_child) + dy_child
    1131         childgrid(m)%ny_coord_b = child_coord_y(ny_child+nbgp_lpm) + dy_child
    1132         childgrid(m)%uz_coord   = child_grid_info(2)
    1133         childgrid(m)%uz_coord_b = child_grid_info(1)
    1134 
    1135      END SUBROUTINE pmci_set_child_edge_coords
     1133!--------------------------------------------------------------------------------------------------!
     1134! Description:
     1135! ------------
     1136!> @Todo: Missing subroutine description.
     1137!--------------------------------------------------------------------------------------------------!
     1138 SUBROUTINE pmci_create_index_list
     1139
     1140    IMPLICIT NONE
     1141
     1142    INTEGER(iwp) ::  ilist            !< Index-list index running over the child's parent-grid jc,ic-space
     1143    INTEGER(iwp) ::  index_list_size  !< Dimension 2 of the array index_list
     1144    INTEGER(iwp) ::  ierr             !< MPI error code
     1145    INTEGER(iwp) ::  ip               !< Running parent-grid index on the child domain in the x-direction
     1146    INTEGER(iwp) ::  jp               !< Running parent-grid index on the child domain in the y-direction
     1147    INTEGER(iwp) ::  n                !< Running index over child subdomains
     1148    INTEGER(iwp) ::  nrx              !< Parent subdomain dimension in the x-direction
     1149    INTEGER(iwp) ::  nry              !< Parent subdomain dimension in the y-direction
     1150    INTEGER(iwp) ::  pex              !< Two-dimensional subdomain (pe) index in the x-direction
     1151    INTEGER(iwp) ::  pey              !< Two-dimensional subdomain (pe) index in the y-direction
     1152    INTEGER(iwp) ::  parent_pe        !< Parent subdomain index (one-dimensional)
     1153
     1154    INTEGER(iwp), DIMENSION(2) ::  pe_indices_2d                          !< Array for two-dimensional subdomain (pe)
     1155                                                                          !< indices needed for MPI_CART_RANK
     1156    INTEGER(iwp), DIMENSION(2) ::  size_of_childs_parent_grid_bounds_all  !< Dimensions of childs_parent_grid_bounds_all
     1157
     1158    INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE  ::  childs_parent_grid_bounds_all  !< Array that contains the child's
     1159                                                                                  !< parent-grid index
     1160                                                                                  !< bounds for all its subdomains (pes)
     1161    INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE  ::  index_list                     !< Array that maps the child index space on
     1162                                                                                  !< the parent index- and subdomain spaces
     1163
     1164    IF ( myid == 0 )  THEN
     1165
     1166       CALL pmc_recv_from_child( child_id, size_of_childs_parent_grid_bounds_all, 2, 0, 40, ierr )
     1167       ALLOCATE( childs_parent_grid_bounds_all(size_of_childs_parent_grid_bounds_all(1),           &
     1168                                               size_of_childs_parent_grid_bounds_all(2)) )
     1169       CALL pmc_recv_from_child( child_id, childs_parent_grid_bounds_all,                          &
     1170                                 SIZE( childs_parent_grid_bounds_all ), 0, 41, ierr )
     1171!
     1172!--    Compute size (dimension) of the index_list.
     1173       index_list_size = 0
     1174       DO  n = 1, size_of_childs_parent_grid_bounds_all(2)
     1175          index_list_size = index_list_size +                                                      &
     1176               ( childs_parent_grid_bounds_all(4,n) - childs_parent_grid_bounds_all(3,n) + 1 ) *   &
     1177               ( childs_parent_grid_bounds_all(2,n) - childs_parent_grid_bounds_all(1,n) + 1 )
     1178       ENDDO
     1179
     1180       ALLOCATE( index_list(6,index_list_size) )
     1181
     1182       nrx = nxr - nxl + 1
     1183       nry = nyn - nys + 1
     1184       ilist = 0
     1185!
     1186!--    Loop over all children PEs
     1187       DO  n = 1, size_of_childs_parent_grid_bounds_all(2)           !
     1188!
     1189!--       Subspace along y required by actual child PE
     1190          DO  jp = childs_parent_grid_bounds_all(3,n), childs_parent_grid_bounds_all(4,n)  ! jp = jps, jpn of child PE# n
     1191!
     1192!--          Subspace along x required by actual child PE
     1193             DO  ip = childs_parent_grid_bounds_all(1,n), childs_parent_grid_bounds_all(2,n)  ! ip = ipl, ipr of child PE# n
     1194
     1195                pex = ip / nrx
     1196                pey = jp / nry
     1197                pe_indices_2d(1) = pex
     1198                pe_indices_2d(2) = pey
     1199                CALL MPI_CART_RANK( comm2d, pe_indices_2d, parent_pe, ierr )
     1200
     1201                ilist = ilist + 1
     1202!
     1203!--             First index in parent array  ! TO_DO: Klaus, please explain better
     1204                index_list(1,ilist) = ip - ( pex * nrx ) + 1 + nbgp
     1205!
     1206!--             Second index in parent array  ! TO_DO: Klaus, please explain better
     1207                index_list(2,ilist) = jp - ( pey * nry ) + 1 + nbgp
     1208!
     1209!--             x index of child's parent grid
     1210                index_list(3,ilist) = ip - childs_parent_grid_bounds_all(1,n) + 1
     1211!
     1212!--             y index of child's parent grid
     1213                index_list(4,ilist) = jp - childs_parent_grid_bounds_all(3,n) + 1
     1214!
     1215!--             PE number of child
     1216                index_list(5,ilist) = n - 1
     1217!
     1218!--             PE number of parent
     1219                index_list(6,ilist) = parent_pe
     1220
     1221             ENDDO
     1222          ENDDO
     1223       ENDDO
     1224!
     1225!--    TO_DO: Klaus: comment what is done here
     1226       CALL pmc_s_set_2d_index_list( child_id, index_list(:,1:ilist) )
     1227
     1228    ELSE
     1229!
     1230!--    TO_DO: Klaus: comment why this dummy allocation is required
     1231       ALLOCATE( index_list(6,1) )
     1232       CALL pmc_s_set_2d_index_list( child_id, index_list )
     1233    ENDIF
     1234
     1235    DEALLOCATE( index_list )
     1236
     1237 END SUBROUTINE pmci_create_index_list
     1238
     1239
     1240!--------------------------------------------------------------------------------------------------!
     1241! Description:
     1242! ------------
     1243!> @Todo: Missing subroutine description.
     1244!--------------------------------------------------------------------------------------------------!
     1245 SUBROUTINE pmci_set_child_edge_coords
     1246    IMPLICIT  NONE
     1247
     1248    INTEGER(iwp) ::  nbgp_lpm = 1  !< Number of ghost-point layers used for lpm (Klaus, is this correct?)
     1249
     1250
     1251    nbgp_lpm = MIN( nbgp_lpm, nbgp )
     1252
     1253    childgrid(m)%nx = nx_child
     1254    childgrid(m)%ny = ny_child
     1255    childgrid(m)%nz = nz_child
     1256    childgrid(m)%dx = dx_child
     1257    childgrid(m)%dy = dy_child
     1258    childgrid(m)%dz = dz_child
     1259
     1260    childgrid(m)%lx_coord   = child_coord_x(0)
     1261    childgrid(m)%lx_coord_b = child_coord_x(-nbgp_lpm)
     1262    childgrid(m)%rx_coord   = child_coord_x(nx_child) + dx_child
     1263    childgrid(m)%rx_coord_b = child_coord_x(nx_child+nbgp_lpm) + dx_child
     1264    childgrid(m)%sy_coord   = child_coord_y(0)
     1265    childgrid(m)%sy_coord_b = child_coord_y(-nbgp_lpm)
     1266    childgrid(m)%ny_coord   = child_coord_y(ny_child) + dy_child
     1267    childgrid(m)%ny_coord_b = child_coord_y(ny_child+nbgp_lpm) + dy_child
     1268    childgrid(m)%uz_coord   = child_grid_info(2)
     1269    childgrid(m)%uz_coord_b = child_grid_info(1)
     1270
     1271 END SUBROUTINE pmci_set_child_edge_coords
    11361272
    11371273#endif
     
    11391275
    11401276
    1141 
     1277!--------------------------------------------------------------------------------------------------!
     1278! Description:
     1279! ------------
     1280!> @Todo: Missing subroutine description.
     1281!--------------------------------------------------------------------------------------------------!
    11421282 SUBROUTINE pmci_setup_child
    11431283
     
    11451285    IMPLICIT NONE
    11461286
    1147     INTEGER(iwp) ::  ierr                          !< MPI error code
    1148     INTEGER(iwp) ::  lb                            !< Running index for aerosol size bins
    1149     INTEGER(iwp) ::  lc                            !< Running index for aerosol mass bins
    1150     INTEGER(iwp) ::  lg                            !< Running index for SALSA gases
    1151     INTEGER(iwp) ::  n                             !< Running index for number of chemical species
    1152     INTEGER(iwp), DIMENSION(3) ::  child_grid_dim  !< Array for sending the child-grid dimensions to parent
    1153 
    1154     REAL(wp), DIMENSION(5) ::  child_grid_info     !< Array for sending the child-grid spacings etc to parent
    1155          
    1156     CHARACTER( LEN=da_namelen ) ::  myname         !< Name of the variable to be coupled
    1157     CHARACTER(LEN=5) ::  salsa_char                !< Name extension for the variable name in case of SALSA variable
    1158    
     1287    CHARACTER(LEN=da_namelen) ::  myname      !< Name of the variable to be coupled
     1288    CHARACTER(LEN=5)          ::  salsa_char  !< Name extension for the variable name in case of SALSA variable
     1289
     1290    INTEGER(iwp) ::  ierr  !< MPI error code
     1291    INTEGER(iwp) ::  lb    !< Running index for aerosol size bins
     1292    INTEGER(iwp) ::  lc    !< Running index for aerosol mass bins
     1293    INTEGER(iwp) ::  lg    !< Running index for SALSA gases
     1294    INTEGER(iwp) ::  n     !< Running index for number of chemical species
     1295
     1296    INTEGER(iwp), DIMENSION(3) ::  child_grid_dim  !< Array for sending the child-grid dimensions to parent
     1297
     1298    REAL(wp), DIMENSION(5) ::  child_grid_info  !< Array for sending the child-grid spacings etc to parent
     1299
    11591300!
    11601301!-- Child setup
     
    11621303    IF ( .NOT. pmc_is_rootmodel() )  THEN
    11631304!
    1164 !--    KLaus, add a description here what pmc_childinit does       
     1305!--    KLaus, add a description here what pmc_childinit does
    11651306       CALL pmc_childinit
    11661307!
    1167 !--    The arrays, which actually will be exchanged between child and parent
    1168 !--    are defined Here AND ONLY HERE.
    1169 !--    If a variable is removed, it only has to be removed from here.
    1170 !--    Please check, if the arrays are in the list of POSSIBLE exchange arrays
    1171 !--    in subroutines:
     1308!--    The arrays, which actually will be exchanged between child and parent are defined Here AND
     1309!--    ONLY HERE. If a variable is removed, it only has to be removed from here. Please check, if
     1310!--    the arrays are in the list of POSSIBLE exchange arrays in subroutines:
    11721311!--    pmci_set_array_pointer (for parent arrays)
    11731312!--    pmci_create_childs_parent_grid_arrays (for child's parent-grid arrays)
     
    11761315       CALL pmc_set_dataarray_name( 'parent', 'w', 'child', 'w', ierr )
    11771316!
    1178 !--    Set data array name for TKE. Please note, nesting of TKE is actually
    1179 !--    only done if both parent and child are in LES or in RANS mode. Due to
    1180 !--    design of model coupler, however, data array names must be already
    1181 !--    available at this point.
    1182        IF ( (        rans_mode_parent  .AND.         rans_mode )  .OR.                              &
    1183             (  .NOT. rans_mode_parent  .AND.  .NOT.  rans_mode  .AND.                               &
    1184                .NOT. constant_diffusion ) )  THEN
     1317!--    Set data array name for TKE. Please note, nesting of TKE is actually only done if both parent
     1318!--    and child are in LES or in RANS mode. Due to design of model coupler, however, data array
     1319!--    names must be already available at this point.
     1320       IF ( ( rans_mode_parent  .AND.  rans_mode )  .OR.                              &
     1321            ( .NOT.  rans_mode_parent  .AND.  .NOT.  rans_mode  .AND.  .NOT. constant_diffusion ) )&
     1322            THEN
    11851323          CALL pmc_set_dataarray_name( 'parent', 'e', 'child', 'e', ierr )
    11861324       ENDIF
    11871325!
    1188 !--    Nesting of dissipation rate only if both parent and child are in RANS
    1189 !--    mode and TKE-epsilon closure is applied. Please see also comment for TKE
    1190 !--    above.
     1326!--    Nesting of dissipation rate only if both parent and child are in RANS mode and TKE-epsilon
     1327!--    closure is applied. Please see also comment for TKE above.
    11911328       IF ( rans_mode_parent  .AND.  rans_mode  .AND.  rans_tke_e )  THEN
    11921329          CALL pmc_set_dataarray_name( 'parent', 'diss', 'child', 'diss', ierr )
     
    12021339
    12031340          IF ( bulk_cloud_model  .AND.  microphysics_morrison )  THEN
    1204             CALL pmc_set_dataarray_name( 'parent', 'qc', 'child', 'qc', ierr ) 
    1205             CALL pmc_set_dataarray_name( 'parent', 'nc', 'child', 'nc', ierr ) 
     1341            CALL pmc_set_dataarray_name( 'parent', 'qc', 'child', 'qc', ierr )
     1342            CALL pmc_set_dataarray_name( 'parent', 'nc', 'child', 'nc', ierr )
    12061343          ENDIF
    12071344
    12081345          IF ( bulk_cloud_model  .AND.  microphysics_seifert )  THEN
    12091346             CALL pmc_set_dataarray_name( 'parent', 'qr', 'child', 'qr', ierr )
    1210              CALL pmc_set_dataarray_name( 'parent', 'nr', 'child', 'nr', ierr ) 
     1347             CALL pmc_set_dataarray_name( 'parent', 'nr', 'child', 'nr', ierr )
    12111348          ENDIF
    1212      
     1349
    12131350       ENDIF
    12141351
     
    12211358          CALL pmc_set_dataarray_name( 'parent', 'part_adr', 'child', 'part_adr', ierr )
    12221359       ENDIF
    1223        
     1360
    12241361       IF ( air_chemistry  .AND.  nesting_chem )  THEN
    12251362          DO n = 1, nspec
    1226              CALL pmc_set_dataarray_name( 'parent', 'chem_' // TRIM( chem_species(n)%name ),        &
     1363             CALL pmc_set_dataarray_name( 'parent', 'chem_' // TRIM( chem_species(n)%name ),       &
    12271364                                          'child',  'chem_' // TRIM( chem_species(n)%name ), ierr )
    1228           ENDDO 
     1365          ENDDO
    12291366       ENDIF
    12301367
    12311368       IF ( salsa  .AND.  nesting_salsa )  THEN
    12321369          DO  lb = 1, nbins_aerosol
    1233              WRITE(salsa_char,'(i0)') lb
    1234              CALL pmc_set_dataarray_name( 'parent', 'an_' // TRIM( salsa_char ),                    &
     1370             WRITE( salsa_char,'(i0)' ) lb
     1371             CALL pmc_set_dataarray_name( 'parent', 'an_' // TRIM( salsa_char ),                   &
    12351372                                          'child',  'an_' // TRIM( salsa_char ), ierr )
    12361373          ENDDO
    12371374          DO  lc = 1, nbins_aerosol * ncomponents_mass
    1238              WRITE(salsa_char,'(i0)') lc
    1239              CALL pmc_set_dataarray_name( 'parent', 'am_' // TRIM( salsa_char ),                    &
     1375             WRITE( salsa_char,'(i0)' ) lc
     1376             CALL pmc_set_dataarray_name( 'parent', 'am_' // TRIM( salsa_char ),                   &
    12401377                                          'child',  'am_' // TRIM( salsa_char ), ierr )
    12411378          ENDDO
    12421379          IF ( .NOT. salsa_gases_from_chem )  THEN
    12431380             DO  lg = 1, ngases_salsa
    1244                 WRITE(salsa_char,'(i0)') lg
    1245                 CALL pmc_set_dataarray_name( 'parent', 'sg_' // TRIM( salsa_char ),                 &
     1381                WRITE( salsa_char,'(i0)' ) lg
     1382                CALL pmc_set_dataarray_name( 'parent', 'sg_' // TRIM( salsa_char ),                &
    12461383                                             'child',  'sg_' // TRIM( salsa_char ), ierr )
    12471384             ENDDO
     
    12711408!
    12721409!--       Receive parent-grid information.
    1273           CALL pmc_recv_from_parent( parent_grid_info_real,                    &
    1274                                      SIZE(parent_grid_info_real), 0, 21, ierr )
     1410          CALL pmc_recv_from_parent( parent_grid_info_real, SIZE( parent_grid_info_real ), 0, 21,  &
     1411                                     ierr )
    12751412          CALL pmc_recv_from_parent( parent_grid_info_int,  3, 0, 22, ierr )
    12761413
    12771414       ENDIF
    12781415
    1279        CALL MPI_BCAST( parent_grid_info_real, SIZE(parent_grid_info_real),     &
    1280                        MPI_REAL, 0, comm2d, ierr )
     1416       CALL MPI_BCAST( parent_grid_info_real, SIZE( parent_grid_info_real ), MPI_REAL, 0, comm2d,  &
     1417                       ierr )
    12811418       CALL MPI_BCAST( parent_grid_info_int, 3, MPI_INTEGER, 0, comm2d, ierr )
    12821419
     
    13131450       CALL MPI_BCAST( pg%zu, pg%nz+2,  MPI_REAL, 0, comm2d, ierr )
    13141451       CALL MPI_BCAST( pg%zw, pg%nz+2,  MPI_REAL, 0, comm2d, ierr )
    1315        CALL MPI_BCAST( rans_mode_parent, 1, MPI_LOGICAL, 0, comm2d, ierr )       
     1452       CALL MPI_BCAST( rans_mode_parent, 1, MPI_LOGICAL, 0, comm2d, ierr )
    13161453!
    13171454!--    Find the index bounds for the nest domain in the parent-grid index space
     
    13321469       DO  WHILE ( pmc_c_getnextarray( myname ) )
    13331470!
    1334 !--       Note that pg%nz is not the original nz of parent, but the highest
    1335 !--       parent-grid level needed for nesting.
    1336 !--       Note that in case of chemical species or SALSA variables an additional
    1337 !--       parameter needs to be passed. The parameter is required to set the pointer
    1338 !--       correctlyto the chemical-species or SALSA data structure. Hence, first check if
    1339 !--       the current variable is a chemical species or a SALSA variable. If so, pass
    1340 !--       index id of respective sub-variable (species or bin) and increment this subsequently.
    1341           IF ( INDEX( TRIM( myname ), 'chem_' ) /= 0 )  THEN             
     1471!--       Note that pg%nz is not the original nz of parent, but the highest parent-grid level needed
     1472!--       for nesting. Note that in case of chemical species or SALSA variables an additional
     1473!--       parameter needs to be passed. The parameter is required to set the pointer correctly to
     1474!--       the chemical-species or SALSA data structure. Hence, first check if the current variable
     1475!--       is a chemical species or a SALSA variable. If so, pass index id of respective sub-variable
     1476!--       (species or bin) and increment this subsequently.
     1477          IF ( INDEX( TRIM( myname ), 'chem_' ) /= 0 )  THEN
    13421478             CALL pmci_create_childs_parent_grid_arrays ( myname, ipl, ipr, jps, jpn, pg%nz, n )
    1343              n = n + 1   
     1479             n = n + 1
    13441480          ELSEIF ( INDEX( TRIM( myname ), 'an_' ) /= 0 )  THEN
    13451481             CALL pmci_create_childs_parent_grid_arrays ( myname, ipl, ipr, jps, jpn, pg%nz, lb )
     
    13601496       CALL pmci_define_index_mapping
    13611497!
    1362 !--    Check that the child and parent grid lines do match 
     1498!--    Check that the child and parent grid lines do match
    13631499       CALL pmci_check_grid_matching
    1364 !       
    1365 !--    Compute surface areas of the nest-boundary faces 
     1500!
     1501!--    Compute surface areas of the nest-boundary faces
    13661502       CALL pmci_compute_face_areas
    1367        
     1503
    13681504    ENDIF
    13691505
     
    13711507
    13721508
    1373     SUBROUTINE pmci_map_child_grid_to_parent_grid
    1374 !
    1375 !--    Determine index bounds of interpolation/anterpolation area in the parent-grid index space
    1376        IMPLICIT NONE
    1377 
    1378        INTEGER(iwp), DIMENSION(5,numprocs) ::  parent_bound_all     !< Transfer array for parent-grid index bounds
    1379 
    1380        INTEGER(iwp), DIMENSION(4)          ::  parent_bound_global  !< Transfer array for global parent-grid index bounds
    1381        INTEGER(iwp), DIMENSION(2)          ::  size_of_array        !< For sending the dimensions of parent_bound_all to parent
    1382 
    1383        INTEGER(iwp) ::  ip      !< Running parent-grid index in the x-direction
    1384        INTEGER(iwp) ::  iauxl   !< Offset between the index bound ipl and the auxiliary index bound ipla
    1385        INTEGER(iwp) ::  iauxr   !< Offset between the index bound ipr and the auxiliary index bound ipra
    1386        INTEGER(iwp) ::  ijaux   !< Temporary variable for receiving the index bound from the neighbouring subdomain
    1387        INTEGER(iwp) ::  jp      !< Running parent-grid index in the y-direction
    1388        INTEGER(iwp) ::  jauxs   !< Offset between the index bound jps and the auxiliary index bound jpsa
    1389        INTEGER(iwp) ::  jauxn   !< Offset between the index bound jpn and the auxiliary index bound jpna
    1390 
    1391        REAL(wp) ::  tolex       !< Tolerance for grid-line matching in x-direction   
    1392        REAL(wp) ::  toley       !< Tolerance for grid-line matching in y-direction   
    1393        REAL(wp) ::  xexl        !< Parent-grid array exceedance behind the left edge of the child PE subdomain
    1394        REAL(wp) ::  xexr        !< Parent-grid array exceedance behind the right edge of the child PE subdomain
    1395        REAL(wp) ::  yexs        !< Parent-grid array exceedance behind the south edge of the child PE subdomain
    1396        REAL(wp) ::  yexn        !< Parent-grid array exceedance behind the north edge of the child PE subdomain
    1397        REAL(wp) ::  xpl         !< Requested left-edge x-coordinate of the parent-grid array domain (at the internal boundaries
    1398                                 !< the real edge may differ from this in some cases as explained in the comment block below) 
    1399        REAL(wp) ::  xpr         !< Requested right-edge x-coordinate of the parent-grid array domain (at the internal boundaries
    1400                                 !< the real edge may differ from this in some cases as explained in the comment block below)
    1401        REAL(wp) ::  yps         !< Requested south-edge y-coordinate of the parent-grid array domain (at the internal boundaries
    1402                                 !< the real edge may differ from this in some cases as explained in the comment block below)
    1403        REAL(wp) ::  ypn         !< Requested south-edge y-coordinate of the parent-grid array domain (at the internal boundaries
    1404                                 !< the real edge may differ from this in some cases as explained in the comment block below)
    1405 
    1406 !
    1407 !--    Determine the index limits for the child's parent-grid arrays (such as uc for example).
    1408 !--    Note that at the outer edges of the child domain (nest boundaries) these arrays exceed
    1409 !--    the boundary by two parent-grid cells. At the internal boundaries, there are no
    1410 !--    exceedances and thus no overlaps with the neighbouring subdomain. If at least half
    1411 !--    of the parent-grid cell is within the current child sub-domain, then it is included
    1412 !--    in the current sub-domain's parent-grid array. Else the parent-grid cell is
    1413 !--    included in the neighbouring subdomain's parent-grid array, or not included at all if
    1414 !--    we are at the outer edge of the child domain. This may occur especially when a large
    1415 !--    grid-spacing ratio is used.
    1416 !
    1417 !--    Tolerances for grid-line matching.
    1418        tolex = tolefac * dx
    1419        toley = tolefac * dy
    1420 !
    1421 !--    Left boundary.
    1422 !--    Extension by two parent-grid cells behind the boundary, see the comment block above.   
    1423        IF ( bc_dirichlet_l )  THEN
    1424           xexl  = 2.0_wp * pg%dx
    1425           iauxl = 0
     1509!--------------------------------------------------------------------------------------------------!
     1510! Description:
     1511! ------------
     1512!> @Todo: Missing subroutine description.
     1513!--------------------------------------------------------------------------------------------------!
     1514 SUBROUTINE pmci_map_child_grid_to_parent_grid
     1515
     1516!
     1517!-- Determine index bounds of interpolation/anterpolation area in the parent-grid index space
     1518    IMPLICIT NONE
     1519
     1520    INTEGER(iwp) ::  ip     !< Running parent-grid index in the x-direction
     1521    INTEGER(iwp) ::  iauxl  !< Offset between the index bound ipl and the auxiliary index bound ipla
     1522    INTEGER(iwp) ::  iauxr  !< Offset between the index bound ipr and the auxiliary index bound ipra
     1523    INTEGER(iwp) ::  ijaux  !< Temporary variable for receiving the index bound from the neighbouring subdomain
     1524    INTEGER(iwp) ::  jp     !< Running parent-grid index in the y-direction
     1525    INTEGER(iwp) ::  jauxs  !< Offset between the index bound jps and the auxiliary index bound jpsa
     1526    INTEGER(iwp) ::  jauxn  !< Offset between the index bound jpn and the auxiliary index bound jpna
     1527
     1528    INTEGER(iwp), DIMENSION(4) ::  parent_bound_global  !< Transfer array for global parent-grid index bounds
     1529    INTEGER(iwp), DIMENSION(2) ::  size_of_array        !< For sending the dimensions of parent_bound_all to parent
     1530
     1531    INTEGER(iwp), DIMENSION(5,numprocs) ::  parent_bound_all  !< Transfer array for parent-grid index bounds
     1532
     1533    REAL(wp) ::  tolex  !< Tolerance for grid-line matching in x-direction
     1534    REAL(wp) ::  toley  !< Tolerance for grid-line matching in y-direction
     1535    REAL(wp) ::  xexl   !< Parent-grid array exceedance behind the left edge of the child PE subdomain
     1536    REAL(wp) ::  xexr   !< Parent-grid array exceedance behind the right edge of the child PE subdomain
     1537    REAL(wp) ::  xpl    !< Requested left-edge x-coordinate of the parent-grid array domain (at the internal boundaries
     1538                        !< the real edge may differ from this in some cases as explained in the comment block below)
     1539    REAL(wp) ::  xpr    !< Requested right-edge x-coordinate of the parent-grid array domain (at the internal boundaries
     1540                        !< the real edge may differ from this in some cases as explained in the comment block below)
     1541    REAL(wp) ::  yexs   !< Parent-grid array exceedance behind the south edge of the child PE subdomain
     1542    REAL(wp) ::  yexn   !< Parent-grid array exceedance behind the north edge of the child PE subdomain
     1543    REAL(wp) ::  yps    !< Requested south-edge y-coordinate of the parent-grid array domain (at the internal boundaries
     1544                        !< the real edge may differ from this in some cases as explained in the comment block below)
     1545    REAL(wp) ::  ypn    !< Requested south-edge y-coordinate of the parent-grid array domain (at the internal boundaries
     1546                        !< the real edge may differ from this in some cases as explained in the comment block below)
     1547
     1548!
     1549!-- Determine the index limits for the child's parent-grid arrays (such as uc for example).
     1550!-- Note that at the outer edges of the child domain (nest boundaries) these arrays exceed the
     1551!-- boundary by two parent-grid cells. At the internal boundaries, there are no exceedances and
     1552!-- thus no overlaps with the neighbouring subdomain. If at least half of the parent-grid cell is
     1553!-- within the current child sub-domain, then it is included in the current sub-domain's
     1554!-- parent-grid array. Else the parent-grid cell is included in the neighbouring subdomain's
     1555!-- parent-grid array, or not included at all if we are at the outer edge of the child domain.
     1556!-- This may occur especially when a large grid-spacing ratio is used.
     1557!
     1558!-- Tolerances for grid-line matching.
     1559    tolex = tolefac * dx
     1560    toley = tolefac * dy
     1561!
     1562!-- Left boundary.
     1563!-- Extension by two parent-grid cells behind the boundary, see the comment block above.
     1564    IF ( bc_dirichlet_l )  THEN
     1565       xexl  = 2.0_wp * pg%dx
     1566       iauxl = 0
     1567    ELSE
     1568       xexl  = 0.0_wp
     1569       iauxl = 1
     1570    ENDIF
     1571    xpl     = coord_x(nxl) - xexl
     1572    DO  ip = 0, pg%nx
     1573       IF ( pg%coord_x(ip) + 0.5_wp * pg%dx >= xpl - tolex )  THEN
     1574          ipl = MAX( 0, ip )
     1575          EXIT
     1576       ENDIF
     1577    ENDDO
     1578!
     1579!-- Right boundary.
     1580!-- Extension by two parent-grid cells behind the boundary, see the comment block above.
     1581    IF ( bc_dirichlet_r )  THEN
     1582       xexr  = 2.0_wp * pg%dx
     1583       iauxr = 0
     1584    ELSE
     1585       xexr  = 0.0_wp
     1586       iauxr = 1
     1587    ENDIF
     1588    xpr  = coord_x(nxr+1) + xexr
     1589    DO  ip = pg%nx, 0 , -1
     1590       IF ( pg%coord_x(ip) + 0.5_wp * pg%dx <= xpr + tolex )  THEN
     1591          ipr = MIN( pg%nx, MAX( ipl, ip ) )
     1592          EXIT
     1593       ENDIF
     1594    ENDDO
     1595!
     1596!-- South boundary.
     1597!-- Extension by two parent-grid cells behind the boundary, see the comment block above.
     1598    IF ( bc_dirichlet_s )  THEN
     1599       yexs  = 2.0_wp * pg%dy
     1600       jauxs = 0
     1601    ELSE
     1602       yexs  = 0.0_wp
     1603       jauxs = 1
     1604    ENDIF
     1605    yps  = coord_y(nys) - yexs
     1606    DO  jp = 0, pg%ny
     1607       IF ( pg%coord_y(jp) + 0.5_wp * pg%dy >= yps - toley )  THEN
     1608          jps = MAX( 0, jp )
     1609          EXIT
     1610       ENDIF
     1611    ENDDO
     1612!
     1613!-- North boundary.
     1614!-- Extension by two parent-grid cells behind the boundary, see the comment block above.
     1615    IF  ( bc_dirichlet_n )  THEN
     1616       yexn  = 2.0_wp * pg%dy
     1617       jauxn = 0
     1618    ELSE
     1619       yexn  = 0.0_wp
     1620       jauxn = 1
     1621    ENDIF
     1622    ypn  = coord_y(nyn+1) + yexn
     1623    DO  jp = pg%ny, 0 , -1
     1624       IF ( pg%coord_y(jp) + 0.5_wp * pg%dy <= ypn + toley )  THEN
     1625          jpn = MIN( pg%ny, MAX( jps, jp ) )
     1626          EXIT
     1627       ENDIF
     1628    ENDDO
     1629!
     1630!-- Make sure that the indexing is contiguous (no gaps, no overlaps). This is a safety measure
     1631!-- mainly for cases with high grid-spacing ratio and narrow child subdomains.
     1632    IF ( pdims(1) > 1 )  THEN
     1633       IF ( nxl == 0 )  THEN
     1634          CALL MPI_SEND( ipr, 1, MPI_INTEGER, pright, 717, comm2d, ierr )
     1635       ELSE IF ( nxr == nx )  THEN
     1636          CALL MPI_RECV( ijaux, 1, MPI_INTEGER, pleft, 717, comm2d, status, ierr )
     1637          ipl = ijaux + 1
    14261638       ELSE
    1427           xexl  = 0.0_wp
    1428           iauxl = 1
    1429        ENDIF
    1430        xpl     = coord_x(nxl) - xexl
    1431        DO  ip = 0, pg%nx
    1432           IF ( pg%coord_x(ip) + 0.5_wp * pg%dx >= xpl - tolex )  THEN
    1433              ipl = MAX( 0, ip )
    1434              EXIT
    1435           ENDIF
    1436        ENDDO
    1437 !
    1438 !--    Right boundary.
    1439 !--    Extension by two parent-grid cells behind the boundary, see the comment block above.       
    1440        IF ( bc_dirichlet_r )  THEN
    1441           xexr  = 2.0_wp * pg%dx
    1442           iauxr = 0 
     1639          CALL MPI_SEND( ipr, 1, MPI_INTEGER, pright, 717, comm2d, ierr )
     1640          CALL MPI_RECV( ijaux, 1, MPI_INTEGER, pleft, 717, comm2d, status, ierr )
     1641          ipl = ijaux + 1
     1642       ENDIF
     1643    ENDIF
     1644
     1645    IF ( pdims(2) > 1 )  THEN
     1646       IF ( nys == 0 )  THEN
     1647          CALL MPI_SEND( jpn, 1, MPI_INTEGER, pnorth, 719, comm2d, ierr )
     1648       ELSE IF ( nyn == ny )  THEN
     1649          CALL MPI_RECV( ijaux, 1, MPI_INTEGER, psouth, 719, comm2d, status, ierr )
     1650          jps = ijaux + 1
    14431651       ELSE
    1444           xexr  = 0.0_wp
    1445           iauxr = 1 
    1446        ENDIF
    1447        xpr  = coord_x(nxr+1) + xexr
    1448        DO  ip = pg%nx, 0 , -1
    1449           IF ( pg%coord_x(ip) + 0.5_wp * pg%dx <= xpr + tolex )  THEN
    1450              ipr = MIN( pg%nx, MAX( ipl, ip ) )
    1451              EXIT
    1452           ENDIF
    1453        ENDDO
    1454 !
    1455 !--    South boundary.
    1456 !--    Extension by two parent-grid cells behind the boundary, see the comment block above.   
    1457        IF ( bc_dirichlet_s )  THEN
    1458           yexs  = 2.0_wp * pg%dy
    1459           jauxs = 0 
     1652          CALL MPI_SEND( jpn, 1, MPI_INTEGER, pnorth, 719, comm2d, ierr )
     1653          CALL MPI_RECV( ijaux, 1, MPI_INTEGER, psouth, 719, comm2d, status, ierr )
     1654          jps = ijaux + 1
     1655       ENDIF
     1656    ENDIF
     1657
     1658    WRITE( 9,"('pmci_map_child_grid_to_parent_grid. Parent-grid array bounds: ',4(i4,2x))" )       &
     1659           ipl, ipr, jps, jpn
     1660    FLUSH(9)
     1661
     1662    parent_bound(1) = ipl
     1663    parent_bound(2) = ipr
     1664    parent_bound(3) = jps
     1665    parent_bound(4) = jpn
     1666    parent_bound(5) = myid
     1667!
     1668!-- The following auxiliary index bounds are used for allocating index mapping and some other
     1669!-- auxiliary arrays.
     1670    ipla = ipl - iauxl
     1671    ipra = ipr + iauxr
     1672    jpsa = jps - jauxs
     1673    jpna = jpn + jauxn
     1674!
     1675!-- The index-bounds parent_bound of all subdomains of the current child domain must be sent to the
     1676!-- parent in order for the parent to create the index list. For this reason, the parent_bound
     1677!-- arrays are packed together in single array parent_bound_all using MPI_GATHER. Note that
     1678!-- MPI_Gather receives data from all processes in the rank order This fact is exploited in creating
     1679!-- the index list in pmci_create_index_list.
     1680    CALL MPI_GATHER( parent_bound, 5, MPI_INTEGER, parent_bound_all, 5, MPI_INTEGER, 0, comm2d,    &
     1681                     ierr )
     1682
     1683    IF ( myid == 0 )  THEN
     1684       size_of_array(1) = SIZE( parent_bound_all, 1 )
     1685       size_of_array(2) = SIZE( parent_bound_all, 2 )
     1686       CALL pmc_send_to_parent( size_of_array, 2, 0, 40, ierr )
     1687       CALL pmc_send_to_parent( parent_bound_all, SIZE( parent_bound_all ), 0, 41, ierr )
     1688!
     1689!--    Determine the global parent-grid index bounds
     1690       parent_bound_global(1) = MINVAL( parent_bound_all(1,:) )
     1691       parent_bound_global(2) = MAXVAL( parent_bound_all(2,:) )
     1692       parent_bound_global(3) = MINVAL( parent_bound_all(3,:) )
     1693       parent_bound_global(4) = MAXVAL( parent_bound_all(4,:) )
     1694    ENDIF
     1695!
     1696!-- Broadcast the global parent-grid index bounds to all current child processes
     1697    CALL MPI_BCAST( parent_bound_global, 4, MPI_INTEGER, 0, comm2d, ierr )
     1698    iplg = parent_bound_global(1)
     1699    iprg = parent_bound_global(2)
     1700    jpsg = parent_bound_global(3)
     1701    jpng = parent_bound_global(4)
     1702    WRITE( 9, "('pmci_map_child_grid_to_parent_grid. Global parent-grid index bounds iplg, iprg, jpsg, jpng: ',4(i4,2x))" ) &
     1703                iplg, iprg, jpsg, jpng
     1704    FLUSH( 9 )
     1705
     1706 END SUBROUTINE pmci_map_child_grid_to_parent_grid
     1707
     1708
     1709!--------------------------------------------------------------------------------------------------!
     1710! Description:
     1711! ------------
     1712!> @Todo: Missing subroutine description.
     1713!--------------------------------------------------------------------------------------------------!
     1714 SUBROUTINE pmci_define_index_mapping
     1715!
     1716!-- Precomputation of the mapping of the child- and parent-grid indices.
     1717
     1718    IMPLICIT NONE
     1719
     1720    INTEGER(iwp) ::  i       !< Child-grid index in the x-direction
     1721    INTEGER(iwp) ::  ii      !< Parent-grid index in the x-direction
     1722    INTEGER(iwp) ::  istart  !<
     1723    INTEGER(iwp) ::  ir      !<
     1724    INTEGER(iwp) ::  iw      !< Child-grid index limited to -1 <= iw <= nx+1 for wall_flags_total_0
     1725    INTEGER(iwp) ::  j       !< Child-grid index in the y-direction
     1726    INTEGER(iwp) ::  jj      !< Parent-grid index in the y-direction
     1727    INTEGER(iwp) ::  jstart  !<
     1728    INTEGER(iwp) ::  jr      !<
     1729    INTEGER(iwp) ::  jw      !< Child-grid index limited to -1 <= jw <= ny+1 for wall_flags_total_0
     1730    INTEGER(iwp) ::  k       !< Child-grid index in the z-direction
     1731    INTEGER(iwp) ::  kk      !< Parent-grid index in the z-direction
     1732    INTEGER(iwp) ::  kstart  !<
     1733    INTEGER(iwp) ::  kw      !< Child-grid index limited to kw <= nzt+1 for wall_flags_total_0
     1734
     1735    REAL(wp) ::  tolex  !< Tolerance for grid-line matching in x-direction
     1736    REAL(wp) ::  toley  !< Tolerance for grid-line matching in y-direction
     1737    REAL(wp) ::  tolez  !< Tolerance for grid-line matching in z-direction
     1738
     1739!
     1740!-- Grid-line tolerances.
     1741    tolex = tolefac * dx
     1742    toley = tolefac * dy
     1743    tolez = tolefac * dz(1)
     1744!
     1745!-- Allocate child-grid work arrays for interpolation.
     1746    igsr = NINT( pg%dx / dx, iwp )
     1747    jgsr = NINT( pg%dy / dy, iwp )
     1748    kgsr = NINT( pg%dzw(1) / dzw(1), iwp )
     1749    WRITE(9,"('igsr, jgsr, kgsr: ',3(i3,2x))") igsr, jgsr, kgsr
     1750    FLUSH(9)
     1751!
     1752!-- Determine index bounds for the parent-grid work arrays for interpolation and allocate them.
     1753    CALL pmci_allocate_workarrays
     1754!
     1755!-- Define the MPI-datatypes for parent-grid work array exchange between the PE-subdomains.
     1756    CALL pmci_create_workarray_exchange_datatypes
     1757!
     1758!-- First determine kcto and kctw which refer to the uppermost parent-grid levels below the child
     1759!-- top-boundary level. Note that these comparison tests are not round-off-error sensitive and
     1760!-- therefore tolerance buffering is not needed here.
     1761    kk = 0
     1762    DO WHILE ( pg%zu(kk) <= zu(nzt) )
     1763       kk = kk + 1
     1764    ENDDO
     1765    kcto = kk - 1
     1766
     1767    kk = 0
     1768    DO WHILE ( pg%zw(kk) <= zw(nzt-1) )
     1769       kk = kk + 1
     1770    ENDDO
     1771    kctw = kk - 1
     1772
     1773    WRITE( 9, "('kcto, kctw = ', 2(i3,2x))" ) kcto, kctw
     1774    FLUSH( 9 )
     1775!
     1776!-- In case of two-way coupling, check that the child domain is sufficiently large in terms of the
     1777!-- number of parent-grid cells covered. Otherwise anterpolation is not possible.
     1778    IF ( nesting_mode == 'two-way')  THEN
     1779       CALL pmci_check_child_domain_size
     1780    ENDIF
     1781
     1782    ALLOCATE( iflu(ipla:ipra) )
     1783    ALLOCATE( iflo(ipla:ipra) )
     1784    ALLOCATE( ifuu(ipla:ipra) )
     1785    ALLOCATE( ifuo(ipla:ipra) )
     1786    ALLOCATE( jflv(jpsa:jpna) )
     1787    ALLOCATE( jflo(jpsa:jpna) )
     1788    ALLOCATE( jfuv(jpsa:jpna) )
     1789    ALLOCATE( jfuo(jpsa:jpna) )
     1790    ALLOCATE( kflw(0:pg%nz+1) )
     1791    ALLOCATE( kflo(0:pg%nz+1) )
     1792    ALLOCATE( kfuw(0:pg%nz+1) )
     1793    ALLOCATE( kfuo(0:pg%nz+1) )
     1794    ALLOCATE( ijkfc_u(0:pg%nz+1,jpsa:jpna,ipla:ipra) )
     1795    ALLOCATE( ijkfc_v(0:pg%nz+1,jpsa:jpna,ipla:ipra) )
     1796    ALLOCATE( ijkfc_w(0:pg%nz+1,jpsa:jpna,ipla:ipra) )
     1797    ALLOCATE( ijkfc_s(0:pg%nz+1,jpsa:jpna,ipla:ipra) )
     1798
     1799    ijkfc_u = 0
     1800    ijkfc_v = 0
     1801    ijkfc_w = 0
     1802    ijkfc_s = 0
     1803!
     1804!-- i-indices of u for each ii-index value
     1805    istart = nxlg
     1806    DO  ii = ipla, ipra
     1807!
     1808!--    The parent and child grid lines do always match in x, hence we use only the local
     1809!--    k,j-child-grid plane for the anterpolation. However, iflu still has to be stored separately
     1810!--    as these index bounds are passed as arguments to the interpolation and anterpolation
     1811!--    subroutines. Note that this comparison test is round-off-error sensitive and therefore
     1812!--    tolerance buffering is needed here.
     1813       i = istart
     1814       DO WHILE ( pg%coord_x(ii) - coord_x(i) > tolex  .AND. i < nxrg )
     1815          i = i + 1
     1816       ENDDO
     1817       iflu(ii) = MIN( MAX( i, nxlg ), nxrg )
     1818       ifuu(ii) = iflu(ii)
     1819       istart   = iflu(ii)
     1820!
     1821!--    Print out the index bounds for checking and debugging purposes
     1822       WRITE( 9, "('pmci_define_index_mapping, ii, iflu, ifuu: ', 3(i4,2x))" ) ii, iflu(ii),       &
     1823                                                                               ifuu(ii)
     1824       FLUSH( 9 )
     1825    ENDDO
     1826    WRITE( 9, * )
     1827!
     1828!-- i-indices of others for each ii-index value. Note that these comparison tests are not
     1829!-- round-off-error sensitive and therefore tolerance buffering is not needed here.
     1830    istart = nxlg
     1831    DO  ii = ipla, ipra
     1832       i = istart
     1833       DO WHILE ( ( coord_x(i) + 0.5_wp * dx < pg%coord_x(ii) )  .AND.  ( i < nxrg ) )
     1834          i  = i + 1
     1835       ENDDO
     1836       iflo(ii) = MIN( MAX( i, nxlg ), nxrg )
     1837       ir = i
     1838       DO WHILE ( ( coord_x(ir) + 0.5_wp * dx < pg%coord_x(ii) + pg%dx )  .AND.  ( i < nxrg+1 ) )
     1839          i  = i + 1
     1840          ir = MIN( i, nxrg )
     1841       ENDDO
     1842       ifuo(ii) = MIN( MAX( i-1, iflo(ii) ), nxrg )
     1843       istart = iflo(ii)
     1844!
     1845!--    Print out the index bounds for checking and debugging purposes
     1846       WRITE( 9, "('pmci_define_index_mapping, ii, iflo, ifuo: ', 3(i4,2x))" ) ii, iflo(ii),       &
     1847                                                                               ifuo(ii)
     1848       FLUSH( 9 )
     1849    ENDDO
     1850    WRITE( 9, * )
     1851!
     1852!-- j-indices of v for each jj-index value
     1853    jstart = nysg
     1854    DO  jj = jpsa, jpna
     1855!
     1856!--    The parent and child grid lines do always match in y, hence we use only the local
     1857!--    k,i-child-grid plane for the anterpolation. However, jcnv still has to be stored separately
     1858!--    as these index bounds are passed as arguments to the interpolation and anterpolation
     1859!--    subroutines. Note that this comparison test is round-off-error sensitive and therefore
     1860!--    tolerance buffering is needed here.
     1861       j = jstart
     1862       DO WHILE ( pg%coord_y(jj) - coord_y(j) > toley  .AND.  j < nyng )
     1863          j = j + 1
     1864       ENDDO
     1865       jflv(jj) = MIN( MAX( j, nysg ), nyng )
     1866       jfuv(jj) = jflv(jj)
     1867       jstart   = jflv(jj)
     1868!
     1869!--    Print out the index bounds for checking and debugging purposes
     1870       WRITE( 9, "('pmci_define_index_mapping, jj, jflv, jfuv: ', 3(i4,2x))" ) jj, jflv(jj),       &
     1871                                                                               jfuv(jj)
     1872       FLUSH(9)
     1873    ENDDO
     1874    WRITE( 9, * )
     1875!
     1876!-- j-indices of others for each jj-index value
     1877!-- Note that these comparison tests are not round-off-error sensitive and therefore tolerance
     1878!-- buffering is not needed here.
     1879    jstart = nysg
     1880    DO  jj = jpsa, jpna
     1881       j = jstart
     1882       DO WHILE ( ( coord_y(j) + 0.5_wp * dy < pg%coord_y(jj) ) .AND. ( j < nyng ) )
     1883          j  = j + 1
     1884       ENDDO
     1885       jflo(jj) = MIN( MAX( j, nysg ), nyng )
     1886       jr = j
     1887       DO WHILE ( ( coord_y(jr) + 0.5_wp * dy < pg%coord_y(jj) + pg%dy ) .AND. ( j < nyng+1 ) )
     1888          j  = j + 1
     1889          jr = MIN( j, nyng )
     1890       ENDDO
     1891       jfuo(jj) = MIN( MAX( j-1, jflo(jj) ), nyng )
     1892       jstart = jflo(jj)
     1893!
     1894!--    Print out the index bounds for checking and debugging purposes
     1895       WRITE( 9, "('pmci_define_index_mapping, jj, jflo, jfuo: ', 3(i4,2x))" ) jj, jflo(jj),       &
     1896                                                                               jfuo(jj)
     1897       FLUSH( 9 )
     1898    ENDDO
     1899    WRITE( 9, * )
     1900!
     1901!-- k-indices of w for each kk-index value
     1902!-- Note that anterpolation index limits are needed also for the top boundary ghost cell level
     1903!-- because they are used also in the interpolation.
     1904    kstart  = 0
     1905    kflw(0) = 0
     1906    kfuw(0) = 0
     1907    DO  kk = 1, pg%nz+1
     1908!
     1909!--    The parent and child grid lines do always match in z, hence we use only the local
     1910!--    j,i-child-grid plane for the anterpolation. However, kctw still has to be stored separately
     1911!--    as these index bounds are passed as arguments to the interpolation and anterpolation
     1912!--    subroutines. Note that this comparison test is round-off-error sensitive and therefore
     1913!--    tolerance buffering is needed here.
     1914       k = kstart
     1915       DO WHILE ( ( pg%zw(kk) - zw(k) > tolez )  .AND.  ( k < nzt+1 ) )
     1916          k = k + 1
     1917       ENDDO
     1918       kflw(kk) = MIN( MAX( k, 1 ), nzt + 1 )
     1919       kfuw(kk) = kflw(kk)
     1920       kstart   = kflw(kk)
     1921!
     1922!--    Print out the index bounds for checking and debugging purposes
     1923       WRITE( 9, "('pmci_define_index_mapping, kk, kflw, kfuw: ', 4(i4,2x), 2(e12.5,2x))" )        &
     1924              kk, kflw(kk), kfuw(kk), nzt,  pg%zu(kk), pg%zw(kk)
     1925       FLUSH( 9 )
     1926    ENDDO
     1927    WRITE( 9, * )
     1928!
     1929!-- k-indices of others for each kk-index value
     1930    kstart  = 0
     1931    kflo(0) = 0
     1932    kfuo(0) = 0
     1933!
     1934!-- Note that anterpolation index limits are needed also for the top boundary ghost cell level
     1935!-- because they are used also in the interpolation. Note that these comparison tests are not
     1936!-- round-off-error sensitive and therefore tolerance buffering is not needed here.
     1937    DO  kk = 1, pg%nz+1
     1938       k = kstart
     1939       DO WHILE ( ( zu(k) < pg%zw(kk-1) )  .AND.  ( k <= nzt ) )
     1940          k = k + 1
     1941       ENDDO
     1942       kflo(kk) = MIN( MAX( k, 1 ), nzt + 1 )
     1943       DO WHILE ( ( zu(k) < pg%zw(kk) )  .AND.  ( k <= nzt+1 ) )
     1944          k = k + 1
     1945          IF ( k > nzt + 1 ) EXIT  ! This EXIT is to prevent zu(k) from flowing over.
     1946       ENDDO
     1947       kfuo(kk) = MIN( MAX( k-1, kflo(kk) ), nzt + 1 )
     1948       kstart = kflo(kk)
     1949    ENDDO
     1950!
     1951!-- Print out the index bounds for checking and debugging purposes
     1952    DO  kk = 1, pg%nz+1
     1953       WRITE( 9, "('pmci_define_index_mapping, kk, kflo, kfuo: ', 4(i4,2x), 2(e12.5,2x))" )        &
     1954              kk, kflo(kk), kfuo(kk), nzt,  pg%zu(kk), pg%zw(kk)
     1955       FLUSH( 9 )
     1956    ENDDO
     1957    WRITE( 9, * )
     1958!
     1959!-- Precomputation of number of child-grid nodes inside parent-grid cells. Note that ii, jj, and kk
     1960!-- are parent-grid indices. This information is needed in the anterpolation. The indices for
     1961!-- wall_flags_total_0 (kw,jw,iw) must be limited to the range [-1,...,nx/ny/nzt+1] in order to
     1962!-- avoid zero values on the outer ghost nodes.
     1963    DO  ii = ipla, ipra
     1964       DO  jj = jpsa, jpna
     1965          DO  kk = 0, pg%nz+1
     1966!
     1967!--          u-component
     1968             DO  i = iflu(ii), ifuu(ii)
     1969                iw = MAX( MIN( i, nx+1 ), -1 )
     1970                DO  j = jflo(jj), jfuo(jj)
     1971                   jw = MAX( MIN( j, ny+1 ), -1 )
     1972                   DO  k = kflo(kk), kfuo(kk)
     1973                      kw = MIN( k, nzt+1 )
     1974                      ijkfc_u(kk,jj,ii) = ijkfc_u(kk,jj,ii)                                        &
     1975                                          + MERGE( 1, 0, BTEST( wall_flags_total_0(kw,jw,iw), 1 ) )
     1976                   ENDDO
     1977                ENDDO
     1978             ENDDO
     1979!
     1980!--          v-component
     1981             DO  i = iflo(ii), ifuo(ii)
     1982                iw = MAX( MIN( i, nx+1 ), -1 )
     1983                DO  j = jflv(jj), jfuv(jj)
     1984                   jw = MAX( MIN( j, ny+1 ), -1 )
     1985                   DO  k = kflo(kk), kfuo(kk)
     1986                      kw = MIN( k, nzt+1 )
     1987                      ijkfc_v(kk,jj,ii) = ijkfc_v(kk,jj,ii)                                        &
     1988                                          + MERGE( 1, 0, BTEST( wall_flags_total_0(kw,jw,iw), 2 ) )
     1989                   ENDDO
     1990                ENDDO
     1991             ENDDO
     1992!
     1993!--          Scalars
     1994             DO  i = iflo(ii), ifuo(ii)
     1995                iw = MAX( MIN( i, nx+1 ), -1 )
     1996                DO  j = jflo(jj), jfuo(jj)
     1997                   jw = MAX( MIN( j, ny+1 ), -1 )
     1998                   DO  k = kflo(kk), kfuo(kk)
     1999                      kw = MIN( k, nzt+1 )
     2000                      ijkfc_s(kk,jj,ii) = ijkfc_s(kk,jj,ii)                                        &
     2001                                          + MERGE( 1, 0, BTEST( wall_flags_total_0(kw,jw,iw), 0 ) )
     2002                   ENDDO
     2003                ENDDO
     2004             ENDDO
     2005!
     2006!--          w-component
     2007             DO  i = iflo(ii), ifuo(ii)
     2008                iw = MAX( MIN( i, nx+1 ), -1 )
     2009                DO  j = jflo(jj), jfuo(jj)
     2010                   jw = MAX( MIN( j, ny+1 ), -1 )
     2011                   DO  k = kflw(kk), kfuw(kk)
     2012                      kw = MIN( k, nzt+1 )
     2013                      ijkfc_w(kk,jj,ii) = ijkfc_w(kk,jj,ii)                                        &
     2014                                          + MERGE( 1, 0, BTEST( wall_flags_total_0(kw,jw,iw), 3 ) )
     2015                   ENDDO
     2016                ENDDO
     2017             ENDDO
     2018
     2019          ENDDO  ! kk
     2020       ENDDO  ! jj
     2021    ENDDO  ! ii
     2022
     2023 END SUBROUTINE pmci_define_index_mapping
     2024
     2025
     2026
     2027!--------------------------------------------------------------------------------------------------!
     2028! Description:
     2029! ------------
     2030!> @Todo: Missing subroutine description.
     2031!--------------------------------------------------------------------------------------------------!
     2032 SUBROUTINE pmci_check_child_domain_size
     2033!
     2034!-- Check if the child domain is too small in terms of number of parent-grid cells covered so that
     2035!-- anterpolation buffers fill the whole domain so that anterpolation not possible. Also, check that
     2036!-- anterpolation_buffer_width is not too large to prevent anterpolation.
     2037    IMPLICIT NONE
     2038
     2039!
     2040!-- First x-direction
     2041    IF ( iplg + 3 + anterpolation_buffer_width > iprg - 3 - anterpolation_buffer_width )  THEN
     2042       IF ( iprg - iplg + 1 < 7 )  THEN
     2043!
     2044!--       Error
     2045          WRITE( message_string, * ) 'child domain too narrow for anterpolation in x-direction'
     2046          CALL message( 'pmci_check_child_domain_size', 'PA0652', 3, 2, 0, 6, 0 )
     2047       ELSE IF ( iprg - iplg + 1 < 11 )  THEN
     2048!
     2049!--       Warning
     2050          WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to 0'
     2051          CALL message( 'pmci_check_child_domain_size', 'PA0653', 0, 1, 0, 6, 0 )
     2052          anterpolation_buffer_width = 0
    14602053       ELSE
    1461           yexs  = 0.0_wp
    1462           jauxs = 1 
    1463        ENDIF
    1464        yps  = coord_y(nys) - yexs
    1465        DO  jp = 0, pg%ny
    1466           IF ( pg%coord_y(jp) + 0.5_wp * pg%dy >= yps - toley )  THEN             
    1467              jps = MAX( 0, jp )
    1468              EXIT
    1469           ENDIF
    1470        ENDDO
    1471 !
    1472 !--    North boundary.
    1473 !--    Extension by two parent-grid cells behind the boundary, see the comment block above. 
    1474        IF  ( bc_dirichlet_n )  THEN
    1475           yexn  = 2.0_wp * pg%dy
    1476           jauxn = 0
     2054!
     2055!--       Informative message
     2056          WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to ' //     &
     2057                                     'default value 2'
     2058          CALL message( 'pmci_check_child_domain_size', 'PA0654', 0, 0, 0, 6, 0 )
     2059          anterpolation_buffer_width = 2
     2060       ENDIF
     2061    ENDIF
     2062!
     2063!-- Then y-direction
     2064    IF ( jpsg + 3 + anterpolation_buffer_width > jpng - 3 - anterpolation_buffer_width )  THEN
     2065       IF ( jpng - jpsg + 1 < 7 )  THEN
     2066!
     2067!--       Error
     2068          WRITE( message_string, * ) 'child domain too narrow for anterpolation in y-direction'
     2069          CALL message( 'pmci_check_child_domain_size', 'PA0652', 3, 2, 0, 6, 0 )
     2070       ELSE IF ( jpng - jpsg + 1 < 11 )  THEN
     2071!
     2072!--       Warning
     2073          WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to 0'
     2074          CALL message( 'pmci_check_child_domain_size', 'PA0653', 0, 1, 0, 6, 0 )
     2075          anterpolation_buffer_width = 0
    14772076       ELSE
    1478           yexn  = 0.0_wp
    1479           jauxn = 1
    1480        ENDIF
    1481        ypn  = coord_y(nyn+1) + yexn
    1482        DO  jp = pg%ny, 0 , -1
    1483           IF ( pg%coord_y(jp) + 0.5_wp * pg%dy <= ypn + toley )  THEN
    1484              jpn = MIN( pg%ny, MAX( jps, jp ) )
    1485              EXIT
    1486           ENDIF
    1487        ENDDO
    1488 !
    1489 !--    Make sure that the indexing is contiguous (no gaps, no overlaps).
    1490 !--    This is a safety measure mainly for cases with high grid-spacing
    1491 !--    ratio and narrow child subdomains.
    1492        IF ( pdims(1) > 1 )  THEN
    1493           IF ( nxl == 0 )  THEN
    1494              CALL MPI_SEND( ipr, 1, MPI_INTEGER, pright, 717, comm2d, ierr )
    1495           ELSE IF ( nxr == nx )  THEN
    1496              CALL MPI_RECV( ijaux, 1, MPI_INTEGER, pleft, 717, comm2d, status, ierr )
    1497              ipl = ijaux + 1
    1498           ELSE
    1499              CALL MPI_SEND( ipr, 1, MPI_INTEGER, pright, 717, comm2d, ierr )
    1500              CALL MPI_RECV( ijaux, 1, MPI_INTEGER, pleft, 717, comm2d, status, ierr )
    1501              ipl = ijaux + 1
    1502           ENDIF
    1503        ENDIF
    1504 
    1505        IF ( pdims(2) > 1 )  THEN
    1506           IF ( nys == 0 )  THEN
    1507              CALL MPI_SEND( jpn, 1, MPI_INTEGER, pnorth, 719, comm2d, ierr )
    1508           ELSE IF ( nyn == ny )  THEN
    1509              CALL MPI_RECV( ijaux, 1, MPI_INTEGER, psouth, 719, comm2d, status, ierr )
    1510              jps = ijaux + 1
    1511           ELSE
    1512              CALL MPI_SEND( jpn, 1, MPI_INTEGER, pnorth, 719, comm2d, ierr )
    1513              CALL MPI_RECV( ijaux, 1, MPI_INTEGER, psouth, 719, comm2d, status, ierr )
    1514              jps = ijaux + 1
    1515           ENDIF
    1516        ENDIF
    1517          
    1518        WRITE(9,"('pmci_map_child_grid_to_parent_grid. Parent-grid array bounds: ',4(i4,2x))")             &
    1519             ipl, ipr, jps, jpn
    1520        FLUSH(9)
    1521 
    1522        parent_bound(1) = ipl
    1523        parent_bound(2) = ipr
    1524        parent_bound(3) = jps
    1525        parent_bound(4) = jpn
    1526        parent_bound(5) = myid
    1527 !
    1528 !--    The following auxiliary index bounds are used for allocating index mapping and
    1529 !--    some other auxiliary arrays.
    1530        ipla = ipl - iauxl
    1531        ipra = ipr + iauxr
    1532        jpsa = jps - jauxs
    1533        jpna = jpn + jauxn
    1534 !
    1535 !--    The index-bounds parent_bound of all subdomains of the current child domain
    1536 !--    must be sent to the parent in order for the parent to create the index list.
    1537 !--    For this reason, the parent_bound arrays are packed together in single
    1538 !--    array parent_bound_all using MPI_GATHER.       
    1539 !--    Note that MPI_Gather receives data from all processes in the rank order
    1540 !--    This fact is exploited in creating the index list in pmci_create_index_list.
    1541        CALL MPI_GATHER( parent_bound, 5, MPI_INTEGER, parent_bound_all, 5,                          &
    1542                         MPI_INTEGER, 0, comm2d, ierr )
    1543 
    1544        IF ( myid == 0 )  THEN
    1545           size_of_array(1) = SIZE( parent_bound_all, 1 )
    1546           size_of_array(2) = SIZE( parent_bound_all, 2 )
    1547           CALL pmc_send_to_parent( size_of_array, 2, 0, 40, ierr )
    1548           CALL pmc_send_to_parent( parent_bound_all, SIZE( parent_bound_all ), 0, 41, ierr )
    1549 !
    1550 !--       Determine the global parent-grid index bounds       
    1551           parent_bound_global(1) = MINVAL( parent_bound_all(1,:) )
    1552           parent_bound_global(2) = MAXVAL( parent_bound_all(2,:) )
    1553           parent_bound_global(3) = MINVAL( parent_bound_all(3,:) )
    1554           parent_bound_global(4) = MAXVAL( parent_bound_all(4,:) )
    1555        ENDIF
    1556 !
    1557 !--    Broadcast the global parent-grid index bounds to all current child processes
    1558        CALL MPI_BCAST( parent_bound_global, 4, MPI_INTEGER, 0, comm2d, ierr )
    1559        iplg = parent_bound_global(1)
    1560        iprg = parent_bound_global(2)
    1561        jpsg = parent_bound_global(3)
    1562        jpng = parent_bound_global(4)
    1563        WRITE( 9, "('pmci_map_child_grid_to_parent_grid. Global parent-grid index bounds iplg, iprg, jpsg, jpng: ',4(i4,2x))" ) &
    1564             iplg, iprg, jpsg, jpng
    1565        FLUSH( 9 )
    1566        
    1567     END SUBROUTINE pmci_map_child_grid_to_parent_grid
    1568 
    1569      
    1570      
    1571     SUBROUTINE pmci_define_index_mapping
    1572 !
    1573 !--    Precomputation of the mapping of the child- and parent-grid indices.
    1574 
    1575        IMPLICIT NONE
    1576 
    1577        INTEGER(iwp) ::  i         !< Child-grid index in the x-direction
    1578        INTEGER(iwp) ::  ii        !< Parent-grid index in the x-direction
    1579        INTEGER(iwp) ::  istart    !<
    1580        INTEGER(iwp) ::  ir        !<
    1581        INTEGER(iwp) ::  iw        !< Child-grid index limited to -1 <= iw <= nx+1 for wall_flags_total_0
    1582        INTEGER(iwp) ::  j         !< Child-grid index in the y-direction
    1583        INTEGER(iwp) ::  jj        !< Parent-grid index in the y-direction
    1584        INTEGER(iwp) ::  jstart    !<
    1585        INTEGER(iwp) ::  jr        !<
    1586        INTEGER(iwp) ::  jw        !< Child-grid index limited to -1 <= jw <= ny+1 for wall_flags_total_0
    1587        INTEGER(iwp) ::  k         !< Child-grid index in the z-direction
    1588        INTEGER(iwp) ::  kk        !< Parent-grid index in the z-direction
    1589        INTEGER(iwp) ::  kstart    !<
    1590        INTEGER(iwp) ::  kw        !< Child-grid index limited to kw <= nzt+1 for wall_flags_total_0
    1591 
    1592        REAL(wp)     ::  tolex     !< Tolerance for grid-line matching in x-direction   
    1593        REAL(wp)     ::  toley     !< Tolerance for grid-line matching in y-direction   
    1594        REAL(wp)     ::  tolez     !< Tolerance for grid-line matching in z-direction   
    1595 
    1596 !
    1597 !--    Grid-line tolerances.
     2077!
     2078!--       Informative message
     2079          WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to ' //     &
     2080                                     'default value 2'
     2081          CALL message( 'pmci_check_child_domain_size', 'PA0654', 0, 0, 0, 6, 0 )
     2082          anterpolation_buffer_width = 2
     2083       ENDIF
     2084    ENDIF
     2085!
     2086!-- Finally z-direction
     2087    IF ( kctw - 1 - anterpolation_buffer_width < 1 )  THEN
     2088       IF ( kctw - 1 < 1 )  THEN
     2089!
     2090!--       Error
     2091          WRITE( message_string, * ) 'child domain too shallow for anterpolation in z-direction'
     2092          CALL message( 'pmci_check_child_domain_size', 'PA0652', 3, 2, 0, 6, 0 )
     2093       ELSE IF ( kctw - 3 < 1 )  THEN
     2094!
     2095!--       Warning
     2096          WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to 0'
     2097          CALL message( 'pmci_check_child_domain_size', 'PA0653', 0, 1, 0, 6, 0 )
     2098          anterpolation_buffer_width = 0
     2099       ELSE
     2100!
     2101!--       Informative message
     2102          WRITE( message_string, * ) 'anterpolation_buffer_width value too high, reset to ' //     &
     2103                                     'default value 2'
     2104          CALL message( 'pmci_check_child_domain_size', 'PA0654', 0, 0, 0, 6, 0 )
     2105          anterpolation_buffer_width = 2
     2106       ENDIF
     2107    ENDIF
     2108
     2109 END SUBROUTINE pmci_check_child_domain_size
     2110
     2111
     2112!--------------------------------------------------------------------------------------------------!
     2113! Description:
     2114! ------------
     2115!> @Todo: Missing subroutine description.
     2116!--------------------------------------------------------------------------------------------------!
     2117 SUBROUTINE pmci_allocate_workarrays
     2118!
     2119!-- Allocate parent-grid work-arrays for interpolation
     2120    IMPLICIT NONE
     2121
     2122!
     2123!-- Determine and store the PE-subdomain dependent index bounds
     2124    IF ( bc_dirichlet_l )  THEN
     2125       iplw = ipl + 1
     2126    ELSE
     2127       iplw = ipl - 1
     2128    ENDIF
     2129
     2130    IF ( bc_dirichlet_r )  THEN
     2131       iprw = ipr - 1
     2132    ELSE
     2133       iprw = ipr + 1
     2134    ENDIF
     2135
     2136    IF ( bc_dirichlet_s )  THEN
     2137       jpsw = jps + 1
     2138    ELSE
     2139       jpsw = jps - 1
     2140    ENDIF
     2141
     2142    IF ( bc_dirichlet_n )  THEN
     2143       jpnw = jpn - 1
     2144    ELSE
     2145       jpnw = jpn + 1
     2146    ENDIF
     2147!
     2148!-- Left and right boundaries.
     2149    ALLOCATE( workarr_lr(0:pg%nz+1,jpsw:jpnw,0:2) )
     2150!
     2151!-- South and north boundaries.
     2152    ALLOCATE( workarr_sn(0:pg%nz+1,0:2,iplw:iprw) )
     2153!
     2154!-- Top boundary.
     2155    ALLOCATE( workarr_t(0:2,jpsw:jpnw,iplw:iprw) )
     2156
     2157 END SUBROUTINE pmci_allocate_workarrays
     2158
     2159
     2160!--------------------------------------------------------------------------------------------------!
     2161! Description:
     2162! ------------
     2163!> @Todo: Missing subroutine description.
     2164!--------------------------------------------------------------------------------------------------!
     2165 SUBROUTINE pmci_create_workarray_exchange_datatypes
     2166!
     2167!-- Define specific MPI types for workarr-exchange.
     2168    IMPLICIT NONE
     2169
     2170!
     2171!-- For the left and right boundaries
     2172    CALL MPI_TYPE_VECTOR( 3, pg%nz+2, (jpnw-jpsw+1)*(pg%nz+2), MPI_REAL, workarr_lr_exchange_type, &
     2173                          ierr )
     2174    CALL MPI_TYPE_COMMIT( workarr_lr_exchange_type, ierr )
     2175!
     2176!-- For the south and north boundaries
     2177    CALL MPI_TYPE_VECTOR( 1, 3*(pg%nz+2), 3*(pg%nz+2), MPI_REAL, workarr_sn_exchange_type, ierr )
     2178    CALL MPI_TYPE_COMMIT( workarr_sn_exchange_type, ierr )
     2179!
     2180!-- For the top-boundary x-slices
     2181    CALL MPI_TYPE_VECTOR( iprw-iplw+1, 3, 3*(jpnw-jpsw+1), MPI_REAL, workarr_t_exchange_type_x,    &
     2182                          ierr )
     2183    CALL MPI_TYPE_COMMIT( workarr_t_exchange_type_x, ierr )
     2184!
     2185!-- For the top-boundary y-slices
     2186    CALL MPI_TYPE_VECTOR( 1, 3*(jpnw-jpsw+1), 3*(jpnw-jpsw+1), MPI_REAL,                           &
     2187                          workarr_t_exchange_type_y, ierr )
     2188    CALL MPI_TYPE_COMMIT( workarr_t_exchange_type_y, ierr )
     2189
     2190 END SUBROUTINE pmci_create_workarray_exchange_datatypes
     2191
     2192
     2193!--------------------------------------------------------------------------------------------------!
     2194! Description:
     2195! ------------
     2196!> @Todo: Missing subroutine description.
     2197!--------------------------------------------------------------------------------------------------!
     2198 SUBROUTINE pmci_check_grid_matching
     2199!
     2200!-- Check that the grid lines of child and parent do match.
     2201!-- Also check that the child subdomain width is not smaller than the parent grid spacing in the
     2202!-- respective direction.
     2203    IMPLICIT NONE
     2204
     2205    INTEGER(iwp) ::  non_int_gsr_x = 0                    !< Flag for non-integer grid-spacing ration in x-direction
     2206    INTEGER(iwp) ::  non_int_gsr_y = 0                    !< Flag for non-integer grid-spacing ration in y-direction
     2207    INTEGER(iwp) ::  non_int_gsr_z = 0                    !< Flag for non-integer grid-spacing ration in z-direction
     2208    INTEGER(iwp) ::  non_matching_height = 0              !< Flag for non-matching child-domain height
     2209    INTEGER(iwp) ::  non_matching_lower_left_corner = 0   !< Flag for non-matching lower left corner
     2210    INTEGER(iwp) ::  non_matching_upper_right_corner = 0  !< Flag for non-matching upper right corner
     2211    INTEGER(iwp) ::  too_narrow_pesd_x = 0                !< Flag for too narrow pe-subdomain in x-direction
     2212    INTEGER(iwp) ::  too_narrow_pesd_y = 0                !< Flag for too narrow pe-subdomain in y-direction
     2213
     2214    REAL(wp) ::  child_ngp_x_l                            !< Number of gridpoints in child subdomain in x-direction
     2215                                                          !< converted to REAL(wp)
     2216    REAL(wp) ::  child_ngp_y_l                            !< Number of gridpoints in child subdomain in y-direction
     2217                                                          !< converted to REAL(wp)
     2218    REAL(wp) ::  gridline_mismatch_x                      !< Mismatch between the parent and child gridlines in the x-direction
     2219    REAL(wp) ::  gridline_mismatch_y                      !< Mismatch between the parent and child gridlines in the y-direction
     2220    REAL(wp) ::  gsr_mismatch_x                           !< Deviation of the grid-spacing ratio from the nearest integer value,
     2221                                                          !< the x-direction
     2222    REAL(wp) ::  gsr_mismatch_y                           !< Deviation of the grid-spacing ratio from the nearest integer value, the
     2223                                                          !< y-direction
     2224    REAL(wp) ::  tolex                                    !< Tolerance for grid-line matching in x-direction
     2225    REAL(wp) ::  toley                                    !< Tolerance for grid-line matching in y-direction
     2226    REAL(wp) ::  tolez                                    !< Tolerance for grid-line matching in z-direction
     2227    REAL(wp) ::  upper_right_coord_x                      !< X-coordinate of the upper right corner of the child domain
     2228    REAL(wp) ::  upper_right_coord_y                      !< Y-coordinate of the upper right corner of the child domain
     2229
     2230
     2231    IF ( myid == 0 )  THEN
     2232
    15982233       tolex = tolefac * dx
    15992234       toley = tolefac * dy
    16002235       tolez = tolefac * dz(1)
    16012236!
    1602 !--    Allocate child-grid work arrays for interpolation.
    1603        igsr = NINT( pg%dx / dx, iwp )
    1604        jgsr = NINT( pg%dy / dy, iwp )
    1605        kgsr = NINT( pg%dzw(1) / dzw(1), iwp )
    1606        WRITE(9,"('igsr, jgsr, kgsr: ',3(i3,2x))") igsr, jgsr, kgsr
    1607        FLUSH(9)
    1608 !       
    1609 !--    Determine index bounds for the parent-grid work arrays for
    1610 !--    interpolation and allocate them.
    1611        CALL pmci_allocate_workarrays
    1612 !       
    1613 !--    Define the MPI-datatypes for parent-grid work array
    1614 !--    exchange between the PE-subdomains.
    1615        CALL pmci_create_workarray_exchange_datatypes
    1616 !
    1617 !--    First determine kcto and kctw which refer to the uppermost
    1618 !--    parent-grid levels below the child top-boundary level.
    1619 !--    Note that these comparison tests are not round-off-error
    1620 !--    sensitive and therefore tolerance buffering is not needed here.
    1621        kk = 0
    1622        DO WHILE ( pg%zu(kk) <= zu(nzt) )
    1623           kk = kk + 1
    1624        ENDDO
    1625        kcto = kk - 1
    1626 
    1627        kk = 0
    1628        DO WHILE ( pg%zw(kk) <= zw(nzt-1) )
    1629           kk = kk + 1
    1630        ENDDO
    1631        kctw = kk - 1
    1632 
    1633        WRITE( 9, "('kcto, kctw = ', 2(i3,2x))" ) kcto, kctw
    1634        FLUSH( 9 )
    1635 !       
    1636 !--    In case of two-way coupling, check that the child domain is sufficiently
    1637 !--    large in terms of the number of parent-grid cells covered. Otherwise
    1638 !--    anterpolation is not possible.
    1639        IF ( nesting_mode == 'two-way')  THEN
    1640           CALL pmci_check_child_domain_size
    1641        ENDIF
    1642        
    1643        ALLOCATE( iflu(ipla:ipra) )
    1644        ALLOCATE( iflo(ipla:ipra) )
    1645        ALLOCATE( ifuu(ipla:ipra) )
    1646        ALLOCATE( ifuo(ipla:ipra) )
    1647        ALLOCATE( jflv(jpsa:jpna) )
    1648        ALLOCATE( jflo(jpsa:jpna) )
    1649        ALLOCATE( jfuv(jpsa:jpna) )
    1650        ALLOCATE( jfuo(jpsa:jpna) )       
    1651        ALLOCATE( kflw(0:pg%nz+1) )
    1652        ALLOCATE( kflo(0:pg%nz+1) )
    1653        ALLOCATE( kfuw(0:pg%nz+1) )
    1654        ALLOCATE( kfuo(0:pg%nz+1) )
    1655        ALLOCATE( ijkfc_u(0:pg%nz+1,jpsa:jpna,ipla:ipra) )
    1656        ALLOCATE( ijkfc_v(0:pg%nz+1,jpsa:jpna,ipla:ipra) )
    1657        ALLOCATE( ijkfc_w(0:pg%nz+1,jpsa:jpna,ipla:ipra) )
    1658        ALLOCATE( ijkfc_s(0:pg%nz+1,jpsa:jpna,ipla:ipra) )
    1659 
    1660        ijkfc_u = 0
    1661        ijkfc_v = 0
    1662        ijkfc_w = 0
    1663        ijkfc_s = 0
    1664 !
    1665 !--    i-indices of u for each ii-index value
    1666        istart = nxlg
    1667        DO  ii = ipla, ipra
    1668 !
    1669 !--       The parent and child grid lines do always match in x, hence we
    1670 !--       use only the local k,j-child-grid plane for the anterpolation.
    1671 !--       However, icru still has to be stored separately as these index bounds
    1672 !--       are passed as arguments to the interpolation and anterpolation
    1673 !--       subroutines.
    1674 !--       Note that this comparison test is round-off-error sensitive
    1675 !--       and therefore tolerance buffering is needed here.
    1676           i = istart
    1677           DO WHILE ( pg%coord_x(ii) - coord_x(i) > tolex  .AND. i < nxrg )
    1678              i = i + 1
    1679           ENDDO
    1680           iflu(ii) = MIN( MAX( i, nxlg ), nxrg )
    1681           ifuu(ii) = iflu(ii)
    1682           istart   = iflu(ii)
    1683 !
    1684 !--       Print out the index bounds for checking and debugging purposes
    1685           WRITE( 9, "('pmci_define_index_mapping, ii, iflu, ifuu: ', 3(i4,2x))" )                   &
    1686                ii, iflu(ii), ifuu(ii)
    1687           FLUSH( 9 )
    1688        ENDDO
    1689        WRITE( 9, * )
    1690 !
    1691 !--    i-indices of others for each ii-index value.
    1692 !--    Note that these comparison tests are not round-off-error
    1693 !--    sensitive and therefore tolerance buffering is not needed here.
    1694        istart = nxlg
    1695        DO  ii = ipla, ipra
    1696           i = istart
    1697           DO WHILE ( ( coord_x(i) + 0.5_wp * dx < pg%coord_x(ii) )  .AND.  ( i < nxrg ) )
    1698              i  = i + 1
    1699           ENDDO
    1700           iflo(ii) = MIN( MAX( i, nxlg ), nxrg )
    1701           ir = i
    1702           DO WHILE ( ( coord_x(ir) + 0.5_wp * dx < pg%coord_x(ii) + pg%dx )  .AND.  ( i < nxrg+1 ) )
    1703              i  = i + 1
    1704              ir = MIN( i, nxrg )
    1705           ENDDO
    1706           ifuo(ii) = MIN( MAX( i-1, iflo(ii) ), nxrg )
    1707           istart = iflo(ii)
    1708 !
    1709 !--       Print out the index bounds for checking and debugging purposes
    1710           WRITE( 9, "('pmci_define_index_mapping, ii, iflo, ifuo: ', 3(i4,2x))" )                   &
    1711                ii, iflo(ii), ifuo(ii)
    1712           FLUSH( 9 )
    1713        ENDDO
    1714        WRITE( 9, * )
    1715 !
    1716 !--    j-indices of v for each jj-index value
    1717        jstart = nysg
    1718        DO  jj = jpsa, jpna
    1719 !
    1720 !--       The parent and child grid lines do always match in y, hence we
    1721 !--       use only the local k,i-child-grid plane for the anterpolation.
    1722 !--       However, jcnv still has to be stored separately as these index bounds
    1723 !--       are passed as arguments to the interpolation and anterpolation
    1724 !--       subroutines.
    1725 !--       Note that this comparison test is round-off-error sensitive
    1726 !--       and therefore tolerance buffering is needed here.
    1727           j = jstart
    1728           DO WHILE ( pg%coord_y(jj) - coord_y(j) > toley  .AND. j < nyng )
    1729              j = j + 1
    1730           ENDDO
    1731           jflv(jj) = MIN( MAX( j, nysg ), nyng )
    1732           jfuv(jj) = jflv(jj)
    1733           jstart   = jflv(jj)
    1734 !
    1735 !--       Print out the index bounds for checking and debugging purposes
    1736           WRITE( 9, "('pmci_define_index_mapping, jj, jflv, jfuv: ', 3(i4,2x))" )                   &
    1737                jj, jflv(jj), jfuv(jj)
    1738           FLUSH(9)
    1739        ENDDO
    1740        WRITE( 9, * )
    1741 !
    1742 !--    j-indices of others for each jj-index value
    1743 !--    Note that these comparison tests are not round-off-error
    1744 !--    sensitive and therefore tolerance buffering is not needed here.
    1745        jstart = nysg
    1746        DO  jj = jpsa, jpna
    1747           j = jstart
    1748           DO WHILE ( ( coord_y(j) + 0.5_wp * dy < pg%coord_y(jj) ) .AND. ( j < nyng ) )
    1749              j  = j + 1
    1750           ENDDO
    1751           jflo(jj) = MIN( MAX( j, nysg ), nyng )
    1752           jr = j
    1753           DO WHILE ( ( coord_y(jr) + 0.5_wp * dy < pg%coord_y(jj) + pg%dy ) .AND. ( j < nyng+1 ) )
    1754              j  = j + 1
    1755              jr = MI