Ignore:
Timestamp:
Mar 12, 2020 4:51:59 PM (4 years ago)
Author:
raasch
Message:

extensions to allow usage of alternative communicators in exchange_horiz

File:
1 edited

Legend:

Unmodified
Added
Removed
  • palm/trunk/SOURCE/exchange_horiz_mod.f90

    r4457 r4461  
    2525! -----------------
    2626! $Id$
     27! optional communicator added to exchange_horiz
     28!
     29! 4457 2020-03-11 14:20:43Z raasch
    2730! routine has been modularized, file exchange_horiz_2d has been merged
    2831!
     
    4548! Description:
    4649! ------------
    47 !> Exchange of lateral boundary values (parallel computers) and cyclic
    48 !> lateral boundary conditions, respectively.
     50!> Exchange of ghost point layers for subdomains (in parallel mode) and setting
     51!> of cyclic lateral boundary conditions for the total domain .
    4952!------------------------------------------------------------------------------!
    5053 MODULE exchange_horiz_mod
     
    8487
    8588
    86  SUBROUTINE exchange_horiz( ar, nbgp_local)
     89!------------------------------------------------------------------------------!
     90! Description:
     91! ------------
     92!> Exchange of ghost point layers for subdomains (in parallel mode) and setting
     93!> of cyclic lateral boundary conditions for the total domain.
     94!> This routine is for REAL 3d-arrays.
     95!------------------------------------------------------------------------------!
     96 SUBROUTINE exchange_horiz( ar, nbgp_local, alternative_communicator)
    8797
    8898    USE control_parameters,                                                    &
     
    105115#endif
    106116
    107     INTEGER(iwp) ::  nbgp_local  !<
     117    INTEGER(iwp), OPTIONAL ::  alternative_communicator  !< alternative MPI communicator to be used
     118    INTEGER(iwp) ::  communicator  !< communicator that is used as argument in MPI calls
     119    INTEGER(iwp) ::  left_pe       !< id of left pe that is used as argument in MPI calls
     120    INTEGER(iwp) ::  nbgp_local    !< number of ghost point layers
     121    INTEGER(iwp) ::  north_pe      !< id of north pe that is used as argument in MPI calls
     122    INTEGER(iwp) ::  right_pe      !< id of right pe that is used as argument in MPI calls
     123    INTEGER(iwp) ::  south_pe      !< id of south pe that is used as argument in MPI calls
    108124   
    109125    REAL(wp), DIMENSION(nzb:nzt+1,nys-nbgp_local:nyn+nbgp_local,               &
    110                         nxl-nbgp_local:nxr+nbgp_local) ::  ar  !<
     126                        nxl-nbgp_local:nxr+nbgp_local) ::  ar !< 3d-array for which exchange is done
    111127                       
    112128
     
    136152#endif
    137153
     154!
     155!-- Set the communicator to be used
     156    IF ( PRESENT( alternative_communicator ) )  THEN
     157!
     158!--    Alternative communicator is to be used
     159       communicator = communicator_configurations(alternative_communicator)%mpi_communicator
     160       left_pe  = communicator_configurations(alternative_communicator)%pleft
     161       right_pe = communicator_configurations(alternative_communicator)%pright
     162       south_pe = communicator_configurations(alternative_communicator)%psouth
     163       north_pe = communicator_configurations(alternative_communicator)%pnorth
     164
     165    ELSE
     166!
     167!--    Main communicator is to be used
     168       communicator = comm2d
     169       left_pe  = pleft
     170       right_pe = pright
     171       south_pe = psouth
     172       north_pe = pnorth
     173
     174    ENDIF
     175
    138176#if defined( __parallel )
    139177
     
    154192!
    155193!--       Send left boundary, receive right one (synchronous)
    156           CALL MPI_SENDRECV(                                                   &
    157               ar(nzb,nys-nbgp_local,nxl),   1, type_yz(grid_level), pleft,  0, &
    158               ar(nzb,nys-nbgp_local,nxr+1), 1, type_yz(grid_level), pright, 0, &
    159               comm2d, status, ierr )
     194          CALL MPI_SENDRECV( ar(nzb,nys-nbgp_local,nxl),   1, type_yz(grid_level), left_pe,  0,    &
     195                             ar(nzb,nys-nbgp_local,nxr+1), 1, type_yz(grid_level), right_pe, 0,    &
     196                             communicator, status, ierr )
    160197!
    161198!--       Send right boundary, receive left one (synchronous)
    162           CALL MPI_SENDRECV( ar(nzb,nys-nbgp_local,nxr+1-nbgp_local), 1,       &
    163                              type_yz(grid_level), pright, 1,                   &
    164                              ar(nzb,nys-nbgp_local,nxl-nbgp_local), 1,         &
    165                              type_yz(grid_level), pleft,  1,                   &
    166                              comm2d, status, ierr )
     199          CALL MPI_SENDRECV( ar(nzb,nys-nbgp_local,nxr+1-nbgp_local), 1,                           &
     200                             type_yz(grid_level), right_pe, 1,                                     &
     201                             ar(nzb,nys-nbgp_local,nxl-nbgp_local), 1,                             &
     202                             type_yz(grid_level), left_pe,  1,                                     &
     203                             communicator, status, ierr )
    167204
    168205       ELSE
     
    176213!
    177214!--          Send left boundary, receive right one (asynchronous)
    178              CALL MPI_ISEND( ar(nzb,nys-nbgp_local,nxl),   1, type_yz(grid_level), &
    179                              pleft, req_count, comm2d, req(req_count+1), ierr )
    180              CALL MPI_IRECV( ar(nzb,nys-nbgp_local,nxr+1), 1, type_yz(grid_level), &
    181                              pright, req_count, comm2d, req(req_count+2), ierr )
     215             CALL MPI_ISEND( ar(nzb,nys-nbgp_local,nxl),   1, type_yz(grid_level), left_pe,        &
     216                             req_count, communicator, req(req_count+1), ierr )
     217             CALL MPI_IRECV( ar(nzb,nys-nbgp_local,nxr+1), 1, type_yz(grid_level), right_pe,       &
     218                             req_count, communicator, req(req_count+2), ierr )
    182219!
    183220!--          Send right boundary, receive left one (asynchronous)
    184              CALL MPI_ISEND( ar(nzb,nys-nbgp_local,nxr+1-nbgp_local), 1,       &
    185                              type_yz(grid_level), pright, req_count+1, comm2d, &
    186                              req(req_count+3), ierr )
    187              CALL MPI_IRECV( ar(nzb,nys-nbgp_local,nxl-nbgp_local),   1,       &
    188                              type_yz(grid_level), pleft,  req_count+1, comm2d, &
    189                              req(req_count+4), ierr )
     221             CALL MPI_ISEND( ar(nzb,nys-nbgp_local,nxr+1-nbgp_local), 1, type_yz(grid_level),      &
     222                             right_pe, req_count+1, communicator, req(req_count+3), ierr )
     223             CALL MPI_IRECV( ar(nzb,nys-nbgp_local,nxl-nbgp_local),   1, type_yz(grid_level),      &
     224                             left_pe,  req_count+1, communicator, req(req_count+4), ierr )
    190225
    191226             CALL MPI_WAITALL( 4, req, wait_stat, ierr )
     
    219254!
    220255!--       Send front boundary, receive rear one (synchronous)
    221           CALL MPI_SENDRECV(                                                   &
    222               ar(nzb,nys,nxl-nbgp_local),   1, type_xz(grid_level), psouth, 0, &
    223               ar(nzb,nyn+1,nxl-nbgp_local), 1, type_xz(grid_level), pnorth, 0, &
    224               comm2d, status, ierr )
     256          CALL MPI_SENDRECV( ar(nzb,nys,nxl-nbgp_local),   1, type_xz(grid_level), south_pe, 0,    &
     257                             ar(nzb,nyn+1,nxl-nbgp_local), 1, type_xz(grid_level), north_pe, 0,    &
     258                             communicator, status, ierr )
    225259!
    226260!--       Send rear boundary, receive front one (synchronous)
    227           CALL MPI_SENDRECV( ar(nzb,nyn-nbgp_local+1,nxl-nbgp_local), 1,       &
    228                              type_xz(grid_level), pnorth, 1,                   &
    229                              ar(nzb,nys-nbgp_local,nxl-nbgp_local),   1,       &
    230                              type_xz(grid_level), psouth, 1,                   &
    231                              comm2d, status, ierr )
     261          CALL MPI_SENDRECV( ar(nzb,nyn-nbgp_local+1,nxl-nbgp_local), 1,                           &
     262                             type_xz(grid_level), north_pe, 1,                                     &
     263                             ar(nzb,nys-nbgp_local,nxl-nbgp_local),   1,                           &
     264                             type_xz(grid_level), south_pe, 1,                                     &
     265                             communicator, status, ierr )
    232266
    233267       ELSE
     
    242276!
    243277!--          Send front boundary, receive rear one (asynchronous)
    244              CALL MPI_ISEND( ar(nzb,nys,nxl-nbgp_local),   1, type_xz(grid_level), &
    245                              psouth, req_count, comm2d, req(req_count+1), ierr )
    246              CALL MPI_IRECV( ar(nzb,nyn+1,nxl-nbgp_local), 1, type_xz(grid_level), &
    247                              pnorth, req_count, comm2d, req(req_count+2), ierr )
     278             CALL MPI_ISEND( ar(nzb,nys,nxl-nbgp_local),   1, type_xz(grid_level), south_pe,       &
     279                             req_count, communicator, req(req_count+1), ierr )
     280             CALL MPI_IRECV( ar(nzb,nyn+1,nxl-nbgp_local), 1, type_xz(grid_level), north_pe,       &
     281                             req_count, communicator, req(req_count+2), ierr )
    248282!
    249283!--          Send rear boundary, receive front one (asynchronous)
    250              CALL MPI_ISEND( ar(nzb,nyn-nbgp_local+1,nxl-nbgp_local), 1,       &
    251                              type_xz(grid_level), pnorth, req_count+1, comm2d, &
    252                              req(req_count+3), ierr )
    253              CALL MPI_IRECV( ar(nzb,nys-nbgp_local,nxl-nbgp_local),   1,       &
    254                              type_xz(grid_level), psouth, req_count+1, comm2d, &
    255                              req(req_count+4), ierr )
     284             CALL MPI_ISEND( ar(nzb,nyn-nbgp_local+1,nxl-nbgp_local), 1, type_xz(grid_level),      &
     285                             north_pe, req_count+1, communicator, req(req_count+3), ierr )
     286             CALL MPI_IRECV( ar(nzb,nys-nbgp_local,nxl-nbgp_local),   1, type_xz(grid_level),      &
     287                             south_pe, req_count+1, communicator, req(req_count+4), ierr )
    256288
    257289             CALL MPI_WAITALL( 4, req, wait_stat, ierr )
Note: See TracChangeset for help on using the changeset viewer.