SUBROUTINE exchange_horiz_2d( ar ) !------------------------------------------------------------------------------! ! Actual revisions: ! ----------------- ! ! ! Former revisions: ! ----------------- ! $Log: exchange_horiz_2d.f90,v $ ! Revision 1.9 2006/05/12 19:15:52 letzel ! MPI_REAL replaced by MPI_INTEGER in exchange_horiz_2d_int ! ! Revision 1.8 2006/02/23 12:18:32 raasch ! Additional subroutine exchange_horiz_2d_int for 2D integer arrays, ! extensions for non-cyclic boundary conditions along x or y for non-parallel ! case, anz_y renamed ngp_y ! ! Revision 1.7 2003/03/16 09:30:43 raasch ! Two underscores (_) are placed in front of all define-strings ! ! Revision 1.6 2002/06/11 12:59:35 raasch ! Cyclic boundary conditions are used instead of sendrecv in case of ! pdims(..)=1. Array "feld" is renamed to "ar". ! ! Revision 1.5 2001/03/30 07:23:16 07:23:16 raasch (Siegfried Raasch) ! Translation of remaining German identifiers (variables, subroutines, etc.) ! ! Revision 1.4 2001/01/22 06:43:50 raasch ! Module test_variables removed ! ! Revision 1.3 2000/12/20 12:09:27 letzel ! All comments translated into English. ! ! Revision 1.2 1998/07/06 12:13:53 raasch ! + USE test_variables ! ! Revision 1.1 1998/01/23 09:58:21 raasch ! Initial revision ! ! ! Description: ! ------------ ! Exchange of lateral (ghost) boundaries (parallel computers) and cyclic ! boundary conditions, respectively, for 2D-arrays. !------------------------------------------------------------------------------! USE control_parameters USE cpulog USE indices USE interfaces USE pegrid IMPLICIT NONE REAL :: ar(nys-1:nyn+1,nxl-1:nxr+1) CALL cpu_log( log_point_s(13), 'exchange_horiz_2d', 'start' ) #if defined( __parallel ) ! !-- Exchange of lateral boundary values for parallel computers IF ( pdims(1) == 1 ) THEN ! !-- One-dimensional decomposition along y, boundary values can be exchanged !-- within the PE memory ar(nys:nyn,nxl-1) = ar(nys:nyn,nxr) ar(nys:nyn,nxr+1) = ar(nys:nyn,nxl) ELSE ! !-- Send left boundary, receive right one CALL MPI_SENDRECV( ar(nys,nxl), ngp_y, MPI_REAL, pleft, 0, & ar(nys,nxr+1), ngp_y, MPI_REAL, pright, 0, & comm2d, status, ierr ) ! !-- Send right boundary, receive left one CALL MPI_SENDRECV( ar(nys,nxr), ngp_y, MPI_REAL, pright, 1, & ar(nys,nxl-1), ngp_y, MPI_REAL, pleft, 1, & comm2d, status, ierr ) ENDIF IF ( pdims(2) == 1 ) THEN ! !-- One-dimensional decomposition along x, boundary values can be exchanged !-- within the PE memory ar(nys-1,:) = ar(nyn,:) ar(nyn+1,:) = ar(nys,:) ELSE ! !-- Send front boundary, receive rear one CALL MPI_SENDRECV( ar(nys,nxl-1), 1, type_x, psouth, 0, & ar(nyn+1,nxl-1), 1, type_x, pnorth, 0, & comm2d, status, ierr ) ! !-- Send rear boundary, receive front one CALL MPI_SENDRECV( ar(nyn,nxl-1), 1, type_x, pnorth, 1, & ar(nys-1,nxl-1), 1, type_x, psouth, 1, & comm2d, status, ierr ) ENDIF #else ! !-- Lateral boundary conditions in the non-parallel case IF ( bc_lr == 'cyclic' ) THEN ar(nys:nyn,nxl-1) = ar(nys:nyn,nxr) ar(nys:nyn,nxr+1) = ar(nys:nyn,nxl) ENDIF IF ( bc_ns == 'cyclic' ) THEN ar(nys-1,:) = ar(nyn,:) ar(nyn+1,:) = ar(nys,:) ENDIF #endif CALL cpu_log( log_point_s(13), 'exchange_horiz_2d', 'stop' ) END SUBROUTINE exchange_horiz_2d SUBROUTINE exchange_horiz_2d_int( ar ) !------------------------------------------------------------------------------! ! Description: ! ------------ ! Exchange of lateral (ghost) boundaries (parallel computers) and cyclic ! boundary conditions, respectively, for 2D integer arrays. !------------------------------------------------------------------------------! USE control_parameters USE cpulog USE indices USE interfaces USE pegrid IMPLICIT NONE INTEGER :: ar(nys-1:nyn+1,nxl-1:nxr+1) CALL cpu_log( log_point_s(13), 'exchange_horiz_2d', 'start' ) #if defined( __parallel ) ! !-- Exchange of lateral boundary values for parallel computers IF ( pdims(1) == 1 ) THEN ! !-- One-dimensional decomposition along y, boundary values can be exchanged !-- within the PE memory ar(nys:nyn,nxl-1) = ar(nys:nyn,nxr) ar(nys:nyn,nxr+1) = ar(nys:nyn,nxl) ELSE ! !-- Send left boundary, receive right one CALL MPI_SENDRECV( ar(nys,nxl), ngp_y, MPI_INTEGER, pleft, 0, & ar(nys,nxr+1), ngp_y, MPI_INTEGER, pright, 0, & comm2d, status, ierr ) ! !-- Send right boundary, receive left one CALL MPI_SENDRECV( ar(nys,nxr), ngp_y, MPI_INTEGER, pright, 1, & ar(nys,nxl-1), ngp_y, MPI_INTEGER, pleft, 1, & comm2d, status, ierr ) ENDIF IF ( pdims(2) == 1 ) THEN ! !-- One-dimensional decomposition along x, boundary values can be exchanged !-- within the PE memory ar(nys-1,:) = ar(nyn,:) ar(nyn+1,:) = ar(nys,:) ELSE ! !-- Send front boundary, receive rear one CALL MPI_SENDRECV( ar(nys,nxl-1), 1, type_x_int, psouth, 0, & ar(nyn+1,nxl-1), 1, type_x_int, pnorth, 0, & comm2d, status, ierr ) ! !-- Send rear boundary, receive front one CALL MPI_SENDRECV( ar(nyn,nxl-1), 1, type_x_int, pnorth, 1, & ar(nys-1,nxl-1), 1, type_x_int, psouth, 1, & comm2d, status, ierr ) ENDIF #else ! !-- Lateral boundary conditions in the non-parallel case IF ( bc_lr == 'cyclic' ) THEN ar(nys:nyn,nxl-1) = ar(nys:nyn,nxr) ar(nys:nyn,nxr+1) = ar(nys:nyn,nxl) ENDIF IF ( bc_ns == 'cyclic' ) THEN ar(nys-1,:) = ar(nyn,:) ar(nyn+1,:) = ar(nys,:) ENDIF #endif CALL cpu_log( log_point_s(13), 'exchange_horiz_2d', 'stop' ) END SUBROUTINE exchange_horiz_2d_int