MODULE PMC_handle_communicator !--------------------------------------------------------------------------------! ! This file is part of PALM. ! ! PALM is free software: you can redistribute it and/or modify it under the terms ! of the GNU General Public License as published by the Free Software Foundation, ! either version 3 of the License, or (at your option) any later version. ! ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR ! A PARTICULAR PURPOSE. See the GNU General Public License for more details. ! ! You should have received a copy of the GNU General Public License along with ! PALM. If not, see . ! ! Copyright 1997-2015 Leibniz Universitaet Hannover !--------------------------------------------------------------------------------! ! ! Current revisions: ! ------------------ ! ! ! Former revisions: ! ----------------- ! $Id: pmc_handle_communicator.f90 1765 2016-02-28 13:30:40Z raasch $ ! ! 1764 2016-02-28 12:45:19Z raasch ! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at ! the beginning of the variable list, ! domain layout is read with new NAMELIST nestpar from standard file PARIN, ! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp, ! variable domain_layouts instead of m_couplers introduced for this NAMELIST, ! general format changed to PALM style ! ! 1762 2016-02-25 12:31:13Z hellstea ! Initial revision by K. Ketelsen ! ! Description: ! ------------ ! Handle MPI communicator in PALM model coupler !------------------------------------------------------------------------------! #if defined( __parallel ) USE kinds #if defined( __lc ) USE MPI #else INCLUDE "mpif.h" #endif USE pmc_general, & ONLY: pmc_status_ok, pmc_status_error, pmc_max_modell IMPLICIT NONE TYPE pmc_layout CHARACTER(len=32) :: name INTEGER :: id INTEGER :: parent_id INTEGER :: npe_x INTEGER :: npe_y REAL(wp) :: lower_left_x REAL(wp) :: lower_left_y END TYPE pmc_layout PUBLIC pmc_status_ok, pmc_status_error INTEGER, PARAMETER, PUBLIC :: pmc_error_npes = 1 ! illegal number of PEs INTEGER, PARAMETER, PUBLIC :: pmc_namelist_error = 2 ! error(s) in nestpar namelist INTEGER, PARAMETER, PUBLIC :: pmc_no_namelist_found = 3 ! No couple layout file found ! Coupler Setup INTEGER :: m_my_CPL_id !Coupler id of this model INTEGER :: m_Parent_id !Coupler id of parent of this model INTEGER :: m_NrOfCpl !Number of Coupler in layout file TYPE(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers !Information of all coupler ! MPI settings INTEGER,PUBLIC :: m_model_comm !Communicator of this model INTEGER,PUBLIC :: m_to_server_comm !Communicator to the server INTEGER,DIMENSION(PMC_MAX_MODELL) :: m_to_client_comm !Communicator to the client(s) INTEGER,PUBLIC :: m_world_rank INTEGER :: m_world_npes INTEGER,PUBLIC :: m_model_rank INTEGER,PUBLIC :: m_model_npes INTEGER :: m_server_remote_size !Number of Server PE's PUBLIC m_to_client_comm !Indicates this PE is server for Cleint NR INTEGER,DIMENSION(:),POINTER,PUBLIC :: PMC_Server_for_Client INTERFACE pmc_is_rootmodel MODULE PROCEDURE pmc_is_rootmodel END INTERFACE pmc_is_rootmodel INTERFACE PMC_get_local_model_info MODULE PROCEDURE PMC_get_local_model_info END INTERFACE PMC_get_local_model_info PUBLIC pmc_get_local_model_info, pmc_init_model, pmc_is_rootmodel CONTAINS SUBROUTINE pmc_init_model( comm, nesting_mode, pmc_status ) USE control_parameters, & ONLY: message_string USE pegrid, & ONLY: myid IMPLICIT NONE CHARACTER(LEN=7), INTENT(OUT) :: nesting_mode INTEGER, INTENT(OUT) :: comm INTEGER, INTENT(OUT) :: pmc_status INTEGER :: i, ierr, istat INTEGER,DIMENSION(pmc_max_modell+1) :: start_pe INTEGER :: m_my_cpl_rank INTEGER :: tag, clientcount INTEGER,DIMENSION(pmc_max_modell) :: activeserver ! I am active server for this client ID pmc_status = pmc_status_ok comm = -1 m_my_cpl_id = -1 clientcount = 0 activeserver = -1 start_pe(:) = 0 CALL MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat ) CALL MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat ) ! !-- Only PE 0 of root model reads IF ( m_world_rank == 0 ) THEN CALL read_coupling_layout( nesting_mode, pmc_status ) IF ( pmc_status /= pmc_no_namelist_found .AND. & pmc_status /= pmc_namelist_error ) & THEN ! !-- Calculate start PE of every model start_pe(1) = 0 DO i = 2, m_nrofcpl+1 start_pe(i) = start_pe(i-1) + & m_couplers(i-1)%npe_x * m_couplers(i-1)%npe_y ENDDO ! !-- The number of cores provided with the run must be the same as the !-- total sum of cores required by all nest domains !-- TO_DO: can we use > instead of /= ? IF ( start_pe(m_nrofcpl+1) /= m_world_npes ) THEN !-- TO_DO: this IF statement is redundant IF ( m_world_rank == 0 ) THEN WRITE ( message_string, '(A,I6,A,I6,A)' ) & 'nesting-setup requires more MPI procs (', & start_pe(m_nrofcpl+1), ') than provided (', & m_world_npes,')' CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 ) ENDIF ENDIF ENDIF ENDIF ! !-- Broadcast the read status. This synchronises all other PEs with PE 0 of !-- the root model. Without synchronisation, they would not behave in the !-- correct way (e.g. they would not return in case of a missing NAMELIST) CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) IF ( pmc_status == pmc_no_namelist_found ) THEN ! !-- Not a nested run; return the MPI_WORLD communicator comm = MPI_COMM_WORLD RETURN ELSEIF ( pmc_status == pmc_namelist_error ) THEN ! !-- Only the root model gives the error message. Others are aborted by the !-- message-routine with MPI_ABORT. Must be done this way since myid and !-- comm2d have not yet been assigned at this point. IF ( m_world_rank == 0 ) THEN message_string = 'errors in \$nestpar' CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 ) ENDIF ENDIF CALL MPI_BCAST( m_nrofcpl, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) CALL MPI_BCAST( start_pe, m_nrofcpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) ! !-- Broadcast coupling layout DO i = 1, m_nrofcpl CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat ) CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) CALL MPI_BCAST( m_couplers(i)%Parent_id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) CALL MPI_BCAST( m_couplers(i)%npe_x, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) CALL MPI_BCAST( m_couplers(i)%npe_y, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL, 0, MPI_COMM_WORLD, istat ) CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL, 0, MPI_COMM_WORLD, istat ) ENDDO ! !-- Assign global MPI processes to individual models by setting the couple id DO i = 1, m_nrofcpl IF ( m_world_rank >= start_pe(i) .AND. m_world_rank < start_pe(i+1) ) & THEN m_my_cpl_id = i EXIT ENDIF ENDDO m_my_cpl_rank = m_world_rank - start_pe(i) ! !-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach). !-- The communictors for the individual models as created by MPI_COMM_SPLIT. !-- The color of the model is represented by the coupler id CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm, & istat ) IF ( istat /= MPI_SUCCESS ) THEN ! !-- TO_DO: replace by message-call !-- TO_DO: Can this really happen, or is this just for the debugging phase? IF ( m_world_rank == 0 ) WRITE (0,*) 'PMC: Error in MPI_Comm_split ' CALL MPI_ABORT( MPI_COMM_WORLD, ierr, istat ) ENDIF ! !-- Get size and rank of the model running on this PE CALL MPI_COMM_RANK( comm, m_model_rank, istat ) CALL MPI_COMM_SIZE( comm, m_model_npes, istat ) ! !-- Broadcast (from PE 0) the parent id and id of every model DO i = 1, m_nrofcpl CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0, & MPI_COMM_WORLD, istat ) CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, & MPI_COMM_WORLD, istat ) ENDDO ! !-- TO_DO: describe what is happening here, and why m_model_comm = comm ! !-- Create intercommunicator between server and clients. !-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of !-- different colors. !-- The grouping was done above with MPI_COMM_SPLIT DO i = 2, m_nrofcpl IF ( m_couplers(i)%parent_id == m_my_cpl_id ) THEN ! !-- Collect server PEs !-- TO_DO: explain in more details, what is done here tag = 500 + i CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i), & tag, m_to_client_comm(i), istat) clientcount = clientcount + 1 activeserver(i) = 1 ELSEIF ( i == m_my_cpl_id) THEN ! !-- Collect client PEs !-- TO_DO: explain in more detail, what is happening here tag = 500 + i CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, & start_pe(m_couplers(i)%parent_id), & tag, m_to_server_comm, istat ) ENDIF IF ( istat /= MPI_SUCCESS ) THEN ! !-- TO_DO: replace by message-call !-- TO_DO: can this really happen, or is this just for debugging? IF ( m_world_rank == 0 ) WRITE (0,*) 'PMC: Error in Coupler Setup ' CALL MPI_ABORT( MPI_COMM_WORLD, ierr, istat ) ENDIF ENDDO ! !-- If I am server, count the number of clients that I have !-- Although this loop is symmetric on all processes, the "activeserver" flag !-- is true (==1) on the respective individual PE only. ALLOCATE( pmc_server_for_client(clientcount+1) ) clientcount = 0 DO i = 2, m_nrofcpl IF ( activeserver(i) == 1 ) THEN clientcount = clientcount + 1 pmc_server_for_client(clientcount) = i ENDIF ENDDO !-- TO_DO: explain why this is done pmc_server_for_client(clientcount+1) = -1 ! !-- Get the size of the server model !-- TO_DO: what does "size" mean here? Number of PEs? IF ( m_my_cpl_id > 1 ) THEN CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size, & istat) ELSE ! !-- The root model does not have a server m_server_remote_size = -1 ! ENDIF ! !-- Set myid to non-tero value except for the root domain. This is a setting !-- for the message routine which is called at the end of pmci_init. That !-- routine outputs messages for myid = 0, only. However, myid has not been !-- assigened so far, so that all PEs of the root model would output a !-- message. To avoid this, set myid to some other value except for PE0 of the !-- root domain. IF ( m_world_rank /= 0 ) myid = 1 END SUBROUTINE PMC_init_model ! !-- Make module private variables available to palm !-- TO_DO: why can't they be available from the beginning, i.e. why do they !-- first have to be declared as different private variables? SUBROUTINE pmc_get_local_model_info( my_cpl_id, my_cpl_parent_id, cpl_name, & npe_x, npe_y, lower_left_x, & lower_left_y ) USE kinds IMPLICIT NONE CHARACTER(LEN=*), INTENT(OUT), OPTIONAL :: cpl_name INTEGER, INTENT(OUT), OPTIONAL :: my_cpl_id INTEGER, INTENT(OUT), OPTIONAL :: my_cpl_parent_id INTEGER, INTENT(OUT), OPTIONAL :: npe_x INTEGER, INTENT(OUT), OPTIONAL :: npe_y REAL(wp), INTENT(OUT), OPTIONAL :: lower_left_x REAL(wp), INTENT(OUT), OPTIONAL :: lower_left_y !-- TO_DO: is the PRESENT clause really required here? IF ( PRESENT( my_cpl_id ) ) my_cpl_id = m_my_cpl_id IF ( PRESENT( my_cpl_parent_id ) ) my_cpl_parent_id = m_couplers(my_cpl_id)%parent_id IF ( PRESENT( cpl_name ) ) cpl_name = m_couplers(my_cpl_id)%name IF ( PRESENT( npe_x ) ) npe_x = m_couplers(my_cpl_id)%npe_x IF ( PRESENT( npe_y ) ) npe_y = m_couplers(my_cpl_id)%npe_y IF ( PRESENT( lower_left_x ) ) lower_left_x = m_couplers(my_cpl_id)%lower_left_x IF ( PRESENT( lower_left_y ) ) lower_left_y = m_couplers(my_cpl_id)%lower_left_y END SUBROUTINE pmc_get_local_model_info LOGICAL function pmc_is_rootmodel( ) IMPLICIT NONE pmc_is_rootmodel = ( m_my_cpl_id == 1 ) END FUNCTION pmc_is_rootmodel !-- TO_DO: what does this comment mean? ! Private SUBROUTINEs SUBROUTINE read_coupling_layout( nesting_mode, pmc_status ) IMPLICIT NONE CHARACTER(LEN=7) :: nesting_mode INTEGER, INTENT(INOUT) :: pmc_status INTEGER :: i, istat, iunit TYPE(pmc_layout), DIMENSION(pmc_max_modell) :: domain_layouts NAMELIST /nestpar/ domain_layouts, nesting_mode ! !-- Initialize some coupling variables domain_layouts(1:pmc_max_modell)%id = -1 m_nrofcpl = 0 iunit = 345 pmc_status = pmc_status_ok ! !-- Open the NAMELIST-file and read the nesting layout CALL check_open( 11 ) READ ( 11, nestpar, IOSTAT=istat ) IF ( istat < 0 ) THEN ! !-- No nestpar-NAMELIST found pmc_status = pmc_no_namelist_found ! !-- Set filepointer to the beginning of the file. Otherwise PE0 will later !-- be unable to read the inipar-NAMELIST REWIND ( 11 ) RETURN ELSEIF ( istat > 0 ) THEN ! !-- Errors in reading nestpar-NAMELIST pmc_status = pmc_namelist_error RETURN ENDIF ! !-- Output location message CALL location_message( 'initialize communicators for nesting', .FALSE. ) ! !-- Assign the layout to the internally used variable m_couplers = domain_layouts ! !-- Get the number of nested models given in the nestpar-NAMELIST DO i = 1, pmc_max_modell IF ( m_couplers(i)%id /= -1 .AND. i <= pmc_max_modell ) THEN WRITE ( 0, '(A,A,1X,4I7,1X,2F10.2)' ) 'Set up Model ', & TRIM( m_couplers(i)%name ), m_couplers(i)%id, & m_couplers(i)%Parent_id, m_couplers(i)%npe_x, & m_couplers(i)%npe_y, m_couplers(i)%lower_left_x, & m_couplers(i)%lower_left_y ELSE ! !-- When id=-1 is found for the first time, the list of domains is !-- finished (or latest after pmc_max_modell entries m_nrofcpl = i - 1 EXIT ENDIF ENDDO END SUBROUTINE read_coupling_layout #endif END MODULE pmc_handle_communicator