source: palm/trunk/SOURCE/pmc_handle_communicator.f90 @ 1787

Last change on this file since 1787 was 1787, checked in by raasch, 8 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 15.2 KB
RevLine 
[1764]1 MODULE PMC_handle_communicator
[1762]2
3!--------------------------------------------------------------------------------!
4! This file is part of PALM.
5!
6! PALM is free software: you can redistribute it and/or modify it under the terms
7! of the GNU General Public License as published by the Free Software Foundation,
8! either version 3 of the License, or (at your option) any later version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2015 Leibniz Universitaet Hannover
18!--------------------------------------------------------------------------------!
19!
20! Current revisions:
21! ------------------
[1765]22!
[1787]23!
[1765]24! Former revisions:
25! -----------------
26! $Id: pmc_handle_communicator.f90 1787 2016-03-08 06:57:00Z raasch $
27!
[1787]28! 1786 2016-03-08 05:49:27Z raasch
29! Bugfix: nesting_mode is broadcast now
30!
[1780]31! 1779 2016-03-03 08:01:28Z raasch
32! only the total number of PEs is given in the nestpar-NAMELIST,
33! additional comments included
34!
[1765]35! 1764 2016-02-28 12:45:19Z raasch
[1764]36! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at
37! the beginning of the variable list,
38! domain layout is read with new NAMELIST nestpar from standard file PARIN,
39! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp,
40! variable domain_layouts instead of m_couplers introduced for this NAMELIST,
41! general format changed to PALM style
[1762]42!
[1763]43! 1762 2016-02-25 12:31:13Z hellstea
44! Initial revision by K. Ketelsen
45!
[1762]46! Description:
47! ------------
[1764]48! Handle MPI communicator in PALM model coupler
[1762]49!------------------------------------------------------------------------------!
50
[1764]51#if defined( __parallel )
52    USE kinds
[1762]53
[1764]54#if defined( __lc )
55    USE MPI
56#else
57    INCLUDE "mpif.h"
58#endif
[1762]59
[1764]60   USE pmc_general,                                                            &
61       ONLY: pmc_status_ok, pmc_status_error, pmc_max_modell
[1762]62
[1764]63   IMPLICIT NONE
[1762]64
[1764]65   TYPE pmc_layout
[1762]66
[1764]67      CHARACTER(len=32) ::  name
[1762]68
[1764]69      INTEGER  ::  id
70      INTEGER  ::  parent_id
[1779]71      INTEGER  ::  npe_total
[1762]72
[1764]73      REAL(wp) ::  lower_left_x
74      REAL(wp) ::  lower_left_y
[1762]75
[1764]76   END TYPE pmc_layout
77
78   PUBLIC  pmc_status_ok, pmc_status_error
79
80   INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes          = 1  ! illegal number of PEs
81   INTEGER, PARAMETER, PUBLIC ::  pmc_namelist_error      = 2  ! error(s) in nestpar namelist
82   INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found   = 3  ! No couple layout file found
83
[1762]84   ! Coupler Setup
85
86   INTEGER                                    :: m_my_CPL_id  !Coupler id of this model
87   INTEGER                                    :: m_Parent_id  !Coupler id of parent of this model
88   INTEGER                                    :: m_NrOfCpl    !Number of Coupler in layout file
[1764]89   TYPE(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers   !Information of all coupler
[1762]90
91   ! MPI settings
92
93   INTEGER,PUBLIC                    :: m_model_comm          !Communicator of this model
94   INTEGER,PUBLIC                    :: m_to_server_comm      !Communicator to the server
95   INTEGER,DIMENSION(PMC_MAX_MODELL) :: m_to_client_comm      !Communicator to the client(s)
96   INTEGER,PUBLIC                    :: m_world_rank
97   INTEGER                           :: m_world_npes
98   INTEGER,PUBLIC                    :: m_model_rank
99   INTEGER,PUBLIC                    :: m_model_npes
100   INTEGER                           :: m_server_remote_size  !Number of Server PE's
101
102   PUBLIC m_to_client_comm
103
104   !Indicates this PE is server for Cleint NR
105
106   INTEGER,DIMENSION(:),POINTER,PUBLIC :: PMC_Server_for_Client
107
[1764]108   INTERFACE pmc_is_rootmodel
109      MODULE PROCEDURE pmc_is_rootmodel
110   END INTERFACE pmc_is_rootmodel
[1762]111
112   INTERFACE PMC_get_local_model_info
113      MODULE PROCEDURE PMC_get_local_model_info
114   END INTERFACE PMC_get_local_model_info
115
[1764]116   PUBLIC pmc_get_local_model_info, pmc_init_model, pmc_is_rootmodel
[1762]117
[1764]118 CONTAINS
[1762]119
[1764]120   SUBROUTINE pmc_init_model( comm, nesting_mode, pmc_status )
[1762]121
[1764]122      USE control_parameters,                                                  &
123          ONLY:  message_string
124
125      USE pegrid,                                                              &
126          ONLY:  myid
127
128      IMPLICIT NONE
129
130      CHARACTER(LEN=7), INTENT(OUT) ::  nesting_mode
131
132      INTEGER, INTENT(OUT)                ::  comm
133      INTEGER, INTENT(OUT)                ::  pmc_status
134
135      INTEGER                             ::  i, ierr, istat
136      INTEGER,DIMENSION(pmc_max_modell+1) ::  start_pe
137      INTEGER                             ::  m_my_cpl_rank
138      INTEGER                             ::  tag, clientcount
139      INTEGER,DIMENSION(pmc_max_modell)   ::  activeserver  ! I am active server for this client ID
140
141      pmc_status   = pmc_status_ok
[1762]142      comm         = -1
[1764]143      m_my_cpl_id  = -1
144      clientcount  =  0
145      activeserver = -1
146      start_pe(:)  =  0
[1762]147
[1764]148      CALL  MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat )
149      CALL  MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat )
150!
151!--   Only PE 0 of root model reads
152      IF ( m_world_rank == 0 )  THEN
[1762]153
[1764]154         CALL read_coupling_layout( nesting_mode, pmc_status )
[1762]155
[1764]156         IF ( pmc_status /= pmc_no_namelist_found  .AND.                       &
157              pmc_status /= pmc_namelist_error )                               &
158         THEN
159!
160!--         Calculate start PE of every model
161            start_pe(1) = 0
162            DO  i = 2, m_nrofcpl+1
[1779]163               start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total
[1764]164            ENDDO
[1762]165
[1764]166!
167!--         The number of cores provided with the run must be the same as the
168!--         total sum of cores required by all nest domains
169            IF ( start_pe(m_nrofcpl+1) /= m_world_npes )  THEN
[1779]170               WRITE ( message_string, '(A,I6,A,I6,A)' )                       &
171                               'nesting-setup requires more MPI procs (',      &
172                               start_pe(m_nrofcpl+1), ') than provided (',     &
173                               m_world_npes,')'
174               CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
[1764]175            ENDIF
[1762]176
[1764]177         ENDIF
[1762]178
[1764]179      ENDIF
180!
181!--   Broadcast the read status. This synchronises all other PEs with PE 0 of
182!--   the root model. Without synchronisation, they would not behave in the
183!--   correct way (e.g. they would not return in case of a missing NAMELIST)
184      CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1762]185
[1764]186      IF ( pmc_status == pmc_no_namelist_found )  THEN
187!
188!--      Not a nested run; return the MPI_WORLD communicator
189         comm = MPI_COMM_WORLD
190         RETURN
[1762]191
[1764]192      ELSEIF ( pmc_status == pmc_namelist_error )  THEN
193!
194!--      Only the root model gives the error message. Others are aborted by the
195!--      message-routine with MPI_ABORT. Must be done this way since myid and
196!--      comm2d have not yet been assigned at this point.
197         IF ( m_world_rank == 0 )  THEN
198            message_string = 'errors in \$nestpar'
199            CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 )
200         ENDIF
[1762]201
[1764]202      ENDIF
[1762]203
[1764]204      CALL MPI_BCAST( m_nrofcpl, 1,          MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
205      CALL MPI_BCAST( start_pe, m_nrofcpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
[1762]206
[1764]207!
208!--   Broadcast coupling layout
209      DO  i = 1, m_nrofcpl
210         CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
211         CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
212         CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1779]213         CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1764]214         CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0, MPI_COMM_WORLD, istat )
215         CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0, MPI_COMM_WORLD, istat )
[1786]216         CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
[1764]217      ENDDO
[1762]218
[1764]219!
220!--   Assign global MPI processes to individual models by setting the couple id
221      DO  i = 1, m_nrofcpl
222         IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) ) &
223         THEN
224            m_my_cpl_id = i
225            EXIT
226         ENDIF
227      ENDDO
228      m_my_cpl_rank = m_world_rank - start_pe(i)
[1762]229
[1764]230!
231!--   MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
232!--   The communictors for the individual models as created by MPI_COMM_SPLIT.
233!--   The color of the model is represented by the coupler id
234      CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,   &
235                           istat )
236!
237!--   Get size and rank of the model running on this PE
238      CALL  MPI_COMM_RANK( comm, m_model_rank, istat )
239      CALL  MPI_COMM_SIZE( comm, m_model_npes, istat )
[1762]240
[1764]241!
242!--   Broadcast (from PE 0) the parent id and id of every model
243      DO  i = 1, m_nrofcpl
244         CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,           &
245                         MPI_COMM_WORLD, istat )
246         CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,           &
247                         MPI_COMM_WORLD, istat )
248      ENDDO
[1762]249
[1764]250!
[1779]251!--   Save the current model communicator for PMC internal use
[1762]252      m_model_comm = comm
253
[1764]254!
255!--   Create intercommunicator between server and clients.
256!--   MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
257!--   different colors.
258!--   The grouping was done above with MPI_COMM_SPLIT
259      DO  i = 2, m_nrofcpl
[1762]260
[1764]261         IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
262!
[1779]263!--         Collect server PEs.
264!--         Every model exept the root model has a parent model which acts as
265!--         server model. Create an intercommunicator to connect current PE to
266!--         all client PEs
[1764]267            tag = 500 + i
268            CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i),   &
269                                       tag, m_to_client_comm(i), istat)
270            clientcount = clientcount + 1
271            activeserver(i) = 1
[1762]272
[1764]273         ELSEIF ( i == m_my_cpl_id)  THEN
274!
[1779]275!--         Collect client PEs.
276!--         Every model exept the root model has a paremt model which acts as
277!--         server model. Create an intercommunicator to connect current PE to
278!--         all server PEs
[1764]279            tag = 500 + i
280            CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD,                &
281                                       start_pe(m_couplers(i)%parent_id),      &
282                                       tag, m_to_server_comm, istat )
283         ENDIF
[1762]284
[1764]285      ENDDO
[1762]286
[1764]287!
288!--   If I am server, count the number of clients that I have
289!--   Although this loop is symmetric on all processes, the "activeserver" flag
290!--   is true (==1) on the respective individual PE only.
291      ALLOCATE( pmc_server_for_client(clientcount+1) )
[1762]292
[1764]293      clientcount = 0
294      DO  i = 2, m_nrofcpl
295         IF ( activeserver(i) == 1 )  THEN
296            clientcount = clientcount + 1
297            pmc_server_for_client(clientcount) = i
298         ENDIF
299      ENDDO
300!
301!--   Get the size of the server model
302      IF ( m_my_cpl_id > 1 )  THEN
303         CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size,    &
304                                    istat)
305      ELSE
306!
307!--      The root model does not have a server
308         m_server_remote_size = -1             !
309      ENDIF
310!
311!--   Set myid to non-tero value except for the root domain. This is a setting
312!--   for the message routine which is called at the end of pmci_init. That
313!--   routine outputs messages for myid = 0, only. However, myid has not been
314!--   assigened so far, so that all PEs of the root model would output a
315!--   message. To avoid this, set myid to some other value except for PE0 of the
316!--   root domain.
317      IF ( m_world_rank /= 0 )  myid = 1
[1762]318
319   END SUBROUTINE PMC_init_model
320
321
[1764]322!
323!-- Make module private variables available to palm
324   SUBROUTINE pmc_get_local_model_info( my_cpl_id, my_cpl_parent_id, cpl_name, &
[1779]325                                        npe_total, lower_left_x, lower_left_y )
[1762]326
[1764]327      USE kinds
[1762]328
[1764]329      IMPLICIT NONE
[1762]330
[1764]331      CHARACTER(LEN=*), INTENT(OUT), OPTIONAL ::  cpl_name
332      INTEGER, INTENT(OUT), OPTIONAL          ::  my_cpl_id
333      INTEGER, INTENT(OUT), OPTIONAL          ::  my_cpl_parent_id
[1779]334      INTEGER, INTENT(OUT), OPTIONAL          ::  npe_total
[1764]335      REAL(wp), INTENT(OUT), OPTIONAL         ::  lower_left_x
336      REAL(wp), INTENT(OUT), OPTIONAL         ::  lower_left_y
[1762]337
[1764]338      IF ( PRESENT( my_cpl_id )           )  my_cpl_id        = m_my_cpl_id
339      IF ( PRESENT( my_cpl_parent_id )    )  my_cpl_parent_id = m_couplers(my_cpl_id)%parent_id
340      IF ( PRESENT( cpl_name )            )  cpl_name         = m_couplers(my_cpl_id)%name
[1779]341      IF ( PRESENT( npe_total )           )  npe_total        = m_couplers(my_cpl_id)%npe_total
[1764]342      IF ( PRESENT( lower_left_x )        )  lower_left_x     = m_couplers(my_cpl_id)%lower_left_x
343      IF ( PRESENT( lower_left_y )        )  lower_left_y     = m_couplers(my_cpl_id)%lower_left_y
[1762]344
[1764]345   END SUBROUTINE pmc_get_local_model_info
[1762]346
[1764]347
348
349   LOGICAL function pmc_is_rootmodel( )
350
351      IMPLICIT NONE
352
353      pmc_is_rootmodel = ( m_my_cpl_id == 1 )
354
355   END FUNCTION pmc_is_rootmodel
356
357
358
359 SUBROUTINE read_coupling_layout( nesting_mode, pmc_status )
[1762]360
[1764]361    IMPLICIT NONE
[1762]362
[1764]363    CHARACTER(LEN=7) ::  nesting_mode
364
365    INTEGER, INTENT(INOUT) ::  pmc_status
366    INTEGER                ::  i, istat, iunit
367
368    TYPE(pmc_layout), DIMENSION(pmc_max_modell) ::  domain_layouts
369
370
371    NAMELIST /nestpar/  domain_layouts, nesting_mode
372
373!
374!-- Initialize some coupling variables
375    domain_layouts(1:pmc_max_modell)%id = -1
376    m_nrofcpl =   0
[1762]377    iunit     = 345
378
[1764]379    pmc_status = pmc_status_ok
380
381!
382!-- Open the NAMELIST-file and read the nesting layout
383    CALL check_open( 11 )
384    READ ( 11, nestpar, IOSTAT=istat )
385
386    IF ( istat < 0 )  THEN
387!
388!--    No nestpar-NAMELIST found
389       pmc_status = pmc_no_namelist_found
390!
391!--    Set filepointer to the beginning of the file. Otherwise PE0 will later
392!--    be unable to read the inipar-NAMELIST
393       REWIND ( 11 )
[1762]394       RETURN
395
[1764]396    ELSEIF ( istat > 0 )  THEN
397!
398!--    Errors in reading nestpar-NAMELIST
399       pmc_status = pmc_namelist_error
400       RETURN
[1762]401
[1764]402    ENDIF
[1762]403
[1764]404!
405!-- Output location message
406    CALL location_message( 'initialize communicators for nesting', .FALSE. )
407!
408!-- Assign the layout to the internally used variable
409    m_couplers = domain_layouts
[1762]410
[1764]411!
412!-- Get the number of nested models given in the nestpar-NAMELIST
413    DO  i = 1, pmc_max_modell
[1762]414
[1764]415       IF ( m_couplers(i)%id /= -1  .AND.  i <= pmc_max_modell )  THEN
[1779]416          WRITE ( 0, '(A,A,1X,3I7,1X,2F10.2)' )  'Set up Model  ',             &
417                             TRIM( m_couplers(i)%name ), m_couplers(i)%id,     &
418                             m_couplers(i)%Parent_id, m_couplers(i)%npe_total, &
419                             m_couplers(i)%lower_left_x,                       &
420                             m_couplers(i)%lower_left_y
[1764]421       ELSE
422!
423!--       When id=-1 is found for the first time, the list of domains is
424!--       finished (or latest after pmc_max_modell entries
425          m_nrofcpl = i - 1
426          EXIT
427       ENDIF
428
429    ENDDO
430
431 END SUBROUTINE read_coupling_layout
432
433#endif
434 END MODULE pmc_handle_communicator
Note: See TracBrowser for help on using the repository browser.