source: palm/trunk/SOURCE/pmc_handle_communicator_mod.f90 @ 1904

Last change on this file since 1904 was 1901, checked in by raasch, 9 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 16.8 KB
RevLine 
[1764]1 MODULE PMC_handle_communicator
[1762]2
3!--------------------------------------------------------------------------------!
4! This file is part of PALM.
5!
6! PALM is free software: you can redistribute it and/or modify it under the terms
7! of the GNU General Public License as published by the Free Software Foundation,
8! either version 3 of the License, or (at your option) any later version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
[1818]17! Copyright 1997-2016 Leibniz Universitaet Hannover
[1762]18!--------------------------------------------------------------------------------!
19!
20! Current revisions:
21! ------------------
[1851]22!
[1901]23!
[1792]24! Former revisions:
25! -----------------
26! $Id: pmc_handle_communicator_mod.f90 1901 2016-05-04 15:39:38Z suehring $
27!
[1901]28! 1900 2016-05-04 15:27:53Z raasch
29! re-formatting to match PALM style
30!
[1883]31! 1882 2016-04-20 15:24:46Z hellstea
32! MPI_BCAST-calls to broadcast nesting_mode and nesting_datatransfer_mode
33! are moved out from the DO i = 1, m_ncpl loop.
34!
[1851]35! 1850 2016-04-08 13:29:27Z maronga
36! Module renamed
37!
[1809]38! 1808 2016-04-05 19:44:00Z raasch
39! MPI module used by default on all machines
40!
[1798]41! 1797 2016-03-21 16:50:28Z raasch
42! introduction of different datatransfer modes,
43! export of comm_world_nesting
44!
[1792]45! 1791 2016-03-11 10:41:25Z raasch
[1791]46! m_nrofcpl renamed m_ncpl,
47! pmc_get_local_model_info renamed pmc_get_model_info, some keywords also
48! renamed and some added,
49! debug write-statements commented out
[1765]50!
[1787]51! 1786 2016-03-08 05:49:27Z raasch
52! Bugfix: nesting_mode is broadcast now
53!
[1780]54! 1779 2016-03-03 08:01:28Z raasch
55! only the total number of PEs is given in the nestpar-NAMELIST,
56! additional comments included
57!
[1765]58! 1764 2016-02-28 12:45:19Z raasch
[1764]59! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at
60! the beginning of the variable list,
61! domain layout is read with new NAMELIST nestpar from standard file PARIN,
62! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp,
63! variable domain_layouts instead of m_couplers introduced for this NAMELIST,
64! general format changed to PALM style
[1762]65!
[1763]66! 1762 2016-02-25 12:31:13Z hellstea
67! Initial revision by K. Ketelsen
68!
[1762]69! Description:
70! ------------
[1764]71! Handle MPI communicator in PALM model coupler
[1762]72!------------------------------------------------------------------------------!
73
[1764]74#if defined( __parallel )
75    USE kinds
[1762]76
[1808]77#if defined( __mpifh )
78    INCLUDE "mpif.h"
79#else
[1764]80    USE MPI
81#endif
[1762]82
[1900]83    USE pmc_general,                                                           &
84        ONLY: pmc_status_ok, pmc_status_error, pmc_max_models
[1762]85
[1900]86    IMPLICIT NONE
[1762]87
[1900]88    TYPE pmc_layout
[1762]89
[1900]90       CHARACTER(LEN=32) ::  name
[1762]91
[1900]92       INTEGER  ::  id            !<
93       INTEGER  ::  parent_id     !<
94       INTEGER  ::  npe_total     !<
[1762]95
[1900]96       REAL(wp) ::  lower_left_x  !<
97       REAL(wp) ::  lower_left_y  !<
[1762]98
[1900]99    END TYPE pmc_layout
[1764]100
[1900]101    PUBLIC  pmc_status_ok, pmc_status_error
[1764]102
[1900]103    INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes        = 1  !< illegal number of PEs
104    INTEGER, PARAMETER, PUBLIC ::  pmc_namelist_error    = 2  !< error(s) in nestpar namelist
105    INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found = 3  !< no couple layout namelist found
[1764]106
[1900]107    INTEGER ::  m_world_comm  !< global nesting communicator
108    INTEGER ::  m_my_cpl_id   !< coupler id of this model
109    INTEGER ::  m_parent_id   !< coupler id of parent of this model
110    INTEGER ::  m_ncpl        !< number of couplers given in nestpar namelist
[1762]111
[1900]112    TYPE(pmc_layout), DIMENSION(pmc_max_models) ::  m_couplers  !< information of all couplers
[1762]113
[1900]114    INTEGER, PUBLIC ::  m_model_comm          !< communicator of this model
115    INTEGER, PUBLIC ::  m_to_server_comm      !< communicator to the server
116    INTEGER, PUBLIC ::  m_world_rank          !<
117    INTEGER         ::  m_world_npes          !<
118    INTEGER, PUBLIC ::  m_model_rank          !<
119    INTEGER, PUBLIC ::  m_model_npes          !<
120    INTEGER         ::  m_server_remote_size  !< number of server PEs
[1791]121
[1900]122    INTEGER, DIMENSION(pmc_max_models), PUBLIC ::  m_to_client_comm   !< communicator to the client(s)
123    INTEGER, DIMENSION(:), POINTER, PUBLIC ::  pmc_server_for_client  !<
[1762]124
125
[1900]126    INTERFACE pmc_is_rootmodel
127       MODULE PROCEDURE pmc_is_rootmodel
128    END INTERFACE pmc_is_rootmodel
[1762]129
[1900]130    INTERFACE pmc_get_model_info
131       MODULE PROCEDURE pmc_get_model_info
132    END INTERFACE pmc_get_model_info
[1762]133
[1900]134    PUBLIC pmc_get_model_info, pmc_init_model, pmc_is_rootmodel
[1762]135
[1764]136 CONTAINS
[1762]137
[1900]138 SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode,     &
[1797]139                              pmc_status )
[1762]140
[1900]141    USE control_parameters,                                                    &
142        ONLY:  message_string
[1764]143
[1900]144    USE pegrid,                                                                &
145        ONLY:  myid
[1764]146
147      IMPLICIT NONE
148
[1900]149    CHARACTER(LEN=7), INTENT(OUT) ::  nesting_mode               !<
150    CHARACTER(LEN=7), INTENT(OUT) ::  nesting_datatransfer_mode  !<
[1764]151
[1900]152    INTEGER, INTENT(OUT) ::  comm        !<
153    INTEGER, INTENT(OUT) ::  pmc_status  !<
[1764]154
[1900]155    INTEGER ::  clientcount    !<
156    INTEGER ::  i              !<
157    INTEGER ::  ierr           !<
158    INTEGER ::  istat          !<
159    INTEGER ::  m_my_cpl_rank  !<
160    INTEGER ::  tag            !<
[1764]161
[1900]162    INTEGER, DIMENSION(pmc_max_models)   ::  activeserver  ! I am active server for this client ID
163    INTEGER, DIMENSION(pmc_max_models+1) ::  start_pe
[1762]164
[1900]165    pmc_status   = pmc_status_ok
166    comm         = -1
167    m_world_comm = MPI_COMM_WORLD
168    m_my_cpl_id  = -1
169    clientcount  =  0
170    activeserver = -1
171    start_pe(:)  =  0
172
173    CALL MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat )
174    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat )
[1764]175!
[1900]176!-- Only PE 0 of root model reads
177    IF ( m_world_rank == 0 )  THEN
[1762]178
[1900]179       CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode,     &
180                                  pmc_status )
[1762]181
[1900]182       IF ( pmc_status /= pmc_no_namelist_found  .AND.                         &
183            pmc_status /= pmc_namelist_error )                                 &
184       THEN
[1764]185!
[1900]186!--       Calculate start PE of every model
187          start_pe(1) = 0
188          DO  i = 2, m_ncpl+1
189             start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total
190          ENDDO
[1762]191
[1764]192!
[1900]193!--       The number of cores provided with the run must be the same as the
194!--       total sum of cores required by all nest domains
195          IF ( start_pe(m_ncpl+1) /= m_world_npes )  THEN
196             WRITE ( message_string, '(A,I6,A,I6,A)' )                         &
197                             'nesting-setup requires more MPI procs (',        &
198                             start_pe(m_ncpl+1), ') than provided (',          &
199                             m_world_npes,')'
200             CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
201          ENDIF
[1762]202
[1900]203       ENDIF
[1762]204
[1900]205    ENDIF
[1764]206!
[1900]207!-- Broadcast the read status. This synchronises all other PEs with PE 0 of
208!-- the root model. Without synchronisation, they would not behave in the
209!-- correct way (e.g. they would not return in case of a missing NAMELIST)
210    CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1762]211
[1900]212    IF ( pmc_status == pmc_no_namelist_found )  THEN
[1764]213!
[1900]214!--    Not a nested run; return the MPI_WORLD communicator
215       comm = MPI_COMM_WORLD
216       RETURN
[1762]217
[1900]218    ELSEIF ( pmc_status == pmc_namelist_error )  THEN
[1764]219!
[1900]220!--    Only the root model gives the error message. Others are aborted by the
221!--    message-routine with MPI_ABORT. Must be done this way since myid and
222!--    comm2d have not yet been assigned at this point.
223       IF ( m_world_rank == 0 )  THEN
224          message_string = 'errors in \$nestpar'
225          CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 )
226       ENDIF
[1762]227
[1900]228    ENDIF
[1762]229
[1900]230    CALL MPI_BCAST( m_ncpl,          1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
231    CALL MPI_BCAST( start_pe, m_ncpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
[1762]232
[1764]233!
[1900]234!-- Broadcast coupling layout
235    DO  i = 1, m_ncpl
236       CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ),          &
237                       MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
238       CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0,          &
239                       MPI_COMM_WORLD, istat )
240       CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0,          &
241                       MPI_COMM_WORLD, istat )
242       CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0,          &
243                       MPI_COMM_WORLD, istat )
244       CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0,          &
245                       MPI_COMM_WORLD, istat )
246       CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0,          &
247                       MPI_COMM_WORLD, istat )
248    ENDDO
249    CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0,       &
250                    MPI_COMM_WORLD, istat )
251    CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode), &
252                    MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
[1762]253
[1764]254!
[1900]255!-- Assign global MPI processes to individual models by setting the couple id
256    DO  i = 1, m_ncpl
257       IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) ) &
258       THEN
259          m_my_cpl_id = i
260          EXIT
261       ENDIF
262    ENDDO
263    m_my_cpl_rank = m_world_rank - start_pe(i)
[1762]264
[1764]265!
[1900]266!-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
267!-- The communictors for the individual models as created by MPI_COMM_SPLIT.
268!-- The color of the model is represented by the coupler id
269    CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,     &
270                         istat )
[1764]271!
[1900]272!-- Get size and rank of the model running on this PE
273    CALL  MPI_COMM_RANK( comm, m_model_rank, istat )
274    CALL  MPI_COMM_SIZE( comm, m_model_npes, istat )
[1762]275
[1764]276!
[1900]277!-- Broadcast (from PE 0) the parent id and id of every model
278    DO  i = 1, m_ncpl
279       CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,             &
280                       MPI_COMM_WORLD, istat )
281       CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,             &
282                       MPI_COMM_WORLD, istat )
283    ENDDO
[1762]284
[1764]285!
[1900]286!-- Save the current model communicator for pmc internal use
287    m_model_comm = comm
[1762]288
[1764]289!
[1900]290!-- Create intercommunicator between server and clients.
291!-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
292!-- different colors.
293!-- The grouping was done above with MPI_COMM_SPLIT
294    DO  i = 2, m_ncpl
[1762]295
[1900]296       IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
[1764]297!
[1900]298!--       Collect server PEs.
299!--       Every model exept the root model has a parent model which acts as
300!--       server model. Create an intercommunicator to connect current PE to
301!--       all client PEs
302          tag = 500 + i
303          CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i),     &
304                                     tag, m_to_client_comm(i), istat)
305          clientcount = clientcount + 1
306          activeserver(i) = 1
[1762]307
[1900]308       ELSEIF ( i == m_my_cpl_id)  THEN
[1764]309!
[1900]310!--       Collect client PEs.
311!--       Every model exept the root model has a paremt model which acts as
312!--       server model. Create an intercommunicator to connect current PE to
313!--       all server PEs
314          tag = 500 + i
315          CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD,                  &
316                                     start_pe(m_couplers(i)%parent_id),        &
317                                     tag, m_to_server_comm, istat )
318       ENDIF
[1762]319
[1900]320    ENDDO
[1762]321
[1764]322!
[1900]323!-- If I am server, count the number of clients that I have
324!-- Although this loop is symmetric on all processes, the "activeserver" flag
325!-- is true (==1) on the respective individual PE only.
326    ALLOCATE( pmc_server_for_client(clientcount+1) )
[1762]327
[1900]328    clientcount = 0
329    DO  i = 2, m_ncpl
330       IF ( activeserver(i) == 1 )  THEN
331          clientcount = clientcount + 1
332          pmc_server_for_client(clientcount) = i
333       ENDIF
334    ENDDO
[1764]335!
[1900]336!-- Get the size of the server model
337    IF ( m_my_cpl_id > 1 )  THEN
338       CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size,      &
339                                  istat)
340    ELSE
[1764]341!
[1900]342!--    The root model does not have a server
343       m_server_remote_size = -1
344    ENDIF
[1764]345!
[1900]346!-- Set myid to non-tero value except for the root domain. This is a setting
347!-- for the message routine which is called at the end of pmci_init. That
348!-- routine outputs messages for myid = 0, only. However, myid has not been
349!-- assigened so far, so that all PEs of the root model would output a
350!-- message. To avoid this, set myid to some other value except for PE0 of the
351!-- root domain.
352    IF ( m_world_rank /= 0 )  myid = 1
[1762]353
[1900]354 END SUBROUTINE PMC_init_model
[1762]355
356
[1900]357
358 SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name,          &
359                                cpl_parent_id, lower_left_x, lower_left_y,     &
360                                ncpl, npe_total, request_for_cpl_id )
[1764]361!
[1791]362!-- Provide module private variables of the pmc for PALM
[1762]363
[1900]364    USE kinds
[1762]365
[1900]366    IMPLICIT NONE
[1762]367
[1900]368    CHARACTER(LEN=*), INTENT(OUT), OPTIONAL ::  cpl_name  !<
[1762]369
[1900]370    INTEGER, INTENT(IN), OPTIONAL ::  request_for_cpl_id  !<
[1762]371
[1900]372    INTEGER, INTENT(OUT), OPTIONAL ::  comm_world_nesting  !<
373    INTEGER, INTENT(OUT), OPTIONAL ::  cpl_id              !<
374    INTEGER, INTENT(OUT), OPTIONAL ::  cpl_parent_id       !<
375    INTEGER, INTENT(OUT), OPTIONAL ::  ncpl                !<
376    INTEGER, INTENT(OUT), OPTIONAL ::  npe_total           !<
[1762]377
[1900]378    INTEGER ::  requested_cpl_id  !<
[1764]379
[1900]380    REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_x  !<
381    REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_y  !<
[1764]382
[1791]383!
[1900]384!-- Set the requested coupler id
385    IF ( PRESENT( request_for_cpl_id ) )  THEN
386       requested_cpl_id = request_for_cpl_id
[1791]387!
[1900]388!--    Check for allowed range of values
389       IF ( requested_cpl_id < 1  .OR.  requested_cpl_id > m_ncpl )  RETURN
390    ELSE
391       requested_cpl_id = m_my_cpl_id
392    ENDIF
[1791]393
394!
[1900]395!-- Return the requested information
396    IF ( PRESENT( comm_world_nesting )  )  THEN
397       comm_world_nesting = m_world_comm
398    ENDIF
399    IF ( PRESENT( cpl_id )        )  THEN
400       cpl_id = requested_cpl_id
401    ENDIF
402    IF ( PRESENT( cpl_parent_id ) )  THEN
403       cpl_parent_id = m_couplers(requested_cpl_id)%parent_id
404    ENDIF
405    IF ( PRESENT( cpl_name )      )  THEN
406       cpl_name = m_couplers(requested_cpl_id)%name
407    ENDIF
408    IF ( PRESENT( ncpl )          )  THEN
409       ncpl = m_ncpl
410    ENDIF
411    IF ( PRESENT( npe_total )     )  THEN
412       npe_total = m_couplers(requested_cpl_id)%npe_total
413    ENDIF
414    IF ( PRESENT( lower_left_x )  )  THEN
415       lower_left_x = m_couplers(requested_cpl_id)%lower_left_x
416    ENDIF
417    IF ( PRESENT( lower_left_y )  )  THEN
418       lower_left_y = m_couplers(requested_cpl_id)%lower_left_y
419    ENDIF
[1791]420
[1900]421 END SUBROUTINE pmc_get_model_info
[1791]422
423
424
[1900]425 LOGICAL function pmc_is_rootmodel( )
[1764]426
[1900]427    IMPLICIT NONE
[1764]428
[1900]429    pmc_is_rootmodel = ( m_my_cpl_id == 1 )
[1764]430
[1900]431 END FUNCTION pmc_is_rootmodel
[1764]432
433
434
[1797]435 SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode,     &
436                                  pmc_status )
[1762]437
[1764]438    IMPLICIT NONE
[1762]439
[1797]440    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_mode
441    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode
[1764]442
443    INTEGER, INTENT(INOUT) ::  pmc_status
[1791]444    INTEGER                ::  i, istat
[1764]445
[1900]446    TYPE(pmc_layout), DIMENSION(pmc_max_models) ::  domain_layouts
[1764]447
[1882]448!-- TO_DO: include anterp_relax_length_? into nestpar and communicate them.
[1797]449    NAMELIST /nestpar/  domain_layouts, nesting_datatransfer_mode, nesting_mode
[1764]450
451!
452!-- Initialize some coupling variables
[1900]453    domain_layouts(1:pmc_max_models)%id = -1
[1791]454    m_ncpl =   0
[1762]455
[1764]456    pmc_status = pmc_status_ok
457
458!
459!-- Open the NAMELIST-file and read the nesting layout
460    CALL check_open( 11 )
461    READ ( 11, nestpar, IOSTAT=istat )
462
463    IF ( istat < 0 )  THEN
464!
465!--    No nestpar-NAMELIST found
466       pmc_status = pmc_no_namelist_found
467!
468!--    Set filepointer to the beginning of the file. Otherwise PE0 will later
469!--    be unable to read the inipar-NAMELIST
470       REWIND ( 11 )
[1762]471       RETURN
472
[1764]473    ELSEIF ( istat > 0 )  THEN
474!
475!--    Errors in reading nestpar-NAMELIST
476       pmc_status = pmc_namelist_error
477       RETURN
[1762]478
[1764]479    ENDIF
[1762]480
[1764]481!
482!-- Output location message
483    CALL location_message( 'initialize communicators for nesting', .FALSE. )
484!
485!-- Assign the layout to the internally used variable
486    m_couplers = domain_layouts
[1762]487
[1764]488!
489!-- Get the number of nested models given in the nestpar-NAMELIST
[1900]490    DO  i = 1, pmc_max_models
[1764]491!
[1791]492!--    When id=-1 is found for the first time, the list of domains is finished
[1900]493       IF ( m_couplers(i)%id == -1  .OR.  i == pmc_max_models )  THEN
[1791]494          IF ( m_couplers(i)%id == -1 )  THEN
495             m_ncpl = i - 1
496             EXIT
497          ELSE
[1900]498             m_ncpl = pmc_max_models
[1791]499          ENDIF
[1764]500       ENDIF
501
502    ENDDO
503
504 END SUBROUTINE read_coupling_layout
505
506#endif
507 END MODULE pmc_handle_communicator
Note: See TracBrowser for help on using the repository browser.