source: palm/trunk/SOURCE/pmc_handle_communicator_mod.f90 @ 1882

Last change on this file since 1882 was 1882, checked in by hellstea, 8 years ago

Precomputation of ijfc for anterpolation added in pmc_interface_mod.f90

  • Property svn:keywords set to Id
File size: 16.9 KB
RevLine 
[1764]1 MODULE PMC_handle_communicator
[1762]2
3!--------------------------------------------------------------------------------!
4! This file is part of PALM.
5!
6! PALM is free software: you can redistribute it and/or modify it under the terms
7! of the GNU General Public License as published by the Free Software Foundation,
8! either version 3 of the License, or (at your option) any later version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
[1818]17! Copyright 1997-2016 Leibniz Universitaet Hannover
[1762]18!--------------------------------------------------------------------------------!
19!
20! Current revisions:
21! ------------------
[1882]22! MPI_BCAST-calls to broadcast nesting_mode and nesting_datatransfer_mode
23! are moved out from the DO i = 1, m_ncpl loop.
[1851]24!
[1792]25! Former revisions:
26! -----------------
27! $Id: pmc_handle_communicator_mod.f90 1882 2016-04-20 15:24:46Z hellstea $
28!
[1851]29! 1850 2016-04-08 13:29:27Z maronga
30! Module renamed
31!
32!
[1809]33! 1808 2016-04-05 19:44:00Z raasch
34! MPI module used by default on all machines
35!
[1798]36! 1797 2016-03-21 16:50:28Z raasch
37! introduction of different datatransfer modes,
38! export of comm_world_nesting
39!
[1792]40! 1791 2016-03-11 10:41:25Z raasch
[1791]41! m_nrofcpl renamed m_ncpl,
42! pmc_get_local_model_info renamed pmc_get_model_info, some keywords also
43! renamed and some added,
44! debug write-statements commented out
[1765]45!
[1787]46! 1786 2016-03-08 05:49:27Z raasch
47! Bugfix: nesting_mode is broadcast now
48!
[1780]49! 1779 2016-03-03 08:01:28Z raasch
50! only the total number of PEs is given in the nestpar-NAMELIST,
51! additional comments included
52!
[1765]53! 1764 2016-02-28 12:45:19Z raasch
[1764]54! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at
55! the beginning of the variable list,
56! domain layout is read with new NAMELIST nestpar from standard file PARIN,
57! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp,
58! variable domain_layouts instead of m_couplers introduced for this NAMELIST,
59! general format changed to PALM style
[1762]60!
[1763]61! 1762 2016-02-25 12:31:13Z hellstea
62! Initial revision by K. Ketelsen
63!
[1762]64! Description:
65! ------------
[1764]66! Handle MPI communicator in PALM model coupler
[1762]67!------------------------------------------------------------------------------!
68
[1764]69#if defined( __parallel )
70    USE kinds
[1762]71
[1808]72#if defined( __mpifh )
73    INCLUDE "mpif.h"
74#else
[1764]75    USE MPI
76#endif
[1762]77
[1764]78   USE pmc_general,                                                            &
79       ONLY: pmc_status_ok, pmc_status_error, pmc_max_modell
[1762]80
[1764]81   IMPLICIT NONE
[1762]82
[1764]83   TYPE pmc_layout
[1762]84
[1764]85      CHARACTER(len=32) ::  name
[1762]86
[1764]87      INTEGER  ::  id
88      INTEGER  ::  parent_id
[1779]89      INTEGER  ::  npe_total
[1762]90
[1764]91      REAL(wp) ::  lower_left_x
92      REAL(wp) ::  lower_left_y
[1762]93
[1764]94   END TYPE pmc_layout
95
96   PUBLIC  pmc_status_ok, pmc_status_error
97
98   INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes          = 1  ! illegal number of PEs
99   INTEGER, PARAMETER, PUBLIC ::  pmc_namelist_error      = 2  ! error(s) in nestpar namelist
100   INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found   = 3  ! No couple layout file found
101
[1762]102   ! Coupler Setup
103
[1797]104   INTEGER                                    :: m_world_comm !global nesting communicator
[1762]105   INTEGER                                    :: m_my_CPL_id  !Coupler id of this model
106   INTEGER                                    :: m_Parent_id  !Coupler id of parent of this model
[1791]107   INTEGER                                    :: m_ncpl       !Number of Couplers in layout file
[1762]108
[1791]109   TYPE(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers   !Information of all couplers
110
[1762]111   ! MPI settings
112
113   INTEGER,PUBLIC                    :: m_model_comm          !Communicator of this model
114   INTEGER,PUBLIC                    :: m_to_server_comm      !Communicator to the server
115   INTEGER,DIMENSION(PMC_MAX_MODELL) :: m_to_client_comm      !Communicator to the client(s)
116   INTEGER,PUBLIC                    :: m_world_rank
117   INTEGER                           :: m_world_npes
118   INTEGER,PUBLIC                    :: m_model_rank
119   INTEGER,PUBLIC                    :: m_model_npes
120   INTEGER                           :: m_server_remote_size  !Number of Server PE's
121
122   PUBLIC m_to_client_comm
123
124   !Indicates this PE is server for Cleint NR
125
126   INTEGER,DIMENSION(:),POINTER,PUBLIC :: PMC_Server_for_Client
127
[1764]128   INTERFACE pmc_is_rootmodel
129      MODULE PROCEDURE pmc_is_rootmodel
130   END INTERFACE pmc_is_rootmodel
[1762]131
[1791]132   INTERFACE pmc_get_model_info
133      MODULE PROCEDURE pmc_get_model_info
134   END INTERFACE pmc_get_model_info
[1762]135
[1791]136   PUBLIC pmc_get_model_info, pmc_init_model, pmc_is_rootmodel
[1762]137
[1764]138 CONTAINS
[1762]139
[1797]140   SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode,   &
141                              pmc_status )
[1762]142
[1764]143      USE control_parameters,                                                  &
144          ONLY:  message_string
145
146      USE pegrid,                                                              &
147          ONLY:  myid
148
149      IMPLICIT NONE
150
151      CHARACTER(LEN=7), INTENT(OUT) ::  nesting_mode
[1797]152      CHARACTER(LEN=7), INTENT(OUT) ::  nesting_datatransfer_mode
[1764]153
154      INTEGER, INTENT(OUT)                ::  comm
155      INTEGER, INTENT(OUT)                ::  pmc_status
156
157      INTEGER                             ::  i, ierr, istat
158      INTEGER,DIMENSION(pmc_max_modell+1) ::  start_pe
159      INTEGER                             ::  m_my_cpl_rank
160      INTEGER                             ::  tag, clientcount
161      INTEGER,DIMENSION(pmc_max_modell)   ::  activeserver  ! I am active server for this client ID
162
163      pmc_status   = pmc_status_ok
[1762]164      comm         = -1
[1797]165      m_world_comm = MPI_COMM_WORLD
[1764]166      m_my_cpl_id  = -1
167      clientcount  =  0
168      activeserver = -1
169      start_pe(:)  =  0
[1762]170
[1764]171      CALL  MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat )
172      CALL  MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat )
173!
174!--   Only PE 0 of root model reads
175      IF ( m_world_rank == 0 )  THEN
[1762]176
[1797]177         CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode,   &
178                                    pmc_status )
[1762]179
[1764]180         IF ( pmc_status /= pmc_no_namelist_found  .AND.                       &
181              pmc_status /= pmc_namelist_error )                               &
182         THEN
183!
184!--         Calculate start PE of every model
185            start_pe(1) = 0
[1791]186            DO  i = 2, m_ncpl+1
[1779]187               start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total
[1764]188            ENDDO
[1762]189
[1764]190!
191!--         The number of cores provided with the run must be the same as the
192!--         total sum of cores required by all nest domains
[1791]193            IF ( start_pe(m_ncpl+1) /= m_world_npes )  THEN
[1779]194               WRITE ( message_string, '(A,I6,A,I6,A)' )                       &
195                               'nesting-setup requires more MPI procs (',      &
[1791]196                               start_pe(m_ncpl+1), ') than provided (',        &
[1779]197                               m_world_npes,')'
198               CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
[1764]199            ENDIF
[1762]200
[1764]201         ENDIF
[1762]202
[1764]203      ENDIF
204!
205!--   Broadcast the read status. This synchronises all other PEs with PE 0 of
206!--   the root model. Without synchronisation, they would not behave in the
207!--   correct way (e.g. they would not return in case of a missing NAMELIST)
208      CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1762]209
[1764]210      IF ( pmc_status == pmc_no_namelist_found )  THEN
211!
212!--      Not a nested run; return the MPI_WORLD communicator
213         comm = MPI_COMM_WORLD
214         RETURN
[1762]215
[1764]216      ELSEIF ( pmc_status == pmc_namelist_error )  THEN
217!
218!--      Only the root model gives the error message. Others are aborted by the
219!--      message-routine with MPI_ABORT. Must be done this way since myid and
220!--      comm2d have not yet been assigned at this point.
221         IF ( m_world_rank == 0 )  THEN
222            message_string = 'errors in \$nestpar'
223            CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 )
224         ENDIF
[1762]225
[1764]226      ENDIF
[1762]227
[1791]228      CALL MPI_BCAST( m_ncpl,          1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
229      CALL MPI_BCAST( start_pe, m_ncpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
[1762]230
[1764]231!
232!--   Broadcast coupling layout
[1791]233      DO  i = 1, m_ncpl
[1764]234         CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
235         CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
236         CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1779]237         CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1764]238         CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0, MPI_COMM_WORLD, istat )
239         CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0, MPI_COMM_WORLD, istat )
240      ENDDO
[1882]241      CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
242      CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
[1762]243
[1764]244!
245!--   Assign global MPI processes to individual models by setting the couple id
[1791]246      DO  i = 1, m_ncpl
[1764]247         IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) ) &
248         THEN
249            m_my_cpl_id = i
250            EXIT
251         ENDIF
252      ENDDO
253      m_my_cpl_rank = m_world_rank - start_pe(i)
[1762]254
[1764]255!
256!--   MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
257!--   The communictors for the individual models as created by MPI_COMM_SPLIT.
258!--   The color of the model is represented by the coupler id
259      CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,   &
260                           istat )
261!
262!--   Get size and rank of the model running on this PE
263      CALL  MPI_COMM_RANK( comm, m_model_rank, istat )
264      CALL  MPI_COMM_SIZE( comm, m_model_npes, istat )
[1762]265
[1764]266!
267!--   Broadcast (from PE 0) the parent id and id of every model
[1791]268      DO  i = 1, m_ncpl
[1764]269         CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,           &
270                         MPI_COMM_WORLD, istat )
271         CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,           &
272                         MPI_COMM_WORLD, istat )
273      ENDDO
[1762]274
[1764]275!
[1779]276!--   Save the current model communicator for PMC internal use
[1762]277      m_model_comm = comm
278
[1764]279!
280!--   Create intercommunicator between server and clients.
281!--   MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
282!--   different colors.
283!--   The grouping was done above with MPI_COMM_SPLIT
[1791]284      DO  i = 2, m_ncpl
[1762]285
[1764]286         IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
287!
[1779]288!--         Collect server PEs.
289!--         Every model exept the root model has a parent model which acts as
290!--         server model. Create an intercommunicator to connect current PE to
291!--         all client PEs
[1764]292            tag = 500 + i
293            CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i),   &
294                                       tag, m_to_client_comm(i), istat)
295            clientcount = clientcount + 1
296            activeserver(i) = 1
[1762]297
[1764]298         ELSEIF ( i == m_my_cpl_id)  THEN
299!
[1779]300!--         Collect client PEs.
301!--         Every model exept the root model has a paremt model which acts as
302!--         server model. Create an intercommunicator to connect current PE to
303!--         all server PEs
[1764]304            tag = 500 + i
305            CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD,                &
306                                       start_pe(m_couplers(i)%parent_id),      &
307                                       tag, m_to_server_comm, istat )
308         ENDIF
[1762]309
[1764]310      ENDDO
[1762]311
[1764]312!
313!--   If I am server, count the number of clients that I have
314!--   Although this loop is symmetric on all processes, the "activeserver" flag
315!--   is true (==1) on the respective individual PE only.
316      ALLOCATE( pmc_server_for_client(clientcount+1) )
[1762]317
[1764]318      clientcount = 0
[1791]319      DO  i = 2, m_ncpl
[1764]320         IF ( activeserver(i) == 1 )  THEN
321            clientcount = clientcount + 1
322            pmc_server_for_client(clientcount) = i
323         ENDIF
324      ENDDO
325!
326!--   Get the size of the server model
327      IF ( m_my_cpl_id > 1 )  THEN
328         CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size,    &
329                                    istat)
330      ELSE
331!
332!--      The root model does not have a server
333         m_server_remote_size = -1             !
334      ENDIF
335!
336!--   Set myid to non-tero value except for the root domain. This is a setting
337!--   for the message routine which is called at the end of pmci_init. That
338!--   routine outputs messages for myid = 0, only. However, myid has not been
339!--   assigened so far, so that all PEs of the root model would output a
340!--   message. To avoid this, set myid to some other value except for PE0 of the
341!--   root domain.
342      IF ( m_world_rank /= 0 )  myid = 1
[1762]343
344   END SUBROUTINE PMC_init_model
345
346
[1764]347!
[1791]348!-- Provide module private variables of the pmc for PALM
[1797]349    SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name,       &
350                                   cpl_parent_id, lower_left_x, lower_left_y,  &
351                                   ncpl, npe_total, request_for_cpl_id )
[1762]352
[1764]353      USE kinds
[1762]354
[1764]355      IMPLICIT NONE
[1762]356
[1764]357      CHARACTER(LEN=*), INTENT(OUT), OPTIONAL ::  cpl_name
[1762]358
[1791]359      INTEGER, INTENT(IN), OPTIONAL ::  request_for_cpl_id
[1762]360
[1797]361      INTEGER, INTENT(OUT), OPTIONAL ::  comm_world_nesting
[1791]362      INTEGER, INTENT(OUT), OPTIONAL ::  cpl_id
363      INTEGER, INTENT(OUT), OPTIONAL ::  cpl_parent_id
364      INTEGER, INTENT(OUT), OPTIONAL ::  ncpl
365      INTEGER, INTENT(OUT), OPTIONAL ::  npe_total
[1762]366
[1791]367      INTEGER ::  requested_cpl_id
[1764]368
[1791]369      REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_x
370      REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_y
[1764]371
[1791]372!
373!--   Set the requested coupler id
374      IF ( PRESENT( request_for_cpl_id ) )  THEN
375         requested_cpl_id = request_for_cpl_id
376!
377!--      Check for allowed range of values
378         IF ( requested_cpl_id < 1 .OR. requested_cpl_id > m_ncpl )  RETURN
379      ELSE
380         requested_cpl_id = m_my_cpl_id
381      ENDIF
382
383!
384!--   Return the requested information
[1797]385      IF ( PRESENT( comm_world_nesting )  )  THEN
386         comm_world_nesting = m_world_comm
387      ENDIF
[1791]388      IF ( PRESENT( cpl_id )        )  THEN
[1797]389         cpl_id = requested_cpl_id
[1791]390      ENDIF
391      IF ( PRESENT( cpl_parent_id ) )  THEN
392         cpl_parent_id = m_couplers(requested_cpl_id)%parent_id
393      ENDIF
394      IF ( PRESENT( cpl_name )      )  THEN
[1797]395         cpl_name = m_couplers(requested_cpl_id)%name
[1791]396      ENDIF
397      IF ( PRESENT( ncpl )          )  THEN
[1797]398         ncpl = m_ncpl
[1791]399      ENDIF
400      IF ( PRESENT( npe_total )     )  THEN
[1797]401         npe_total = m_couplers(requested_cpl_id)%npe_total
[1791]402      ENDIF
403      IF ( PRESENT( lower_left_x )  )  THEN
[1797]404         lower_left_x = m_couplers(requested_cpl_id)%lower_left_x
[1791]405      ENDIF
406      IF ( PRESENT( lower_left_y )  )  THEN
[1797]407         lower_left_y = m_couplers(requested_cpl_id)%lower_left_y
[1791]408      ENDIF
409
410   END SUBROUTINE pmc_get_model_info
411
412
413
[1764]414   LOGICAL function pmc_is_rootmodel( )
415
416      IMPLICIT NONE
417
418      pmc_is_rootmodel = ( m_my_cpl_id == 1 )
419
420   END FUNCTION pmc_is_rootmodel
421
422
423
[1797]424 SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode,     &
425                                  pmc_status )
[1762]426
[1764]427    IMPLICIT NONE
[1762]428
[1797]429    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_mode
430    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode
[1764]431
432    INTEGER, INTENT(INOUT) ::  pmc_status
[1791]433    INTEGER                ::  i, istat
[1764]434
435    TYPE(pmc_layout), DIMENSION(pmc_max_modell) ::  domain_layouts
436
[1882]437!-- TO_DO: include anterp_relax_length_? into nestpar and communicate them.
[1797]438    NAMELIST /nestpar/  domain_layouts, nesting_datatransfer_mode, nesting_mode
[1764]439
440!
441!-- Initialize some coupling variables
442    domain_layouts(1:pmc_max_modell)%id = -1
[1791]443    m_ncpl =   0
[1762]444
[1764]445    pmc_status = pmc_status_ok
446
447!
448!-- Open the NAMELIST-file and read the nesting layout
449    CALL check_open( 11 )
450    READ ( 11, nestpar, IOSTAT=istat )
451
452    IF ( istat < 0 )  THEN
453!
454!--    No nestpar-NAMELIST found
455       pmc_status = pmc_no_namelist_found
456!
457!--    Set filepointer to the beginning of the file. Otherwise PE0 will later
458!--    be unable to read the inipar-NAMELIST
459       REWIND ( 11 )
[1762]460       RETURN
461
[1764]462    ELSEIF ( istat > 0 )  THEN
463!
464!--    Errors in reading nestpar-NAMELIST
465       pmc_status = pmc_namelist_error
466       RETURN
[1762]467
[1764]468    ENDIF
[1762]469
[1764]470!
471!-- Output location message
472    CALL location_message( 'initialize communicators for nesting', .FALSE. )
473!
474!-- Assign the layout to the internally used variable
475    m_couplers = domain_layouts
[1762]476
[1764]477!
478!-- Get the number of nested models given in the nestpar-NAMELIST
479    DO  i = 1, pmc_max_modell
480!
[1791]481!--    When id=-1 is found for the first time, the list of domains is finished
482       IF ( m_couplers(i)%id == -1  .OR.  i == pmc_max_modell )  THEN
483          IF ( m_couplers(i)%id == -1 )  THEN
484             m_ncpl = i - 1
485             EXIT
486          ELSE
487             m_ncpl = pmc_max_modell
488          ENDIF
[1764]489       ENDIF
490
491    ENDDO
492
493 END SUBROUTINE read_coupling_layout
494
495#endif
496 END MODULE pmc_handle_communicator
Note: See TracBrowser for help on using the repository browser.