source: palm/trunk/SOURCE/pmc_handle_communicator.f90 @ 1780

Last change on this file since 1780 was 1780, checked in by raasch, 6 years ago

last commit documented

  • Property svn:keywords set to Id
File size: 15.0 KB
Line 
1 MODULE PMC_handle_communicator
2
3!--------------------------------------------------------------------------------!
4! This file is part of PALM.
5!
6! PALM is free software: you can redistribute it and/or modify it under the terms
7! of the GNU General Public License as published by the Free Software Foundation,
8! either version 3 of the License, or (at your option) any later version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2015 Leibniz Universitaet Hannover
18!--------------------------------------------------------------------------------!
19!
20! Current revisions:
21! ------------------
22!
23!
24! Former revisions:
25! -----------------
26! $Id: pmc_handle_communicator.f90 1780 2016-03-03 08:39:40Z raasch $
27!
28! 1779 2016-03-03 08:01:28Z raasch
29! only the total number of PEs is given in the nestpar-NAMELIST,
30! additional comments included
31!
32! 1764 2016-02-28 12:45:19Z raasch
33! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at
34! the beginning of the variable list,
35! domain layout is read with new NAMELIST nestpar from standard file PARIN,
36! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp,
37! variable domain_layouts instead of m_couplers introduced for this NAMELIST,
38! general format changed to PALM style
39!
40! 1762 2016-02-25 12:31:13Z hellstea
41! Initial revision by K. Ketelsen
42!
43! Description:
44! ------------
45! Handle MPI communicator in PALM model coupler
46!------------------------------------------------------------------------------!
47
48#if defined( __parallel )
49    USE kinds
50
51#if defined( __lc )
52    USE MPI
53#else
54    INCLUDE "mpif.h"
55#endif
56
57   USE pmc_general,                                                            &
58       ONLY: pmc_status_ok, pmc_status_error, pmc_max_modell
59
60   IMPLICIT NONE
61
62   TYPE pmc_layout
63
64      CHARACTER(len=32) ::  name
65
66      INTEGER  ::  id
67      INTEGER  ::  parent_id
68      INTEGER  ::  npe_total
69
70      REAL(wp) ::  lower_left_x
71      REAL(wp) ::  lower_left_y
72
73   END TYPE pmc_layout
74
75   PUBLIC  pmc_status_ok, pmc_status_error
76
77   INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes          = 1  ! illegal number of PEs
78   INTEGER, PARAMETER, PUBLIC ::  pmc_namelist_error      = 2  ! error(s) in nestpar namelist
79   INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found   = 3  ! No couple layout file found
80
81   ! Coupler Setup
82
83   INTEGER                                    :: m_my_CPL_id  !Coupler id of this model
84   INTEGER                                    :: m_Parent_id  !Coupler id of parent of this model
85   INTEGER                                    :: m_NrOfCpl    !Number of Coupler in layout file
86   TYPE(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers   !Information of all coupler
87
88   ! MPI settings
89
90   INTEGER,PUBLIC                    :: m_model_comm          !Communicator of this model
91   INTEGER,PUBLIC                    :: m_to_server_comm      !Communicator to the server
92   INTEGER,DIMENSION(PMC_MAX_MODELL) :: m_to_client_comm      !Communicator to the client(s)
93   INTEGER,PUBLIC                    :: m_world_rank
94   INTEGER                           :: m_world_npes
95   INTEGER,PUBLIC                    :: m_model_rank
96   INTEGER,PUBLIC                    :: m_model_npes
97   INTEGER                           :: m_server_remote_size  !Number of Server PE's
98
99   PUBLIC m_to_client_comm
100
101   !Indicates this PE is server for Cleint NR
102
103   INTEGER,DIMENSION(:),POINTER,PUBLIC :: PMC_Server_for_Client
104
105   INTERFACE pmc_is_rootmodel
106      MODULE PROCEDURE pmc_is_rootmodel
107   END INTERFACE pmc_is_rootmodel
108
109   INTERFACE PMC_get_local_model_info
110      MODULE PROCEDURE PMC_get_local_model_info
111   END INTERFACE PMC_get_local_model_info
112
113   PUBLIC pmc_get_local_model_info, pmc_init_model, pmc_is_rootmodel
114
115 CONTAINS
116
117   SUBROUTINE pmc_init_model( comm, nesting_mode, pmc_status )
118
119      USE control_parameters,                                                  &
120          ONLY:  message_string
121
122      USE pegrid,                                                              &
123          ONLY:  myid
124
125      IMPLICIT NONE
126
127      CHARACTER(LEN=7), INTENT(OUT) ::  nesting_mode
128
129      INTEGER, INTENT(OUT)                ::  comm
130      INTEGER, INTENT(OUT)                ::  pmc_status
131
132      INTEGER                             ::  i, ierr, istat
133      INTEGER,DIMENSION(pmc_max_modell+1) ::  start_pe
134      INTEGER                             ::  m_my_cpl_rank
135      INTEGER                             ::  tag, clientcount
136      INTEGER,DIMENSION(pmc_max_modell)   ::  activeserver  ! I am active server for this client ID
137
138      pmc_status   = pmc_status_ok
139      comm         = -1
140      m_my_cpl_id  = -1
141      clientcount  =  0
142      activeserver = -1
143      start_pe(:)  =  0
144
145      CALL  MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat )
146      CALL  MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat )
147!
148!--   Only PE 0 of root model reads
149      IF ( m_world_rank == 0 )  THEN
150
151         CALL read_coupling_layout( nesting_mode, pmc_status )
152
153         IF ( pmc_status /= pmc_no_namelist_found  .AND.                       &
154              pmc_status /= pmc_namelist_error )                               &
155         THEN
156!
157!--         Calculate start PE of every model
158            start_pe(1) = 0
159            DO  i = 2, m_nrofcpl+1
160               start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total
161            ENDDO
162
163!
164!--         The number of cores provided with the run must be the same as the
165!--         total sum of cores required by all nest domains
166            IF ( start_pe(m_nrofcpl+1) /= m_world_npes )  THEN
167               WRITE ( message_string, '(A,I6,A,I6,A)' )                       &
168                               'nesting-setup requires more MPI procs (',      &
169                               start_pe(m_nrofcpl+1), ') than provided (',     &
170                               m_world_npes,')'
171               CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
172            ENDIF
173
174         ENDIF
175
176      ENDIF
177!
178!--   Broadcast the read status. This synchronises all other PEs with PE 0 of
179!--   the root model. Without synchronisation, they would not behave in the
180!--   correct way (e.g. they would not return in case of a missing NAMELIST)
181      CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
182
183      IF ( pmc_status == pmc_no_namelist_found )  THEN
184!
185!--      Not a nested run; return the MPI_WORLD communicator
186         comm = MPI_COMM_WORLD
187         RETURN
188
189      ELSEIF ( pmc_status == pmc_namelist_error )  THEN
190!
191!--      Only the root model gives the error message. Others are aborted by the
192!--      message-routine with MPI_ABORT. Must be done this way since myid and
193!--      comm2d have not yet been assigned at this point.
194         IF ( m_world_rank == 0 )  THEN
195            message_string = 'errors in \$nestpar'
196            CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 )
197         ENDIF
198
199      ENDIF
200
201      CALL MPI_BCAST( m_nrofcpl, 1,          MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
202      CALL MPI_BCAST( start_pe, m_nrofcpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
203
204!
205!--   Broadcast coupling layout
206      DO  i = 1, m_nrofcpl
207         CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
208         CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
209         CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
210         CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
211         CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0, MPI_COMM_WORLD, istat )
212         CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0, MPI_COMM_WORLD, istat )
213      ENDDO
214
215!
216!--   Assign global MPI processes to individual models by setting the couple id
217      DO  i = 1, m_nrofcpl
218         IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) ) &
219         THEN
220            m_my_cpl_id = i
221            EXIT
222         ENDIF
223      ENDDO
224      m_my_cpl_rank = m_world_rank - start_pe(i)
225
226!
227!--   MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
228!--   The communictors for the individual models as created by MPI_COMM_SPLIT.
229!--   The color of the model is represented by the coupler id
230      CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,   &
231                           istat )
232!
233!--   Get size and rank of the model running on this PE
234      CALL  MPI_COMM_RANK( comm, m_model_rank, istat )
235      CALL  MPI_COMM_SIZE( comm, m_model_npes, istat )
236
237!
238!--   Broadcast (from PE 0) the parent id and id of every model
239      DO  i = 1, m_nrofcpl
240         CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,           &
241                         MPI_COMM_WORLD, istat )
242         CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,           &
243                         MPI_COMM_WORLD, istat )
244      ENDDO
245
246!
247!--   Save the current model communicator for PMC internal use
248      m_model_comm = comm
249
250!
251!--   Create intercommunicator between server and clients.
252!--   MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
253!--   different colors.
254!--   The grouping was done above with MPI_COMM_SPLIT
255      DO  i = 2, m_nrofcpl
256
257         IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
258!
259!--         Collect server PEs.
260!--         Every model exept the root model has a parent model which acts as
261!--         server model. Create an intercommunicator to connect current PE to
262!--         all client PEs
263            tag = 500 + i
264            CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i),   &
265                                       tag, m_to_client_comm(i), istat)
266            clientcount = clientcount + 1
267            activeserver(i) = 1
268
269         ELSEIF ( i == m_my_cpl_id)  THEN
270!
271!--         Collect client PEs.
272!--         Every model exept the root model has a paremt model which acts as
273!--         server model. Create an intercommunicator to connect current PE to
274!--         all server PEs
275            tag = 500 + i
276            CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD,                &
277                                       start_pe(m_couplers(i)%parent_id),      &
278                                       tag, m_to_server_comm, istat )
279         ENDIF
280
281      ENDDO
282
283!
284!--   If I am server, count the number of clients that I have
285!--   Although this loop is symmetric on all processes, the "activeserver" flag
286!--   is true (==1) on the respective individual PE only.
287      ALLOCATE( pmc_server_for_client(clientcount+1) )
288
289      clientcount = 0
290      DO  i = 2, m_nrofcpl
291         IF ( activeserver(i) == 1 )  THEN
292            clientcount = clientcount + 1
293            pmc_server_for_client(clientcount) = i
294         ENDIF
295      ENDDO
296!
297!--   Get the size of the server model
298      IF ( m_my_cpl_id > 1 )  THEN
299         CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size,    &
300                                    istat)
301      ELSE
302!
303!--      The root model does not have a server
304         m_server_remote_size = -1             !
305      ENDIF
306!
307!--   Set myid to non-tero value except for the root domain. This is a setting
308!--   for the message routine which is called at the end of pmci_init. That
309!--   routine outputs messages for myid = 0, only. However, myid has not been
310!--   assigened so far, so that all PEs of the root model would output a
311!--   message. To avoid this, set myid to some other value except for PE0 of the
312!--   root domain.
313      IF ( m_world_rank /= 0 )  myid = 1
314
315   END SUBROUTINE PMC_init_model
316
317
318!
319!-- Make module private variables available to palm
320   SUBROUTINE pmc_get_local_model_info( my_cpl_id, my_cpl_parent_id, cpl_name, &
321                                        npe_total, lower_left_x, lower_left_y )
322
323      USE kinds
324
325      IMPLICIT NONE
326
327      CHARACTER(LEN=*), INTENT(OUT), OPTIONAL ::  cpl_name
328      INTEGER, INTENT(OUT), OPTIONAL          ::  my_cpl_id
329      INTEGER, INTENT(OUT), OPTIONAL          ::  my_cpl_parent_id
330      INTEGER, INTENT(OUT), OPTIONAL          ::  npe_total
331      REAL(wp), INTENT(OUT), OPTIONAL         ::  lower_left_x
332      REAL(wp), INTENT(OUT), OPTIONAL         ::  lower_left_y
333
334      IF ( PRESENT( my_cpl_id )           )  my_cpl_id        = m_my_cpl_id
335      IF ( PRESENT( my_cpl_parent_id )    )  my_cpl_parent_id = m_couplers(my_cpl_id)%parent_id
336      IF ( PRESENT( cpl_name )            )  cpl_name         = m_couplers(my_cpl_id)%name
337      IF ( PRESENT( npe_total )           )  npe_total        = m_couplers(my_cpl_id)%npe_total
338      IF ( PRESENT( lower_left_x )        )  lower_left_x     = m_couplers(my_cpl_id)%lower_left_x
339      IF ( PRESENT( lower_left_y )        )  lower_left_y     = m_couplers(my_cpl_id)%lower_left_y
340
341   END SUBROUTINE pmc_get_local_model_info
342
343
344
345   LOGICAL function pmc_is_rootmodel( )
346
347      IMPLICIT NONE
348
349      pmc_is_rootmodel = ( m_my_cpl_id == 1 )
350
351   END FUNCTION pmc_is_rootmodel
352
353
354
355 SUBROUTINE read_coupling_layout( nesting_mode, pmc_status )
356
357    IMPLICIT NONE
358
359    CHARACTER(LEN=7) ::  nesting_mode
360
361    INTEGER, INTENT(INOUT) ::  pmc_status
362    INTEGER                ::  i, istat, iunit
363
364    TYPE(pmc_layout), DIMENSION(pmc_max_modell) ::  domain_layouts
365
366
367    NAMELIST /nestpar/  domain_layouts, nesting_mode
368
369!
370!-- Initialize some coupling variables
371    domain_layouts(1:pmc_max_modell)%id = -1
372    m_nrofcpl =   0
373    iunit     = 345
374
375    pmc_status = pmc_status_ok
376
377!
378!-- Open the NAMELIST-file and read the nesting layout
379    CALL check_open( 11 )
380    READ ( 11, nestpar, IOSTAT=istat )
381
382    IF ( istat < 0 )  THEN
383!
384!--    No nestpar-NAMELIST found
385       pmc_status = pmc_no_namelist_found
386!
387!--    Set filepointer to the beginning of the file. Otherwise PE0 will later
388!--    be unable to read the inipar-NAMELIST
389       REWIND ( 11 )
390       RETURN
391
392    ELSEIF ( istat > 0 )  THEN
393!
394!--    Errors in reading nestpar-NAMELIST
395       pmc_status = pmc_namelist_error
396       RETURN
397
398    ENDIF
399
400!
401!-- Output location message
402    CALL location_message( 'initialize communicators for nesting', .FALSE. )
403!
404!-- Assign the layout to the internally used variable
405    m_couplers = domain_layouts
406
407!
408!-- Get the number of nested models given in the nestpar-NAMELIST
409    DO  i = 1, pmc_max_modell
410
411       IF ( m_couplers(i)%id /= -1  .AND.  i <= pmc_max_modell )  THEN
412          WRITE ( 0, '(A,A,1X,3I7,1X,2F10.2)' )  'Set up Model  ',             &
413                             TRIM( m_couplers(i)%name ), m_couplers(i)%id,     &
414                             m_couplers(i)%Parent_id, m_couplers(i)%npe_total, &
415                             m_couplers(i)%lower_left_x,                       &
416                             m_couplers(i)%lower_left_y
417       ELSE
418!
419!--       When id=-1 is found for the first time, the list of domains is
420!--       finished (or latest after pmc_max_modell entries
421          m_nrofcpl = i - 1
422          EXIT
423       ENDIF
424
425    ENDDO
426
427 END SUBROUTINE read_coupling_layout
428
429#endif
430 END MODULE pmc_handle_communicator
Note: See TracBrowser for help on using the repository browser.