source: palm/trunk/SOURCE/pmc_handle_communicator_mod.f90 @ 2608

Last change on this file since 2608 was 2599, checked in by hellstea, 6 years ago

i/o grouping update for nested runs

  • Property svn:keywords set to Id
File size: 18.9 KB
Line 
1  MODULE PMC_handle_communicator
2
3!------------------------------------------------------------------------------!
4! This file is part of PALM.
5!
6! PALM is free software: you can redistribute it and/or modify it under the
7! terms of the GNU General Public License as published by the Free Software
8! Foundation, either version 3 of the License, or (at your option) any later
9! version.
10!
11! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
12! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
14!
15! You should have received a copy of the GNU General Public License along with
16! PALM. If not, see <http://www.gnu.org/licenses/>.
17!
18! Copyright 1997-2017 Leibniz Universitaet Hannover
19!------------------------------------------------------------------------------!
20!
21! Current revisions:
22! ------------------
23!
24!
25! Former revisions:
26! -----------------
27! $Id: pmc_handle_communicator_mod.f90 2599 2017-11-01 13:18:45Z schwenkel $
28! Separate peer communicator peer_comm introduced for MPI_INTERCOMM_CREATE.
29! Some cleanup and commenting improvements.
30!
31! 2516 2017-10-04 11:03:04Z suehring
32! Remove tabs
33!
34! 2514 2017-10-04 09:52:37Z suehring
35! Bugfix, set filepointer to the beginning of the file after namelist read,
36! in order to assure that further namelists are also found.
37!
38! 2279 2017-06-12 15:23:44Z suehring
39! Error message text changed
40!
41! 2101 2017-01-05 16:42:31Z suehring
42!
43! 2013 2016-09-21 13:07:56Z suehring
44! Bugfix in format descriptor
45!
46! 2000 2016-08-20 18:09:15Z knoop
47! Forced header and separation lines into 80 columns
48!
49! 1938 2016-06-13 15:26:05Z hellstea
50! Minor clean-up.
51!
52! 1901 2016-05-04 15:39:38Z raasch
53! Initial version of purely vertical nesting introduced.
54! Code clean up. The words server/client changed to parent/child.
55!
56! 1900 2016-05-04 15:27:53Z raasch
57! re-formatting to match PALM style
58!
59! 1882 2016-04-20 15:24:46Z hellstea
60! MPI_BCAST-calls to broadcast nesting_mode and nesting_datatransfer_mode
61! are moved out from the DO i = 1, m_ncpl loop.
62!
63! 1850 2016-04-08 13:29:27Z maronga
64! Module renamed
65!
66! 1808 2016-04-05 19:44:00Z raasch
67! MPI module used by default on all machines
68!
69! 1797 2016-03-21 16:50:28Z raasch
70! introduction of different datatransfer modes,
71! export of comm_world_nesting
72!
73! 1791 2016-03-11 10:41:25Z raasch
74! m_nrofcpl renamed m_ncpl,
75! pmc_get_local_model_info renamed pmc_get_model_info, some keywords also
76! renamed and some added,
77! debug write-statements commented out
78!
79! 1786 2016-03-08 05:49:27Z raasch
80! Bugfix: nesting_mode is broadcast now
81!
82! 1779 2016-03-03 08:01:28Z raasch
83! only the total number of PEs is given in the nestpar-NAMELIST,
84! additional comments included
85!
86! 1764 2016-02-28 12:45:19Z raasch
87! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at
88! the beginning of the variable list,
89! domain layout is read with new NAMELIST nestpar from standard file PARIN,
90! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp,
91! variable domain_layouts instead of m_couplers introduced for this NAMELIST,
92! general format changed to PALM style
93!
94! 1762 2016-02-25 12:31:13Z hellstea
95! Initial revision by K. Ketelsen
96!
97! Description:
98! ------------
99! Handle MPI communicator in PALM model coupler
100!-------------------------------------------------------------------------------!
101
102#if defined( __parallel )
103    USE kinds
104
105#if defined( __mpifh )
106    INCLUDE "mpif.h"
107#else
108    USE MPI
109#endif
110
111    USE pmc_general,                                                            &
112        ONLY: pmc_status_ok, pmc_status_error, pmc_max_models
113    USE control_parameters,                                                     &
114        ONLY: message_string
115
116    IMPLICIT NONE
117
118    TYPE pmc_layout
119
120       CHARACTER(LEN=32) ::  name
121
122       INTEGER  ::  id            !<
123       INTEGER  ::  parent_id     !<
124       INTEGER  ::  npe_total     !<
125
126       REAL(wp) ::  lower_left_x  !<
127       REAL(wp) ::  lower_left_y  !<
128
129    END TYPE pmc_layout
130
131    PUBLIC  pmc_status_ok, pmc_status_error
132
133    INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes        = 1  !< illegal number of processes
134    INTEGER, PARAMETER, PUBLIC ::  pmc_namelist_error    = 2  !< error(s) in nestpar namelist
135    INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found = 3  !< no couple layout namelist found
136
137    INTEGER ::  m_world_comm  !< global nesting communicator
138    INTEGER ::  m_my_cpl_id   !< coupler id of this modelfortran return
139    INTEGER ::  m_parent_id   !< coupler id of parent of this model
140    INTEGER ::  m_ncpl        !< number of couplers given in nestpar namelist
141
142    TYPE(pmc_layout), DIMENSION(pmc_max_models) ::  m_couplers  !< information of all couplers
143
144    INTEGER, PUBLIC ::  m_model_comm          !< communicator of this model
145    INTEGER, PUBLIC ::  m_to_parent_comm      !< communicator to the parent
146    INTEGER, PUBLIC ::  m_world_rank          !<
147    INTEGER         ::  m_world_npes          !<
148    INTEGER, PUBLIC ::  m_model_rank          !<
149    INTEGER, PUBLIC ::  m_model_npes          !<
150    INTEGER         ::  m_parent_remote_size  !< number of processes in the parent model
151    INTEGER         ::  peer_comm             !< peer_communicator for inter communicators
152
153    INTEGER, DIMENSION(pmc_max_models), PUBLIC ::  m_to_child_comm    !< communicator to the child(ren)
154    INTEGER, DIMENSION(:), POINTER, PUBLIC ::  pmc_parent_for_child   !<
155
156
157    INTERFACE pmc_is_rootmodel
158       MODULE PROCEDURE pmc_is_rootmodel
159    END INTERFACE pmc_is_rootmodel
160
161    INTERFACE pmc_get_model_info
162       MODULE PROCEDURE pmc_get_model_info
163    END INTERFACE pmc_get_model_info
164
165    PUBLIC pmc_get_model_info, pmc_init_model, pmc_is_rootmodel
166
167 CONTAINS
168
169 SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode,      &
170                              pmc_status )
171
172    USE control_parameters,                                                     &
173        ONLY:  message_string
174
175    USE pegrid,                                                                 &
176        ONLY:  myid
177
178      IMPLICIT NONE
179
180    CHARACTER(LEN=8), INTENT(OUT) ::  nesting_mode               !<
181    CHARACTER(LEN=7), INTENT(OUT) ::  nesting_datatransfer_mode  !<
182
183    INTEGER, INTENT(OUT) ::  comm        !<
184    INTEGER, INTENT(OUT) ::  pmc_status  !<
185
186    INTEGER ::  childcount     !<
187    INTEGER ::  i              !<
188    INTEGER ::  ierr           !<
189    INTEGER ::  istat          !<
190    INTEGER ::  m_my_cpl_rank  !<
191    INTEGER ::  tag            !<
192
193    INTEGER, DIMENSION(pmc_max_models)   ::  activeparent  ! I am active parent for this child ID
194    INTEGER, DIMENSION(pmc_max_models+1) ::  start_pe
195
196    pmc_status   = pmc_status_ok
197    comm         = -1
198    m_world_comm = MPI_COMM_WORLD
199    m_my_cpl_id  = -1
200    childcount   =  0
201    activeparent = -1
202    start_pe(:)  =  0
203
204    CALL MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat )
205    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat )
206!
207!-- Only process 0 of root model reads
208    IF ( m_world_rank == 0 )  THEN
209
210       CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode,      &
211                                  pmc_status )
212
213       IF ( pmc_status /= pmc_no_namelist_found  .AND.                          &
214            pmc_status /= pmc_namelist_error )                                  &
215       THEN
216!
217!--       Determine the first process id of each model
218          start_pe(1) = 0
219          DO  i = 2, m_ncpl+1
220             start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total
221          ENDDO
222
223!
224!--       The sum of numbers of processes requested by all the domains
225!--       must be equal to the total number of processes of the run
226          IF ( start_pe(m_ncpl+1) /= m_world_npes )  THEN
227             WRITE ( message_string, '(2A,I6,2A,I6,A)' )                        &
228                             'nesting-setup requires different number of ',     &
229                             'MPI procs (', start_pe(m_ncpl+1), ') than ',      &
230                             'provided (', m_world_npes,')'
231             CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
232          ENDIF
233
234       ENDIF
235
236    ENDIF
237!
238!-- Broadcast the read status. This synchronises all other processes with
239!-- process 0 of the root model. Without synchronisation, they would not
240!-- behave in the correct way (e.g. they would not return in case of a
241!-- missing NAMELIST).
242    CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
243
244    IF ( pmc_status == pmc_no_namelist_found )  THEN
245!
246!--    Not a nested run; return the MPI_WORLD communicator
247       comm = MPI_COMM_WORLD
248       RETURN
249
250    ELSEIF ( pmc_status == pmc_namelist_error )  THEN
251!
252!--    Only the root model gives the error message. Others are aborted by the
253!--    message-routine with MPI_ABORT. Must be done this way since myid and
254!--    comm2d have not yet been assigned at this point.
255       IF ( m_world_rank == 0 )  THEN
256          message_string = 'errors in \$nestpar'
257          CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 )
258       ENDIF
259
260    ENDIF
261
262    CALL MPI_BCAST( m_ncpl,          1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
263    CALL MPI_BCAST( start_pe, m_ncpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
264!
265!-- Broadcast coupling layout
266    DO  i = 1, m_ncpl
267       CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ),           &
268                       MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
269       CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0,           &
270                       MPI_COMM_WORLD, istat )
271       CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0,           &
272                       MPI_COMM_WORLD, istat )
273       CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0,           &
274                       MPI_COMM_WORLD, istat )
275       CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0,           &
276                       MPI_COMM_WORLD, istat )
277       CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0,           &
278                       MPI_COMM_WORLD, istat )
279    ENDDO
280    CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0,        &
281                    MPI_COMM_WORLD, istat )
282    CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode),  &
283                    MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
284!
285!-- Assign global MPI processes to individual models by setting the couple id
286    DO  i = 1, m_ncpl
287       IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) )  &
288       THEN
289          m_my_cpl_id = i
290          EXIT
291       ENDIF
292    ENDDO
293    m_my_cpl_rank = m_world_rank - start_pe(i)
294!
295!-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
296!-- The communictors for the individual models as created by MPI_COMM_SPLIT.
297!-- The color of the model is represented by the coupler id
298    CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,      &
299                         istat )
300!
301!-- Get size and rank of the model running on this process
302    CALL  MPI_COMM_RANK( comm, m_model_rank, istat )
303    CALL  MPI_COMM_SIZE( comm, m_model_npes, istat )
304!
305!-- Broadcast (from process 0) the parent id and id of every model
306    DO  i = 1, m_ncpl
307       CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,              &
308                       MPI_COMM_WORLD, istat )
309       CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,              &
310                       MPI_COMM_WORLD, istat )
311    ENDDO
312!
313!-- Save the current model communicator for pmc internal use
314    m_model_comm = comm
315
316!
317!-- Create intercommunicator between the parent and children.
318!-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
319!-- different colors.
320!-- The grouping was done above with MPI_COMM_SPLIT.
321!-- A duplicate of MPI_COMM_WORLD is created and used as peer communicator
322!-- (peer_comm) for MPI_INTERCOMM_CREATE.
323    CALL MPI_COMM_DUP( MPI_COMM_WORLD, peer_comm, ierr ) 
324    DO  i = 2, m_ncpl
325       IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
326!
327!--       Identify all children models of the current model and create
328!--       inter-communicators to connect between the current model and its
329!--       children models.
330          tag = 500 + i
331          CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm, start_pe(i),           &
332                                     tag, m_to_child_comm(i), istat)
333          childcount = childcount + 1
334          activeparent(i) = 1
335       ELSEIF ( i == m_my_cpl_id)  THEN
336!
337!--       Create an inter-communicator to connect between the current
338!--       model and its parent model.   
339          tag = 500 + i
340          CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm,                        &
341                                     start_pe(m_couplers(i)%parent_id),         &
342                                     tag, m_to_parent_comm, istat )
343       ENDIF
344    ENDDO
345!
346!-- If I am a parent, count the number of children I have.
347!-- Although this loop is symmetric on all processes, the "activeparent" flag
348!-- is true (==1) on the respective individual process only.
349    ALLOCATE( pmc_parent_for_child(childcount+1) )
350
351    childcount = 0
352    DO  i = 2, m_ncpl
353       IF ( activeparent(i) == 1 )  THEN
354          childcount = childcount + 1
355          pmc_parent_for_child(childcount) = i
356       ENDIF
357    ENDDO
358!
359!-- Get the size of the parent model
360    IF ( m_my_cpl_id > 1 )  THEN
361       CALL MPI_COMM_REMOTE_SIZE( m_to_parent_comm, m_parent_remote_size,       &
362                                  istat )
363    ELSE
364!
365!--    The root model does not have a parent
366       m_parent_remote_size = -1
367    ENDIF
368!
369!-- Set myid to non-zero value except for the root domain. This is a setting
370!-- for the message routine which is called at the end of pmci_init. That
371!-- routine outputs messages for myid = 0, only. However, myid has not been
372!-- assigened so far, so that all processes of the root model would output a
373!-- message. To avoid this, set myid to some other value except for process 0
374!-- of the root domain.
375    IF ( m_world_rank /= 0 )  myid = 1
376
377 END SUBROUTINE PMC_init_model
378
379
380
381 SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name,           &
382                                cpl_parent_id, lower_left_x, lower_left_y,      &
383                                ncpl, npe_total, request_for_cpl_id )
384!
385!-- Provide module private variables of the pmc for PALM
386
387    USE kinds
388
389    IMPLICIT NONE
390
391    CHARACTER(LEN=*), INTENT(OUT), OPTIONAL ::  cpl_name   !<
392
393    INTEGER, INTENT(IN), OPTIONAL ::  request_for_cpl_id   !<
394
395    INTEGER, INTENT(OUT), OPTIONAL ::  comm_world_nesting  !<
396    INTEGER, INTENT(OUT), OPTIONAL ::  cpl_id              !<
397    INTEGER, INTENT(OUT), OPTIONAL ::  cpl_parent_id       !<
398    INTEGER, INTENT(OUT), OPTIONAL ::  ncpl                !<
399    INTEGER, INTENT(OUT), OPTIONAL ::  npe_total           !<
400
401    INTEGER ::  requested_cpl_id                           !<
402
403    REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_x       !<
404    REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_y       !<
405
406!
407!-- Set the requested coupler id
408    IF ( PRESENT( request_for_cpl_id ) )  THEN
409       requested_cpl_id = request_for_cpl_id
410!
411!--    Check for allowed range of values
412       IF ( requested_cpl_id < 1  .OR.  requested_cpl_id > m_ncpl )  RETURN
413    ELSE
414       requested_cpl_id = m_my_cpl_id
415    ENDIF
416!
417!-- Return the requested information
418    IF ( PRESENT( comm_world_nesting )  )  THEN
419       comm_world_nesting = m_world_comm
420    ENDIF
421    IF ( PRESENT( cpl_id )        )  THEN
422       cpl_id = requested_cpl_id
423    ENDIF
424    IF ( PRESENT( cpl_parent_id ) )  THEN
425       cpl_parent_id = m_couplers(requested_cpl_id)%parent_id
426    ENDIF
427    IF ( PRESENT( cpl_name )      )  THEN
428       cpl_name = m_couplers(requested_cpl_id)%name
429    ENDIF
430    IF ( PRESENT( ncpl )          )  THEN
431       ncpl = m_ncpl
432    ENDIF
433    IF ( PRESENT( npe_total )     )  THEN
434       npe_total = m_couplers(requested_cpl_id)%npe_total
435    ENDIF
436    IF ( PRESENT( lower_left_x )  )  THEN
437       lower_left_x = m_couplers(requested_cpl_id)%lower_left_x
438    ENDIF
439    IF ( PRESENT( lower_left_y )  )  THEN
440       lower_left_y = m_couplers(requested_cpl_id)%lower_left_y
441    ENDIF
442
443 END SUBROUTINE pmc_get_model_info
444
445
446
447 LOGICAL function pmc_is_rootmodel( )
448
449    IMPLICIT NONE
450
451    pmc_is_rootmodel = ( m_my_cpl_id == 1 )
452
453 END FUNCTION pmc_is_rootmodel
454
455
456
457 SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode,      &
458      pmc_status )
459
460    IMPLICIT NONE
461
462    CHARACTER(LEN=8), INTENT(INOUT) ::  nesting_mode
463    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode
464
465    INTEGER(iwp), INTENT(INOUT) ::  pmc_status
466    INTEGER(iwp)                ::  bad_llcorner
467    INTEGER(iwp)                ::  i
468    INTEGER(iwp)                ::  istat
469
470    TYPE(pmc_layout), DIMENSION(pmc_max_models) ::  domain_layouts
471
472    NAMELIST /nestpar/  domain_layouts, nesting_datatransfer_mode, nesting_mode
473
474!
475!-- Initialize some coupling variables
476    domain_layouts(1:pmc_max_models)%id = -1
477    m_ncpl =   0
478
479    pmc_status = pmc_status_ok
480!
481!-- Open the NAMELIST-file and read the nesting layout
482    CALL check_open( 11 )
483    READ ( 11, nestpar, IOSTAT=istat )
484!
485!-- Set filepointer to the beginning of the file. Otherwise process 0 will later
486!-- be unable to read the inipar-NAMELIST
487    REWIND ( 11 )
488
489    IF ( istat < 0 )  THEN
490!
491!--    No nestpar-NAMELIST found
492       pmc_status = pmc_no_namelist_found
493       RETURN
494    ELSEIF ( istat > 0 )  THEN
495!
496!--    Errors in reading nestpar-NAMELIST
497       pmc_status = pmc_namelist_error
498       RETURN
499    ENDIF
500!
501!-- Output location message
502    CALL location_message( 'initialize communicators for nesting', .FALSE. )
503!
504!-- Assign the layout to the corresponding internally used variable m_couplers
505    m_couplers = domain_layouts
506!
507!-- Get the number of nested models given in the nestpar-NAMELIST
508    DO  i = 1, pmc_max_models
509!
510!--    When id=-1 is found for the first time, the list of domains is finished
511       IF ( m_couplers(i)%id == -1  .OR.  i == pmc_max_models )  THEN
512          IF ( m_couplers(i)%id == -1 )  THEN
513             m_ncpl = i - 1
514             EXIT
515          ELSE
516             m_ncpl = pmc_max_models
517          ENDIF
518       ENDIF
519    ENDDO
520!
521!-- Make sure that all domains have equal lower left corner in case of vertical
522!-- nesting
523    IF ( nesting_mode == 'vertical' )  THEN
524       bad_llcorner = 0
525       DO  i = 1, m_ncpl
526          IF ( domain_layouts(i)%lower_left_x /= 0.0_wp .OR.                    &
527               domain_layouts(i)%lower_left_y /= 0.0_wp )  THEN
528             bad_llcorner = bad_llcorner + 1
529             domain_layouts(i)%lower_left_x = 0.0_wp
530             domain_layouts(i)%lower_left_y = 0.0_wp
531          ENDIF
532       ENDDO
533       IF ( bad_llcorner /= 0)  THEN
534          WRITE ( message_string, *)  'at least one dimension of lower ',       &
535                                      'left corner of one domain is not 0. ',   &
536                                      'All lower left corners were set to (0, 0)'
537          CALL message( 'read_coupling_layout', 'PA0427', 0, 0, 0, 6, 0 )
538       ENDIF
539    ENDIF
540
541 END SUBROUTINE read_coupling_layout
542
543#endif
544 END MODULE pmc_handle_communicator
Note: See TracBrowser for help on using the repository browser.