source: palm/trunk/SOURCE/pmc_handle_communicator_mod.f90 @ 2836

Last change on this file since 2836 was 2801, checked in by thiele, 7 years ago

Introduce particle transfer in nested models

  • Property svn:keywords set to Id
File size: 19.2 KB
Line 
1!> @file pmc_handle_communicator_mod.f90
2!------------------------------------------------------------------------------!
3! This file is part of the PALM model system.
4!
5! PALM is free software: you can redistribute it and/or modify it under the
6! terms of the GNU General Public License as published by the Free Software
7! Foundation, either version 3 of the License, or (at your option) any later
8! version.
9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
17! Copyright 1997-2018 Leibniz Universitaet Hannover
18!------------------------------------------------------------------------------!
19!
20! Current revisions:
21! ------------------
22!
23!
24! Former revisions:
25! -----------------
26! $Id: pmc_handle_communicator_mod.f90 2801 2018-02-14 16:01:55Z Giersch $
27! Introduce particle transfer in nested models.
28!
29! 2718 2018-01-02 08:49:38Z maronga
30! Corrected "Former revisions" section
31!
32! 2696 2017-12-14 17:12:51Z kanani
33! Change in file header (GPL part)
34! Bugfix, give Intent(inout) attributes in routine reading nestpar-namelist (MS)
35!
36! 2599 2017-11-01 13:18:45Z hellstea
37! Separate peer communicator peer_comm introduced for MPI_INTERCOMM_CREATE.
38! Some cleanup and commenting improvements.
39!
40! 2516 2017-10-04 11:03:04Z suehring
41! Remove tabs
42!
43! 2514 2017-10-04 09:52:37Z suehring
44! Bugfix, set filepointer to the beginning of the file after namelist read,
45! in order to assure that further namelists are also found.
46!
47! 2279 2017-06-12 15:23:44Z suehring
48! Error message text changed
49!
50! 2101 2017-01-05 16:42:31Z suehring
51!
52! 2013 2016-09-21 13:07:56Z suehring
53! Bugfix in format descriptor
54!
55! 2000 2016-08-20 18:09:15Z knoop
56! Forced header and separation lines into 80 columns
57!
58! 1938 2016-06-13 15:26:05Z hellstea
59! Minor clean-up.
60!
61! 1901 2016-05-04 15:39:38Z raasch
62! Initial version of purely vertical nesting introduced.
63! Code clean up. The words server/client changed to parent/child.
64!
65! 1900 2016-05-04 15:27:53Z raasch
66! re-formatting to match PALM style
67!
68! 1882 2016-04-20 15:24:46Z hellstea
69! MPI_BCAST-calls to broadcast nesting_mode and nesting_datatransfer_mode
70! are moved out from the DO i = 1, m_ncpl loop.
71!
72! 1850 2016-04-08 13:29:27Z maronga
73! Module renamed
74!
75! 1808 2016-04-05 19:44:00Z raasch
76! MPI module used by default on all machines
77!
78! 1797 2016-03-21 16:50:28Z raasch
79! introduction of different datatransfer modes,
80! export of comm_world_nesting
81!
82! 1791 2016-03-11 10:41:25Z raasch
83! m_nrofcpl renamed m_ncpl,
84! pmc_get_local_model_info renamed pmc_get_model_info, some keywords also
85! renamed and some added,
86! debug write-statements commented out
87!
88! 1786 2016-03-08 05:49:27Z raasch
89! Bugfix: nesting_mode is broadcast now
90!
91! 1779 2016-03-03 08:01:28Z raasch
92! only the total number of PEs is given in the nestpar-NAMELIST,
93! additional comments included
94!
95! 1764 2016-02-28 12:45:19Z raasch
96! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at
97! the beginning of the variable list,
98! domain layout is read with new NAMELIST nestpar from standard file PARIN,
99! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp,
100! variable domain_layouts instead of m_couplers introduced for this NAMELIST,
101! general format changed to PALM style
102!
103! 1762 2016-02-25 12:31:13Z hellstea
104! Initial revision by K. Ketelsen
105!
106! Description:
107! ------------
108! Handle MPI communicator in PALM model coupler
109!-------------------------------------------------------------------------------!
110 MODULE PMC_handle_communicator
111#if defined( __parallel )
112    USE kinds
113
114#if defined( __mpifh )
115    INCLUDE "mpif.h"
116#else
117    USE MPI
118#endif
119
120    USE pmc_general,                                                            &
121        ONLY: pmc_status_ok, pmc_status_error, pmc_max_models
122    USE control_parameters,                                                     &
123        ONLY: message_string
124
125    IMPLICIT NONE
126
127    TYPE pmc_layout
128
129       CHARACTER(LEN=32) ::  name
130
131       INTEGER  ::  id            !<
132       INTEGER  ::  parent_id     !<
133       INTEGER  ::  npe_total     !<
134
135       REAL(wp) ::  lower_left_x  !<
136       REAL(wp) ::  lower_left_y  !<
137
138    END TYPE pmc_layout
139
140    PUBLIC  pmc_status_ok, pmc_status_error
141
142    INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes        = 1  !< illegal number of processes
143    INTEGER, PARAMETER, PUBLIC ::  pmc_namelist_error    = 2  !< error(s) in nestpar namelist
144    INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found = 3  !< no couple layout namelist found
145
146    INTEGER ::  m_world_comm  !< global nesting communicator
147    INTEGER ::  m_my_cpl_id   !< coupler id of this model
148    INTEGER ::  m_parent_id   !< coupler id of parent of this model
149    INTEGER ::  m_ncpl        !< number of couplers given in nestpar namelist
150
151    TYPE(pmc_layout), PUBLIC, DIMENSION(pmc_max_models) ::  m_couplers  !< information of all couplers
152
153    INTEGER, PUBLIC ::  m_model_comm          !< communicator of this model
154    INTEGER, PUBLIC ::  m_to_parent_comm      !< communicator to the parent
155    INTEGER, PUBLIC ::  m_world_rank          !<
156    INTEGER         ::  m_world_npes          !<
157    INTEGER, PUBLIC ::  m_model_rank          !<
158    INTEGER, PUBLIC ::  m_model_npes          !<
159    INTEGER         ::  m_parent_remote_size  !< number of processes in the parent model
160    INTEGER         ::  peer_comm             !< peer_communicator for inter communicators
161
162    INTEGER, DIMENSION(pmc_max_models), PUBLIC ::  m_to_child_comm    !< communicator to the child(ren)
163    INTEGER, DIMENSION(:), POINTER, PUBLIC ::  pmc_parent_for_child   !<
164
165
166    INTERFACE pmc_is_rootmodel
167       MODULE PROCEDURE pmc_is_rootmodel
168    END INTERFACE pmc_is_rootmodel
169
170    INTERFACE pmc_get_model_info
171       MODULE PROCEDURE pmc_get_model_info
172    END INTERFACE pmc_get_model_info
173
174    PUBLIC pmc_get_model_info, pmc_init_model, pmc_is_rootmodel
175
176 CONTAINS
177
178 SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode,      &
179                            pmc_status )
180
181    USE control_parameters,                                                     &
182        ONLY:  message_string
183
184    USE pegrid,                                                                 &
185        ONLY:  myid
186
187      IMPLICIT NONE
188
189    CHARACTER(LEN=8), INTENT(INOUT) ::  nesting_mode               !<
190    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode  !<
191
192    INTEGER, INTENT(INOUT) ::  comm        !<
193    INTEGER, INTENT(INOUT) ::  pmc_status  !<
194
195    INTEGER ::  childcount     !<
196    INTEGER ::  i              !<
197    INTEGER ::  ierr           !<
198    INTEGER ::  istat          !<
199    INTEGER ::  m_my_cpl_rank  !<
200    INTEGER ::  tag            !<
201
202    INTEGER, DIMENSION(pmc_max_models)   ::  activeparent  ! I am active parent for this child ID
203    INTEGER, DIMENSION(pmc_max_models+1) ::  start_pe
204
205    pmc_status   = pmc_status_ok
206    comm         = -1
207    m_world_comm = MPI_COMM_WORLD
208    m_my_cpl_id  = -1
209    childcount   =  0
210    activeparent = -1
211    start_pe(:)  =  0
212
213    CALL MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat )
214    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat )
215!
216!-- Only process 0 of root model reads
217    IF ( m_world_rank == 0 )  THEN
218
219       CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode,      &
220                                  pmc_status )
221
222       IF ( pmc_status /= pmc_no_namelist_found  .AND.                          &
223            pmc_status /= pmc_namelist_error )                                  &
224       THEN
225!
226!--       Calculate start PE of every model
227          start_pe(1) = 0
228          DO  i = 2, m_ncpl+1
229             start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total
230          ENDDO
231
232!
233!--       The sum of numbers of processes requested by all the domains
234!--       must be equal to the total number of processes of the run
235          IF ( start_pe(m_ncpl+1) /= m_world_npes )  THEN
236             WRITE ( message_string, '(2A,I6,2A,I6,A)' )                        &
237                             'nesting-setup requires different number of ',     &
238                             'MPI procs (', start_pe(m_ncpl+1), ') than ',      &
239                             'provided (', m_world_npes,')'
240             CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
241          ENDIF
242
243       ENDIF
244
245    ENDIF
246!
247!-- Broadcast the read status. This synchronises all other processes with
248!-- process 0 of the root model. Without synchronisation, they would not
249!-- behave in the correct way (e.g. they would not return in case of a
250!-- missing NAMELIST).
251    CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
252
253    IF ( pmc_status == pmc_no_namelist_found )  THEN
254!
255!--    Not a nested run; return the MPI_WORLD communicator
256       comm = MPI_COMM_WORLD
257       RETURN
258
259    ELSEIF ( pmc_status == pmc_namelist_error )  THEN
260!
261!--    Only the root model gives the error message. Others are aborted by the
262!--    message-routine with MPI_ABORT. Must be done this way since myid and
263!--    comm2d have not yet been assigned at this point.
264       IF ( m_world_rank == 0 )  THEN
265          message_string = 'errors in \$nestpar'
266          CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 )
267       ENDIF
268
269    ENDIF
270
271    CALL MPI_BCAST( m_ncpl,          1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
272    CALL MPI_BCAST( start_pe, m_ncpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
273!
274!-- Broadcast coupling layout
275    DO  i = 1, m_ncpl
276       CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ),           &
277                       MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
278       CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0,           &
279                       MPI_COMM_WORLD, istat )
280       CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0,           &
281                       MPI_COMM_WORLD, istat )
282       CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0,           &
283                       MPI_COMM_WORLD, istat )
284       CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0,           &
285                       MPI_COMM_WORLD, istat )
286       CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0,           &
287                       MPI_COMM_WORLD, istat )
288    ENDDO
289    CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0,        &
290                    MPI_COMM_WORLD, istat )
291    CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode),  &
292                    MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
293!
294!-- Assign global MPI processes to individual models by setting the couple id
295    DO  i = 1, m_ncpl
296       IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) )  &
297       THEN
298          m_my_cpl_id = i
299          EXIT
300       ENDIF
301    ENDDO
302    m_my_cpl_rank = m_world_rank - start_pe(i)
303!
304!-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
305!-- The communictors for the individual models as created by MPI_COMM_SPLIT.
306!-- The color of the model is represented by the coupler id
307    CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,      &
308                         istat )
309!
310!-- Get size and rank of the model running on this process
311    CALL  MPI_COMM_RANK( comm, m_model_rank, istat )
312    CALL  MPI_COMM_SIZE( comm, m_model_npes, istat )
313!
314!-- Broadcast (from process 0) the parent id and id of every model
315    DO  i = 1, m_ncpl
316       CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,              &
317                       MPI_COMM_WORLD, istat )
318       CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,              &
319                       MPI_COMM_WORLD, istat )
320    ENDDO
321!
322!-- Save the current model communicator for pmc internal use
323    m_model_comm = comm
324
325!
326!-- Create intercommunicator between the parent and children.
327!-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
328!-- different colors.
329!-- The grouping was done above with MPI_COMM_SPLIT.
330!-- A duplicate of MPI_COMM_WORLD is created and used as peer communicator
331!-- (peer_comm) for MPI_INTERCOMM_CREATE.
332    CALL MPI_COMM_DUP( MPI_COMM_WORLD, peer_comm, ierr ) 
333    DO  i = 2, m_ncpl
334       IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
335!
336!--       Identify all children models of the current model and create
337!--       inter-communicators to connect between the current model and its
338!--       children models.
339          tag = 500 + i
340          CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm, start_pe(i),           &
341                                     tag, m_to_child_comm(i), istat)
342          childcount = childcount + 1
343          activeparent(i) = 1
344       ELSEIF ( i == m_my_cpl_id)  THEN
345!
346!--       Create an inter-communicator to connect between the current
347!--       model and its parent model.   
348          tag = 500 + i
349          CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm,                        &
350                                     start_pe(m_couplers(i)%parent_id),         &
351                                     tag, m_to_parent_comm, istat )
352       ENDIF
353    ENDDO
354!
355!-- If I am a parent, count the number of children I have.
356!-- Although this loop is symmetric on all processes, the "activeparent" flag
357!-- is true (==1) on the respective individual process only.
358    ALLOCATE( pmc_parent_for_child(childcount+1) )
359
360    childcount = 0
361    DO  i = 2, m_ncpl
362       IF ( activeparent(i) == 1 )  THEN
363          childcount = childcount + 1
364          pmc_parent_for_child(childcount) = i
365       ENDIF
366    ENDDO
367!
368!-- Get the size of the parent model
369    IF ( m_my_cpl_id > 1 )  THEN
370       CALL MPI_COMM_REMOTE_SIZE( m_to_parent_comm, m_parent_remote_size,       &
371                                  istat )
372    ELSE
373!
374!--    The root model does not have a parent
375       m_parent_remote_size = -1
376    ENDIF
377!
378!-- Set myid to non-zero value except for the root domain. This is a setting
379!-- for the message routine which is called at the end of pmci_init. That
380!-- routine outputs messages for myid = 0, only. However, myid has not been
381!-- assigened so far, so that all processes of the root model would output a
382!-- message. To avoid this, set myid to some other value except for process 0
383!-- of the root domain.
384    IF ( m_world_rank /= 0 )  myid = 1
385
386 END SUBROUTINE PMC_init_model
387
388
389
390 SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name,           &
391                                cpl_parent_id, lower_left_x, lower_left_y,      &
392                                ncpl, npe_total, request_for_cpl_id )
393!
394!-- Provide module private variables of the pmc for PALM
395
396    USE kinds
397
398    IMPLICIT NONE
399
400    CHARACTER(LEN=*), INTENT(OUT), OPTIONAL ::  cpl_name   !<
401
402    INTEGER, INTENT(IN), OPTIONAL ::  request_for_cpl_id   !<
403
404    INTEGER, INTENT(OUT), OPTIONAL ::  comm_world_nesting  !<
405    INTEGER, INTENT(OUT), OPTIONAL ::  cpl_id              !<
406    INTEGER, INTENT(OUT), OPTIONAL ::  cpl_parent_id       !<
407    INTEGER, INTENT(OUT), OPTIONAL ::  ncpl                !<
408    INTEGER, INTENT(OUT), OPTIONAL ::  npe_total           !<
409
410    INTEGER ::  requested_cpl_id                           !<
411
412    REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_x       !<
413    REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_y       !<
414
415!
416!-- Set the requested coupler id
417    IF ( PRESENT( request_for_cpl_id ) )  THEN
418       requested_cpl_id = request_for_cpl_id
419!
420!--    Check for allowed range of values
421       IF ( requested_cpl_id < 1  .OR.  requested_cpl_id > m_ncpl )  RETURN
422    ELSE
423       requested_cpl_id = m_my_cpl_id
424    ENDIF
425!
426!-- Return the requested information
427    IF ( PRESENT( comm_world_nesting )  )  THEN
428       comm_world_nesting = m_world_comm
429    ENDIF
430    IF ( PRESENT( cpl_id )        )  THEN
431       cpl_id = requested_cpl_id
432    ENDIF
433    IF ( PRESENT( cpl_parent_id ) )  THEN
434       cpl_parent_id = m_couplers(requested_cpl_id)%parent_id
435    ENDIF
436    IF ( PRESENT( cpl_name )      )  THEN
437       cpl_name = m_couplers(requested_cpl_id)%name
438    ENDIF
439    IF ( PRESENT( ncpl )          )  THEN
440       ncpl = m_ncpl
441    ENDIF
442    IF ( PRESENT( npe_total )     )  THEN
443       npe_total = m_couplers(requested_cpl_id)%npe_total
444    ENDIF
445    IF ( PRESENT( lower_left_x )  )  THEN
446       lower_left_x = m_couplers(requested_cpl_id)%lower_left_x
447    ENDIF
448    IF ( PRESENT( lower_left_y )  )  THEN
449       lower_left_y = m_couplers(requested_cpl_id)%lower_left_y
450    ENDIF
451
452 END SUBROUTINE pmc_get_model_info
453
454
455
456 LOGICAL function pmc_is_rootmodel( )
457
458    IMPLICIT NONE
459
460    pmc_is_rootmodel = ( m_my_cpl_id == 1 )
461
462 END FUNCTION pmc_is_rootmodel
463
464
465
466 SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode,      &
467      pmc_status )
468
469    IMPLICIT NONE
470
471    CHARACTER(LEN=8), INTENT(INOUT) ::  nesting_mode
472    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode
473
474    INTEGER(iwp), INTENT(INOUT) ::  pmc_status
475    INTEGER(iwp)                ::  bad_llcorner
476    INTEGER(iwp)                ::  i
477    INTEGER(iwp)                ::  istat
478
479    TYPE(pmc_layout), DIMENSION(pmc_max_models) ::  domain_layouts
480
481    NAMELIST /nestpar/  domain_layouts, nesting_datatransfer_mode, nesting_mode
482
483!
484!-- Initialize some coupling variables
485    domain_layouts(1:pmc_max_models)%id = -1
486    m_ncpl =   0
487
488    pmc_status = pmc_status_ok
489!
490!-- Open the NAMELIST-file and read the nesting layout
491    CALL check_open( 11 )
492    READ ( 11, nestpar, IOSTAT=istat )
493!
494!-- Set filepointer to the beginning of the file. Otherwise process 0 will later
495!-- be unable to read the inipar-NAMELIST
496    REWIND ( 11 )
497
498    IF ( istat < 0 )  THEN
499!
500!--    No nestpar-NAMELIST found
501       pmc_status = pmc_no_namelist_found
502       RETURN
503    ELSEIF ( istat > 0 )  THEN
504!
505!--    Errors in reading nestpar-NAMELIST
506       pmc_status = pmc_namelist_error
507       RETURN
508    ENDIF
509!
510!-- Output location message
511    CALL location_message( 'initialize communicators for nesting', .FALSE. )
512!
513!-- Assign the layout to the corresponding internally used variable m_couplers
514    m_couplers = domain_layouts
515!
516!-- Get the number of nested models given in the nestpar-NAMELIST
517    DO  i = 1, pmc_max_models
518!
519!--    When id=-1 is found for the first time, the list of domains is finished
520       IF ( m_couplers(i)%id == -1  .OR.  i == pmc_max_models )  THEN
521          IF ( m_couplers(i)%id == -1 )  THEN
522             m_ncpl = i - 1
523             EXIT
524          ELSE
525             m_ncpl = pmc_max_models
526          ENDIF
527       ENDIF
528    ENDDO
529!
530!-- Make sure that all domains have equal lower left corner in case of vertical
531!-- nesting
532    IF ( nesting_mode == 'vertical' )  THEN
533       bad_llcorner = 0
534       DO  i = 1, m_ncpl
535          IF ( domain_layouts(i)%lower_left_x /= 0.0_wp .OR.                    &
536               domain_layouts(i)%lower_left_y /= 0.0_wp )  THEN
537             bad_llcorner = bad_llcorner + 1
538             domain_layouts(i)%lower_left_x = 0.0_wp
539             domain_layouts(i)%lower_left_y = 0.0_wp
540          ENDIF
541       ENDDO
542       IF ( bad_llcorner /= 0)  THEN
543          WRITE ( message_string, *)  'at least one dimension of lower ',       &
544                                      'left corner of one domain is not 0. ',   &
545                                      'All lower left corners were set to (0, 0)'
546          CALL message( 'read_coupling_layout', 'PA0427', 0, 0, 0, 6, 0 )
547       ENDIF
548    ENDIF
549
550 END SUBROUTINE read_coupling_layout
551
552#endif
553 END MODULE pmc_handle_communicator
Note: See TracBrowser for help on using the repository browser.