source: palm/trunk/SOURCE/pmc_handle_communicator_mod.f90 @ 3886

Last change on this file since 3886 was 3885, checked in by kanani, 6 years ago

restructure/add location/debug messages

  • Property svn:keywords set to Id
File size: 20.3 KB
RevLine 
[2696]1!> @file pmc_handle_communicator_mod.f90
[2000]2!------------------------------------------------------------------------------!
[2696]3! This file is part of the PALM model system.
[1762]4!
[2000]5! PALM is free software: you can redistribute it and/or modify it under the
6! terms of the GNU General Public License as published by the Free Software
7! Foundation, either version 3 of the License, or (at your option) any later
8! version.
[1762]9!
10! PALM is distributed in the hope that it will be useful, but WITHOUT ANY
11! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
12! A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
13!
14! You should have received a copy of the GNU General Public License along with
15! PALM. If not, see <http://www.gnu.org/licenses/>.
16!
[3655]17! Copyright 1997-2019 Leibniz Universitaet Hannover
[2000]18!------------------------------------------------------------------------------!
[1762]19!
20! Current revisions:
21! ------------------
[1851]22!
[2516]23!
[1792]24! Former revisions:
25! -----------------
26! $Id: pmc_handle_communicator_mod.f90 3885 2019-04-11 11:29:34Z suehring $
[3885]27! Changes related to global restructuring of location messages and introduction
28! of additional debug messages
29!
30! 3819 2019-03-27 11:01:36Z hellstea
[3819]31! Adjustable anterpolation buffer introduced on all nest boundaries, it is controlled
32! by the new nesting_parameters parameter anterpolation_buffer_width.
33!
34! 3655 2019-01-07 16:51:22Z knoop
[2932]35! nestpar renamed to nesting_parameters
36!
37! 2841 2018-02-27 15:02:57Z knoop
[2841]38! Bugfix: wrong placement of include 'mpif.h' corrected
39!
40! 2801 2018-02-14 16:01:55Z thiele
[2801]41! Introduce particle transfer in nested models.
42!
43! 2718 2018-01-02 08:49:38Z maronga
[2716]44! Corrected "Former revisions" section
45!
46! 2696 2017-12-14 17:12:51Z kanani
47! Change in file header (GPL part)
[2696]48! Bugfix, give Intent(inout) attributes in routine reading nestpar-namelist (MS)
49!
50! 2599 2017-11-01 13:18:45Z hellstea
[2599]51! Separate peer communicator peer_comm introduced for MPI_INTERCOMM_CREATE.
52! Some cleanup and commenting improvements.
53!
54! 2516 2017-10-04 11:03:04Z suehring
[2516]55! Remove tabs
56!
57! 2514 2017-10-04 09:52:37Z suehring
[2280]58! Bugfix, set filepointer to the beginning of the file after namelist read,
59! in order to assure that further namelists are also found.
60!
61! 2279 2017-06-12 15:23:44Z suehring
[2271]62! Error message text changed
63!
64! 2101 2017-01-05 16:42:31Z suehring
[1792]65!
[2014]66! 2013 2016-09-21 13:07:56Z suehring
67! Bugfix in format descriptor
68!
[2001]69! 2000 2016-08-20 18:09:15Z knoop
70! Forced header and separation lines into 80 columns
71!
[1939]72! 1938 2016-06-13 15:26:05Z hellstea
73! Minor clean-up.
74!
[1933]75! 1901 2016-05-04 15:39:38Z raasch
76! Initial version of purely vertical nesting introduced.
77! Code clean up. The words server/client changed to parent/child.
78!
[1901]79! 1900 2016-05-04 15:27:53Z raasch
80! re-formatting to match PALM style
81!
[1883]82! 1882 2016-04-20 15:24:46Z hellstea
83! MPI_BCAST-calls to broadcast nesting_mode and nesting_datatransfer_mode
84! are moved out from the DO i = 1, m_ncpl loop.
85!
[1851]86! 1850 2016-04-08 13:29:27Z maronga
87! Module renamed
88!
[1809]89! 1808 2016-04-05 19:44:00Z raasch
90! MPI module used by default on all machines
91!
[1798]92! 1797 2016-03-21 16:50:28Z raasch
93! introduction of different datatransfer modes,
94! export of comm_world_nesting
95!
[1792]96! 1791 2016-03-11 10:41:25Z raasch
[1791]97! m_nrofcpl renamed m_ncpl,
98! pmc_get_local_model_info renamed pmc_get_model_info, some keywords also
99! renamed and some added,
100! debug write-statements commented out
[1765]101!
[1787]102! 1786 2016-03-08 05:49:27Z raasch
103! Bugfix: nesting_mode is broadcast now
104!
[1780]105! 1779 2016-03-03 08:01:28Z raasch
106! only the total number of PEs is given in the nestpar-NAMELIST,
107! additional comments included
108!
[1765]109! 1764 2016-02-28 12:45:19Z raasch
[1764]110! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at
111! the beginning of the variable list,
112! domain layout is read with new NAMELIST nestpar from standard file PARIN,
113! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp,
114! variable domain_layouts instead of m_couplers introduced for this NAMELIST,
115! general format changed to PALM style
[1762]116!
[1763]117! 1762 2016-02-25 12:31:13Z hellstea
118! Initial revision by K. Ketelsen
119!
[1762]120! Description:
121! ------------
[1764]122! Handle MPI communicator in PALM model coupler
[1933]123!-------------------------------------------------------------------------------!
[2696]124 MODULE PMC_handle_communicator
[1764]125#if defined( __parallel )
126    USE kinds
[1762]127
[2841]128#if !defined( __mpifh )
[1764]129    USE MPI
130#endif
[1762]131
[1933]132    USE pmc_general,                                                            &
[1900]133        ONLY: pmc_status_ok, pmc_status_error, pmc_max_models
[1933]134    USE control_parameters,                                                     &
135        ONLY: message_string
[1762]136
[1900]137    IMPLICIT NONE
[1762]138
[2841]139#if defined( __mpifh )
140    INCLUDE "mpif.h"
141#endif
142
[1900]143    TYPE pmc_layout
[1762]144
[1900]145       CHARACTER(LEN=32) ::  name
[1762]146
[1900]147       INTEGER  ::  id            !<
148       INTEGER  ::  parent_id     !<
149       INTEGER  ::  npe_total     !<
[1762]150
[1900]151       REAL(wp) ::  lower_left_x  !<
152       REAL(wp) ::  lower_left_y  !<
[1762]153
[1900]154    END TYPE pmc_layout
[1764]155
[1900]156    PUBLIC  pmc_status_ok, pmc_status_error
[1764]157
[2599]158    INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes        = 1  !< illegal number of processes
[2932]159    INTEGER, PARAMETER, PUBLIC ::  pmc_namelist_error    = 2  !< error(s) in nesting_parameters namelist
[1900]160    INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found = 3  !< no couple layout namelist found
[1764]161
[1900]162    INTEGER ::  m_world_comm  !< global nesting communicator
[2801]163    INTEGER ::  m_my_cpl_id   !< coupler id of this model
[1900]164    INTEGER ::  m_parent_id   !< coupler id of parent of this model
[2932]165    INTEGER ::  m_ncpl        !< number of couplers given in nesting_parameters namelist
[1762]166
[2801]167    TYPE(pmc_layout), PUBLIC, DIMENSION(pmc_max_models) ::  m_couplers  !< information of all couplers
[1762]168
[1900]169    INTEGER, PUBLIC ::  m_model_comm          !< communicator of this model
[1933]170    INTEGER, PUBLIC ::  m_to_parent_comm      !< communicator to the parent
[1900]171    INTEGER, PUBLIC ::  m_world_rank          !<
172    INTEGER         ::  m_world_npes          !<
173    INTEGER, PUBLIC ::  m_model_rank          !<
174    INTEGER, PUBLIC ::  m_model_npes          !<
[2599]175    INTEGER         ::  m_parent_remote_size  !< number of processes in the parent model
176    INTEGER         ::  peer_comm             !< peer_communicator for inter communicators
[1791]177
[1933]178    INTEGER, DIMENSION(pmc_max_models), PUBLIC ::  m_to_child_comm    !< communicator to the child(ren)
179    INTEGER, DIMENSION(:), POINTER, PUBLIC ::  pmc_parent_for_child   !<
[1762]180
181
[1900]182    INTERFACE pmc_is_rootmodel
183       MODULE PROCEDURE pmc_is_rootmodel
184    END INTERFACE pmc_is_rootmodel
[1762]185
[1900]186    INTERFACE pmc_get_model_info
187       MODULE PROCEDURE pmc_get_model_info
188    END INTERFACE pmc_get_model_info
[1762]189
[1900]190    PUBLIC pmc_get_model_info, pmc_init_model, pmc_is_rootmodel
[1762]191
[1764]192 CONTAINS
[1762]193
[1933]194 SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode,      &
[3819]195                            anterpolation_buffer_width, pmc_status )
[1762]196
[1933]197    USE control_parameters,                                                     &
[1900]198        ONLY:  message_string
[1764]199
[1933]200    USE pegrid,                                                                 &
[1900]201        ONLY:  myid
[1764]202
203      IMPLICIT NONE
204
[2696]205    CHARACTER(LEN=8), INTENT(INOUT) ::  nesting_mode               !<
206    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode  !<
[1764]207
[3819]208    INTEGER, INTENT(INOUT) ::  anterpolation_buffer_width          !< Boundary buffer width for anterpolation
[2696]209    INTEGER, INTENT(INOUT) ::  comm        !<
210    INTEGER, INTENT(INOUT) ::  pmc_status  !<
[1764]211
[1933]212    INTEGER ::  childcount     !<
[1900]213    INTEGER ::  i              !<
214    INTEGER ::  ierr           !<
215    INTEGER ::  istat          !<
216    INTEGER ::  m_my_cpl_rank  !<
217    INTEGER ::  tag            !<
[1764]218
[1933]219    INTEGER, DIMENSION(pmc_max_models)   ::  activeparent  ! I am active parent for this child ID
[1900]220    INTEGER, DIMENSION(pmc_max_models+1) ::  start_pe
[1762]221
[1900]222    pmc_status   = pmc_status_ok
223    comm         = -1
224    m_world_comm = MPI_COMM_WORLD
225    m_my_cpl_id  = -1
[1933]226    childcount   =  0
227    activeparent = -1
[1900]228    start_pe(:)  =  0
229
230    CALL MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat )
231    CALL MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat )
[1764]232!
[2599]233!-- Only process 0 of root model reads
[1900]234    IF ( m_world_rank == 0 )  THEN
[1762]235
[1933]236       CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode,      &
[3819]237                                  anterpolation_buffer_width, pmc_status )
[1762]238
[1933]239       IF ( pmc_status /= pmc_no_namelist_found  .AND.                          &
240            pmc_status /= pmc_namelist_error )                                  &
[1900]241       THEN
[1764]242!
[2801]243!--       Calculate start PE of every model
[1900]244          start_pe(1) = 0
245          DO  i = 2, m_ncpl+1
246             start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total
247          ENDDO
[1762]248
[1764]249!
[2599]250!--       The sum of numbers of processes requested by all the domains
251!--       must be equal to the total number of processes of the run
[1900]252          IF ( start_pe(m_ncpl+1) /= m_world_npes )  THEN
[2013]253             WRITE ( message_string, '(2A,I6,2A,I6,A)' )                        &
[1933]254                             'nesting-setup requires different number of ',     &
255                             'MPI procs (', start_pe(m_ncpl+1), ') than ',      &
256                             'provided (', m_world_npes,')'
[1900]257             CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
258          ENDIF
[1762]259
[1900]260       ENDIF
[1762]261
[1900]262    ENDIF
[1764]263!
[2599]264!-- Broadcast the read status. This synchronises all other processes with
265!-- process 0 of the root model. Without synchronisation, they would not
266!-- behave in the correct way (e.g. they would not return in case of a
267!-- missing NAMELIST).
[1900]268    CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1762]269
[1900]270    IF ( pmc_status == pmc_no_namelist_found )  THEN
[1764]271!
[1900]272!--    Not a nested run; return the MPI_WORLD communicator
273       comm = MPI_COMM_WORLD
274       RETURN
[1762]275
[1900]276    ELSEIF ( pmc_status == pmc_namelist_error )  THEN
[1764]277!
[1900]278!--    Only the root model gives the error message. Others are aborted by the
279!--    message-routine with MPI_ABORT. Must be done this way since myid and
280!--    comm2d have not yet been assigned at this point.
281       IF ( m_world_rank == 0 )  THEN
[2932]282          message_string = 'errors in \$nesting_parameters'
[1900]283          CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 )
284       ENDIF
[1762]285
[1900]286    ENDIF
[1762]287
[2599]288    CALL MPI_BCAST( m_ncpl,          1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
289    CALL MPI_BCAST( start_pe, m_ncpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
[1764]290!
[1900]291!-- Broadcast coupling layout
292    DO  i = 1, m_ncpl
[1933]293       CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ),           &
[1900]294                       MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
[1933]295       CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0,           &
[1900]296                       MPI_COMM_WORLD, istat )
[1933]297       CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0,           &
[1900]298                       MPI_COMM_WORLD, istat )
[1933]299       CALL MPI_BCAST( m_couplers(i)%npe_total,    1, MPI_INTEGER, 0,           &
[1900]300                       MPI_COMM_WORLD, istat )
[1933]301       CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0,           &
[1900]302                       MPI_COMM_WORLD, istat )
[1933]303       CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0,           &
[1900]304                       MPI_COMM_WORLD, istat )
305    ENDDO
[1933]306    CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0,        &
[1900]307                    MPI_COMM_WORLD, istat )
[1933]308    CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode),  &
[1900]309                    MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
[1764]310!
[1900]311!-- Assign global MPI processes to individual models by setting the couple id
312    DO  i = 1, m_ncpl
[1933]313       IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) )  &
[1900]314       THEN
315          m_my_cpl_id = i
316          EXIT
317       ENDIF
318    ENDDO
319    m_my_cpl_rank = m_world_rank - start_pe(i)
[1764]320!
[1900]321!-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
322!-- The communictors for the individual models as created by MPI_COMM_SPLIT.
323!-- The color of the model is represented by the coupler id
[1933]324    CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,      &
[1900]325                         istat )
[1764]326!
[2599]327!-- Get size and rank of the model running on this process
[1900]328    CALL  MPI_COMM_RANK( comm, m_model_rank, istat )
329    CALL  MPI_COMM_SIZE( comm, m_model_npes, istat )
[1764]330!
[2599]331!-- Broadcast (from process 0) the parent id and id of every model
[1900]332    DO  i = 1, m_ncpl
[1933]333       CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,              &
[1900]334                       MPI_COMM_WORLD, istat )
[1933]335       CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,              &
[1900]336                       MPI_COMM_WORLD, istat )
337    ENDDO
[1764]338!
[1900]339!-- Save the current model communicator for pmc internal use
340    m_model_comm = comm
[1762]341
[1764]342!
[2599]343!-- Create intercommunicator between the parent and children.
[1900]344!-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
345!-- different colors.
[2599]346!-- The grouping was done above with MPI_COMM_SPLIT.
347!-- A duplicate of MPI_COMM_WORLD is created and used as peer communicator
348!-- (peer_comm) for MPI_INTERCOMM_CREATE.
349    CALL MPI_COMM_DUP( MPI_COMM_WORLD, peer_comm, ierr ) 
[1900]350    DO  i = 2, m_ncpl
351       IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
[1764]352!
[2599]353!--       Identify all children models of the current model and create
354!--       inter-communicators to connect between the current model and its
355!--       children models.
[1900]356          tag = 500 + i
[2599]357          CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm, start_pe(i),           &
[1933]358                                     tag, m_to_child_comm(i), istat)
359          childcount = childcount + 1
360          activeparent(i) = 1
[1900]361       ELSEIF ( i == m_my_cpl_id)  THEN
[1764]362!
[2599]363!--       Create an inter-communicator to connect between the current
364!--       model and its parent model.   
[1900]365          tag = 500 + i
[2599]366          CALL MPI_INTERCOMM_CREATE( comm, 0, peer_comm,                        &
[1933]367                                     start_pe(m_couplers(i)%parent_id),         &
368                                     tag, m_to_parent_comm, istat )
[1900]369       ENDIF
370    ENDDO
[1764]371!
[2599]372!-- If I am a parent, count the number of children I have.
[1933]373!-- Although this loop is symmetric on all processes, the "activeparent" flag
[2599]374!-- is true (==1) on the respective individual process only.
[1933]375    ALLOCATE( pmc_parent_for_child(childcount+1) )
[1762]376
[1933]377    childcount = 0
[1900]378    DO  i = 2, m_ncpl
[1933]379       IF ( activeparent(i) == 1 )  THEN
380          childcount = childcount + 1
381          pmc_parent_for_child(childcount) = i
[1900]382       ENDIF
383    ENDDO
[1764]384!
[1933]385!-- Get the size of the parent model
[1900]386    IF ( m_my_cpl_id > 1 )  THEN
[1933]387       CALL MPI_COMM_REMOTE_SIZE( m_to_parent_comm, m_parent_remote_size,       &
[2599]388                                  istat )
[1900]389    ELSE
[1764]390!
[1933]391!--    The root model does not have a parent
392       m_parent_remote_size = -1
[1900]393    ENDIF
[1764]394!
[2599]395!-- Set myid to non-zero value except for the root domain. This is a setting
[1900]396!-- for the message routine which is called at the end of pmci_init. That
397!-- routine outputs messages for myid = 0, only. However, myid has not been
[2599]398!-- assigened so far, so that all processes of the root model would output a
399!-- message. To avoid this, set myid to some other value except for process 0
400!-- of the root domain.
[1900]401    IF ( m_world_rank /= 0 )  myid = 1
[1762]402
[1900]403 END SUBROUTINE PMC_init_model
[1762]404
405
[1900]406
[1933]407 SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name,           &
408                                cpl_parent_id, lower_left_x, lower_left_y,      &
[1900]409                                ncpl, npe_total, request_for_cpl_id )
[1764]410!
[1791]411!-- Provide module private variables of the pmc for PALM
[1762]412
[1900]413    USE kinds
[1762]414
[1900]415    IMPLICIT NONE
[1762]416
[1933]417    CHARACTER(LEN=*), INTENT(OUT), OPTIONAL ::  cpl_name   !<
[1762]418
[1933]419    INTEGER, INTENT(IN), OPTIONAL ::  request_for_cpl_id   !<
[1762]420
[1900]421    INTEGER, INTENT(OUT), OPTIONAL ::  comm_world_nesting  !<
422    INTEGER, INTENT(OUT), OPTIONAL ::  cpl_id              !<
423    INTEGER, INTENT(OUT), OPTIONAL ::  cpl_parent_id       !<
424    INTEGER, INTENT(OUT), OPTIONAL ::  ncpl                !<
425    INTEGER, INTENT(OUT), OPTIONAL ::  npe_total           !<
[1762]426
[2599]427    INTEGER ::  requested_cpl_id                           !<
[1764]428
[2599]429    REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_x       !<
430    REAL(wp), INTENT(OUT), OPTIONAL ::  lower_left_y       !<
[1764]431
[1791]432!
[1900]433!-- Set the requested coupler id
434    IF ( PRESENT( request_for_cpl_id ) )  THEN
435       requested_cpl_id = request_for_cpl_id
[1791]436!
[1900]437!--    Check for allowed range of values
438       IF ( requested_cpl_id < 1  .OR.  requested_cpl_id > m_ncpl )  RETURN
439    ELSE
440       requested_cpl_id = m_my_cpl_id
441    ENDIF
[1791]442!
[1900]443!-- Return the requested information
444    IF ( PRESENT( comm_world_nesting )  )  THEN
445       comm_world_nesting = m_world_comm
446    ENDIF
447    IF ( PRESENT( cpl_id )        )  THEN
448       cpl_id = requested_cpl_id
449    ENDIF
450    IF ( PRESENT( cpl_parent_id ) )  THEN
451       cpl_parent_id = m_couplers(requested_cpl_id)%parent_id
452    ENDIF
453    IF ( PRESENT( cpl_name )      )  THEN
454       cpl_name = m_couplers(requested_cpl_id)%name
455    ENDIF
456    IF ( PRESENT( ncpl )          )  THEN
457       ncpl = m_ncpl
458    ENDIF
459    IF ( PRESENT( npe_total )     )  THEN
460       npe_total = m_couplers(requested_cpl_id)%npe_total
461    ENDIF
462    IF ( PRESENT( lower_left_x )  )  THEN
463       lower_left_x = m_couplers(requested_cpl_id)%lower_left_x
464    ENDIF
465    IF ( PRESENT( lower_left_y )  )  THEN
466       lower_left_y = m_couplers(requested_cpl_id)%lower_left_y
467    ENDIF
[1791]468
[1900]469 END SUBROUTINE pmc_get_model_info
[1791]470
471
472
[1900]473 LOGICAL function pmc_is_rootmodel( )
[1764]474
[1900]475    IMPLICIT NONE
[1764]476
[1900]477    pmc_is_rootmodel = ( m_my_cpl_id == 1 )
[1764]478
[1900]479 END FUNCTION pmc_is_rootmodel
[1764]480
481
482
[1933]483 SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode,      &
[3819]484      anterpolation_buffer_width, pmc_status )
[1762]485
[1764]486    IMPLICIT NONE
[1762]487
[1933]488    CHARACTER(LEN=8), INTENT(INOUT) ::  nesting_mode
[1797]489    CHARACTER(LEN=7), INTENT(INOUT) ::  nesting_datatransfer_mode
[3819]490   
491    INTEGER, INTENT(INOUT)      ::  anterpolation_buffer_width     !< Boundary buffer width for anterpolation
[1933]492    INTEGER(iwp), INTENT(INOUT) ::  pmc_status
493    INTEGER(iwp)                ::  bad_llcorner
494    INTEGER(iwp)                ::  i
495    INTEGER(iwp)                ::  istat
[1764]496
[1900]497    TYPE(pmc_layout), DIMENSION(pmc_max_models) ::  domain_layouts
[1764]498
[2932]499    NAMELIST /nesting_parameters/  domain_layouts, nesting_datatransfer_mode,  &
[3819]500                                   nesting_mode, anterpolation_buffer_width
[2932]501   
[1764]502!
503!-- Initialize some coupling variables
[1900]504    domain_layouts(1:pmc_max_models)%id = -1
[1791]505    m_ncpl =   0
[1762]506
[1764]507    pmc_status = pmc_status_ok
508!
509!-- Open the NAMELIST-file and read the nesting layout
510    CALL check_open( 11 )
[2932]511    READ ( 11, nesting_parameters, IOSTAT=istat )
[2279]512!
[2599]513!-- Set filepointer to the beginning of the file. Otherwise process 0 will later
[2279]514!-- be unable to read the inipar-NAMELIST
515    REWIND ( 11 )
[1764]516
517    IF ( istat < 0 )  THEN
518!
[2932]519!--    No nesting_parameters-NAMELIST found
[1764]520       pmc_status = pmc_no_namelist_found
[1762]521       RETURN
[1764]522    ELSEIF ( istat > 0 )  THEN
523!
[2932]524!--    Errors in reading nesting_parameters-NAMELIST
[1764]525       pmc_status = pmc_namelist_error
526       RETURN
527    ENDIF
528!
529!-- Output location message
[3885]530    CALL location_message( 'initialize communicators for nesting', 'start' )
[1764]531!
[2599]532!-- Assign the layout to the corresponding internally used variable m_couplers
[1764]533    m_couplers = domain_layouts
534!
[2932]535!-- Get the number of nested models given in the nesting_parameters-NAMELIST
[1900]536    DO  i = 1, pmc_max_models
[1764]537!
[1791]538!--    When id=-1 is found for the first time, the list of domains is finished
[2599]539       IF ( m_couplers(i)%id == -1  .OR.  i == pmc_max_models )  THEN
[1791]540          IF ( m_couplers(i)%id == -1 )  THEN
541             m_ncpl = i - 1
542             EXIT
543          ELSE
[1900]544             m_ncpl = pmc_max_models
[1791]545          ENDIF
[1764]546       ENDIF
547    ENDDO
[1933]548!
549!-- Make sure that all domains have equal lower left corner in case of vertical
550!-- nesting
551    IF ( nesting_mode == 'vertical' )  THEN
552       bad_llcorner = 0
553       DO  i = 1, m_ncpl
554          IF ( domain_layouts(i)%lower_left_x /= 0.0_wp .OR.                    &
555               domain_layouts(i)%lower_left_y /= 0.0_wp )  THEN
556             bad_llcorner = bad_llcorner + 1
557             domain_layouts(i)%lower_left_x = 0.0_wp
558             domain_layouts(i)%lower_left_y = 0.0_wp
559          ENDIF
560       ENDDO
561       IF ( bad_llcorner /= 0)  THEN
[2271]562          WRITE ( message_string, *)  'at least one dimension of lower ',       &
[2514]563                                      'left corner of one domain is not 0. ',   &
[2271]564                                      'All lower left corners were set to (0, 0)'
[1933]565          CALL message( 'read_coupling_layout', 'PA0427', 0, 0, 0, 6, 0 )
566       ENDIF
567    ENDIF
568
[3885]569    CALL location_message( 'initialize communicators for nesting', 'finished' )
570
[1764]571 END SUBROUTINE read_coupling_layout
572
573#endif
574 END MODULE pmc_handle_communicator
Note: See TracBrowser for help on using the repository browser.