1 | MODULE PMC_handle_communicator |
---|
2 | |
---|
3 | !--------------------------------------------------------------------------------! |
---|
4 | ! This file is part of PALM. |
---|
5 | ! |
---|
6 | ! PALM is free software: you can redistribute it and/or modify it under the terms |
---|
7 | ! of the GNU General Public License as published by the Free Software Foundation, |
---|
8 | ! either version 3 of the License, or (at your option) any later version. |
---|
9 | ! |
---|
10 | ! PALM is distributed in the hope that it will be useful, but WITHOUT ANY |
---|
11 | ! WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR |
---|
12 | ! A PARTICULAR PURPOSE. See the GNU General Public License for more details. |
---|
13 | ! |
---|
14 | ! You should have received a copy of the GNU General Public License along with |
---|
15 | ! PALM. If not, see <http://www.gnu.org/licenses/>. |
---|
16 | ! |
---|
17 | ! Copyright 1997-2016 Leibniz Universitaet Hannover |
---|
18 | !--------------------------------------------------------------------------------! |
---|
19 | ! |
---|
20 | ! Current revisions: |
---|
21 | ! ------------------ |
---|
22 | ! |
---|
23 | ! |
---|
24 | ! Former revisions: |
---|
25 | ! ----------------- |
---|
26 | ! $Id: pmc_handle_communicator_mod.f90 1883 2016-04-20 15:27:13Z raasch $ |
---|
27 | ! |
---|
28 | ! 1882 2016-04-20 15:24:46Z hellstea |
---|
29 | ! MPI_BCAST-calls to broadcast nesting_mode and nesting_datatransfer_mode |
---|
30 | ! are moved out from the DO i = 1, m_ncpl loop. |
---|
31 | ! |
---|
32 | ! 1850 2016-04-08 13:29:27Z maronga |
---|
33 | ! Module renamed |
---|
34 | ! |
---|
35 | ! |
---|
36 | ! 1808 2016-04-05 19:44:00Z raasch |
---|
37 | ! MPI module used by default on all machines |
---|
38 | ! |
---|
39 | ! 1797 2016-03-21 16:50:28Z raasch |
---|
40 | ! introduction of different datatransfer modes, |
---|
41 | ! export of comm_world_nesting |
---|
42 | ! |
---|
43 | ! 1791 2016-03-11 10:41:25Z raasch |
---|
44 | ! m_nrofcpl renamed m_ncpl, |
---|
45 | ! pmc_get_local_model_info renamed pmc_get_model_info, some keywords also |
---|
46 | ! renamed and some added, |
---|
47 | ! debug write-statements commented out |
---|
48 | ! |
---|
49 | ! 1786 2016-03-08 05:49:27Z raasch |
---|
50 | ! Bugfix: nesting_mode is broadcast now |
---|
51 | ! |
---|
52 | ! 1779 2016-03-03 08:01:28Z raasch |
---|
53 | ! only the total number of PEs is given in the nestpar-NAMELIST, |
---|
54 | ! additional comments included |
---|
55 | ! |
---|
56 | ! 1764 2016-02-28 12:45:19Z raasch |
---|
57 | ! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at |
---|
58 | ! the beginning of the variable list, |
---|
59 | ! domain layout is read with new NAMELIST nestpar from standard file PARIN, |
---|
60 | ! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp, |
---|
61 | ! variable domain_layouts instead of m_couplers introduced for this NAMELIST, |
---|
62 | ! general format changed to PALM style |
---|
63 | ! |
---|
64 | ! 1762 2016-02-25 12:31:13Z hellstea |
---|
65 | ! Initial revision by K. Ketelsen |
---|
66 | ! |
---|
67 | ! Description: |
---|
68 | ! ------------ |
---|
69 | ! Handle MPI communicator in PALM model coupler |
---|
70 | !------------------------------------------------------------------------------! |
---|
71 | |
---|
72 | #if defined( __parallel ) |
---|
73 | USE kinds |
---|
74 | |
---|
75 | #if defined( __mpifh ) |
---|
76 | INCLUDE "mpif.h" |
---|
77 | #else |
---|
78 | USE MPI |
---|
79 | #endif |
---|
80 | |
---|
81 | USE pmc_general, & |
---|
82 | ONLY: pmc_status_ok, pmc_status_error, pmc_max_modell |
---|
83 | |
---|
84 | IMPLICIT NONE |
---|
85 | |
---|
86 | TYPE pmc_layout |
---|
87 | |
---|
88 | CHARACTER(len=32) :: name |
---|
89 | |
---|
90 | INTEGER :: id |
---|
91 | INTEGER :: parent_id |
---|
92 | INTEGER :: npe_total |
---|
93 | |
---|
94 | REAL(wp) :: lower_left_x |
---|
95 | REAL(wp) :: lower_left_y |
---|
96 | |
---|
97 | END TYPE pmc_layout |
---|
98 | |
---|
99 | PUBLIC pmc_status_ok, pmc_status_error |
---|
100 | |
---|
101 | INTEGER, PARAMETER, PUBLIC :: pmc_error_npes = 1 ! illegal number of PEs |
---|
102 | INTEGER, PARAMETER, PUBLIC :: pmc_namelist_error = 2 ! error(s) in nestpar namelist |
---|
103 | INTEGER, PARAMETER, PUBLIC :: pmc_no_namelist_found = 3 ! No couple layout file found |
---|
104 | |
---|
105 | ! Coupler Setup |
---|
106 | |
---|
107 | INTEGER :: m_world_comm !global nesting communicator |
---|
108 | INTEGER :: m_my_CPL_id !Coupler id of this model |
---|
109 | INTEGER :: m_Parent_id !Coupler id of parent of this model |
---|
110 | INTEGER :: m_ncpl !Number of Couplers in layout file |
---|
111 | |
---|
112 | TYPE(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers !Information of all couplers |
---|
113 | |
---|
114 | ! MPI settings |
---|
115 | |
---|
116 | INTEGER,PUBLIC :: m_model_comm !Communicator of this model |
---|
117 | INTEGER,PUBLIC :: m_to_server_comm !Communicator to the server |
---|
118 | INTEGER,DIMENSION(PMC_MAX_MODELL) :: m_to_client_comm !Communicator to the client(s) |
---|
119 | INTEGER,PUBLIC :: m_world_rank |
---|
120 | INTEGER :: m_world_npes |
---|
121 | INTEGER,PUBLIC :: m_model_rank |
---|
122 | INTEGER,PUBLIC :: m_model_npes |
---|
123 | INTEGER :: m_server_remote_size !Number of Server PE's |
---|
124 | |
---|
125 | PUBLIC m_to_client_comm |
---|
126 | |
---|
127 | !Indicates this PE is server for Cleint NR |
---|
128 | |
---|
129 | INTEGER,DIMENSION(:),POINTER,PUBLIC :: PMC_Server_for_Client |
---|
130 | |
---|
131 | INTERFACE pmc_is_rootmodel |
---|
132 | MODULE PROCEDURE pmc_is_rootmodel |
---|
133 | END INTERFACE pmc_is_rootmodel |
---|
134 | |
---|
135 | INTERFACE pmc_get_model_info |
---|
136 | MODULE PROCEDURE pmc_get_model_info |
---|
137 | END INTERFACE pmc_get_model_info |
---|
138 | |
---|
139 | PUBLIC pmc_get_model_info, pmc_init_model, pmc_is_rootmodel |
---|
140 | |
---|
141 | CONTAINS |
---|
142 | |
---|
143 | SUBROUTINE pmc_init_model( comm, nesting_datatransfer_mode, nesting_mode, & |
---|
144 | pmc_status ) |
---|
145 | |
---|
146 | USE control_parameters, & |
---|
147 | ONLY: message_string |
---|
148 | |
---|
149 | USE pegrid, & |
---|
150 | ONLY: myid |
---|
151 | |
---|
152 | IMPLICIT NONE |
---|
153 | |
---|
154 | CHARACTER(LEN=7), INTENT(OUT) :: nesting_mode |
---|
155 | CHARACTER(LEN=7), INTENT(OUT) :: nesting_datatransfer_mode |
---|
156 | |
---|
157 | INTEGER, INTENT(OUT) :: comm |
---|
158 | INTEGER, INTENT(OUT) :: pmc_status |
---|
159 | |
---|
160 | INTEGER :: i, ierr, istat |
---|
161 | INTEGER,DIMENSION(pmc_max_modell+1) :: start_pe |
---|
162 | INTEGER :: m_my_cpl_rank |
---|
163 | INTEGER :: tag, clientcount |
---|
164 | INTEGER,DIMENSION(pmc_max_modell) :: activeserver ! I am active server for this client ID |
---|
165 | |
---|
166 | pmc_status = pmc_status_ok |
---|
167 | comm = -1 |
---|
168 | m_world_comm = MPI_COMM_WORLD |
---|
169 | m_my_cpl_id = -1 |
---|
170 | clientcount = 0 |
---|
171 | activeserver = -1 |
---|
172 | start_pe(:) = 0 |
---|
173 | |
---|
174 | CALL MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat ) |
---|
175 | CALL MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat ) |
---|
176 | ! |
---|
177 | !-- Only PE 0 of root model reads |
---|
178 | IF ( m_world_rank == 0 ) THEN |
---|
179 | |
---|
180 | CALL read_coupling_layout( nesting_datatransfer_mode, nesting_mode, & |
---|
181 | pmc_status ) |
---|
182 | |
---|
183 | IF ( pmc_status /= pmc_no_namelist_found .AND. & |
---|
184 | pmc_status /= pmc_namelist_error ) & |
---|
185 | THEN |
---|
186 | ! |
---|
187 | !-- Calculate start PE of every model |
---|
188 | start_pe(1) = 0 |
---|
189 | DO i = 2, m_ncpl+1 |
---|
190 | start_pe(i) = start_pe(i-1) + m_couplers(i-1)%npe_total |
---|
191 | ENDDO |
---|
192 | |
---|
193 | ! |
---|
194 | !-- The number of cores provided with the run must be the same as the |
---|
195 | !-- total sum of cores required by all nest domains |
---|
196 | IF ( start_pe(m_ncpl+1) /= m_world_npes ) THEN |
---|
197 | WRITE ( message_string, '(A,I6,A,I6,A)' ) & |
---|
198 | 'nesting-setup requires more MPI procs (', & |
---|
199 | start_pe(m_ncpl+1), ') than provided (', & |
---|
200 | m_world_npes,')' |
---|
201 | CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 ) |
---|
202 | ENDIF |
---|
203 | |
---|
204 | ENDIF |
---|
205 | |
---|
206 | ENDIF |
---|
207 | ! |
---|
208 | !-- Broadcast the read status. This synchronises all other PEs with PE 0 of |
---|
209 | !-- the root model. Without synchronisation, they would not behave in the |
---|
210 | !-- correct way (e.g. they would not return in case of a missing NAMELIST) |
---|
211 | CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) |
---|
212 | |
---|
213 | IF ( pmc_status == pmc_no_namelist_found ) THEN |
---|
214 | ! |
---|
215 | !-- Not a nested run; return the MPI_WORLD communicator |
---|
216 | comm = MPI_COMM_WORLD |
---|
217 | RETURN |
---|
218 | |
---|
219 | ELSEIF ( pmc_status == pmc_namelist_error ) THEN |
---|
220 | ! |
---|
221 | !-- Only the root model gives the error message. Others are aborted by the |
---|
222 | !-- message-routine with MPI_ABORT. Must be done this way since myid and |
---|
223 | !-- comm2d have not yet been assigned at this point. |
---|
224 | IF ( m_world_rank == 0 ) THEN |
---|
225 | message_string = 'errors in \$nestpar' |
---|
226 | CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 ) |
---|
227 | ENDIF |
---|
228 | |
---|
229 | ENDIF |
---|
230 | |
---|
231 | CALL MPI_BCAST( m_ncpl, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) |
---|
232 | CALL MPI_BCAST( start_pe, m_ncpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat) |
---|
233 | |
---|
234 | ! |
---|
235 | !-- Broadcast coupling layout |
---|
236 | DO i = 1, m_ncpl |
---|
237 | CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat ) |
---|
238 | CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) |
---|
239 | CALL MPI_BCAST( m_couplers(i)%Parent_id, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) |
---|
240 | CALL MPI_BCAST( m_couplers(i)%npe_total, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat ) |
---|
241 | CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL, 0, MPI_COMM_WORLD, istat ) |
---|
242 | CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL, 0, MPI_COMM_WORLD, istat ) |
---|
243 | ENDDO |
---|
244 | CALL MPI_BCAST( nesting_mode, LEN( nesting_mode ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat ) |
---|
245 | CALL MPI_BCAST( nesting_datatransfer_mode, LEN(nesting_datatransfer_mode), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat ) |
---|
246 | |
---|
247 | ! |
---|
248 | !-- Assign global MPI processes to individual models by setting the couple id |
---|
249 | DO i = 1, m_ncpl |
---|
250 | IF ( m_world_rank >= start_pe(i) .AND. m_world_rank < start_pe(i+1) ) & |
---|
251 | THEN |
---|
252 | m_my_cpl_id = i |
---|
253 | EXIT |
---|
254 | ENDIF |
---|
255 | ENDDO |
---|
256 | m_my_cpl_rank = m_world_rank - start_pe(i) |
---|
257 | |
---|
258 | ! |
---|
259 | !-- MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach). |
---|
260 | !-- The communictors for the individual models as created by MPI_COMM_SPLIT. |
---|
261 | !-- The color of the model is represented by the coupler id |
---|
262 | CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm, & |
---|
263 | istat ) |
---|
264 | ! |
---|
265 | !-- Get size and rank of the model running on this PE |
---|
266 | CALL MPI_COMM_RANK( comm, m_model_rank, istat ) |
---|
267 | CALL MPI_COMM_SIZE( comm, m_model_npes, istat ) |
---|
268 | |
---|
269 | ! |
---|
270 | !-- Broadcast (from PE 0) the parent id and id of every model |
---|
271 | DO i = 1, m_ncpl |
---|
272 | CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0, & |
---|
273 | MPI_COMM_WORLD, istat ) |
---|
274 | CALL MPI_BCAST( m_couplers(i)%id, 1, MPI_INTEGER, 0, & |
---|
275 | MPI_COMM_WORLD, istat ) |
---|
276 | ENDDO |
---|
277 | |
---|
278 | ! |
---|
279 | !-- Save the current model communicator for PMC internal use |
---|
280 | m_model_comm = comm |
---|
281 | |
---|
282 | ! |
---|
283 | !-- Create intercommunicator between server and clients. |
---|
284 | !-- MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of |
---|
285 | !-- different colors. |
---|
286 | !-- The grouping was done above with MPI_COMM_SPLIT |
---|
287 | DO i = 2, m_ncpl |
---|
288 | |
---|
289 | IF ( m_couplers(i)%parent_id == m_my_cpl_id ) THEN |
---|
290 | ! |
---|
291 | !-- Collect server PEs. |
---|
292 | !-- Every model exept the root model has a parent model which acts as |
---|
293 | !-- server model. Create an intercommunicator to connect current PE to |
---|
294 | !-- all client PEs |
---|
295 | tag = 500 + i |
---|
296 | CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i), & |
---|
297 | tag, m_to_client_comm(i), istat) |
---|
298 | clientcount = clientcount + 1 |
---|
299 | activeserver(i) = 1 |
---|
300 | |
---|
301 | ELSEIF ( i == m_my_cpl_id) THEN |
---|
302 | ! |
---|
303 | !-- Collect client PEs. |
---|
304 | !-- Every model exept the root model has a paremt model which acts as |
---|
305 | !-- server model. Create an intercommunicator to connect current PE to |
---|
306 | !-- all server PEs |
---|
307 | tag = 500 + i |
---|
308 | CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, & |
---|
309 | start_pe(m_couplers(i)%parent_id), & |
---|
310 | tag, m_to_server_comm, istat ) |
---|
311 | ENDIF |
---|
312 | |
---|
313 | ENDDO |
---|
314 | |
---|
315 | ! |
---|
316 | !-- If I am server, count the number of clients that I have |
---|
317 | !-- Although this loop is symmetric on all processes, the "activeserver" flag |
---|
318 | !-- is true (==1) on the respective individual PE only. |
---|
319 | ALLOCATE( pmc_server_for_client(clientcount+1) ) |
---|
320 | |
---|
321 | clientcount = 0 |
---|
322 | DO i = 2, m_ncpl |
---|
323 | IF ( activeserver(i) == 1 ) THEN |
---|
324 | clientcount = clientcount + 1 |
---|
325 | pmc_server_for_client(clientcount) = i |
---|
326 | ENDIF |
---|
327 | ENDDO |
---|
328 | ! |
---|
329 | !-- Get the size of the server model |
---|
330 | IF ( m_my_cpl_id > 1 ) THEN |
---|
331 | CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size, & |
---|
332 | istat) |
---|
333 | ELSE |
---|
334 | ! |
---|
335 | !-- The root model does not have a server |
---|
336 | m_server_remote_size = -1 ! |
---|
337 | ENDIF |
---|
338 | ! |
---|
339 | !-- Set myid to non-tero value except for the root domain. This is a setting |
---|
340 | !-- for the message routine which is called at the end of pmci_init. That |
---|
341 | !-- routine outputs messages for myid = 0, only. However, myid has not been |
---|
342 | !-- assigened so far, so that all PEs of the root model would output a |
---|
343 | !-- message. To avoid this, set myid to some other value except for PE0 of the |
---|
344 | !-- root domain. |
---|
345 | IF ( m_world_rank /= 0 ) myid = 1 |
---|
346 | |
---|
347 | END SUBROUTINE PMC_init_model |
---|
348 | |
---|
349 | |
---|
350 | ! |
---|
351 | !-- Provide module private variables of the pmc for PALM |
---|
352 | SUBROUTINE pmc_get_model_info( comm_world_nesting, cpl_id, cpl_name, & |
---|
353 | cpl_parent_id, lower_left_x, lower_left_y, & |
---|
354 | ncpl, npe_total, request_for_cpl_id ) |
---|
355 | |
---|
356 | USE kinds |
---|
357 | |
---|
358 | IMPLICIT NONE |
---|
359 | |
---|
360 | CHARACTER(LEN=*), INTENT(OUT), OPTIONAL :: cpl_name |
---|
361 | |
---|
362 | INTEGER, INTENT(IN), OPTIONAL :: request_for_cpl_id |
---|
363 | |
---|
364 | INTEGER, INTENT(OUT), OPTIONAL :: comm_world_nesting |
---|
365 | INTEGER, INTENT(OUT), OPTIONAL :: cpl_id |
---|
366 | INTEGER, INTENT(OUT), OPTIONAL :: cpl_parent_id |
---|
367 | INTEGER, INTENT(OUT), OPTIONAL :: ncpl |
---|
368 | INTEGER, INTENT(OUT), OPTIONAL :: npe_total |
---|
369 | |
---|
370 | INTEGER :: requested_cpl_id |
---|
371 | |
---|
372 | REAL(wp), INTENT(OUT), OPTIONAL :: lower_left_x |
---|
373 | REAL(wp), INTENT(OUT), OPTIONAL :: lower_left_y |
---|
374 | |
---|
375 | ! |
---|
376 | !-- Set the requested coupler id |
---|
377 | IF ( PRESENT( request_for_cpl_id ) ) THEN |
---|
378 | requested_cpl_id = request_for_cpl_id |
---|
379 | ! |
---|
380 | !-- Check for allowed range of values |
---|
381 | IF ( requested_cpl_id < 1 .OR. requested_cpl_id > m_ncpl ) RETURN |
---|
382 | ELSE |
---|
383 | requested_cpl_id = m_my_cpl_id |
---|
384 | ENDIF |
---|
385 | |
---|
386 | ! |
---|
387 | !-- Return the requested information |
---|
388 | IF ( PRESENT( comm_world_nesting ) ) THEN |
---|
389 | comm_world_nesting = m_world_comm |
---|
390 | ENDIF |
---|
391 | IF ( PRESENT( cpl_id ) ) THEN |
---|
392 | cpl_id = requested_cpl_id |
---|
393 | ENDIF |
---|
394 | IF ( PRESENT( cpl_parent_id ) ) THEN |
---|
395 | cpl_parent_id = m_couplers(requested_cpl_id)%parent_id |
---|
396 | ENDIF |
---|
397 | IF ( PRESENT( cpl_name ) ) THEN |
---|
398 | cpl_name = m_couplers(requested_cpl_id)%name |
---|
399 | ENDIF |
---|
400 | IF ( PRESENT( ncpl ) ) THEN |
---|
401 | ncpl = m_ncpl |
---|
402 | ENDIF |
---|
403 | IF ( PRESENT( npe_total ) ) THEN |
---|
404 | npe_total = m_couplers(requested_cpl_id)%npe_total |
---|
405 | ENDIF |
---|
406 | IF ( PRESENT( lower_left_x ) ) THEN |
---|
407 | lower_left_x = m_couplers(requested_cpl_id)%lower_left_x |
---|
408 | ENDIF |
---|
409 | IF ( PRESENT( lower_left_y ) ) THEN |
---|
410 | lower_left_y = m_couplers(requested_cpl_id)%lower_left_y |
---|
411 | ENDIF |
---|
412 | |
---|
413 | END SUBROUTINE pmc_get_model_info |
---|
414 | |
---|
415 | |
---|
416 | |
---|
417 | LOGICAL function pmc_is_rootmodel( ) |
---|
418 | |
---|
419 | IMPLICIT NONE |
---|
420 | |
---|
421 | pmc_is_rootmodel = ( m_my_cpl_id == 1 ) |
---|
422 | |
---|
423 | END FUNCTION pmc_is_rootmodel |
---|
424 | |
---|
425 | |
---|
426 | |
---|
427 | SUBROUTINE read_coupling_layout( nesting_datatransfer_mode, nesting_mode, & |
---|
428 | pmc_status ) |
---|
429 | |
---|
430 | IMPLICIT NONE |
---|
431 | |
---|
432 | CHARACTER(LEN=7), INTENT(INOUT) :: nesting_mode |
---|
433 | CHARACTER(LEN=7), INTENT(INOUT) :: nesting_datatransfer_mode |
---|
434 | |
---|
435 | INTEGER, INTENT(INOUT) :: pmc_status |
---|
436 | INTEGER :: i, istat |
---|
437 | |
---|
438 | TYPE(pmc_layout), DIMENSION(pmc_max_modell) :: domain_layouts |
---|
439 | |
---|
440 | !-- TO_DO: include anterp_relax_length_? into nestpar and communicate them. |
---|
441 | NAMELIST /nestpar/ domain_layouts, nesting_datatransfer_mode, nesting_mode |
---|
442 | |
---|
443 | ! |
---|
444 | !-- Initialize some coupling variables |
---|
445 | domain_layouts(1:pmc_max_modell)%id = -1 |
---|
446 | m_ncpl = 0 |
---|
447 | |
---|
448 | pmc_status = pmc_status_ok |
---|
449 | |
---|
450 | ! |
---|
451 | !-- Open the NAMELIST-file and read the nesting layout |
---|
452 | CALL check_open( 11 ) |
---|
453 | READ ( 11, nestpar, IOSTAT=istat ) |
---|
454 | |
---|
455 | IF ( istat < 0 ) THEN |
---|
456 | ! |
---|
457 | !-- No nestpar-NAMELIST found |
---|
458 | pmc_status = pmc_no_namelist_found |
---|
459 | ! |
---|
460 | !-- Set filepointer to the beginning of the file. Otherwise PE0 will later |
---|
461 | !-- be unable to read the inipar-NAMELIST |
---|
462 | REWIND ( 11 ) |
---|
463 | RETURN |
---|
464 | |
---|
465 | ELSEIF ( istat > 0 ) THEN |
---|
466 | ! |
---|
467 | !-- Errors in reading nestpar-NAMELIST |
---|
468 | pmc_status = pmc_namelist_error |
---|
469 | RETURN |
---|
470 | |
---|
471 | ENDIF |
---|
472 | |
---|
473 | ! |
---|
474 | !-- Output location message |
---|
475 | CALL location_message( 'initialize communicators for nesting', .FALSE. ) |
---|
476 | ! |
---|
477 | !-- Assign the layout to the internally used variable |
---|
478 | m_couplers = domain_layouts |
---|
479 | |
---|
480 | ! |
---|
481 | !-- Get the number of nested models given in the nestpar-NAMELIST |
---|
482 | DO i = 1, pmc_max_modell |
---|
483 | ! |
---|
484 | !-- When id=-1 is found for the first time, the list of domains is finished |
---|
485 | IF ( m_couplers(i)%id == -1 .OR. i == pmc_max_modell ) THEN |
---|
486 | IF ( m_couplers(i)%id == -1 ) THEN |
---|
487 | m_ncpl = i - 1 |
---|
488 | EXIT |
---|
489 | ELSE |
---|
490 | m_ncpl = pmc_max_modell |
---|
491 | ENDIF |
---|
492 | ENDIF |
---|
493 | |
---|
494 | ENDDO |
---|
495 | |
---|
496 | END SUBROUTINE read_coupling_layout |
---|
497 | |
---|
498 | #endif |
---|
499 | END MODULE pmc_handle_communicator |
---|