Changeset 1764 for palm/trunk/SOURCE


Ignore:
Timestamp:
Feb 28, 2016 12:45:19 PM (8 years ago)
Author:
raasch
Message:

update of the nested domain system + some bugfixes

Location:
palm/trunk/SOURCE
Files:
18 edited

Legend:

Unmodified
Added
Removed
  • palm/trunk/SOURCE/Makefile

    r1763 r1764  
    2020# Current revisions:
    2121# ------------------
    22 #
     22# update dependency of check_parameters, header, local_stop,
     23# pmc_handle_communicator
    2324#
    2425# Former revisions:
     
    327328check_for_restart.o: modules.o mod_kinds.o
    328329check_open.o: modules.o mod_kinds.o mod_particle_attributes.o
    329 check_parameters.o: modules.o mod_kinds.o subsidence.o land_surface_model.o\
    330         plant_canopy_model.o radiation_model.o
     330check_parameters.o: modules.o mod_kinds.o land_surface_model.o\
     331        plant_canopy_model.o pmc_interface.o radiation_model.o subsidence.o
    331332close_file.o: modules.o mod_kinds.o
    332333compute_vpt.o: modules.o mod_kinds.o
     
    363364global_min_max.o: modules.o mod_kinds.o
    364365header.o: modules.o cpulog.o mod_kinds.o land_surface_model.o\
    365           plant_canopy_model.o radiation_model.o subsidence.o
     366          plant_canopy_model.o pmc_interface.o radiation_model.o subsidence.o
    366367impact_of_latent_heat.o: modules.o mod_kinds.o
    367368inflow_turbulence.o: modules.o cpulog.o mod_kinds.o
     
    385386local_flush.o: mod_kinds.o
    386387local_getenv.o: modules.o mod_kinds.o
    387 local_stop.o: modules.o mod_kinds.o
     388local_stop.o: modules.o mod_kinds.o pmc_interface.o
    388389local_tremain.o: modules.o cpulog.o mod_kinds.o
    389390local_tremain_ini.o: modules.o cpulog.o mod_kinds.o
     
    419420lpm_write_restart_file.o: modules.o mod_kinds.o mod_particle_attributes.o
    420421ls_forcing.o: modules.o cpulog.o mod_kinds.o
    421 message.o: modules.o mod_kinds.o
     422message.o: modules.o mod_kinds.o pmc_interface.o
    422423microphysics.o: modules.o cpulog.o mod_kinds.o
    423424modules.o: modules.f90 mod_kinds.o
     
    430431palm.o: modules.o cpulog.o ls_forcing.o mod_kinds.o nudging.o\
    431432        pmc_interface.o surface_layer_fluxes.o
    432 parin.o: modules.o cpulog.o mod_kinds.o progress_bar.o
     433parin.o: modules.o cpulog.o mod_kinds.o pmc_interface.o progress_bar.o
    433434plant_canopy_model.o: modules.o mod_kinds.o
    434435pmc_interface.o: modules.o mod_kinds.o pmc_client.o pmc_general.o\
    435436        pmc_handle_communicator.o pmc_mpi_wrapper.o pmc_server.o
    436 pmc_client.o: pmc_general.o pmc_handle_communicator.o pmc_mpi_wrapper.o
    437 pmc_handle_communicator.o: pmc_general.o
     437pmc_client.o: mod_kinds.o pmc_general.o pmc_handle_communicator.o\
     438   pmc_mpi_wrapper.o
     439pmc_general.o: mod_kinds.o
     440pmc_handle_communicator.o: modules.o mod_kinds.o pmc_general.o
    438441pmc_mpi_wrapper.o: pmc_handle_communicator.o
    439442pmc_server.o: pmc_general.o pmc_handle_communicator.o pmc_mpi_wrapper.o
  • palm/trunk/SOURCE/boundary_conds.f90

    r1763 r1764  
    1919! Current revisions:
    2020! -----------------
    21 !
     21! index bug for u_p at left outflow removed
    2222!
    2323! Former revisions:
     
    820820!--       Top boundary at the outflow
    821821          IF ( ibc_uv_t == 0 )  THEN
    822              u_p(nzt+1,:,-1) = u_init(nzt+1)
     822             u_p(nzt+1,:,0) = u_init(nzt+1)
    823823             v_p(nzt+1,:,-1) = v_init(nzt+1)
    824824          ELSE
    825              u_p(nzt+1,:,-1) = u_p(nzt,:,-1)
     825             u_p(nzt+1,:,0)  = u_p(nzt,:,0)
    826826             v_p(nzt+1,:,-1) = v_p(nzt,:,-1)
    827827          ENDIF
  • palm/trunk/SOURCE/check_parameters.f90

    r1763 r1764  
    1919! Current revisions:
    2020! -----------------
    21 !
     21! output of nest id in run description header,
     22! bugfix: check of validity of lateral boundary conditions moved to parin
    2223!
    2324! Former revisions:
     
    312313    USE pegrid
    313314    USE plant_canopy_model_mod
     315    USE pmc_interface,                                                         &
     316        ONLY:  cpl_id, nested_run
    314317    USE profil_parameter
    315318    USE radiation_model_mod
     
    328331    CHARACTER (LEN=8)   ::  date                     !<
    329332    CHARACTER (LEN=10)  ::  time                     !<
     333    CHARACTER (LEN=10)  ::  ensemble_string          !<
     334    CHARACTER (LEN=15)  ::  nest_string              !<
    330335    CHARACTER (LEN=40)  ::  coupling_string          !<
    331336    CHARACTER (LEN=100) ::  action                   !<
     
    583588    ELSEIF ( coupling_mode == 'ocean_to_atmosphere' )  THEN
    584589       coupling_string = ' coupled (ocean)'
    585     ENDIF       
    586 
     590    ENDIF
    587591    IF ( ensemble_member_nr /= 0 )  THEN
    588        WRITE ( run_description_header,                                         &
    589                   '(A,2X,A,2X,A,A,A,I2.2,A,2X,A,I2.2,2X,A,A,2X,A,1X,A)' )      &
    590               TRIM( version ), TRIM( revision ), 'run: ',                      &
    591               TRIM( run_identifier ), '.', runnr, TRIM( coupling_string ),     &
    592               'en-no: ', ensemble_member_nr,'host: ', TRIM( host ),            &
    593               run_date, run_time
     592       WRITE( ensemble_string, '(2X,A,I2.2)' )  'en-no: ', ensemble_member_nr
    594593    ELSE
    595        WRITE ( run_description_header,                                         &
    596                   '(A,2X,A,2X,A,A,A,I2.2,A,2X,A,A,2X,A,1X,A)' )                &
    597               TRIM( version ), TRIM( revision ), 'run: ',                      &
    598               TRIM( run_identifier ), '.', runnr, TRIM( coupling_string ),     &
    599               'host: ', TRIM( host ), run_date, run_time
    600     ENDIF
     594       ensemble_string = ''
     595    ENDIF
     596    IF ( nested_run )  THEN
     597       WRITE( nest_string, '(2X,A,I2.2)' )  'nest-id: ', cpl_id
     598    ELSE
     599       nest_string = ''
     600    ENDIF
     601
     602    WRITE ( run_description_header,                                            &
     603            '(A,2X,A,2X,A,A,A,I2.2,A,A,A,2X,A,A,2X,A,1X,A)' )                  &
     604          TRIM( version ), TRIM( revision ), 'run: ',                          &
     605          TRIM( run_identifier ), '.', runnr, TRIM( coupling_string ),         &
     606          TRIM( nest_string ), TRIM( ensemble_string), 'host: ', TRIM( host ), &
     607          run_date, run_time
     608
    601609!
    602610!-- Check the general loop optimization method
     
    17561764!
    17571765!-- Check boundary conditions and set internal variables:
    1758 !-- Lateral boundary conditions
    1759     IF ( bc_lr /= 'cyclic'  .AND.  bc_lr /= 'dirichlet/radiation'  .AND. &
    1760          bc_lr /= 'radiation/dirichlet'  .AND.  bc_lr /= 'nested' )  THEN
    1761        message_string = 'unknown boundary condition: bc_lr = "' // &
    1762                         TRIM( bc_lr ) // '"'
    1763        CALL message( 'check_parameters', 'PA0049', 1, 2, 0, 6, 0 )
    1764     ENDIF
    1765     IF ( bc_ns /= 'cyclic'  .AND.  bc_ns /= 'dirichlet/radiation'  .AND. &
    1766          bc_ns /= 'radiation/dirichlet'  .AND.  bc_ns /= 'nested' )  THEN
    1767        message_string = 'unknown boundary condition: bc_ns = "' // &
    1768                         TRIM( bc_ns ) // '"'
    1769        CALL message( 'check_parameters', 'PA0050', 1, 2, 0, 6, 0 )
    1770     ENDIF
    1771 
    1772 !
    1773 !-- Internal variables used for speed optimization in if clauses
    1774     IF ( bc_lr /= 'cyclic' )               bc_lr_cyc    = .FALSE.
    1775     IF ( bc_lr == 'dirichlet/radiation' )  bc_lr_dirrad = .TRUE.
    1776     IF ( bc_lr == 'radiation/dirichlet' )  bc_lr_raddir = .TRUE.
    1777     IF ( bc_ns /= 'cyclic' )               bc_ns_cyc    = .FALSE.
    1778     IF ( bc_ns == 'dirichlet/radiation' )  bc_ns_dirrad = .TRUE.
    1779     IF ( bc_ns == 'radiation/dirichlet' )  bc_ns_raddir = .TRUE.
    1780 
     1766!-- Attention: the lateral boundary conditions have been already checked in
     1767!-- parin
    17811768!
    17821769!-- Non-cyclic lateral boundaries require the multigrid method and Piascek-
  • palm/trunk/SOURCE/header.f90

    r1698 r1764  
    1919! Current revisions:
    2020! -----------------
    21 !
     21! output of nesting informations
    2222!
    2323! Former revisions:
     
    205205! Description:
    206206! ------------
    207 !> Writing a header with all important information about the actual run.
     207!> Writing a header with all important information about the current run.
    208208!> This subroutine is called three times, two times at the beginning
    209209!> (writing information on files RUN_CONTROL and HEADER) and one time at the
     
    269269               plant_canopy
    270270
     271    USE pmc_interface,                                                         &
     272        ONLY:  cpl_id, cpl_parent_id, cpl_name, lower_left_coord_x,            &
     273               lower_left_coord_y, nested_run, nesting_mode
     274
    271275    USE radiation_model_mod,                                                   &
    272276        ONLY:  albedo, albedo_type, albedo_type_name, constant_albedo,         &
     
    366370!-- Determine kind of model run
    367371    IF ( TRIM( initializing_actions ) == 'read_restart_data' )  THEN
    368        run_classification = '3D - restart run'
     372       run_classification = 'restart run'
    369373    ELSEIF ( TRIM( initializing_actions ) == 'cyclic_fill' )  THEN
    370        run_classification = '3D - run with cyclic fill of 3D - prerun data'
     374       run_classification = 'run with cyclic fill of 3D - prerun data'
    371375    ELSEIF ( INDEX( initializing_actions, 'set_constant_profiles' ) /= 0 )  THEN
    372        run_classification = '3D - run without 1D - prerun'
     376       run_classification = 'run without 1D - prerun'
    373377    ELSEIF ( INDEX( initializing_actions, 'set_1d-model_profiles' ) /= 0 )  THEN
    374        run_classification = '3D - run with 1D - prerun'
     378       run_classification = 'run with 1D - prerun'
    375379    ELSEIF ( INDEX( initializing_actions, 'by_user' ) /=0 )  THEN
    376        run_classification = '3D - run initialized by user'
     380       run_classification = 'run initialized by user'
    377381    ELSE
    378382       message_string = ' unknown action(s): ' // TRIM( initializing_actions )
    379383       CALL message( 'header', 'PA0191', 0, 0, 0, 6, 0 )
    380384    ENDIF
     385    IF ( nested_run )  run_classification = 'nested ' // run_classification
    381386    IF ( ocean )  THEN
    382387       run_classification = 'ocean - ' // run_classification
     
    444449    IF ( num_acc_per_node /= 0 )  WRITE ( io, 120 )  num_acc_per_node
    445450#endif
     451
     452!
     453!-- Nesting informations
     454    IF ( nested_run )  THEN
     455       WRITE ( io, 600 )  cpl_id, TRIM( cpl_name ), cpl_parent_id,             &
     456                          nesting_mode, lower_left_coord_x, lower_left_coord_y
     457    ENDIF
    446458    WRITE ( io, 99 )
    447459
     
    23992411513 FORMAT (' --> Scalar advection via Wicker-Skamarock-Scheme 5th order ' // &
    24002412            '+ monotonic adjustment')
    2401 
     2413600 FORMAT (/' Nesting informations:'/                                        &
     2414            ' Nest id / name:                   ',I2.2,' / ',A,' (parent id: ',I2.2,')'/ &
     2415            ' Nesting mode:                     ',A/ &
     2416            ' Lower left corner coordinates:    ','x = ',F8.2,' m, y = ',F8.2,' m'/)
    24022417
    24032418 END SUBROUTINE header
  • palm/trunk/SOURCE/init_3d_model.f90

    r1763 r1764  
    1919! Current revisions:
    2020! ------------------
    21 !
     21! bugfix: increase size of volume_flow_area_l and volume_flow_initial_l by 1
    2222!
    2323! Former revisions:
     
    302302    INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE ::  ngp_2dh_s_inner_l  !<
    303303
    304     REAL(wp), DIMENSION(1:2) ::  volume_flow_area_l     !<
    305     REAL(wp), DIMENSION(1:2) ::  volume_flow_initial_l  !<
     304    REAL(wp), DIMENSION(1:3) ::  volume_flow_area_l     !<
     305    REAL(wp), DIMENSION(1:3) ::  volume_flow_initial_l  !<
    306306
    307307    REAL(wp), DIMENSION(:), ALLOCATABLE ::  mean_surface_level_height_l    !<
  • palm/trunk/SOURCE/init_pegrid.f90

    r1763 r1764  
    1919! Current revisions:
    2020! ------------------
    21 !
     21! cpp-statements for nesting removed
    2222!
    2323! Former revisions:
     
    151151    USE pegrid
    152152 
    153 #if defined( PMC_ACTIVE )
    154153    USE pmc_interface,                                                         &
    155         ONLY:  cpl_npex,cpl_npey
    156 #endif
     154        ONLY:  cpl_npex, cpl_npey, nested_run
    157155
    158156    USE transpose_indices,                                                     &
     
    215213    CALL location_message( 'creating virtual PE grids + MPI derived data types', &
    216214                           .FALSE. )
    217 #if defined( PMC_ACTIVE )
    218 !
    219 !-- In case of nested-domain runs, the processor grid is explicitly given
    220 !-- by the user in the nestpar-NAMELIST
    221     pdims(1) = cpl_npex
    222     pdims(2) = cpl_npey
    223 #else
    224 !
    225 !-- Determine the processor topology or check it, if prescribed by the user
    226     IF ( npex == -1  .AND.  npey == -1 )  THEN
    227 
    228 !
    229 !--    Automatic determination of the topology
    230        numproc_sqr = SQRT( REAL( numprocs, KIND=wp ) )
    231        pdims(1)    = MAX( numproc_sqr , 1 )
    232        DO  WHILE ( MOD( numprocs , pdims(1) ) /= 0 )
    233           pdims(1) = pdims(1) - 1
    234        ENDDO
    235        pdims(2) = numprocs / pdims(1)
    236 
    237     ELSEIF ( npex /= -1  .AND.  npey /= -1 )  THEN
    238 
    239 !
    240 !--    Prescribed by user. Number of processors on the prescribed topology
    241 !--    must be equal to the number of PEs available to the job
    242        IF ( ( npex * npey ) /= numprocs )  THEN
    243           WRITE( message_string, * ) 'number of PEs of the prescribed ',      &
     215
     216    IF ( nested_run )  THEN
     217!
     218!--    In case of nested-domain runs, the processor grid is explicitly given
     219!--    by the user in the nestpar-NAMELIST
     220       pdims(1) = cpl_npex
     221       pdims(2) = cpl_npey
     222
     223    ELSE
     224!
     225!--    Determine the processor topology or check it, if prescribed by the user
     226       IF ( npex == -1  .AND.  npey == -1 )  THEN
     227
     228!
     229!--       Automatic determination of the topology
     230          numproc_sqr = SQRT( REAL( numprocs, KIND=wp ) )
     231          pdims(1)    = MAX( numproc_sqr , 1 )
     232          DO  WHILE ( MOD( numprocs , pdims(1) ) /= 0 )
     233             pdims(1) = pdims(1) - 1
     234          ENDDO
     235          pdims(2) = numprocs / pdims(1)
     236
     237       ELSEIF ( npex /= -1  .AND.  npey /= -1 )  THEN
     238
     239!
     240!--       Prescribed by user. Number of processors on the prescribed topology
     241!--       must be equal to the number of PEs available to the job
     242          IF ( ( npex * npey ) /= numprocs )  THEN
     243             WRITE( message_string, * ) 'number of PEs of the prescribed ',   &
    244244                 'topology (', npex*npey,') does not match & the number of ', &
    245245                 'PEs available to the job (', numprocs, ')'
    246           CALL message( 'init_pegrid', 'PA0221', 1, 2, 0, 6, 0 )
    247        ENDIF
    248        pdims(1) = npex
    249        pdims(2) = npey
    250 
    251     ELSE
    252 !
    253 !--    If the processor topology is prescribed by the user, the number of
    254 !--    PEs must be given in both directions
    255        message_string = 'if the processor topology is prescribed by the, ' //  &
    256                    ' user& both values of "npex" and "npey" must be given ' // &
    257                    'in the &NAMELIST-parameter file'
    258        CALL message( 'init_pegrid', 'PA0222', 1, 2, 0, 6, 0 )
    259 
    260     ENDIF
    261 #endif
     246             CALL message( 'init_pegrid', 'PA0221', 1, 2, 0, 6, 0 )
     247          ENDIF
     248          pdims(1) = npex
     249          pdims(2) = npey
     250
     251       ELSE
     252!
     253!--       If the processor topology is prescribed by the user, the number of
     254!--       PEs must be given in both directions
     255          message_string = 'if the processor topology is prescribed by th' //  &
     256                   'e user& both values of "npex" and "npey" must be given' // &
     257                   ' in the &NAMELIST-parameter file'
     258          CALL message( 'init_pegrid', 'PA0222', 1, 2, 0, 6, 0 )
     259
     260       ENDIF
     261
     262    ENDIF
     263
    262264
    263265!
     
    10821084       ELSEIF ( bc_lr == 'radiation/dirichlet' )  THEN
    10831085          outflow_l = .TRUE.
    1084 #if defined( PMC_ACTIVE )
    10851086       ELSEIF ( bc_lr == 'nested' )  THEN
    10861087          nest_bound_l = .TRUE.
    1087 #endif
    10881088       ENDIF
    10891089    ENDIF
     
    10941094       ELSEIF ( bc_lr == 'radiation/dirichlet' )  THEN
    10951095          inflow_r  = .TRUE.
    1096 #if defined( PMC_ACTIVE )
    10971096       ELSEIF ( bc_lr == 'nested' )  THEN
    10981097          nest_bound_r = .TRUE.
    1099 #endif
    11001098       ENDIF
    11011099    ENDIF
     
    11061104       ELSEIF ( bc_ns == 'radiation/dirichlet' )  THEN
    11071105          inflow_s  = .TRUE.
    1108 #if defined( PMC_ACTIVE )
    11091106       ELSEIF ( bc_ns == 'nested' )  THEN
    11101107          nest_bound_s = .TRUE.
    1111 #endif
    11121108       ENDIF
    11131109    ENDIF
     
    11181114       ELSEIF ( bc_ns == 'radiation/dirichlet' )  THEN
    11191115          outflow_n = .TRUE.
    1120 #if defined( PMC_ACTIVE )
    11211116       ELSEIF ( bc_ns == 'nested' )  THEN
    11221117          nest_bound_n = .TRUE.
    1123 #endif
    11241118       ENDIF
    11251119    ENDIF
  • palm/trunk/SOURCE/local_stop.f90

    r1683 r1764  
    1919! Current revisions:
    2020! -----------------
    21 !
     21! abort with MPI_COMM_WORLD added, nested runs always abort with MPI_ABORT
    2222!
    2323! Former revisions:
     
    5555 
    5656
    57     USE pegrid
    58    
    5957    USE control_parameters,                                                    &
    6058        ONLY:  abort_mode, coupling_mode, coupling_mode_remote, dt_restart,    &
     
    6260               terminate_run, time_restart
    6361
     62    USE pegrid
     63
     64    USE pmc_interface,                                                         &
     65        ONLY:  nested_run
    6466
    6567#if defined( __parallel ) && ! defined ( __check )
    6668    IF ( coupling_mode == 'uncoupled' )  THEN
    67        IF ( abort_mode == 1 )  THEN
    68           CALL MPI_FINALIZE( ierr )
    69           STOP
    70        ELSEIF ( abort_mode == 2 )  THEN
    71           CALL MPI_ABORT( comm2d, 9999, ierr )
     69       IF ( nested_run )  THEN
     70!
     71!--       Workaround: If any of the nested model crashes, it aborts the whole
     72!--       run with MPI_ABORT, regardless of the reason given by abort_mode
     73          CALL MPI_ABORT( MPI_COMM_WORLD, 9999, ierr )
     74       ELSE
     75          IF ( abort_mode == 1 )  THEN
     76             CALL MPI_FINALIZE( ierr )
     77             STOP
     78          ELSEIF ( abort_mode == 2 )  THEN
     79             CALL MPI_ABORT( comm2d, 9999, ierr )
     80          ELSEIF ( abort_mode == 3 )  THEN
     81             CALL MPI_ABORT( MPI_COMM_WORLD, 9999, ierr )
     82          ENDIF
    7283       ENDIF
    7384    ELSE
  • palm/trunk/SOURCE/message.f90

    r1683 r1764  
    1919! Current revisions:
    2020! -----------------
    21 !
     21! nest id added to header string, add linefeed to stdout to get messages better
     22! seperatedvfrom the location messages,
     23! in case of nested runs, location messages are given only by the root domain
    2224!
    2325! Former revisions:
     
    5557!> Meaning of formal parameters:
    5658!> requested_action: 0 - continue, 1 - abort by stop, 2 - abort by mpi_abort
     59!>                   3 - abort by mpi_abort using MPI_COMM_WORLD
    5760!> message_level: 0 - informative, 1 - warning, 2 - error
    5861!> output_on_pe: -1 - all, else - output on specified PE
     
    6366                     message_level, output_on_pe, file_id, flush )
    6467 
    65 
    6668    USE control_parameters,                                                    &
    6769        ONLY:  abort_mode, message_string
     
    7173    USE pegrid
    7274
     75    USE pmc_interface,                                                         &
     76        ONLY:  cpl_id, nested_run
     77
    7378    IMPLICIT NONE
    7479
    7580    CHARACTER(LEN=6)   ::  message_identifier            !<
     81    CHARACTER(LEN=20)  ::  nest_string                   !< nest id information
    7682    CHARACTER(LEN=*)   ::  routine_name                  !<
    7783    CHARACTER(LEN=200) ::  header_string                 !<
     
    94100
    95101!
     102!-- In case of nested runs create the nest id informations
     103    IF ( nested_run )  THEN
     104       WRITE( nest_string, '(1X,A,I2.2)' )  'from nest-id ', cpl_id
     105    ELSE
     106       nest_string = ''
     107    ENDIF
     108!
    96109!-- Create the complete output string, starting with the message level
    97110    IF ( message_level == 0 )  THEN
    98        header_string = '--- informative message ---  ID:'
     111       header_string = '--- informative message' // TRIM(nest_string) //       &
     112                       ' ---  ID:'
    99113    ELSEIF ( message_level == 1 )  THEN
    100        header_string = '+++ warning message ---  ID:'
     114       header_string = '+++ warning message' // TRIM(nest_string) // ' ---  ID:'
    101115    ELSEIF ( message_level == 2 )  THEN
    102        header_string = '+++ error message ---  ID:'
     116       header_string = '+++ error message' // TRIM(nest_string) // ' ---  ID:'
    103117    ELSE
    104        WRITE( header_string,'(A,I2)' )  '+++ unknown message level: ', &
     118       WRITE( header_string,'(A,I2)' )  '+++ unknown message level' //         &
     119                                        TRIM(nest_string) // ': ',             &
    105120                                        message_level
    106121    ENDIF
     
    118133       information_string_2 = 'http://palm.muk.uni-hannover.de/trac/wiki/doc' // &
    119134                              '/app/errmsg#' // message_identifier
    120     END IF
     135    ENDIF
    121136   
    122137
     
    147162!
    148163!--       Output on stdout
    149           WRITE( *, '(A/)' )  TRIM( header_string )
     164          WRITE( *, '(//A/)' )  TRIM( header_string )
    150165!
    151166!--       Cut message string into pieces and output one piece per line.
     
    219234
    220235    USE, INTRINSIC ::  ISO_FORTRAN_ENV,                                        &
    221         ONLY :  OUTPUT_UNIT
     236        ONLY:  OUTPUT_UNIT
    222237
    223238    USE pegrid,                                                                &
    224         ONLY :  myid
     239        ONLY:  myid
     240
     241    USE pmc_interface,                                                         &
     242        ONLY:  cpl_id
    225243
    226244    IMPLICIT NONE
     
    229247    LOGICAL          ::  advance  !< switch for advancing/noadvancing I/O
    230248
     249!
     250!-- Output for nested runs only on the root domain
     251    IF ( cpl_id /= 1 )  RETURN
    231252
    232253    IF ( myid == 0 )  THEN
  • palm/trunk/SOURCE/modules.f90

    r1763 r1764  
    1919! Current revisions:
    2020! ------------------
    21 !
     21! some reformatting
    2222!
    2323! Former revisions:
     
    713713                lunudge = .FALSE., lvnudge = .FALSE., lwnudge = .FALSE., &
    714714                masking_method = .FALSE., mg_switch_to_pe0 = .FALSE., &
    715                 monotonic_adjustment = .FALSE., &
    716                 nest_bound_l = .FALSE., nest_bound_n = .FALSE., &
    717                 nest_bound_r = .FALSE., nest_bound_s = .FALSE., &
    718                 nest_domain = .FALSE., &
    719                 neutral = .FALSE., nudging = .FALSE., &
     715                monotonic_adjustment = .FALSE.
     716    LOGICAL ::  nest_bound_l = .FALSE. !< nested boundary on left side
     717    LOGICAL ::  nest_bound_n = .FALSE. !< nested boundary on north side
     718    LOGICAL ::  nest_bound_r = .FALSE. !< nested boundary on right side
     719    LOGICAL ::  nest_bound_s = .FALSE. !< nested boundary on south side
     720    LOGICAL ::  nest_domain  = .FALSE. !< domain is nested into a parent domain
     721    LOGICAL ::  neutral = .FALSE., nudging = .FALSE., &
    720722                ocean = .FALSE., on_device = .FALSE., &
    721723                outflow_l = .FALSE., outflow_n = .FALSE., outflow_r = .FALSE., &
  • palm/trunk/SOURCE/palm.f90

    r1763 r1764  
    1919! Current revisions:
    2020! -----------------
    21 !
     21! cpp-statements for nesting removed, communicator settings cleaned up
    2222!
    2323! Former revisions:
     
    148148    USE pegrid
    149149
    150 #if defined( PMC_ACTIVE )
    151150    USE pmc_interface,                                                         &
    152         ONLY:  cpl_id, pmci_init, pmci_modelconfiguration
    153 #endif
     151        ONLY:  cpl_id, nested_run, pmci_init, pmci_modelconfiguration
    154152
    155153    USE statistics,                                                            &
     
    185183    CALL MPI_INIT( ierr )
    186184
    187 #if defined( PMC_ACTIVE )
    188185!
    189186!-- Initialize the coupling for nested-domain runs
     187!-- comm_palm is the communicator which includes all PEs (MPI processes)
     188!-- available for this (nested) model. If it is not a nested run, comm_palm
     189!-- is returned as MPI_COMM_WORLD
    190190    CALL pmci_init( comm_palm )
    191191    comm2d = comm_palm
    192 
    193     IF ( cpl_id >= 2 )  THEN
    194        nest_domain = .TRUE.
    195        WRITE( coupling_char, '(A1,I1.1)') '_', cpl_id
    196     ENDIF
    197 
    198     CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr )
    199     CALL MPI_COMM_RANK( comm_palm, myid, ierr )
    200 #else
    201     CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
    202     CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
    203     comm_palm = MPI_COMM_WORLD
    204     comm2d    = MPI_COMM_WORLD
    205 !
    206 !-- Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm
    207 !-- will be splitted in init_coupling)
    208     CALL init_coupling
    209 #endif
     192!
     193!-- Get the (preliminary) number of MPI processes and the local PE-id (in case
     194!-- of a further communicator splitting in init_coupling, these numbers will
     195!-- be changed in init_pegrid).
     196    IF ( nested_run )  THEN
     197!--    TO_DO: move the following two settings somewehere to the pmc_interface
     198       IF ( cpl_id >= 2 )  THEN
     199          nest_domain = .TRUE.
     200          WRITE( coupling_char, '(A1,I1.1)') '_', cpl_id
     201       ENDIF
     202
     203       CALL MPI_COMM_SIZE( comm_palm, numprocs, ierr )
     204       CALL MPI_COMM_RANK( comm_palm, myid, ierr )
     205
     206    ELSE
     207
     208       CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr )
     209       CALL MPI_COMM_RANK( MPI_COMM_WORLD, myid, ierr )
     210!
     211!--    Initialize PE topology in case of coupled atmosphere-ocean runs (comm_palm
     212!--    will be splitted in init_coupling)
     213       CALL init_coupling
     214    ENDIF
    210215#endif
    211216
     
    316321    CALL init_3d_model
    317322
    318 #if defined( PMC_ACTIVE )
    319323!
    320324!-- Coupling protocol setup for nested-domain runs
    321     CALL pmci_modelconfiguration
    322 #endif
     325    IF ( nested_run )  THEN
     326       CALL pmci_modelconfiguration
     327    ENDIF
    323328
    324329!
  • palm/trunk/SOURCE/parin.f90

    r1763 r1764  
    1919! Current revisions:
    2020! -----------------
    21 !
     21! cpp-statements for nesting removed, explicit settings of boundary conditions
     22! in nest domains,
     23! bugfix: npex/npey message moved from inipar to d3par
     24! bugfix: check of lateral boundary conditions from check_parameters to here,
     25! because they will be already used in init_pegrid and init_grid
    2226!
    2327! Former revisions:
     
    193197        ONLY:  nx, ny, nz
    194198
     199    USE kinds
     200
    195201    USE model_1d,                                                              &
    196202        ONLY:  damp_level_1d, dt_pr_1d, dt_run_control_1d, end_time_1d
    197203
    198204    USE pegrid
     205
     206    USE pmc_interface,                                                         &
     207        ONLY:  nested_run
    199208
    200209    USE profil_parameter,                                                      &
     
    345354          READ ( 11, inipar, ERR=10, END=11 )
    346355
    347 #if defined ( PMC_ACTIVE )
    348 !
    349 !--       In nested domains, npex or npey must not be given in \$inipar
    350 !--       because here the PE-grids are always defined in the nestpar-NAMELIST
    351           IF ( ( npex /= -1 ) .OR. ( npey /= -1 ) )  THEN
    352              message_string = 'npex or npey must not be given in \$inipar ' // &
    353                               'in nested domains'
    354              CALL message( 'parin', 'PAXXXX', 1, 2, 0, 6, 0 )
    355           ENDIF
    356 #else
    357 !
    358 !--       Make sure that no nesting boundary conditions are defined if
    359 !--       PMC_ACTIVE is not defined. Otherwise initiate abort.
    360           IF ( ( bc_lr == 'nested' ) .OR. ( bc_ns == 'nested' ) .OR. ( bc_uv_t == 'nested' ) .OR. &
    361                ( bc_pt_t ==  'nested' ) .OR. ( bc_q_t == 'nested') .OR. ( bc_sa_t == 'nested') .OR. &
    362                ( bc_p_t == 'nested' ) ) THEN
    363              message_string = 'Nested boundary conditions are not allowed ' // &
    364                               'since the cpp flag PMC_ACTIVE is not set'
    365              CALL message( 'parin', 'PAXXXX', 1, 2, 0, 6, 0 )
    366           ENDIF
    367 #endif
    368 
    369356#if defined ( __check )
    370357!
     
    406393
    407394!
     395!--       In case of nested runs, explicitly set nesting boundary conditions
     396!--       except for the root domain. This will overwrite the user settings.
     397          IF ( nest_domain )  THEN
     398             bc_lr   = 'nested'
     399             bc_ns   = 'nested'
     400             bc_uv_t = 'nested'
     401             bc_pt_t = 'nested'
     402             bc_p_t  = 'neumann'
     403          ENDIF
     404!
     405!--       Check validity of lateral boundary conditions. This has to be done
     406!--       here because they are already used in init_pegrid and init_grid and
     407!--       therefore cannot be check in check_parameters
     408          IF ( bc_lr /= 'cyclic'  .AND.  bc_lr /= 'dirichlet/radiation'  .AND. &
     409               bc_lr /= 'radiation/dirichlet'  .AND.  bc_lr /= 'nested' )  THEN
     410             message_string = 'unknown boundary condition: bc_lr = "' // &
     411                              TRIM( bc_lr ) // '"'
     412             CALL message( 'check_parameters', 'PA0049', 1, 2, 0, 6, 0 )
     413          ENDIF
     414          IF ( bc_ns /= 'cyclic'  .AND.  bc_ns /= 'dirichlet/radiation'  .AND. &
     415               bc_ns /= 'radiation/dirichlet'  .AND.  bc_ns /= 'nested' )  THEN
     416             message_string = 'unknown boundary condition: bc_ns = "' // &
     417                              TRIM( bc_ns ) // '"'
     418             CALL message( 'check_parameters', 'PA0050', 1, 2, 0, 6, 0 )
     419          ENDIF
     420
     421!
     422!--       Set internal variables used for speed optimization in if clauses
     423          IF ( bc_lr /= 'cyclic' )               bc_lr_cyc    = .FALSE.
     424          IF ( bc_lr == 'dirichlet/radiation' )  bc_lr_dirrad = .TRUE.
     425          IF ( bc_lr == 'radiation/dirichlet' )  bc_lr_raddir = .TRUE.
     426          IF ( bc_ns /= 'cyclic' )               bc_ns_cyc    = .FALSE.
     427          IF ( bc_ns == 'dirichlet/radiation' )  bc_ns_dirrad = .TRUE.
     428          IF ( bc_ns == 'radiation/dirichlet' )  bc_ns_raddir = .TRUE.
     429
     430!
    408431!--       Definition of names of areas used for computing statistics. They must
    409432!--       be defined at this place, because they are allowed to be redefined by
     
    416439!--       values are used for the parameters.
    417440          READ ( 11, d3par, END=20 )
     441
     442          IF ( nested_run )  THEN
     443!
     444!--          In nested domains, npex or npey can not be given in the d3par-
     445!--          NAMELIST because here the PE-grids are always defined in the
     446!--          nestpar-NAMELIST. Settings will be ignored.
     447             IF ( ( npex /= -1 ) .OR. ( npey /= -1 ) )  THEN
     448                message_string = 'npex or npey can not be given in \$d3par ' // &
     449                                 'for nested runs & they will be ignored'
     450                CALL message( 'parin', 'PA0352', 0, 1, 0, 6, 0 )
     451             ENDIF
     452          ENDIF
    418453
    419454!
  • palm/trunk/SOURCE/pmc_client.f90

    r1763 r1764  
    2020! Current revisions:
    2121! ------------------
    22 !
     22! cpp-statement added (nesting can only be used in parallel mode),
     23! all kinds given in PALM style
    2324!
    2425! Former revisions:
     
    3536!------------------------------------------------------------------------------!
    3637
     38#if defined( __parallel )
    3739
    3840    use, intrinsic :: iso_c_binding
    3941
    40     USE  mpi
    41     USE  kinds,         ONLY: wp, iwp
     42#if defined( __lc )
     43    USE MPI
     44#else
     45    INCLUDE "mpif.h"
     46#endif
     47    USE  kinds
    4248    USE  PMC_general,   ONLY: ClientDef, DA_NameDef, DA_Namelen, PMC_STATUS_OK, PMC_DA_NAME_ERR, PeDef, ArrayDef, &
    4349                                         DA_Desclen, DA_Namelen, PMC_G_SetName, PMC_G_GetName
     
    5157!   data local to this MODULE
    5258    Type(ClientDef)                       :: me
    53     INTEGER, PARAMETER                    :: dp = wp
     59!-- TO_DO: what is the meaning of this? Could variables declared in this module
     60!--        also have single precision?
     61!    INTEGER, PARAMETER                    :: dp = wp
    5462
    5563    INTEGER, save                         :: myIndex = 0                !Counter and unique number for Data Arrays
     
    310318    SUBROUTINE PMC_C_Set_DataArray_2d (array)
    311319       IMPLICIT none
     320!--    TO_DO: is double precision absolutely required here?
    312321       REAL(kind=dp),INTENT(IN),DIMENSION(:,:)    :: array
    313322       !-- local variables
     
    344353    SUBROUTINE PMC_C_Set_DataArray_3d (array)
    345354       IMPLICIT none
     355!--    TO_DO: is double precision absolutely required here?
    346356       REAL(kind=dp),INTENT(IN),DIMENSION(:,:,:)  :: array
    347357       !-- local variables
     
    377387
    378388   SUBROUTINE PMC_C_setInd_and_AllocMem
     389
    379390      IMPLICIT none
    380391
    381392      INTEGER                                 :: i, ierr
    382393      INTEGER                                 :: arlen, myIndex, tag
    383       INTEGER(kind=8)                         :: bufsize                   ! Size of MPI data Window
     394      INTEGER(idp)                            :: bufsize                   ! Size of MPI data Window
    384395      TYPE(PeDef),POINTER                     :: aPE
    385396      TYPE(ArrayDef),POINTER                  :: ar
     
    574585    END SUBROUTINE PMC_C_PutBuffer
    575586
    576 
    577 ! Private SUBROUTINEs
    578 
     587#endif
    579588END MODULE pmc_client
  • palm/trunk/SOURCE/pmc_general.f90

    r1763 r1764  
    2020! Current revisions:
    2121! ------------------
    22 !
     22! cpp-statement added (nesting can only be used in parallel mode),
     23! all kinds given in PALM style
    2324!
    2425! Former revisions:
     
    3536!------------------------------------------------------------------------------!
    3637
    37 
     38#if defined( __parallel )
    3839   use, intrinsic :: iso_c_binding
    39    USE            :: MPI
     40
     41   USE kinds
     42
     43#if defined( __lc )
     44    USE MPI
     45#else
     46    INCLUDE "mpif.h"
     47#endif
    4048
    4149   IMPLICIT none
     
    6573      INTEGER                       :: dim_order                   ! Order of Dimensions: 2 = 2D array, 33 = 3D array
    6674      TYPE (c_ptr)                  :: data                        ! Pointer of data in server space
    67       INTEGER(kind=8)               :: BufIndex                    ! index in Send Buffer
     75      INTEGER(idp)                  :: BufIndex                    ! index in Send Buffer
    6876      INTEGER                       :: BufSize                     ! size in Send Buffer
    6977      TYPE (c_ptr)                  :: SendBuf                     ! Pointer of Data in Send buffer
     
    7684
    7785   TYPE, PUBLIC :: PeDef
    78       INTEGER(KIND=8)                     :: NrEle                 ! Number of Elemets
     86      INTEGER(idp)                        :: NrEle                 ! Number of Elemets
    7987      TYPE (xy_ind), POINTER,DIMENSION(:) :: locInd                ! xy index local array for remote PE
    8088      TYPE( ArrayDef), POINTER            :: Arrays                ! Pointer to Data Array List (Type ArrayDef)
     
    8391
    8492   TYPE, PUBLIC :: ClientDef
    85       INTEGER(KIND=8)               :: TotalBufferSize
     93      INTEGER(idp)                  :: TotalBufferSize
    8694      INTEGER                       :: model_comm                  ! Communicator of this model
    8795      INTEGER                       :: inter_comm                  ! Inter communicator model and client
     
    258266    END FUNCTION DA_List_next
    259267
     268#endif
    260269end MODULE pmc_general
  • palm/trunk/SOURCE/pmc_handle_communicator.f90

    r1763 r1764  
    1 MODULE PMC_handle_communicator
    2 
     1 MODULE PMC_handle_communicator
    32
    43!--------------------------------------------------------------------------------!
     
    2120! Current revisions:
    2221! ------------------
    23 !
     22! pmc_layout type: comm_cpl and comm_parent removed, character "name" moved at
     23! the beginning of the variable list,
     24! domain layout is read with new NAMELIST nestpar from standard file PARIN,
     25! MPI-datatype REAL8 replaced by REAL, kind=8 replaced by wp,
     26! variable domain_layouts instead of m_couplers introduced for this NAMELIST,
     27! general format changed to PALM style
    2428!
    2529! Former revisions:
     
    3034! Initial revision by K. Ketelsen
    3135!
    32 ! Intoduction of the pure FORTRAN Palm Model Coupler     (PMC)  12.11.2015  K. Ketelsen
    33 !
    3436! Description:
    3537! ------------
    36 !
    37 ! Handle MPI Communicator in Palm Model Coupler
     38! Handle MPI communicator in PALM model coupler
    3839!------------------------------------------------------------------------------!
    3940
    40    USE      mpi
    41    USE      pmc_general,                        &
    42       ONLY: PMC_STATUS_OK, PMC_STATUS_ERROR, PMC_MAX_MODELL
    43 
    44    IMPLICIT none
    45 
    46    ! Define Types
    47 
    48    type PMC_layout
    49       INTEGER                         :: comm_parent
    50       INTEGER                         :: comm_cpl
    51       INTEGER                         :: Id
    52       INTEGER                         :: Parent_id
    53 
    54       INTEGER                         :: npe_x
    55       INTEGER                         :: npe_y
    56 
    57       REAL(kind=8)                    :: lower_left_x
    58       REAL(kind=8)                    :: lower_left_y
    59 
    60       CHARACTER(len=32)               :: name
    61    END type PMC_layout
    62 
    63    ! return status
    64    PUBLIC                               PMC_STATUS_OK, PMC_STATUS_ERROR
    65    INTEGER,parameter,PUBLIC          :: PMC_ERROR_NPES = 1                       ! illegal Number of PEs
    66    INTEGER,parameter,PUBLIC          :: PMC_ERROR_MPI  = 2                       ! MPI Error
    67    INTEGER,parameter,PUBLIC          :: PMC_ERRO_NOF   = 3                       ! No couple layout file found
     41#if defined( __parallel )
     42    USE kinds
     43
     44#if defined( __lc )
     45    USE MPI
     46#else
     47    INCLUDE "mpif.h"
     48#endif
     49
     50   USE pmc_general,                                                            &
     51       ONLY: pmc_status_ok, pmc_status_error, pmc_max_modell
     52
     53   IMPLICIT NONE
     54
     55   TYPE pmc_layout
     56
     57      CHARACTER(len=32) ::  name
     58
     59      INTEGER  ::  id
     60      INTEGER  ::  parent_id
     61      INTEGER  ::  npe_x
     62      INTEGER  ::  npe_y
     63
     64      REAL(wp) ::  lower_left_x
     65      REAL(wp) ::  lower_left_y
     66
     67   END TYPE pmc_layout
     68
     69   PUBLIC  pmc_status_ok, pmc_status_error
     70
     71   INTEGER, PARAMETER, PUBLIC ::  pmc_error_npes          = 1  ! illegal number of PEs
     72   INTEGER, PARAMETER, PUBLIC ::  pmc_namelist_error      = 2  ! error(s) in nestpar namelist
     73   INTEGER, PARAMETER, PUBLIC ::  pmc_no_namelist_found   = 3  ! No couple layout file found
    6874
    6975   ! Coupler Setup
     
    7278   INTEGER                                    :: m_Parent_id  !Coupler id of parent of this model
    7379   INTEGER                                    :: m_NrOfCpl    !Number of Coupler in layout file
    74    type(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers   !Information of all coupler
     80   TYPE(PMC_layout),DIMENSION(PMC_MAX_MODELL) :: m_couplers   !Information of all coupler
    7581
    7682   ! MPI settings
     
    9197   INTEGER,DIMENSION(:),POINTER,PUBLIC :: PMC_Server_for_Client
    9298
    93    !INTERFACE Section
    94 
    95    INTERFACE PMC_is_RootModel
    96       MODULE PROCEDURE PMC_is_RootModel
    97    END INTERFACE PMC_is_RootModel
     99   INTERFACE pmc_is_rootmodel
     100      MODULE PROCEDURE pmc_is_rootmodel
     101   END INTERFACE pmc_is_rootmodel
    98102
    99103   INTERFACE PMC_get_local_model_info
     
    101105   END INTERFACE PMC_get_local_model_info
    102106
    103    PUBLIC PMC_init_model,PMC_get_local_model_info, PMC_is_RootModel
    104 CONTAINS
    105 
    106    SUBROUTINE PMC_init_model (comm, PMC_status)
    107       IMPLICIT     none
    108       INTEGER,INTENT(OUT)                 :: comm
    109       INTEGER,INTENT(OUT)                 :: PMC_status
    110 
    111       !-- local declarations
    112       INTEGER                             :: i,istat, ierr
    113       INTEGER,DIMENSION(PMC_MAX_MODELL+1) :: start_PE
    114       INTEGER                             :: m_my_CPL_rank
    115       INTEGER                             :: tag, ClientCount
    116       INTEGER,DIMENSION(PMC_MAX_MODELL)   :: activeServer        !I am active server for this client ID
    117 
    118       PMC_status   = PMC_STATUS_OK
     107   PUBLIC pmc_get_local_model_info, pmc_init_model, pmc_is_rootmodel
     108
     109 CONTAINS
     110
     111   SUBROUTINE pmc_init_model( comm, nesting_mode, pmc_status )
     112
     113      USE control_parameters,                                                  &
     114          ONLY:  message_string
     115
     116      USE pegrid,                                                              &
     117          ONLY:  myid
     118
     119      IMPLICIT NONE
     120
     121      CHARACTER(LEN=7), INTENT(OUT) ::  nesting_mode
     122
     123      INTEGER, INTENT(OUT)                ::  comm
     124      INTEGER, INTENT(OUT)                ::  pmc_status
     125
     126      INTEGER                             ::  i, ierr, istat
     127      INTEGER,DIMENSION(pmc_max_modell+1) ::  start_pe
     128      INTEGER                             ::  m_my_cpl_rank
     129      INTEGER                             ::  tag, clientcount
     130      INTEGER,DIMENSION(pmc_max_modell)   ::  activeserver  ! I am active server for this client ID
     131
     132      pmc_status   = pmc_status_ok
    119133      comm         = -1
    120       m_my_CPL_id  = -1
    121       ClientCount  = 0
    122       activeServer = -1
    123       start_PE(:)  = 0
    124 
    125       CALL  MPI_Comm_rank (MPI_COMM_WORLD, m_world_rank, istat)
    126       CALL  MPI_Comm_size (MPI_COMM_WORLD, m_world_npes, istat)
    127 
    128       if(m_world_rank == 0) then ! only PE 0 of root model reads
    129 
    130          CALL read_coupling_layout (PMC_status)
    131 
    132          IF (PMC_status /= PMC_ERRO_NOF  ) THEN
    133             ! Compute Start PE of every model
    134             start_PE(1) = 0
    135             do i=2,m_NrOfCpl+1
    136                start_pe(i) = start_PE(i-1) + m_couplers(i-1)%npe_x*m_couplers(i-1)%npe_y
    137             END do
    138             if(start_pe(m_NrOfCpl+1) /= m_world_npes)   then
    139                if(m_world_rank == 0) then
    140                   write(0,*) 'PMC ERROR: Coupler Setup Not equal Nr. MPI procs ',start_pe(m_NrOfCpl+1),m_world_npes
    141                END if
    142                CALL MPI_Abort (MPI_COMM_WORLD, ierr, istat)
    143             END if
    144          END IF
    145       END if
    146 
    147       CALL MPI_Bcast (PMC_status, 1,          MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
    148       IF (PMC_status == PMC_ERRO_NOF  ) THEN
    149          if(m_world_rank == 0)  write(0,*) 'PMC ERROR: file PMC_couple_layout not found'
    150          CALL MPI_Abort (MPI_COMM_WORLD, ierr, istat)
    151       END IF
    152 
    153       CALL MPI_Bcast (m_NrOfCpl, 1,          MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
    154       CALL MPI_Bcast (start_PE, m_NrOfCpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
    155 
    156       !   Broadcast coupling layout
    157 
    158       do i=1,m_NrOfCpl
    159          CALL MPI_Bcast (m_couplers(i)%name, len(m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat)
    160          CALL MPI_Bcast (m_couplers(i)%id,           1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
    161          CALL MPI_Bcast (m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
    162          CALL MPI_Bcast (m_couplers(i)%npe_x,        1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
    163          CALL MPI_Bcast (m_couplers(i)%npe_y,        1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
    164          CALL MPI_Bcast (m_couplers(i)%lower_left_x, 1, MPI_REAL8,   0, MPI_COMM_WORLD, istat)
    165          CALL MPI_Bcast (m_couplers(i)%lower_left_y, 1, MPI_REAL8,   0, MPI_COMM_WORLD, istat)
    166       END do
    167 
    168       ! Assign global MPI processes to individual models by setting the couple id
    169 
    170       do i=1,m_NrOfCpl
    171          if(m_world_rank >= start_PE(i) .and. m_world_rank < start_PE(i+1) ) then
    172             m_my_CPL_id = i
     134      m_my_cpl_id  = -1
     135      clientcount  =  0
     136      activeserver = -1
     137      start_pe(:)  =  0
     138
     139      CALL  MPI_COMM_RANK( MPI_COMM_WORLD, m_world_rank, istat )
     140      CALL  MPI_COMM_SIZE( MPI_COMM_WORLD, m_world_npes, istat )
     141!
     142!--   Only PE 0 of root model reads
     143      IF ( m_world_rank == 0 )  THEN
     144
     145         CALL read_coupling_layout( nesting_mode, pmc_status )
     146
     147         IF ( pmc_status /= pmc_no_namelist_found  .AND.                       &
     148              pmc_status /= pmc_namelist_error )                               &
     149         THEN
     150!
     151!--         Calculate start PE of every model
     152            start_pe(1) = 0
     153            DO  i = 2, m_nrofcpl+1
     154               start_pe(i) = start_pe(i-1) +                                   &
     155                             m_couplers(i-1)%npe_x * m_couplers(i-1)%npe_y
     156            ENDDO
     157
     158!
     159!--         The number of cores provided with the run must be the same as the
     160!--         total sum of cores required by all nest domains
     161!--         TO_DO: can we use > instead of /= ?
     162            IF ( start_pe(m_nrofcpl+1) /= m_world_npes )  THEN
     163!--            TO_DO: this IF statement is redundant
     164               IF ( m_world_rank == 0 )  THEN
     165                  WRITE ( message_string, '(A,I6,A,I6,A)' )                    &
     166                                  'nesting-setup requires more MPI procs (',   &
     167                                  start_pe(m_nrofcpl+1), ') than provided (',  &
     168                                  m_world_npes,')'
     169                  CALL message( 'pmc_init_model', 'PA0229', 3, 2, 0, 6, 0 )
     170               ENDIF
     171            ENDIF
     172
     173         ENDIF
     174
     175      ENDIF
     176!
     177!--   Broadcast the read status. This synchronises all other PEs with PE 0 of
     178!--   the root model. Without synchronisation, they would not behave in the
     179!--   correct way (e.g. they would not return in case of a missing NAMELIST)
     180      CALL MPI_BCAST( pmc_status, 1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
     181
     182      IF ( pmc_status == pmc_no_namelist_found )  THEN
     183!
     184!--      Not a nested run; return the MPI_WORLD communicator
     185         comm = MPI_COMM_WORLD
     186         RETURN
     187
     188      ELSEIF ( pmc_status == pmc_namelist_error )  THEN
     189!
     190!--      Only the root model gives the error message. Others are aborted by the
     191!--      message-routine with MPI_ABORT. Must be done this way since myid and
     192!--      comm2d have not yet been assigned at this point.
     193         IF ( m_world_rank == 0 )  THEN
     194            message_string = 'errors in \$nestpar'
     195            CALL message( 'pmc_init_model', 'PA0223', 3, 2, 0, 6, 0 )
     196         ENDIF
     197
     198      ENDIF
     199
     200      CALL MPI_BCAST( m_nrofcpl, 1,          MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
     201      CALL MPI_BCAST( start_pe, m_nrofcpl+1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat)
     202
     203!
     204!--   Broadcast coupling layout
     205      DO  i = 1, m_nrofcpl
     206         CALL MPI_BCAST( m_couplers(i)%name, LEN( m_couplers(i)%name ), MPI_CHARACTER, 0, MPI_COMM_WORLD, istat )
     207         CALL MPI_BCAST( m_couplers(i)%id,           1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
     208         CALL MPI_BCAST( m_couplers(i)%Parent_id,    1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
     209         CALL MPI_BCAST( m_couplers(i)%npe_x,        1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
     210         CALL MPI_BCAST( m_couplers(i)%npe_y,        1, MPI_INTEGER, 0, MPI_COMM_WORLD, istat )
     211         CALL MPI_BCAST( m_couplers(i)%lower_left_x, 1, MPI_REAL,    0, MPI_COMM_WORLD, istat )
     212         CALL MPI_BCAST( m_couplers(i)%lower_left_y, 1, MPI_REAL,    0, MPI_COMM_WORLD, istat )
     213      ENDDO
     214
     215!
     216!--   Assign global MPI processes to individual models by setting the couple id
     217      DO  i = 1, m_nrofcpl
     218         IF ( m_world_rank >= start_pe(i)  .AND.  m_world_rank < start_pe(i+1) ) &
     219         THEN
     220            m_my_cpl_id = i
    173221            EXIT
    174          END if
    175       END do
    176       m_my_CPL_rank = m_world_rank-start_PE(i)
    177 
    178       !   MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach)
    179       !   The communictors for the individual models a created by MPI_Comm_split
    180       !   The color of the model is represented by the Coupler id
    181 
    182       CALL MPI_Comm_split (MPI_COMM_WORLD, m_my_CPL_id, m_my_CPL_rank, comm, istat)
    183       if(istat /= MPI_SUCCESS) then
    184          if(m_world_rank == 0) write(0,*) 'PMC: Error in MPI_Comm_split '
    185          CALL MPI_Abort (MPI_COMM_WORLD, ierr, istat)
    186       END if
    187 
    188       !   Get size and rank of the model running on THIS PE
    189 
    190       CALL  MPI_Comm_rank (comm, m_model_rank, istat)
    191       CALL  MPI_Comm_size (comm, m_model_npes, istat)
    192 
    193       !   Pe 0  brodcasts the Parent ID and Id of every model
    194 
    195       do i=1,m_NrOfCpl
    196          CALL MPI_Bcast (m_couplers(i)%Parent_Id,  1, MPI_INTEGER, 0,           MPI_COMM_WORLD, istat)
    197          CALL MPI_Bcast (m_couplers(i)%Id,         1, MPI_INTEGER, 0,           MPI_COMM_WORLD, istat)
    198       END do
    199 
     222         ENDIF
     223      ENDDO
     224      m_my_cpl_rank = m_world_rank - start_pe(i)
     225
     226!
     227!--   MPI_COMM_WORLD is the communicator for ALL models (MPI-1 approach).
     228!--   The communictors for the individual models as created by MPI_COMM_SPLIT.
     229!--   The color of the model is represented by the coupler id
     230      CALL MPI_COMM_SPLIT( MPI_COMM_WORLD, m_my_cpl_id, m_my_cpl_rank, comm,   &
     231                           istat )
     232      IF ( istat /= MPI_SUCCESS )  THEN
     233!
     234!--      TO_DO: replace by message-call
     235!--      TO_DO: Can this really happen, or is this just for the debugging phase?
     236         IF ( m_world_rank == 0 )  WRITE (0,*) 'PMC: Error in MPI_Comm_split '
     237         CALL MPI_ABORT( MPI_COMM_WORLD, ierr, istat )
     238      ENDIF
     239
     240!
     241!--   Get size and rank of the model running on this PE
     242      CALL  MPI_COMM_RANK( comm, m_model_rank, istat )
     243      CALL  MPI_COMM_SIZE( comm, m_model_npes, istat )
     244
     245!
     246!--   Broadcast (from PE 0) the parent id and id of every model
     247      DO  i = 1, m_nrofcpl
     248         CALL MPI_BCAST( m_couplers(i)%parent_id, 1, MPI_INTEGER, 0,           &
     249                         MPI_COMM_WORLD, istat )
     250         CALL MPI_BCAST( m_couplers(i)%id,        1, MPI_INTEGER, 0,           &
     251                         MPI_COMM_WORLD, istat )
     252      ENDDO
     253
     254!
     255!--   TO_DO: describe what is happening here, and why
    200256      m_model_comm = comm
    201257
    202       !   create Intercommunicator to server and clients
    203       !   MPI_Intercomm_create creates an intercommunicator between 2 groups of different colors
    204       !   The grouping with done prior with MPI_Comm_split
    205 
    206       do i=2,m_NrOfCpl
    207          if(m_couplers(i)%Parent_Id == m_my_CPL_id)   then                         !collect server PEs
    208             tag = 500+i
    209 !kk            write(0,'(a,6i4)') 'server Part ',m_world_rank,m_world_npes,m_model_rank,m_model_npes,tag,start_pe(i)
    210             CALL MPI_Intercomm_create (comm, 0, MPI_COMM_WORLD, start_pe(i), &
    211                tag, m_to_client_comm(i), istat)
    212 
    213             clientCount = clientCount+1
    214             activeServer(i) = 1
    215          else if (i == m_my_CPL_id)   then                                         !collect client PEs
    216             tag = 500+i
    217             CALL MPI_Intercomm_create (comm, 0, MPI_COMM_WORLD, start_pe(m_couplers(i)%Parent_Id), &
    218                tag, m_to_server_comm, istat)
    219 !kk            write(0,'(a,7i4)') 'client Part',m_world_rank,m_world_npes,m_model_rank,m_model_npes,tag, start_pe(m_couplers(i)%Parent_Id)
    220          END if
    221          if(istat /= MPI_SUCCESS) then
    222             if(m_world_rank == 0) write(0,*) 'PMC: Error in Coupler Setup '
    223             CALL MPI_Abort (MPI_COMM_WORLD, ierr, istat)
    224          END if
    225       END do
    226 
    227 !     If I am server, count nr. of clients
    228 !     Although this loop is symetric on all processes, the active server flag is valid only on the individual PE.
    229 
    230       ALLOCATE(PMC_Server_for_Client(ClientCount+1))
    231       ClientCount = 0
    232       do i=2,m_NrOfCpl
    233          if(activeServer(i) == 1)  then
    234             ClientCount = clientCount+1
    235             PMC_Server_for_Client(ClientCount) = i
    236          END if
    237       END do
    238       PMC_Server_for_Client(ClientCount+1) = -1
    239 
    240       !   Get size of the server model
    241 
    242       if(m_my_CPL_id > 1)  then
    243          CALL MPI_Comm_remote_size (m_to_server_comm, m_server_remote_size, istat)
    244       else
    245          m_server_remote_size = -1             ! root model does not have a server
    246       END if
    247 
    248 !      write(0,'(a,a,1x,9i7)') 'New Communicator ',trim(m_couplers(m_my_CPL_id)%name),m_world_npes,m_model_npes,m_world_rank, &
    249 !                                           m_model_rank,m_my_CPL_id,m_my_CPL_rank,m_server_remote_size,ClientCount
    250 
    251       return
     258!
     259!--   Create intercommunicator between server and clients.
     260!--   MPI_INTERCOMM_CREATE creates an intercommunicator between 2 groups of
     261!--   different colors.
     262!--   The grouping was done above with MPI_COMM_SPLIT
     263      DO  i = 2, m_nrofcpl
     264
     265         IF ( m_couplers(i)%parent_id == m_my_cpl_id )  THEN
     266!
     267!--         Collect server PEs
     268!--         TO_DO: explain in more details, what is done here
     269            tag = 500 + i
     270            CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD, start_pe(i),   &
     271                                       tag, m_to_client_comm(i), istat)
     272            clientcount = clientcount + 1
     273            activeserver(i) = 1
     274
     275         ELSEIF ( i == m_my_cpl_id)  THEN
     276!
     277!--         Collect client PEs
     278!--         TO_DO: explain in more detail, what is happening here
     279            tag = 500 + i
     280            CALL MPI_INTERCOMM_CREATE( comm, 0, MPI_COMM_WORLD,                &
     281                                       start_pe(m_couplers(i)%parent_id),      &
     282                                       tag, m_to_server_comm, istat )
     283         ENDIF
     284
     285         IF ( istat /= MPI_SUCCESS )  THEN
     286!
     287!--         TO_DO: replace by message-call
     288!--         TO_DO: can this really happen, or is this just for debugging?
     289            IF ( m_world_rank == 0 )  WRITE (0,*) 'PMC: Error in Coupler Setup '
     290            CALL MPI_ABORT( MPI_COMM_WORLD, ierr, istat )
     291         ENDIF
     292
     293      ENDDO
     294
     295!
     296!--   If I am server, count the number of clients that I have
     297!--   Although this loop is symmetric on all processes, the "activeserver" flag
     298!--   is true (==1) on the respective individual PE only.
     299      ALLOCATE( pmc_server_for_client(clientcount+1) )
     300
     301      clientcount = 0
     302      DO  i = 2, m_nrofcpl
     303         IF ( activeserver(i) == 1 )  THEN
     304            clientcount = clientcount + 1
     305            pmc_server_for_client(clientcount) = i
     306         ENDIF
     307      ENDDO
     308!--   TO_DO: explain why this is done
     309      pmc_server_for_client(clientcount+1) = -1
     310
     311!
     312!--   Get the size of the server model
     313!--   TO_DO: what does "size" mean here? Number of PEs?
     314      IF ( m_my_cpl_id > 1 )  THEN
     315         CALL MPI_COMM_REMOTE_SIZE( m_to_server_comm, m_server_remote_size,    &
     316                                    istat)
     317      ELSE
     318!
     319!--      The root model does not have a server
     320         m_server_remote_size = -1             !
     321      ENDIF
     322!
     323!--   Set myid to non-tero value except for the root domain. This is a setting
     324!--   for the message routine which is called at the end of pmci_init. That
     325!--   routine outputs messages for myid = 0, only. However, myid has not been
     326!--   assigened so far, so that all PEs of the root model would output a
     327!--   message. To avoid this, set myid to some other value except for PE0 of the
     328!--   root domain.
     329      IF ( m_world_rank /= 0 )  myid = 1
     330
    252331   END SUBROUTINE PMC_init_model
    253332
    254 !  Make module private variables available to palm
    255 
    256    SUBROUTINE PMC_get_local_model_info (my_CPL_id, CPL_name,  npe_x, npe_y, lower_left_x, lower_left_y)
    257       IMPLICIT     none
    258       INTEGER,INTENT(OUT),optional             :: my_CPL_id
    259       CHARACTER(len=*),INTENT(OUT),optional    :: CPL_name
    260       INTEGER,INTENT(OUT),optional             :: npe_x
    261       INTEGER,INTENT(OUT),optional             :: npe_y
    262       REAL(kind=8),INTENT(OUT),optional        :: lower_left_x
    263       REAL(kind=8),INTENT(OUT),optional        :: lower_left_y
    264 
    265       if(present(my_CPL_id))    my_CPL_id    = m_my_CPL_id
    266       if(present(CPL_name))     CPL_name     = m_couplers(my_CPL_id)%name
    267       if(present(npe_x))        npe_x        = m_couplers(my_CPL_id)%npe_x
    268       if(present(npe_y))        npe_y        = m_couplers(my_CPL_id)%npe_y
    269       if(present(lower_left_x)) lower_left_x = m_couplers(my_CPL_id)%lower_left_x
    270       if(present(lower_left_y)) lower_left_y = m_couplers(my_CPL_id)%lower_left_y
    271 
    272       return
    273    END  SUBROUTINE PMC_get_local_model_info
    274 
    275    LOGICAL function PMC_is_RootModel ()
    276       IMPLICIT     none
    277 
    278       PMC_is_RootModel = (m_my_CPL_id == 1)
    279 
    280       return
    281    END  function PMC_is_RootModel
    282 
     333
     334!
     335!-- Make module private variables available to palm
     336!-- TO_DO: why can't they be available from the beginning, i.e. why do they
     337!--        first have to be declared as different private variables?
     338   SUBROUTINE pmc_get_local_model_info( my_cpl_id, my_cpl_parent_id, cpl_name, &
     339                                        npe_x, npe_y, lower_left_x,            &
     340                                        lower_left_y )
     341
     342      USE kinds
     343
     344      IMPLICIT NONE
     345
     346      CHARACTER(LEN=*), INTENT(OUT), OPTIONAL ::  cpl_name
     347      INTEGER, INTENT(OUT), OPTIONAL          ::  my_cpl_id
     348      INTEGER, INTENT(OUT), OPTIONAL          ::  my_cpl_parent_id
     349      INTEGER, INTENT(OUT), OPTIONAL          ::  npe_x
     350      INTEGER, INTENT(OUT), OPTIONAL          ::  npe_y
     351      REAL(wp), INTENT(OUT), OPTIONAL         ::  lower_left_x
     352      REAL(wp), INTENT(OUT), OPTIONAL         ::  lower_left_y
     353
     354!--   TO_DO: is the PRESENT clause really required here?
     355      IF ( PRESENT( my_cpl_id )           )  my_cpl_id        = m_my_cpl_id
     356      IF ( PRESENT( my_cpl_parent_id )    )  my_cpl_parent_id = m_couplers(my_cpl_id)%parent_id
     357      IF ( PRESENT( cpl_name )            )  cpl_name         = m_couplers(my_cpl_id)%name
     358      IF ( PRESENT( npe_x )               )  npe_x            = m_couplers(my_cpl_id)%npe_x
     359      IF ( PRESENT( npe_y )               )  npe_y            = m_couplers(my_cpl_id)%npe_y
     360      IF ( PRESENT( lower_left_x )        )  lower_left_x     = m_couplers(my_cpl_id)%lower_left_x
     361      IF ( PRESENT( lower_left_y )        )  lower_left_y     = m_couplers(my_cpl_id)%lower_left_y
     362
     363   END SUBROUTINE pmc_get_local_model_info
     364
     365
     366
     367   LOGICAL function pmc_is_rootmodel( )
     368
     369      IMPLICIT NONE
     370
     371      pmc_is_rootmodel = ( m_my_cpl_id == 1 )
     372
     373   END FUNCTION pmc_is_rootmodel
     374
     375
     376
     377
     378!-- TO_DO: what does this comment mean?
    283379! Private SUBROUTINEs
    284 
    285   SUBROUTINE read_coupling_layout (PMC_status)
    286     IMPLICIT     none
    287     INTEGER,INTENT(INOUT)           :: PMC_status
    288     INTEGER                         :: i,iunit,istat
    289     CHARACTER(LEN=*), PARAMETER     :: fname = 'PMC_couple_layout'
    290     LOGICAL                         :: lex
    291 
    292     m_NrOfCpl = 0
     380 SUBROUTINE read_coupling_layout( nesting_mode, pmc_status )
     381
     382    IMPLICIT NONE
     383
     384    CHARACTER(LEN=7) ::  nesting_mode
     385
     386    INTEGER, INTENT(INOUT) ::  pmc_status
     387    INTEGER                ::  i, istat, iunit
     388
     389    TYPE(pmc_layout), DIMENSION(pmc_max_modell) ::  domain_layouts
     390
     391
     392    NAMELIST /nestpar/  domain_layouts, nesting_mode
     393
     394!
     395!-- Initialize some coupling variables
     396    domain_layouts(1:pmc_max_modell)%id = -1
     397    m_nrofcpl =   0
    293398    iunit     = 345
    294399
    295     PMC_STATUS = PMC_STATUS_OK
    296     INQUIRE(file=TRIM(fname), exist=lex)
    297     IF (.NOT. lex) THEN
    298        PMC_status = PMC_ERRO_NOF
     400    pmc_status = pmc_status_ok
     401
     402!
     403!-- Open the NAMELIST-file and read the nesting layout
     404    CALL check_open( 11 )
     405    READ ( 11, nestpar, IOSTAT=istat )
     406
     407    IF ( istat < 0 )  THEN
     408!
     409!--    No nestpar-NAMELIST found
     410       pmc_status = pmc_no_namelist_found
     411!
     412!--    Set filepointer to the beginning of the file. Otherwise PE0 will later
     413!--    be unable to read the inipar-NAMELIST
     414       REWIND ( 11 )
    299415       RETURN
    300     END IF
    301 
    302     open(iunit,file=TRIM(fname),status='OLD')
    303     do i=1,PMC_MAX_MODELL
    304       read(iunit,*,iostat=istat) m_couplers(i)%name                     &
    305            , m_couplers(i)%id,m_couplers(i)%Parent_id                   &
    306            , m_couplers(i)%npe_x,m_couplers(i)%npe_y                    &
    307            , m_couplers(i)%lower_left_x, m_couplers(i)%lower_left_y
    308       if(istat /= 0)  EXIT
    309 
    310       write(0,'(a,a,1x,4i7,1x,2F10.2)') 'Set up Model  ',trim(m_couplers(i)%name),m_couplers(i)%id,m_couplers(i)%Parent_id, &
    311                                                      m_couplers(i)%npe_x,m_couplers(i)%npe_y,                               &
    312                                                      m_couplers(i)%lower_left_x,m_couplers(i)%lower_left_y
    313 
    314       m_NrOfCpl = i
    315     END do
    316     close(iunit)
    317 
    318     return
    319   END SUBROUTINE read_coupling_layout
    320 
    321 END MODULE PMC_handle_communicator
     416
     417    ELSEIF ( istat > 0 )  THEN
     418!
     419!--    Errors in reading nestpar-NAMELIST
     420       pmc_status = pmc_namelist_error
     421       RETURN
     422
     423    ENDIF
     424
     425!
     426!-- Output location message
     427    CALL location_message( 'initialize communicators for nesting', .FALSE. )
     428!
     429!-- Assign the layout to the internally used variable
     430    m_couplers = domain_layouts
     431
     432!
     433!-- Get the number of nested models given in the nestpar-NAMELIST
     434    DO  i = 1, pmc_max_modell
     435
     436       IF ( m_couplers(i)%id /= -1  .AND.  i <= pmc_max_modell )  THEN
     437          WRITE ( 0, '(A,A,1X,4I7,1X,2F10.2)' )  'Set up Model  ',             &
     438                              TRIM( m_couplers(i)%name ), m_couplers(i)%id,    &
     439                              m_couplers(i)%Parent_id, m_couplers(i)%npe_x,    &
     440                              m_couplers(i)%npe_y, m_couplers(i)%lower_left_x, &
     441                              m_couplers(i)%lower_left_y
     442       ELSE
     443!
     444!--       When id=-1 is found for the first time, the list of domains is
     445!--       finished (or latest after pmc_max_modell entries
     446          m_nrofcpl = i - 1
     447          EXIT
     448       ENDIF
     449
     450    ENDDO
     451
     452 END SUBROUTINE read_coupling_layout
     453
     454#endif
     455 END MODULE pmc_handle_communicator
  • palm/trunk/SOURCE/pmc_interface.f90

    r1763 r1764  
    2020! Current revisions:
    2121! ------------------
    22 !
     22! +cpl_parent_id,
     23! cpp-statements for nesting replaced by __parallel statements,
     24! errors output with message-subroutine,
     25! index bugfixes in pmci_interp_tril_all,
     26! some adjustments to PALM style
    2327!
    2428! Former revisions:
     
    3539!------------------------------------------------------------------------------!
    3640
    37 
    38     USE mpi
    39 
    40 !
    41 !-- PALM modules
     41    USE arrays_3d,                                                             &
     42        ONLY:  dzu, dzw, e, e_p, pt, pt_p, q, q_p, te_m, tu_m, tv_m, tw_m, u,  &
     43               u_p, v, v_p, w, w_p, zu, zw, z0
     44
     45    USE control_parameters,                                                    &
     46        ONLY:  dt_3d, dz, humidity, message_string, nest_bound_l,              &
     47               nest_bound_r, nest_bound_s, nest_bound_n, passive_scalar,       &
     48               simulated_time, topography, volume_flow
     49
     50    USE cpulog,                                                                &
     51        ONLY:  cpu_log, log_point_s
     52
     53    USE grid_variables,                                                        &
     54        ONLY:  dx, dy
     55
     56    USE indices,                                                               &
     57        ONLY:  nbgp, nx, nxl, nxlg, nxlu, nxr, nxrg, ny, nyn, nyng, nys, nysg, &
     58               nysv, nz, nzb, nzb_s_inner, nzb_u_inner, nzb_u_outer,           &
     59               nzb_v_inner, nzb_v_outer, nzb_w_inner, nzb_w_outer, nzt
     60
    4261    USE kinds
    43     USE pegrid,                                                                           &
    44         ONLY:  myid, numprocs, comm2d, comm1dx, comm1dy, myidx, myidy, collective_wait
    45     USE arrays_3d,                                                                        &
    46         ONLY:  u, v, w, e, pt, q, u_p, v_p, w_p, e_p, pt_p, q_p, z0, dzu, dzw, zu, zw,    &
    47                tu_m, tv_m, tw_m, te_m
    48     USE indices,                                                                          &
    49         ONLY:  nx, ny, nz, nxl, nxr, nys, nyn, nzb, nzt, nxlu, nysv, nxlg, nxrg,          &
    50                nysg, nyng, nbgp, nzb_u_inner, nzb_v_inner, nzb_w_inner,                   &
    51                nzb_s_inner, nzb_u_outer, nzb_v_outer, nzb_w_outer
    52     USE control_parameters,                                                               &
    53         ONLY:  dz, dt_3d, simulated_time, message_string, volume_flow,                    &
    54                nest_bound_l, nest_bound_r, nest_bound_s, nest_bound_n,                    &
    55                topography, humidity, passive_scalar
    56     USE grid_variables,                                                                   &
    57         ONLY:  dx, dy
    58     USE cpulog,                                                                           &
    59         ONLY:  cpu_log, log_point_s
    60 
    61 !
    62 !-- PMC modules
    63     USE pmc_general,                                                                      &
    64         ONLY:  pmc_status_ok, pmc_max_modell, da_namelen
    65     USE pmc_handle_communicator,                                                          &
    66         ONLY:  pmc_init_model, pmc_is_rootmodel, pmc_get_local_model_info,                &
    67                pmc_server_for_client
    68     USE pmc_mpi_Wrapper,                                                                  &
    69         ONLY:  pmc_recv_from_client, pmc_send_to_server, pmc_recv_from_server,            &
    70                pmc_send_to_client, pmc_bcast
    71     USE pmc_server,                                                                       &
    72         ONLY:  pmc_serverinit, pmc_s_getnextarray,                                        &
    73                pmc_s_set_dataarray, pmc_s_setind_and_allocmem,                            &
    74                pmc_s_set_2d_index_list, pmc_s_fillbuffer,pmc_s_getdata_from_buffer
    75     USE pmc_client,                                                                       &
    76         ONLY:  pmc_clientinit, pmc_set_dataarray_name, pmc_c_get_2d_index_list,           &
    77                pmc_c_getnextarray, pmc_c_set_dataarray, pmc_c_setind_and_allocmem,        &
    78                pmc_c_putbuffer, pmc_c_getbuffer
     62
     63#if defined( __parallel )
     64#if defined( __lc )
     65    USE MPI
     66#else
     67    INCLUDE "mpif.h"
     68#endif
     69
     70    USE pegrid,                                                                &
     71        ONLY:  collective_wait, comm1dx, comm1dy, comm2d, myid, myidx, myidy,  &
     72               numprocs
     73
     74    USE pmc_client,                                                            &
     75        ONLY:  pmc_clientinit, pmc_c_getnextarray, pmc_c_get_2d_index_list,    &
     76               pmc_c_getbuffer, pmc_c_putbuffer, pmc_c_setind_and_allocmem,    &
     77               pmc_c_set_dataarray, pmc_set_dataarray_name
     78
     79    USE pmc_general,                                                           &
     80        ONLY:  da_namelen, pmc_max_modell, pmc_status_ok
     81
     82    USE pmc_handle_communicator,                                               &
     83        ONLY:  pmc_get_local_model_info, pmc_init_model, pmc_is_rootmodel,     &
     84               pmc_no_namelist_found, pmc_server_for_client
     85
     86    USE pmc_mpi_wrapper,                                                       &
     87        ONLY:  pmc_bcast, pmc_recv_from_client, pmc_recv_from_server,          &
     88               pmc_send_to_client, pmc_send_to_server
     89
     90    USE pmc_server,                                                            &
     91        ONLY:  pmc_serverinit, pmc_s_fillbuffer, pmc_s_getdata_from_buffer,    &
     92               pmc_s_getnextarray, pmc_s_setind_and_allocmem,                  &
     93               pmc_s_set_dataarray, pmc_s_set_2d_index_list
     94
     95#endif
    7996
    8097    IMPLICIT NONE
    8198
     99!-- TO_DO: a lot of lines (including comments) in this file exceed the 80 char
     100!--        limit. Try to reduce as much as possible
     101
     102!-- TO_DO: shouldn't we use public as default here? Only a minority of the
     103!-- variables is private.
    82104    PRIVATE    !:  Note that the default publicity is here set to private.
    83105
    84106!
    85107!-- Constants
    86     INTEGER(iwp), PARAMETER, PUBLIC        ::  client_to_server = 2   !:
    87     INTEGER(iwp), PARAMETER, PUBLIC        ::  server_to_client = 1   !:
     108    INTEGER(iwp), PARAMETER, PUBLIC ::  client_to_server = 2   !:
     109    INTEGER(iwp), PARAMETER, PUBLIC ::  server_to_client = 1   !:
    88110
    89111!
    90112!-- Coupler setup
    91     INTEGER(iwp), PUBLIC, SAVE             ::  cpl_id                 !:
    92     CHARACTER(LEN=32), PUBLIC, SAVE        ::  cpl_name               !:
    93     INTEGER(iwp), PUBLIC, SAVE             ::  cpl_npex               !:
    94     INTEGER(iwp), PUBLIC, SAVE             ::  cpl_npey               !:
     113    INTEGER(iwp), PUBLIC, SAVE      ::  cpl_id  = 1            !:
     114    CHARACTER(LEN=32), PUBLIC, SAVE ::  cpl_name               !:
     115    INTEGER(iwp), PUBLIC, SAVE      ::  cpl_npex               !:
     116    INTEGER(iwp), PUBLIC, SAVE      ::  cpl_npey               !:
     117    INTEGER(iwp), PUBLIC, SAVE      ::  cpl_parent_id          !:
    95118
    96119!
    97120!-- Control parameters, will be made input parameters later
    98     CHARACTER(LEN=7), PUBLIC, SAVE         ::  nesting_mode = 'two-way'          !:
    99     REAL(wp), PUBLIC, SAVE                 ::  anterp_relax_length_l = -1.0_wp   !:
    100     REAL(wp), PUBLIC, SAVE                 ::  anterp_relax_length_r = -1.0_wp   !:
    101     REAL(wp), PUBLIC, SAVE                 ::  anterp_relax_length_s = -1.0_wp   !:
    102     REAL(wp), PUBLIC, SAVE                 ::  anterp_relax_length_n = -1.0_wp   !:
    103     REAL(wp), PUBLIC, SAVE                 ::  anterp_relax_length_t = -1.0_wp   !:
     121    CHARACTER(LEN=7), PUBLIC, SAVE ::  nesting_mode = 'two-way'  !: steering parameter for one- or two-way nesting
     122
     123    LOGICAL, PUBLIC, SAVE ::  nested_run = .FALSE.  !: general switch if nested run or not
     124
     125    REAL(wp), PUBLIC, SAVE ::  anterp_relax_length_l = -1.0_wp   !:
     126    REAL(wp), PUBLIC, SAVE ::  anterp_relax_length_r = -1.0_wp   !:
     127    REAL(wp), PUBLIC, SAVE ::  anterp_relax_length_s = -1.0_wp   !:
     128    REAL(wp), PUBLIC, SAVE ::  anterp_relax_length_n = -1.0_wp   !:
     129    REAL(wp), PUBLIC, SAVE ::  anterp_relax_length_t = -1.0_wp   !:
    104130
    105131!
     
    120146    REAL(wp), SAVE, DIMENSION(:,:,:), ALLOCATABLE, TARGET, PUBLIC ::  qc   !:
    121147
    122     INTEGER(iwp), DIMENSION(5)                          ::  coarse_bound   !: Moved here form map_fine_to_coarse.
     148    INTEGER(iwp), DIMENSION(5)                          ::  coarse_bound   !:
    123149    REAL(wp), PUBLIC, SAVE                              ::  xexl           !:
    124150    REAL(wp), PUBLIC, SAVE                              ::  xexr           !:
     
    229255       INTEGER(iwp)                        ::  nx
    230256       INTEGER(iwp)                        ::  ny
    231        INTEGER (iwp)                       ::  nz
     257       INTEGER(iwp)                        ::  nz
    232258       REAL(wp)                            ::  dx
    233259       REAL(wp)                            ::  dy
     
    247273    TYPE(coarsegrid_def), SAVE             ::  cg   !:
    248274
    249 !
    250 !-- Interface section.
     275
     276    INTERFACE pmci_client_datatrans
     277       MODULE PROCEDURE pmci_client_datatrans
     278    END INTERFACE
     279
     280    INTERFACE pmci_client_initialize
     281       MODULE PROCEDURE pmci_client_initialize
     282    END INTERFACE
     283
     284    INTERFACE pmci_client_synchronize
     285       MODULE PROCEDURE pmci_client_synchronize
     286    END INTERFACE
     287
     288    INTERFACE pmci_ensure_nest_mass_conservation
     289       MODULE PROCEDURE pmci_ensure_nest_mass_conservation
     290    END INTERFACE
     291
    251292    INTERFACE pmci_init
    252293       MODULE PROCEDURE pmci_init
    253294    END INTERFACE
    254    
     295
    255296    INTERFACE pmci_modelconfiguration
    256297       MODULE PROCEDURE pmci_modelconfiguration
    257298    END INTERFACE
    258    
     299
     300    INTERFACE pmci_server_initialize
     301       MODULE PROCEDURE pmci_server_initialize
     302    END INTERFACE
     303
    259304    INTERFACE pmci_server_synchronize
    260305       MODULE PROCEDURE pmci_server_synchronize
    261306    END INTERFACE
    262    
    263     INTERFACE pmci_client_synchronize
    264        MODULE PROCEDURE pmci_client_synchronize
    265     END INTERFACE
    266    
    267     INTERFACE pmci_server_datatrans
    268        MODULE PROCEDURE pmci_server_datatrans
    269     END INTERFACE
    270    
    271     INTERFACE pmci_client_datatrans
    272        MODULE PROCEDURE pmci_client_datatrans
    273     END INTERFACE
    274    
     307
    275308    INTERFACE pmci_update_new
    276309       MODULE PROCEDURE pmci_update_new
    277310    END INTERFACE
    278311
    279     INTERFACE pmci_ensure_nest_mass_conservation
    280        MODULE PROCEDURE pmci_ensure_nest_mass_conservation
    281     END INTERFACE
    282    
    283     INTERFACE pmci_server_initialize
    284        MODULE PROCEDURE pmci_server_initialize
    285     END INTERFACE
    286    
    287     INTERFACE pmci_client_initialize
    288        MODULE PROCEDURE pmci_client_initialize
    289     END INTERFACE
    290    
     312    PUBLIC pmci_client_datatrans
     313    PUBLIC pmci_client_initialize
     314    PUBLIC pmci_client_synchronize
     315    PUBLIC pmci_ensure_nest_mass_conservation
    291316    PUBLIC pmci_init
    292317    PUBLIC pmci_modelconfiguration
     318    PUBLIC pmci_server_datatrans
     319    PUBLIC pmci_server_initialize
    293320    PUBLIC pmci_server_synchronize
    294     PUBLIC pmci_client_synchronize
    295     PUBLIC pmci_server_datatrans
    296     PUBLIC pmci_client_datatrans
    297321    PUBLIC pmci_update_new
    298     PUBLIC pmci_ensure_nest_mass_conservation
    299     PUBLIC pmci_server_initialize
    300     PUBLIC pmci_client_initialize
    301322
    302323
     
    305326
    306327 SUBROUTINE pmci_init( world_comm )
     328
    307329    IMPLICIT NONE
    308330
    309     INTEGER, INTENT(OUT)  ::  world_comm   !:
    310 
    311     INTEGER(iwp)          ::  ierr         !:
    312     INTEGER(iwp)          ::  istat        !:
    313     INTEGER(iwp)          ::  PMC_status   !:
    314 
    315 
    316 #if defined PMC_ACTIVE
    317     CALL pmc_init_model( world_comm, pmc_status )
    318     IF ( pmc_status /= pmc_status_ok )  THEN
    319        CALL MPI_ABORT( MPI_COMM_WORLD, istat, ierr )
     331    INTEGER, INTENT(OUT) ::  world_comm   !:
     332
     333#if defined( __parallel )
     334
     335    INTEGER(iwp)         ::  ierr         !:
     336    INTEGER(iwp)         ::  istat        !:
     337    INTEGER(iwp)         ::  pmc_status   !:
     338
     339
     340    CALL pmc_init_model( world_comm, nesting_mode, pmc_status )
     341
     342    IF ( pmc_status == pmc_no_namelist_found )  THEN
     343!
     344!--    This is not a nested run
     345!
     346!--    TO_DO: this wouldn't be required any more?
     347       world_comm = MPI_COMM_WORLD
     348       cpl_id     = 1
     349       cpl_name   = ""
     350       cpl_npex   = 2
     351       cpl_npey   = 2
     352       lower_left_coord_x = 0.0_wp
     353       lower_left_coord_y = 0.0_wp
     354       RETURN
     355    ELSE
     356!
     357!--    Set the general steering switch which tells PALM that its a nested run
     358       nested_run = .TRUE.
    320359    ENDIF
    321     CALL pmc_get_local_model_info( my_cpl_id = cpl_id, cpl_name = cpl_name,  npe_x=cpl_npex, npe_y = cpl_npey,  &
    322                                    lower_left_x = lower_left_coord_x, lower_left_y = lower_left_coord_y )
     360
     361    CALL pmc_get_local_model_info( my_cpl_id = cpl_id,                         &
     362                                   my_cpl_parent_id = cpl_parent_id,           &
     363                                   cpl_name = cpl_name,                        &
     364                                   npe_x = cpl_npex, npe_y = cpl_npey,         &
     365                                   lower_left_x = lower_left_coord_x,          &
     366                                   lower_left_y = lower_left_coord_y )
     367!
     368!-- Message that communicators for nesting are initialized.
     369!-- Attention: myid has been set at the end of pmc_init_model in order to
     370!-- guarantee that only PE0 of the root domain does the output.
     371    CALL location_message( 'finished', .TRUE. )
     372!
     373!-- Reset myid to its default value
     374    myid = 0
    323375#else
    324     world_comm = MPI_COMM_WORLD
     376!
     377!-- Nesting cannot be used in serial mode. cpl_id is set to root domain (1)
     378!-- because no location messages would be generated otherwise.
     379!-- world_comm is given a dummy value to avoid compiler warnings (INTENT(OUT)
     380!-- should get an explicit value)
    325381    cpl_id     = 1
    326     cpl_name   = ""
    327     cpl_npex   = 2
    328     cpl_npey   = 2
    329     lower_left_coord_x = 0.0_wp
    330     lower_left_coord_y = 0.0_wp
     382    nested_run = .FALSE.
     383    world_comm = 1
    331384#endif
    332385
     
    336389
    337390 SUBROUTINE pmci_modelconfiguration
     391
    338392    IMPLICIT NONE
    339393
     394    CALL location_message( 'setup the nested model configuration', .FALSE. )
    340395    CALL pmci_setup_coordinates   !:  Compute absolute coordinates valid for all models
    341     CALL pmci_setup_client        !:  Initialize PMC Client (Must be called before pmc_palm_SetUp_Server)
     396    CALL pmci_setup_client        !:  Initialize PMC Client (Must be called before pmc_setup_server)
    342397    CALL pmci_setup_server        !:  Initialize PMC Server
     398    CALL location_message( 'finished', .TRUE. )
    343399
    344400 END SUBROUTINE pmci_modelconfiguration
     
    347403
    348404 SUBROUTINE pmci_setup_server
     405
     406#if defined( __parallel )
    349407    IMPLICIT NONE
    350408
     
    371429   
    372430
    373 #if defined PMC_ACTIVE
    374     CALL pmc_serverinit                                !  Initialize PMC Server
    375 
    376 !
    377 !-- Get coordinates from all Clients.
     431!
     432!   Initialize the PMC server
     433    CALL pmc_serverinit
     434
     435!
     436!-- Get coordinates from all clients
    378437    DO  m = 1, SIZE( pmc_server_for_client ) - 1
    379438       client_id = pmc_server_for_client(m)
     
    391450
    392451!
    393 !--       Find the highest client level in the coarse grid for the reduced z transfer
     452!--       Find the highest client level in the coarse grid for the reduced z
     453!--       transfer
    394454          DO  k = 1, nz                 
    395455             IF ( zw(k) > fval(1) )  THEN
     
    404464          ALLOCATE( cl_coord_y(-nbgp:ny_cl+nbgp) )
    405465         
    406           CALL pmc_recv_from_client( client_id, cl_coord_x, SIZE( cl_coord_x ), 0, 11, ierr )
    407           CALL pmc_recv_from_client( client_id, cl_coord_y, SIZE( cl_coord_y ), 0, 12, ierr )
     466          CALL pmc_recv_from_client( client_id, cl_coord_x, SIZE( cl_coord_x ),&
     467                                     0, 11, ierr )
     468          CALL pmc_recv_from_client( client_id, cl_coord_y, SIZE( cl_coord_y ),&
     469                                     0, 12, ierr )
    408470          WRITE ( 0, * )  'receive from pmc Client ', client_id, nx_cl, ny_cl
    409471         
    410472          define_coarse_grid_real(1) = lower_left_coord_x
    411473          define_coarse_grid_real(2) = lower_left_coord_y
    412           define_coarse_grid_real(3) = 0                                      !  KK currently not used.
     474!--       TO_DO: remove this?
     475          define_coarse_grid_real(3) = 0             !  KK currently not used.
    413476          define_coarse_grid_real(4) = 0
    414477          define_coarse_grid_real(5) = dx
    415478          define_coarse_grid_real(6) = dy
    416           define_coarse_grid_real(7) = lower_left_coord_x + ( nx + 1 ) * dx   !  AH: corrected 6.2.2015
    417           define_coarse_grid_real(8) = lower_left_coord_y + ( ny + 1 ) * dy   !  AH: corrected 6.2.2015
    418           define_coarse_grid_real(9) = dz                                     !  AH: added 24.2.2015
     479          define_coarse_grid_real(7) = lower_left_coord_x + ( nx + 1 ) * dx
     480          define_coarse_grid_real(8) = lower_left_coord_y + ( ny + 1 ) * dy
     481          define_coarse_grid_real(9) = dz
    419482
    420483          define_coarse_grid_int(1)  = nx
     
    437500!
    438501!--       Send coarse grid information to client
    439           CALL pmc_send_to_client( client_id, Define_coarse_grid_real, 9, 0, 21, ierr )
    440           CALL pmc_send_to_client( client_id, Define_coarse_grid_int,  3, 0, 22, ierr )
    441 
    442 !
    443 !--       Send local grid to client.
     502          CALL pmc_send_to_client( client_id, Define_coarse_grid_real, 9, 0,   &
     503                                   21, ierr )
     504          CALL pmc_send_to_client( client_id, Define_coarse_grid_int,  3, 0,   &
     505                                   22, ierr )
     506
     507!
     508!--       Send local grid to client
    444509          CALL pmc_send_to_client( client_id, coord_x, nx+1+2*nbgp, 0, 24, ierr )
    445510          CALL pmc_send_to_client( client_id, coord_y, ny+1+2*nbgp, 0, 25, ierr )
    446511
    447512!
    448 !--       Also send the dzu-, dzw-, zu- and zw-arrays here.   
     513!--       Also send the dzu-, dzw-, zu- and zw-arrays here
    449514          CALL pmc_send_to_client( client_id, dzu, nz_cl + 1, 0, 26, ierr )
    450515          CALL pmc_send_to_client( client_id, dzw, nz_cl + 1, 0, 27, ierr )
     
    454519       ENDIF
    455520
    456        CALL MPI_Bcast( nomatch, 1, MPI_INTEGER, 0, comm2d, ierr )
     521       CALL MPI_BCAST( nomatch, 1, MPI_INTEGER, 0, comm2d, ierr )
    457522       IF ( nomatch /= 0 ) THEN
    458           WRITE ( message_string, * )  'Error: nested client domain does not fit ',    &
    459                                        'into its server domain'
     523          WRITE ( message_string, * )  'Error: nested client domain does ',    &
     524                                       'not fit into its server domain'
    460525          CALL message( 'pmc_palm_setup_server', 'PA0XYZ', 1, 2, 0, 6, 0 )
    461526       ENDIF
    462527     
    463        CALL MPI_Bcast( nz_cl, 1, MPI_INTEGER, 0, comm2d, ierr )
     528       CALL MPI_BCAST( nz_cl, 1, MPI_INTEGER, 0, comm2d, ierr )
    464529       
    465530       CALL pmci_create_index_list
     
    468533!--    Include couple arrays into server content
    469534       DO  WHILE ( pmc_s_getnextarray( client_id, myname ) )
    470           CALL pmci_set_array_pointer( myName, client_id = client_id, nz_cl = nz_cl )
     535          CALL pmci_set_array_pointer( myname, client_id = client_id,          &
     536                                       nz_cl = nz_cl )
    471537       ENDDO
    472538       CALL pmc_s_setind_and_allocmem( client_id )
    473539    ENDDO
    474540
    475 #endif
    476 
    477 
    478541 CONTAINS
    479542
    480543
    481544   SUBROUTINE pmci_create_index_list
     545
    482546       IMPLICIT NONE
     547
    483548       INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE  ::  coarse_bound_all   !:
    484549       INTEGER(iwp)                               ::  i                  !:
     
    504569          CALL pmc_recv_from_client( client_id, size_of_array, 2, 0, 40, ierr )
    505570          ALLOCATE( coarse_bound_all(size_of_array(1),size_of_array(2)) )
    506           CALL pmc_recv_from_client( client_id, coarse_bound_all, SIZE( coarse_bound_all ), 0, 41, ierr )
     571          CALL pmc_recv_from_client( client_id, coarse_bound_all,              &
     572                                     SIZE( coarse_bound_all ), 0, 41, ierr )
    507573
    508574!
     
    519585          ALLOCATE( index_list(6,ic) )
    520586
    521           CALL MPI_Comm_size( comm1dx, npx, ierr )
    522           CALL MPI_Comm_size( comm1dy, npy, ierr )
     587          CALL MPI_COMM_SIZE( comm1dx, npx, ierr )
     588          CALL MPI_COMM_SIZE( comm1dy, npy, ierr )
    523589
    524590          nrx = nxr - nxl + 1   !  +1 in index because FORTRAN indexing starts with 1, palm with 0
     
    532598                   scoord(1) = px
    533599                   scoord(2) = py
    534                    CALL MPI_Cart_rank( comm2d, scoord, server_pe, ierr )
     600                   CALL MPI_CART_RANK( comm2d, scoord, server_pe, ierr )
    535601                 
    536602                   ic = ic + 1
     
    546612          CALL pmc_s_set_2d_index_list( client_id, index_list(:,1:ic) )
    547613       ELSE
    548           ALLOCATE( index_list(6,1) )                                   !  Dummy allocate
     614          ALLOCATE( index_list(6,1) )    !  Dummy allocate
    549615          CALL pmc_s_set_2d_index_list( client_id, index_list )
    550616       ENDIF
     
    554620     END SUBROUTINE pmci_create_index_list
    555621
    556 
     622#endif
    557623 END SUBROUTINE pmci_setup_server
    558624
     
    560626
    561627 SUBROUTINE pmci_setup_client
     628
     629#if defined( __parallel )
    562630    IMPLICIT NONE
     631
     632    CHARACTER(LEN=DA_Namelen)  ::  myname     !:
     633
    563634    INTEGER(iwp)               ::  i          !:
    564635    INTEGER(iwp)               ::  ierr       !:
     
    579650    REAL(wp), DIMENSION(4)     ::  ztt        !:
    580651                                             
    581     CHARACTER(LEN=DA_Namelen)  ::  myname     !:
    582 
    583 
    584 #if defined PMC_ACTIVE
    585     IF ( .not. pmc_is_rootmodel() )  THEN     !  Root Model does not have Server and is not a client
     652
     653!-- TO_DO: describe what is happening in this if-clause
     654!-- Root Model does not have Server and is not a client
     655    IF ( .NOT. pmc_is_rootmodel() )  THEN
    586656       CALL pmc_clientinit
    587657       
     
    596666
    597667!
    598 !--    Update this list appropritely and also in create_client_arrays and in pmci_set_array_pointer.
    599 !--    If a variable is removed, it only has tobe removed from here.
    600        CALL pmc_set_dataarray_name( lastentry = .true. )
    601 
    602 !
    603 !--    Send grid to Server
     668!--    Update this list appropritely and also in create_client_arrays and in
     669!--    pmci_set_array_pointer.
     670!--    If a variable is removed, it only has to be removed from here.
     671       CALL pmc_set_dataarray_name( lastentry = .TRUE. )
     672
     673!
     674!--    Send grid to server
    604675       val(1)  = nx
    605676       val(2)  = ny
     
    621692
    622693!
    623 !--       Receive also the dz-,zu- and zw-arrays here.         
     694!--       Receive also the dz-,zu- and zw-arrays here.
     695!--       TO_DO: what is the meaning of above comment + remove write statements
     696!--              and give this informations in header
    624697          WRITE(0,*) 'Coarse grid from Server '
    625698          WRITE(0,*) 'startx_tot    = ',define_coarse_grid_real(1)
     
    635708       ENDIF
    636709
    637        CALL MPI_Bcast( define_coarse_grid_real, 9, MPI_REAL, 0, comm2d, ierr )
    638        CALL MPI_Bcast( define_coarse_grid_int, 3, MPI_INTEGER, 0, comm2d, ierr )
     710       CALL MPI_BCAST( define_coarse_grid_real, 9, MPI_REAL, 0, comm2d, ierr )
     711       CALL MPI_BCAST( define_coarse_grid_int, 3, MPI_INTEGER, 0, comm2d, ierr )
    639712
    640713       cg%dx = define_coarse_grid_real(5)
     
    658731!--    Get coarse grid coordinates and vales of the z-direction from server
    659732       IF ( myid == 0) THEN
    660           CALL pmc_recv_from_server( cg%coord_x, cg%nx + 1 + 2 * nbgp, 0, 24, ierr )
    661           CALL pmc_recv_from_server( cg%coord_y, cg%ny + 1 + 2 * nbgp, 0, 25, ierr )         
     733          CALL pmc_recv_from_server( cg%coord_x, cg%nx + 1 + 2 * nbgp, 0, 24,  &
     734                                     ierr )
     735          CALL pmc_recv_from_server( cg%coord_y, cg%ny + 1 + 2 * nbgp, 0, 25,  &
     736                                     ierr )
    662737          CALL pmc_recv_from_server( cg%dzu, cg%nz + 1, 0, 26, ierr )
    663738          CALL pmc_recv_from_server( cg%dzw, cg%nz + 1, 0, 27, ierr )
     
    668743!
    669744!--    and broadcast this information
    670        CALL MPI_Bcast( cg%coord_x, cg%nx + 1 + 2 * nbgp, MPI_REAL, 0, comm2d, ierr )
    671        CALL MPI_Bcast( cg%coord_y, cg%ny + 1 + 2 * nbgp, MPI_REAL, 0, comm2d, ierr )
    672        CALL MPI_Bcast( cg%dzu, cg%nz + 1, MPI_REAL, 0, comm2d, ierr )
    673        CALL MPI_Bcast( cg%dzw, cg%nz + 1, MPI_REAL, 0, comm2d, ierr )
    674        CALL MPI_Bcast( cg%zu, cg%nz + 2,  MPI_REAL, 0, comm2d, ierr )
    675        CALL MPI_Bcast( cg%zw, cg%nz + 2,  MPI_REAL, 0, comm2d, ierr )
     745       CALL MPI_BCAST( cg%coord_x, cg%nx + 1 + 2 * nbgp, MPI_REAL, 0, comm2d,  &
     746                       ierr )
     747       CALL MPI_BCAST( cg%coord_y, cg%ny + 1 + 2 * nbgp, MPI_REAL, 0, comm2d,  &
     748                       ierr )
     749       CALL MPI_BCAST( cg%dzu, cg%nz + 1, MPI_REAL, 0, comm2d, ierr )
     750       CALL MPI_BCAST( cg%dzw, cg%nz + 1, MPI_REAL, 0, comm2d, ierr )
     751       CALL MPI_BCAST( cg%zu, cg%nz + 2,  MPI_REAL, 0, comm2d, ierr )
     752       CALL MPI_BCAST( cg%zw, cg%nz + 2,  MPI_REAL, 0, comm2d, ierr )
    676753       
    677754       CALL pmci_map_fine_to_coarse_grid
    678 
    679755       CALL pmc_c_get_2d_index_list
    680756
     
    682758!--    Include couple arrays into client content.
    683759       DO  WHILE ( pmc_c_getnextarray( myname ) )
    684           CALL pmci_create_client_arrays ( myName, icl, icr, jcs, jcn,  cg%nz )   !  Klaus, why the c-arrays are still up to cg%nz??
     760!--       TO_DO: Klaus, why the c-arrays are still up to cg%nz??
     761          CALL pmci_create_client_arrays ( myname, icl, icr, jcs, jcn, cg%nz )
    685762       ENDDO
    686763       CALL pmc_c_setind_and_allocmem
    687764
    688765!
    689 !--    Precompute interpolation coefficients and client-array indices.
     766!--    Precompute interpolation coefficients and client-array indices
    690767       CALL pmci_init_interp_tril
    691768
     
    695772
    696773!
    697 !--    Define the SGS-TKE scaling factor based on the grid-spacing ratio.
     774!--    Define the SGS-TKE scaling factor based on the grid-spacing ratio
    698775       CALL pmci_init_tkefactor
    699776
    700777!
    701778!--    Two-way coupling     
    702        IF ( nesting_mode == 'two-way' ) THEN
     779       IF ( nesting_mode == 'two-way' )  THEN
    703780          CALL pmci_init_anterp_tophat
    704781       ENDIF
     
    712789
    713790!
    714 !--    Why not just simply? test this!
     791!--    TO_DO: Why not just simply? test this!
    715792       !area_t_l = ( nx + 1 ) * (ny + 1 ) * dx * dy           
    716793
    717     ENDIF   !  IF ( .not. PMC_is_RootModel )
    718 #endif
    719 
     794    ENDIF
    720795
    721796 CONTAINS
     
    723798
    724799    SUBROUTINE pmci_map_fine_to_coarse_grid
     800
    725801        IMPLICIT NONE
     802
    726803        INTEGER(iwp), DIMENSION(5,numprocs)  ::  coarse_bound_all   !:
    727804        INTEGER(iwp), DIMENSION(2)           ::  size_of_array      !:
     
    730807        REAL(wp)                             ::  coarse_dy   !:
    731808        REAL(wp)                             ::  loffset     !:
     809        REAL(wp)                             ::  noffset     !:
    732810        REAL(wp)                             ::  roffset     !:
    733         REAL(wp)                             ::  noffset     !:
    734811        REAL(wp)                             ::  soffset     !:
    735812
     
    792869!
    793870!--     Note that MPI_Gather receives data from all processes in the rank order.
    794         CALL MPI_Gather( coarse_bound, 5, MPI_INTEGER, coarse_bound_all, 5, &
     871        CALL MPI_GATHER( coarse_bound, 5, MPI_INTEGER, coarse_bound_all, 5, &
    795872                         MPI_INTEGER, 0, comm2d, ierr )
    796873
     
    9681045         DO  i = nxl - 1, nxl
    9691046            DO  j = nys, nyn
    970                nzt_topo_nestbc_l = MAX( nzt_topo_nestbc_l, nzb_u_inner(j,i), nzb_v_inner(j,i), nzb_w_inner(j,i) )
     1047               nzt_topo_nestbc_l = MAX( nzt_topo_nestbc_l, nzb_u_inner(j,i),   &
     1048                                        nzb_v_inner(j,i), nzb_w_inner(j,i) )
    9711049            ENDDO
    9721050         ENDDO
     
    9781056         i = nxr + 1
    9791057         DO  j = nys, nyn
    980             nzt_topo_nestbc_r = MAX( nzt_topo_nestbc_r, nzb_u_inner(j,i), nzb_v_inner(j,i), nzb_w_inner(j,i) )
     1058            nzt_topo_nestbc_r = MAX( nzt_topo_nestbc_r, nzb_u_inner(j,i),      &
     1059                                     nzb_v_inner(j,i), nzb_w_inner(j,i) )
    9811060         ENDDO
    9821061         nzt_topo_nestbc_r = nzt_topo_nestbc_r + 1       
     
    9871066         DO  j = nys - 1, nys
    9881067            DO  i = nxl, nxr
    989                nzt_topo_nestbc_s = MAX( nzt_topo_nestbc_s, nzb_u_inner(j,i), nzb_v_inner(j,i), nzb_w_inner(j,i) )
     1068               nzt_topo_nestbc_s = MAX( nzt_topo_nestbc_s, nzb_u_inner(j,i),   &
     1069                                        nzb_v_inner(j,i), nzb_w_inner(j,i) )
    9901070            ENDDO
    9911071         ENDDO
     
    9971077         j = nyn + 1
    9981078         DO  i = nxl, nxr
    999             nzt_topo_nestbc_n = MAX( nzt_topo_nestbc_n, nzb_u_inner(j,i), nzb_v_inner(j,i), nzb_w_inner(j,i) )
     1079            nzt_topo_nestbc_n = MAX( nzt_topo_nestbc_n, nzb_u_inner(j,i),      &
     1080                                     nzb_v_inner(j,i), nzb_w_inner(j,i) )
    10001081         ENDDO
    10011082         nzt_topo_nestbc_n = nzt_topo_nestbc_n + 1
     
    10031084
    10041085!
    1005 !--  Then determine the maximum number of near-wall nodes per wall point based on the grid-spacing ratios.
    1006       nzt_topo_max = MAX( nzt_topo_nestbc_l, nzt_topo_nestbc_r, nzt_topo_nestbc_s, nzt_topo_nestbc_n )
    1007       ni = CEILING( cg%dx / dx ) / 2   !  Note that the outer division must be integer division.
    1008       nj = CEILING( cg%dy / dy ) / 2   !  Note that the outer division must be integer division.
     1086!--   Then determine the maximum number of near-wall nodes per wall point based
     1087!--   on the grid-spacing ratios.
     1088      nzt_topo_max = MAX( nzt_topo_nestbc_l, nzt_topo_nestbc_r,                &
     1089                          nzt_topo_nestbc_s, nzt_topo_nestbc_n )
     1090
     1091!
     1092!--   Note that the outer division must be integer division.
     1093      ni = CEILING( cg%dx / dx ) / 2
     1094      nj = CEILING( cg%dy / dy ) / 2
    10091095      nk = 1
    10101096      DO  k = 1, nzt_topo_max
     
    10181104
    10191105!
    1020 !--   First horizontal walls.
    1021 !--   Left boundary.
     1106!--   First horizontal walls
     1107!--   Left boundary
    10221108      IF ( nest_bound_l )   THEN
    10231109         ALLOCATE( logc_u_l(nzb:nzt_topo_nestbc_l, nys:nyn, 1:2) )
     
    10331119
    10341120         DO  j = nys, nyn     
    1035 
    1036 !
    1037 !--         Left boundary for u.
     1121!
     1122!--         Left boundary for u
    10381123            i   = 0
    10391124            kb  = nzb_u_inner(j,i)
    10401125            k   = kb + 1
    10411126            wall_index = kb
    1042             CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr )
     1127            CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, &
     1128                                     wall_index, z0(j,i), kb, direction, ncorr )
    10431129            logc_u_l(k,j,1) = lc
    10441130            logc_ratio_u_l(k,j,1,0:ncorr-1) = lcr(0:ncorr-1)
     
    10461132
    10471133!
    1048 !--         Left boundary for v.
     1134!--         Left boundary for v
    10491135            i   = -1
    10501136            kb  =  nzb_v_inner(j,i)
    10511137            k   =  kb + 1
    10521138            wall_index = kb
    1053             CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr )
     1139            CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, &
     1140                                     wall_index, z0(j,i), kb, direction, ncorr )
    10541141            logc_v_l(k,j,1) = lc
    10551142            logc_ratio_v_l(k,j,1,0:ncorr-1) = lcr(0:ncorr-1)
    10561143            lcr(0:ncorr-1) = 1.0_wp
    1057          ENDDO
    1058       ENDIF
    1059 
    1060 !
    1061 !--   Right boundary.
     1144
     1145         ENDDO
     1146      ENDIF
     1147
     1148!
     1149!--   Right boundary
    10621150      IF ( nest_bound_r )  THEN
    10631151         ALLOCATE( logc_u_r(nzb:nzt_topo_nestbc_r,nys:nyn,1:2) )
     
    10711159         direction      = 1
    10721160         inc            = 1 
    1073          DO  j = nys, nyn         
    1074 
     1161         DO  j = nys, nyn
    10751162!
    10761163!--         Right boundary for u.
     
    10791166            k   = kb + 1
    10801167            wall_index = kb
    1081             CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr )
     1168            CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, &
     1169                                     wall_index, z0(j,i), kb, direction, ncorr )
    10821170            logc_u_r(k,j,1) = lc
    10831171            logc_ratio_u_r(k,j,1,0:ncorr-1) = lcr(0:ncorr-1)
     
    10901178            k   = kb + 1
    10911179            wall_index = kb
    1092             CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr )
     1180            CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, &
     1181                                     wall_index, z0(j,i), kb, direction, ncorr )
    10931182            logc_v_r(k,j,1) = lc
    10941183            logc_ratio_v_r(k,j,1,0:ncorr-1) = lcr(0:ncorr-1)
    10951184            lcr(0:ncorr-1) = 1.0_wp
    1096          ENDDO
    1097       ENDIF
    1098 
    1099 !
    1100 !--   South boundary.
     1185
     1186         ENDDO
     1187      ENDIF
     1188
     1189!
     1190!--   South boundary
    11011191      IF ( nest_bound_s )  THEN
    11021192         ALLOCATE( logc_u_s(nzb:nzt_topo_nestbc_s,nxl:nxr,1:2) )
     
    11111201         inc            = 1
    11121202         DO  i = nxl, nxr
    1113 
    11141203!
    11151204!--         South boundary for u.
     
    11181207            k   =  kb + 1
    11191208            wall_index = kb
    1120             CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr )
     1209            CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, &
     1210                                     wall_index, z0(j,i), kb, direction, ncorr )
    11211211            logc_u_s(k,i,1) = lc
    11221212            logc_ratio_u_s(k,i,1,0:ncorr-1) = lcr(0:ncorr-1)
     
    11241214
    11251215!
    1126 !--         South boundary for v.
     1216!--         South boundary for v
    11271217            j   = 0
    11281218            kb  = nzb_v_inner(j,i)
    11291219            k   = kb + 1
    11301220            wall_index = kb
    1131             CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr )
     1221            CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, &
     1222                                     wall_index, z0(j,i), kb, direction, ncorr )
    11321223            logc_v_s(k,i,1) = lc
    11331224            logc_ratio_v_s(k,i,1,0:ncorr-1) = lcr(0:ncorr-1)
     
    11371228
    11381229!
    1139 !--   North boundary.
     1230!--   North boundary
    11401231      IF ( nest_bound_n )  THEN
    11411232         ALLOCATE( logc_u_n(nzb:nzt_topo_nestbc_n,nxl:nxr,1:2) )
     
    11491240         direction      = 1
    11501241         inc            = 1
    1151          DO  i = nxl, nxr       
    1152 
     1242         DO  i = nxl, nxr
    11531243!
    11541244!--         North boundary for u.
     
    11571247            k   = kb + 1
    11581248            wall_index = kb
    1159             CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr )
     1249            CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, &
     1250                                     wall_index, z0(j,i), kb, direction, ncorr )
    11601251            logc_u_n(k,i,1) = lc
    11611252            logc_ratio_u_n(k,i,1,0:ncorr-1) = lcr(0:ncorr-1)
     
    11681259            k   = kb + 1
    11691260            wall_index = kb
    1170             CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, wall_index, z0(j,i), kb, direction, ncorr )
     1261            CALL pmci_define_loglaw_correction_parameters( lc, lcr, k, j, inc, &
     1262                                     wall_index, z0(j,i), kb, direction, ncorr )
    11711263            logc_v_n(k,i,1) = lc
    11721264            logc_ratio_v_n(k,i,1,0:ncorr-1) = lcr(0:ncorr-1)
    11731265            lcr(0:ncorr-1) = 1.0_wp
     1266
    11741267         ENDDO
    11751268      ENDIF
     
    20112104    END SUBROUTINE pmci_init_tkefactor
    20122105
    2013 
     2106#endif
    20142107 END SUBROUTINE pmci_setup_client
    20152108
     
    20172110
    20182111 SUBROUTINE pmci_setup_coordinates
     2112
     2113#if defined( __parallel )
    20192114    IMPLICIT NONE
     2115
    20202116    INTEGER(iwp) ::  i   !:
    20212117    INTEGER(iwp) ::  j   !:
     
    20332129       coord_y(j) = lower_left_coord_y + j * dy
    20342130    ENDDO
    2035      
     2131
     2132#endif
    20362133 END SUBROUTINE pmci_setup_coordinates
    20372134
     
    20402137 SUBROUTINE pmci_server_synchronize
    20412138
     2139#if defined( __parallel )
    20422140!
    20432141!-- Unify the time steps for each model and synchronize.
     
    20662164!
    20672165!-- Broadcast the unified time step to all server processes.
    2068     CALL MPI_Bcast( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )
     2166    CALL MPI_BCAST( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )
    20692167
    20702168!
     
    20762174       ENDIF
    20772175    ENDDO
    2078    
     2176
     2177#endif
    20792178 END SUBROUTINE pmci_server_synchronize
    20802179
     
    20832182 SUBROUTINE pmci_client_synchronize
    20842183
     2184#if defined( __parallel )
    20852185!
    20862186!-- Unify the time steps for each model and synchronize.
     
    21052205!
    21062206!--    Broadcast the unified time step to all server processes.
    2107        CALL MPI_Bcast( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )
     2207       CALL MPI_BCAST( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )
    21082208    ENDIF
    21092209
     2210#endif
    21102211 END SUBROUTINE pmci_client_synchronize
    21112212               
     
    21132214
    21142215 SUBROUTINE pmci_server_datatrans( direction )
     2216
    21152217    IMPLICIT NONE
     2218
    21162219    INTEGER(iwp),INTENT(IN) ::  direction   !:
     2220
     2221#if defined( __parallel )
    21172222    INTEGER(iwp)            ::  client_id   !:
    21182223    INTEGER(iwp)            ::  i           !:
     
    21382243!
    21392244!-- Broadcast the unified time step to all server processes.
    2140     CALL MPI_Bcast( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )
     2245    CALL MPI_BCAST( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )
    21412246
    21422247    DO  m = 1, SIZE( PMC_Server_for_Client ) - 1
     
    21852290    ENDDO
    21862291
     2292#endif
    21872293 END SUBROUTINE pmci_server_datatrans
    21882294
     
    21902296
    21912297 SUBROUTINE pmci_client_datatrans( direction )
     2298
    21922299    IMPLICIT NONE
     2300
    21932301    INTEGER(iwp), INTENT(IN) ::  direction   !:
     2302
     2303#if defined( __parallel )
    21942304    INTEGER(iwp)             ::  ierr        !:
    21952305    INTEGER(iwp)             ::  icl         !:
     
    22142324!
    22152325!--    Broadcast the unified time step to all server processes.
    2216        CALL MPI_Bcast( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )
     2326       CALL MPI_BCAST( dt_3d, 1, MPI_REAL, 0, comm2d, ierr )
    22172327       CALL cpu_log( log_point_s(70), 'PMC model sync', 'stop' )
    22182328
     
    31793289!--            Spatial under-relaxation.
    31803290               fra  = frax(l) * fray(m) * fraz(n)
     3291!--            TO_DO: why not KIND=wp ?
    31813292               fc(n,m,l) = ( 1.0_wp - fra ) * fc(n,m,l) + fra * cellsum / REAL( nfc, KIND=KIND(cellsum) ) 
    31823293            ENDDO
     
    31863297   END SUBROUTINE pmci_anterp_tophat
    31873298
    3188 
     3299#endif
    31893300 END SUBROUTINE pmci_client_datatrans
    31903301
     
    31933304 SUBROUTINE pmci_update_new
    31943305
     3306#if defined( __parallel )
    31953307!
    31963308!-- Copy the interpolated/anterpolated boundary values to the _p
     
    32193331
    32203332!
    3221 !-- Find out later if nesting would work without __nopointer.
     3333!-- TO_DO: Find out later if nesting would work without __nopointer.
     3334#endif
    32223335
    32233336 END SUBROUTINE pmci_update_new
     
    32263339
    32273340 SUBROUTINE pmci_set_array_pointer( name, client_id, nz_cl )
     3341
    32283342    IMPLICIT NONE
    32293343
     
    32323346    CHARACTER(LEN=*), INTENT(IN)         ::  name        !:
    32333347   
     3348#if defined( __parallel )
    32343349    REAL(wp), POINTER, DIMENSION(:,:)    ::  p_2d        !:
    32353350    REAL(wp), POINTER, DIMENSION(:,:,:)  ::  p_3d        !:
     
    32383353   
    32393354
    3240 #if defined PMC_ACTIVE
    32413355    NULLIFY( p_3d )
    32423356    NULLIFY( p_2d )
     
    32573371       CALL pmc_s_set_dataarray( client_id, p_2d )
    32583372    ELSE
    3259        IF ( myid == 0 ) WRITE( 0, * )  'PMC set_array_Pointer -> no pointer p_2d or p_3d associated '
    3260        CALL MPI_Abort( MPI_COMM_WORLD, istat, ierr )
     3373!
     3374!--    Give only one message for the root domain
     3375       IF ( myid == 0  .AND.  cpl_id == 1 )  THEN
     3376
     3377          message_string = 'pointer for array "' // TRIM( name ) //            &
     3378                           '" can''t be associated'
     3379          CALL message( 'pmci_set_array_pointer', 'PA0117', 3, 2, 0, 6, 0 )
     3380       ELSE
     3381!
     3382!--       Avoid others to continue
     3383          CALL MPI_BARRIER( comm2d, ierr )
     3384       ENDIF
    32613385    ENDIF
    32623386
    32633387#endif
    3264 
    32653388 END SUBROUTINE pmci_set_array_pointer
    32663389
     
    32683391
    32693392 SUBROUTINE pmci_create_client_arrays( name, is, ie, js, je, nzc  )
     3393
    32703394    IMPLICIT NONE
     3395
    32713396    INTEGER(iwp), INTENT(IN)              ::  ie      !:
    32723397    INTEGER(iwp), INTENT(IN)              ::  is      !:
     
    32763401    CHARACTER(LEN=*), INTENT(IN)          ::  name    !:
    32773402     
     3403#if defined( __parallel )
    32783404    REAL(wp), POINTER,DIMENSION(:,:)      ::  p_2d    !:
    32793405    REAL(wp), POINTER,DIMENSION(:,:,:)    ::  p_3d    !:
     
    32823408
    32833409
    3284 #if defined PMC_ACTIVE
    32853410    NULLIFY( p_3d )
    32863411    NULLIFY( p_2d )
     
    33173442       CALL pmc_c_set_dataarray( p_2d )
    33183443    ELSE
    3319        IF ( myid == 0 ) WRITE( 0 , * )  'PMC create_client_arrays -> no pointer p_2d or p_3d associated '
    3320        CALL MPI_Abort( MPI_COMM_WORLD, istat, ierr )
     3444!
     3445!--    Give only one message for the first client domain
     3446       IF ( myid == 0  .AND.  cpl_id == 2 )  THEN
     3447
     3448          message_string = 'pointer for array "' // TRIM( name ) //            &
     3449                           '" can''t be associated'
     3450          CALL message( 'pmci_create_client_arrays', 'PA0170', 3, 2, 0, 6, 0 )
     3451       ELSE
     3452!
     3453!--       Avoid others to continue
     3454          CALL MPI_BARRIER( comm2d, ierr )
     3455       ENDIF
    33213456    ENDIF
     3457
    33223458#endif
    3323 
    33243459 END SUBROUTINE pmci_create_client_arrays
    33253460
     
    33273462
    33283463 SUBROUTINE pmci_server_initialize
     3464
     3465#if defined( __parallel )
    33293466    IMPLICIT NONE
     3467
    33303468    INTEGER(iwp)   ::  client_id   !:
    33313469    INTEGER(iwp)   ::  m           !:
     
    33383476    ENDDO
    33393477
     3478#endif
    33403479 END SUBROUTINE pmci_server_initialize
    33413480
     
    33443483 SUBROUTINE pmci_client_initialize
    33453484
     3485#if defined( __parallel )
    33463486    IMPLICIT NONE
     3487
    33473488    INTEGER(iwp)   ::  i          !:
    33483489    INTEGER(iwp)   ::  icl        !:
     
    34283569      INTEGER(iwp) ::  i      !:
    34293570      INTEGER(iwp) ::  ib     !:
     3571      INTEGER(iwp) ::  ie     !:
    34303572      INTEGER(iwp) ::  j      !:
    34313573      INTEGER(iwp) ::  jb     !:
     3574      INTEGER(iwp) ::  je     !:
    34323575      INTEGER(iwp) ::  k      !:
    34333576      INTEGER(iwp) ::  k1     !:
     
    34483591     
    34493592      ib = nxl
    3450       jb = nys     
     3593      ie = nxr
     3594      jb = nys   
     3595      je = nyn
    34513596      IF ( nest_bound_l )  THEN
     3597         ib = nxl - 1
    34523598         IF ( var == 'u' )  THEN   !  For u, nxl is a ghost node, but not for the other variables.
    3453             ib = nxl + 1
     3599            ib = nxl
    34543600         ENDIF
    34553601      ENDIF
    34563602      IF ( nest_bound_s )  THEN
     3603         jb = nys - 1
    34573604         IF ( var == 'v' )  THEN   !  For v, nys is a ghost node, but not for the other variables.
    3458             jb = nys + 1
     3605            jb = nys
    34593606         ENDIF
    34603607      ENDIF
     3608      IF ( nest_bound_r )  THEN
     3609         ie = nxr + 1
     3610      ENDIF
     3611      IF ( nest_bound_n )  THEN
     3612         je = nyn + 1
     3613      ENDIF
    34613614
    34623615!
    34633616!--   Trilinear interpolation.
    3464       DO  i = ib, nxr
    3465          DO  j = jb, nyn
    3466             DO  k = kb(j,i), nzt
     3617      DO  i = ib, ie
     3618         DO  j = jb, je
     3619            DO  k = kb(j,i), nzt + 1
    34673620               l = ic(i)
    34683621               m = jc(j)
     
    35173670   END SUBROUTINE pmci_interp_tril_all
    35183671
     3672#endif
    35193673 END SUBROUTINE pmci_client_initialize
    35203674
     
    35233677 SUBROUTINE pmci_ensure_nest_mass_conservation
    35243678
     3679#if defined( __parallel )
    35253680!
    35263681!-- Adjust the volume-flow rate through the top boundary
     
    36353790    ENDDO
    36363791
     3792#endif
    36373793 END SUBROUTINE pmci_ensure_nest_mass_conservation
    36383794
  • palm/trunk/SOURCE/pmc_mpi_wrapper.f90

    r1763 r1764  
    2020! Current revisions:
    2121! ------------------
    22 !
     22! cpp-statement added (nesting can only be used in parallel mode),
     23! kind-parameters adjusted to PALM-kinds
    2324!
    2425! Former revisions:
     
    3536!------------------------------------------------------------------------------!
    3637
     38#if defined( __parallel )
    3739   use, intrinsic :: iso_c_binding
    3840
    39    USE  mpi
    40    USE  kinds,         ONLY: wp
     41#if defined( __lc )
     42    USE MPI
     43#else
     44    INCLUDE "mpif.h"
     45#endif
     46   USE  kinds
    4147   USE  PMC_handle_communicator, ONLY: m_to_server_comm, m_to_client_comm, m_model_comm, m_model_rank
    4248   IMPLICIT none
     
    4450   SAVE
    4551
    46    INTEGER, PARAMETER :: dp = wp
     52!-- TO_DO: what is the meaning of this? Could variables declared in this module
     53!--        also have single precision?
     54!   INTEGER, PARAMETER :: dp = wp
    4755
    4856
     
    149157   SUBROUTINE  PMC_Send_to_Server_real_r1 (buf, n, Server_rank, tag, ierr)
    150158      IMPLICIT     none
     159!--   TO_DO: has buf always to be of dp-kind, or can wp used here
     160!--          this effects all respective declarations in this file
    151161      REAL(kind=dp), DIMENSION(:), INTENT(IN)   :: buf
    152162      INTEGER, INTENT(IN)                       :: n
     
    485495      IMPLICIT     none
    486496      REAL(kind=wp),DIMENSION(:),POINTER,INTENT(INOUT) :: array
    487       INTEGER(kind=8),INTENT(IN)                       :: idim1
     497      INTEGER(idp),INTENT(IN)                          :: idim1
    488498      Type(c_ptr),INTENT(OUT),optional                 :: base_ptr
    489499
     
    516526    END FUNCTION PMC_TIME
    517527
     528#endif
    518529 END MODULE pmc_mpi_wrapper
  • palm/trunk/SOURCE/pmc_server.f90

    r1763 r1764  
    2020! Current revisions:
    2121! ------------------
    22 !
     22! cpp-statement added (nesting can only be used in parallel mode)
    2323!
    2424! Former revisions:
     
    3535!------------------------------------------------------------------------------!
    3636
     37#if defined( __parallel )
    3738   use, intrinsic :: iso_c_binding
    3839
    39    USE  mpi
    40    USE  kinds,                     ONLY: wp, iwp
     40#if defined( __lc )
     41    USE MPI
     42#else
     43    INCLUDE "mpif.h"
     44#endif
     45   USE  kinds
    4146   USE  PMC_general,               ONLY: ClientDef, PMC_MAX_MODELL,PMC_sort, DA_NameDef, DA_Desclen, DA_Namelen,       &
    4247                                         PMC_G_SetName, PMC_G_GetName, PeDef, ArrayDef
     
    6065   PUBLIC PMC_Server_for_Client
    6166
    62    INTEGER, PARAMETER :: dp = wp
     67!-- TO_DO: what is the meaning of this? Could variables declared in this module
     68!--        also have single precision?
     69!   INTEGER, PARAMETER :: dp = wp
    6370
    6471   ! INTERFACE section
     
    225232        IMPLICIT none
    226233        INTEGER,INTENT(IN)                         :: ClientId
     234!--   TO_DO: has array always to be of dp-kind, or can wp used here
     235!--          this effects all respective declarations in this file
    227236        REAL(kind=dp),INTENT(IN),DIMENSION(:,:)    :: array
    228237        !-- local variables
     
    282291      INTEGER                                 :: arlen, myIndex, tag
    283292      INTEGER                                 :: rCount                    ! count MPI requests
    284       INTEGER(kind=8)                         :: bufsize                   ! Size of MPI data Window
     293      INTEGER(idp)                            :: bufsize                   ! Size of MPI data Window
    285294      TYPE(PeDef),POINTER                     :: aPE
    286295      TYPE(ArrayDef),POINTER                  :: ar
     
    347356         do while (PMC_S_GetNextArray ( ClientId, myName,i))
    348357            ar  => aPE%Arrays
     358!--         TO_DO:  Adressrechnung ueberlegen?
    349359            ar%SendBuf = c_loc(base_array(ar%BufIndex))                         !kk Adressrechnung ueberlegen
    350360            if(ar%BufIndex+ar%BufSize > bufsize) then
     361!--            TO_DO: can this error really happen, and what can be the reason?
    351362               write(0,'(a,i4,4i7,1x,a)') 'Buffer too small ',i,ar%BufIndex,ar%BufSize,ar%BufIndex+ar%BufSize,bufsize,trim(myName)
    352363               CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr)
     
    402413               end do
    403414            else
     415!--            TO_DO: can this error really happen, and what can be the reason?
    404416               write(0,*) "Illegal Order of Dimension ",ar%dim_order
    405417               CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr);
     
    458470               end do
    459471            else
     472!--            TO_DO: can this error really happen, and what can be the reason?
    460473               write(0,*) "Illegal Order of Dimension ",ar%dim_order
    461474               CALL MPI_Abort (MPI_COMM_WORLD, istat, ierr);
     
    624637    END SUBROUTINE Set_PE_index_list
    625638
     639#endif
    626640END MODULE pmc_server
  • palm/trunk/SOURCE/time_integration.f90

    r1763 r1764  
    1919! Current revisions:
    2020! ------------------
    21 !
     21! PMC_ACTIVE flags removed,
     22! nest synchronization after first call of timestep
    2223!
    2324! Former revisions:
     
    238239    USE pegrid
    239240
    240 #if defined( PMC_ACTIVE )
    241241    USE pmc_interface,                                                         &
    242         ONLY:  client_to_server, nesting_mode,                                 &
     242        ONLY:  client_to_server, nested_run, nesting_mode,                     &
    243243               pmci_ensure_nest_mass_conservation, pmci_client_datatrans,      &
    244244               pmci_client_initialize, pmci_client_synchronize,                &
    245245               pmci_server_datatrans, pmci_server_initialize,                  &
    246246               pmci_server_synchronize, pmci_update_new, server_to_client
    247 #endif
    248247
    249248    USE production_e_mod,                                                      &
     
    282281    IF ( simulated_time == 0.0_wp )  CALL timestep
    283282
     283!
     284!-- Synchronize the timestep in case of nested run.
     285!-- The server side must be called first
     286    IF ( nested_run )  THEN
     287       CALL pmci_server_synchronize
     288       CALL pmci_client_synchronize
     289    ENDIF
     290
    284291    CALL run_control
    285 
    286292
    287293!
     
    302308    ENDIF
    303309
    304 #if defined( PMC_ACTIVE )
    305 !
    306 !-- TO_DO: try to give more meaningful comments here
    307 !-- Domain nesting: From server to client commmunication
    308 !-- ( direction=SERVER_TO_CLIENT )
    309 !-- Nest initial conditions
    310 !
    311 !-- Send initial condition data from server to client
    312     CALL pmci_server_initialize
    313 !
    314 !-- Receive and interpolate initial data on client
    315     CALL pmci_client_initialize
    316 !
    317 !-- TO_DO, maybe removed
    318 !-- Obs. Nesting may be unnecessary at this point.
    319 !
    320 !-- Nest boundary conditions
    321     CALL pmci_server_datatrans( server_to_client )
    322     CALL pmci_client_datatrans( server_to_client )
    323 
    324     IF ( nesting_mode == 'two-way' )  THEN
    325        CALL pmci_server_datatrans( client_to_server )
    326        CALL pmci_client_datatrans( client_to_server )
    327 !
    328 !--    Exchange_horiz is needed for all server-domains after the anterpolation
    329        CALL exchange_horiz( u, nbgp )
    330        CALL exchange_horiz( v, nbgp )
    331        CALL exchange_horiz( w, nbgp )
    332        CALL exchange_horiz( pt, nbgp )
    333        IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
    334        intermediate_timestep_count = 0
    335        CALL pres
     310    IF ( nested_run )  THEN
     311!
     312!--    TO_DO: try to give more meaningful comments here
     313!--    Domain nesting: From server to client commmunication
     314!--    ( direction=SERVER_TO_CLIENT )
     315!--    Nest initial conditions
     316!
     317!--    Send initial condition data from server to client
     318       CALL pmci_server_initialize
     319!
     320!--    Receive and interpolate initial data on client
     321       CALL pmci_client_initialize
     322!
     323!--    TO_DO, maybe removed
     324!--    Obs. Nesting may be unnecessary at this point.
     325!
     326!--    Nest boundary conditions
     327       CALL pmci_server_datatrans( server_to_client )
     328       CALL pmci_client_datatrans( server_to_client )
     329
     330       IF ( nesting_mode == 'two-way' )  THEN
     331          CALL pmci_server_datatrans( client_to_server )
     332          CALL pmci_client_datatrans( client_to_server )
     333!
     334!--       Exchange_horiz is needed for all server-domains after the anterpolation
     335          CALL exchange_horiz( u, nbgp )
     336          CALL exchange_horiz( v, nbgp )
     337          CALL exchange_horiz( w, nbgp )
     338          CALL exchange_horiz( pt, nbgp )
     339          IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
     340          intermediate_timestep_count = 0
     341          CALL pres
     342       ENDIF
     343!
     344!--    Correct the w top-BC in nest domains to ensure mass conservation.
     345!--    Copy the interpolated/anterpolated boundary values to the _p
     346!--    arrays, too, to make sure the interpolated/anterpolated boundary
     347!--    values are carried over from one RK inner step to another.
     348!--    These actions must not be done for the root domain.
     349       IF ( nest_domain )  THEN
     350          CALL pmci_ensure_nest_mass_conservation
     351          CALL pmci_update_new
     352       ENDIF
     353
    336354    ENDIF
    337 !
    338 !-- Correct the w top-BC in nest domains to ensure mass conservation.
    339 !-- Copy the interpolated/anterpolated boundary values to the _p
    340 !-- arrays, too, to make sure the interpolated/anterpolated boundary
    341 !-- values are carried over from one RK inner step to another.
    342 !-- These actions must not be done for the root domain.
    343     IF ( nest_domain )  THEN
    344        CALL pmci_ensure_nest_mass_conservation
    345        CALL pmci_update_new
    346     ENDIF
    347 #endif
    348355
    349356#if defined( __dvrp_graphics )
     
    366373          CALL timestep
    367374
    368 #if defined( PMC_ACTIVE )
    369 !
    370 !--       TO_DO: try to give more detailed and meaningful comments here
    371 !--       Server side must be called first
    372           CALL pmci_server_synchronize
    373           CALL pmci_client_synchronize
    374 #endif
     375          IF ( nested_run )  THEN
     376!
     377!--          TO_DO: try to give more detailed and meaningful comments here
     378!--          Server side must be called first
     379             CALL pmci_server_synchronize
     380             CALL pmci_client_synchronize
     381          ENDIF
    375382       ENDIF
    376383
     
    694701          CALL swap_timelevel
    695702
    696 #if defined( PMC_ACTIVE )
    697 !
    698 !--       TO_DO: try to give more meaningful comments here
    699 !--       Domain nesting
    700 !--       Note that the nesting operations are omitted intentionally on the
    701 !--       first two RK-substeps.
    702           CALL cpu_log( log_point(60), 'nesting', 'start' )
    703 !
    704 !--       From server to client commmunication ( direction=SERVER_TO_CLIENT )
    705           CALL pmci_server_datatrans( server_to_client )
    706           CALL pmci_client_datatrans( server_to_client )
    707 
    708           IF ( nesting_mode == 'two-way' )  THEN
    709 !
    710 !--          From client to server commmunication ( direction=CLIENT_TO_SERVER )
    711              CALL pmci_server_datatrans( client_to_server )
    712              CALL pmci_client_datatrans( client_to_server )
    713 !
    714 !--          Exchange_horiz is needed for all server-domains after the
    715 !--          anterpolation
    716              CALL exchange_horiz( u, nbgp )
    717              CALL exchange_horiz( v, nbgp )
    718              CALL exchange_horiz( w, nbgp )
    719              CALL exchange_horiz( pt, nbgp )
    720              IF ( humidity  .OR.  passive_scalar )  THEN
    721                 CALL exchange_horiz( q, nbgp )
    722              ENDIF
    723              IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
    724           ENDIF
    725 !
    726 !--       Correct the w top-BC in nest domains to ensure mass conservation.
    727 !--       This action must never be done for the root domain.
    728           IF ( nest_domain )  THEN
    729              CALL pmci_ensure_nest_mass_conservation
    730 !
    731 !--          pmc_update_new is not necessary if nesting is made at each substep.
    732              CALL pmci_update_new
    733           ENDIF
    734 
    735           CALL cpu_log( log_point(60), 'nesting', 'stop' )
    736 #endif
     703          IF ( nested_run )  THEN
     704!
     705!--          TO_DO: try to give more meaningful comments here
     706!--          Domain nesting
     707!--          Note that the nesting operations are omitted intentionally on the
     708!--          first two RK-substeps.
     709             CALL cpu_log( log_point(60), 'nesting', 'start' )
     710!
     711!--          From server to client commmunication ( direction=SERVER_TO_CLIENT )
     712             CALL pmci_server_datatrans( server_to_client )
     713             CALL pmci_client_datatrans( server_to_client )
     714
     715             IF ( nesting_mode == 'two-way' )  THEN
     716!
     717!--             From client to server commmunication ( direction=CLIENT_TO_SERVER )
     718                CALL pmci_server_datatrans( client_to_server )
     719                CALL pmci_client_datatrans( client_to_server )
     720!
     721!--             Exchange_horiz is needed for all server-domains after the
     722!--             anterpolation
     723                CALL exchange_horiz( u, nbgp )
     724                CALL exchange_horiz( v, nbgp )
     725                CALL exchange_horiz( w, nbgp )
     726                CALL exchange_horiz( pt, nbgp )
     727                IF ( humidity  .OR.  passive_scalar )  THEN
     728                   CALL exchange_horiz( q, nbgp )
     729                ENDIF
     730                IF ( .NOT. constant_diffusion )  CALL exchange_horiz( e, nbgp )
     731             ENDIF
     732!
     733!--          Correct the w top-BC in nest domains to ensure mass conservation.
     734!--          This action must never be done for the root domain.
     735             IF ( nest_domain )  THEN
     736                CALL pmci_ensure_nest_mass_conservation
     737!
     738!--             pmc_update_new is not necessary if nesting is made at each
     739!--             substep
     740                CALL pmci_update_new
     741             ENDIF
     742
     743             CALL cpu_log( log_point(60), 'nesting', 'stop' )
     744
     745          ENDIF
    737746
    738747!
     
    11481157!
    11491158!--    Output elapsed simulated time in form of a progress bar on stdout
    1150 !--    TO_DO: should be done by root domain later
    1151 #if ! defined( PMC_ACTIVE )
    11521159       IF ( myid == 0 )  CALL output_progress_bar
    1153 #endif
    11541160
    11551161       CALL cpu_log( log_point_s(10), 'timesteps', 'stop' )
     
    11581164    ENDDO   ! time loop
    11591165
    1160 !-- TO_DO: should be done by root domain later
    1161 #if ! defined( PMC_ACTIVE )
    11621166    IF ( myid == 0 )  CALL finish_progress_bar
    1163 #endif
    11641167
    11651168#if defined( __dvrp_graphics )
Note: See TracChangeset for help on using the changeset viewer.