Changeset 4444 for palm/trunk


Ignore:
Timestamp:
Mar 5, 2020 3:59:50 PM (5 years ago)
Author:
raasch
Message:

bugfix: cpp-directives for serial mode added

Location:
palm/trunk/SOURCE
Files:
26 edited

Legend:

Unmodified
Added
Removed
  • palm/trunk/SOURCE/check_open.f90

    r4400 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4400 2020-02-10 20:32:41Z suehring
    2730! Remove binary output for virtual measurements
    2831!
     
    6669 
    6770
     71    USE control_parameters,                                                                        &
     72        ONLY:  coupling_char, data_output_2d_on_each_pe, max_masks, message_string, openfile,      &
     73               run_description_header
     74
     75#if defined( __parallel )
    6876    USE control_parameters,                                                    &
    69         ONLY:  coupling_char, data_output_2d_on_each_pe,                       &
    70                max_masks, message_string, nz_do3d, openfile,              &
    71                run_description_header
     77        ONLY:  nz_do3d
     78#endif
    7279
    7380    USE indices,                                                               &
  • palm/trunk/SOURCE/check_parameters.f90

    r4392 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4392 2020-01-31 16:14:57Z pavelkrc
    2730! Some error numbers revised to prevent double usage
    2831!
     
    169172    USE transpose_indices
    170173
     174#if defined( __parallel )
    171175    USE vertical_nesting_mod,                                                  &
    172176        ONLY:  vnested,                                                        &
    173177               vnest_check_parameters
     178#endif
    174179
    175180
     
    196201    LOGICAL     ::  found                            !< flag, true if output variable is already marked for averaging
    197202
     203    REAL(wp)    ::  gradient                         !< local gradient
     204#if defined( __parallel )
    198205    REAL(wp)    ::  dt_spinup_max                    !< maximum spinup timestep in nested domains
    199     REAL(wp)    ::  gradient                         !< local gradient
    200206    REAL(wp)    ::  remote = 0.0_wp                  !< MPI id of remote processor
    201207    REAL(wp)    ::  spinup_time_max                  !< maximum spinup time in nested domains
    202208    REAL(wp)    ::  time_to_be_simulated_from_reference_point  !< time to be simulated from reference point
     209#endif
    203210
    204211
     
    31753182    ENDIF
    31763183
     3184#if defined( __parallel )
    31773185!
    31783186!-- Vertical nesting: check fine and coarse grid compatibility for data exchange
    31793187    IF ( vnested )  CALL vnest_check_parameters
     3188#endif
    31803189
    31813190!
  • palm/trunk/SOURCE/data_output_2d.f90

    r4442 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4442 2020-03-04 19:21:13Z suehring
    2730! Change order of dimension in surface array %frac to allow for better
    2831! vectorization.
     
    152155    INTEGER(iwp) ::  flag_nr   !< number of masking flag
    153156    INTEGER(iwp) ::  i         !< loop index
    154     INTEGER(iwp) ::  iis       !< vertical index of a xy slice in array 'local_2d_sections'
    155157    INTEGER(iwp) ::  is        !< slice index
    156158    INTEGER(iwp) ::  ivar      !< variable index
     
    166168    INTEGER(iwp) ::  nzt_do    !< upper limit of the data field (usually nzt+1)
    167169    INTEGER(iwp) ::  s_ind     !< index of slice types (xy=1, xz=2, yz=3)
     170#if defined( __parallel )
     171    INTEGER(iwp) ::  iis       !< vertical index of a xy slice in array 'local_2d_sections'
    168172    INTEGER(iwp) ::  sender    !< PE id of sending PE
    169173    INTEGER(iwp) ::  ind(4)    !< index limits (lower/upper bounds) of array 'local_2d'
     174#endif
    170175
    171176    LOGICAL ::  found          !< true if output variable was found
  • palm/trunk/SOURCE/data_output_3d.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Introduction of wall_flags_total_0, which currently sets bits based on static
    2831! topography information used in wall_flags_static_0
     
    9093
    9194    USE control_parameters,                                                    &
    92         ONLY:  debug_output_timestep,                                          &
    93                do3d, do3d_no, do3d_time_count, io_blocks, io_group,            &
     95        ONLY:  debug_output_timestep, do3d, do3d_no, do3d_time_count,          &
    9496               land_surface, message_string, ntdim_3d, nz_do3d, plant_canopy,  &
    9597               psolver, time_since_reference_point, urban_surface,             &
    9698               varnamelength
    9799
     100#if defined( __parallel )
     101    USE control_parameters,                                                    &
     102        ONLY:  io_blocks, io_group
     103#endif
     104
    98105    USE cpulog,                                                                &
    99106        ONLY:  log_point, cpu_log
    100107
    101 #if defined( __parallel )
    102108    USE indices,                                                               &
    103109        ONLY:  nbgp, nxl, nxlg, nxr, nxrg, nyn, nyng, nys, nysg, nzb, nzt,     &
    104110               wall_flags_total_0
    105 #else
     111
     112#if ! defined( __parallel )
    106113    USE indices,                                                               &
    107         ONLY:  nbgp, nx, nxl, nxlg, nxr, nxrg, ny, nyn, nyng, nys, nysg, nzb,  &
    108                nzt, wall_flags_total_0
     114        ONLY:  nx, ny
    109115#endif
    110116
  • palm/trunk/SOURCE/data_output_mask.f90

    r4377 r4444  
    2020! Current revisions:
    2121! -----------------
    22 !
    23 !
     22! 
     23! 
    2424! Former revisions:
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4377 2020-01-15 11:10:51Z gronemeier
    2730! bugfix: set fill value for output according to wall_flags_total_0 for
    2831!         non-terrain following output
     
    9396    USE control_parameters,                                                    &
    9497        ONLY:  air_chemistry, domask, domask_no, domask_time_count, mask_i,    &
    95                mask_j, mask_k, mask_size, mask_size_l, mask_start_l,           &
    96                mask_surface,                                                   &
     98               mask_j, mask_k, mask_size_l, mask_surface,                                                   &
    9799               max_masks, message_string, nz_do3d, salsa,                      &
    98100               time_since_reference_point
     101
     102#if defined( __parallel )
     103    USE control_parameters,                                                    &
     104        ONLY:  mask_size, mask_start_l
     105#endif
    99106
    100107    USE diagnostic_output_quantities_mod,                                      &
     
    134141
    135142    INTEGER(iwp) ::  av                      !< flag for (non-)average output
    136     INTEGER(iwp) ::  ngp                     !< number of grid points of an output slice
    137143    INTEGER(iwp) ::  flag_nr                 !< number of masking flag
    138144    INTEGER(iwp) ::  i                       !< loop index
     
    146152    INTEGER(iwp) ::  n                       !< loop index
    147153    INTEGER(iwp) ::  netcdf_data_format_save !< value of netcdf_data_format
     154    INTEGER(iwp) ::  ktt                     !< k index of highest terrain surface
     155#if defined( __parallel )
     156    INTEGER(iwp) ::  ngp                     !< number of grid points of an output slice
    148157    INTEGER(iwp) ::  sender                  !< PE id of sending PE
    149     INTEGER(iwp) ::  ktt                     !< k index of highest terrain surface
    150158    INTEGER(iwp) ::  ind(6)                  !< index limits (lower/upper bounds) of array 'local_2d'
     159#endif
    151160
    152161    LOGICAL ::  found      !< true if output variable was found
  • palm/trunk/SOURCE/exchange_horiz_2d.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Corrected "Former revisions" section
    2831!
     
    332335        ONLY:  bc_dirichlet_l, bc_dirichlet_n, bc_dirichlet_r, bc_dirichlet_s, &
    333336               bc_radiation_l, bc_radiation_n, bc_radiation_r, bc_radiation_s, &
    334                bc_radiation_l, bc_radiation_n, bc_radiation_r, bc_radiation_s, &
    335                grid_level
    336        
     337               bc_radiation_l, bc_radiation_n, bc_radiation_r, bc_radiation_s
     338
     339#if defined( __parallel )
     340    USE control_parameters,                                                    &
     341        ONLY:  grid_level
     342#endif
     343
    337344    USE cpulog,                                                                &
    338345        ONLY:  cpu_log, log_point_s
  • palm/trunk/SOURCE/flow_statistics.f90

    r4442 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4442 2020-03-04 19:21:13Z suehring
    2730! Change order of dimension in surface array %frac to allow for better
    2831! vectorization.
     
    101104       
    102105    USE indices,                                                               &
    103         ONLY:   ngp_2dh, ngp_2dh_s_inner, ngp_3d, ngp_3d_inner, ngp_sums,      &
    104                 ngp_sums_ls, nxl, nxr, nyn, nys, nzb, nzt, topo_min_level,     &
    105                 wall_flags_total_0
     106        ONLY:   ngp_2dh, ngp_2dh_s_inner, ngp_3d, ngp_3d_inner, nxl, nxr, nyn, &
     107                nys, nzb, nzt, topo_min_level, wall_flags_total_0
     108
     109#if defined( __parallel )
     110    USE indices,                                                               &
     111        ONLY:  ngp_sums, ngp_sums_ls
     112#endif
    106113       
    107114    USE kinds
  • palm/trunk/SOURCE/header.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Bugfix, character length too short, caused crash on NEC.
    2831!
     
    123126    USE pmc_handle_communicator,                                               &
    124127        ONLY:  pmc_get_model_info
    125 #endif
    126128
    127129    USE pmc_interface,                                                         &
    128130        ONLY:  nested_run, nesting_datatransfer_mode, nesting_mode
     131#endif
    129132
    130133    USE surface_mod,                                                           &
     
    148151    CHARACTER (LEN=26) ::  ver_rev             !< string for run identification
    149152
     153#if defined( __parallel )
    150154    CHARACTER (LEN=32) ::  cpl_name            !< name of child domain (nesting mode only)
     155#endif
    151156   
    152157    CHARACTER (LEN=40) ::  output_format       !< netcdf format
     
    185190    INTEGER(iwp) ::  ch             !< canyon depth in generic street-canyon setup
    186191    INTEGER(iwp) ::  count          !< number of masked output locations
    187     INTEGER(iwp) ::  cpl_parent_id  !< parent ID for the respective child model
    188192    INTEGER(iwp) ::  cwx            !< canyon width along x in generic street-canyon setup
    189193    INTEGER(iwp) ::  cwy            !< canyon width along y in generic street-canyon setup
     
    198202    INTEGER(iwp) ::  ll             !< substring length
    199203    INTEGER(iwp) ::  mid            !< masked output running index
     204#if defined( __parallel )
     205    INTEGER(iwp) ::  cpl_parent_id  !< parent ID for the respective child model
    200206    INTEGER(iwp) ::  my_cpl_id      !< run id in a nested model setup
    201207    INTEGER(iwp) ::  n              !< running index over number of couplers in a nested model setup
    202208    INTEGER(iwp) ::  ncpl           !< number of coupler in a nested model setup
    203209    INTEGER(iwp) ::  npe_total      !< number of total PEs in a coupler (parent + child)
     210#endif
    204211   
    205212
    206213    REAL(wp) ::  cpuseconds_per_simulated_second  !< CPU time (in s) per simulated second
     214#if defined( __parallel )
    207215    REAL(wp) ::  lower_left_coord_x               !< x-coordinate of nest domain
    208216    REAL(wp) ::  lower_left_coord_y               !< y-coordinate of nest domain
     217#endif
    209218
    210219!
     
    246255       CALL message( 'header', 'PA0191', 0, 0, 0, 6, 0 )
    247256    ENDIF
     257#if defined( __parallel )
    248258    IF ( nested_run )  run_classification = 'nested ' // run_classification(1:63)
     259#endif
    249260    IF ( ocean_mode )  THEN
    250261       run_classification = 'ocean - ' // run_classification(1:61)
     
    300311#endif
    301312
     313#if defined( __parallel )
    302314!
    303315!-- Nesting informations
    304316    IF ( nested_run )  THEN
    305317
    306 #if defined( __parallel )
    307318       WRITE ( io, 600 )  TRIM( nesting_mode ),                                &
    308319                          TRIM( nesting_datatransfer_mode )
     
    324335                             TRIM( cpl_name )
    325336       ENDDO
     337
     338    ENDIF
    326339#endif
    327340
    328     ENDIF
    329341    WRITE ( io, 99 )
    330342
     
    19191931            ' Time:                 ',A8,6X,'Run-No.:   ',I2.2/     &
    19201932            ' Run on host:        ',A10,6X,'En-No.:    ',I2.2)
     1933#if defined( __parallel )
    19211934600 FORMAT (/' Nesting informations:'/ &
    19221935            ' --------------------'/ &
     
    19261939            ' (*=me)     id    of PEs      x (m)     y (m)' )
    19271940601 FORMAT (2X,A1,1X,I2.2,6X,I2.2,5X,I5,5X,F8.2,2X,F8.2,5X,A)
     1941#endif
    19281942
    19291943 END SUBROUTINE header
  • palm/trunk/SOURCE/init_coupling.f90

    r4360 r4444  
    2525! ------------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Corrected "Former revisions" section
    2831!
     
    5457!
    5558!-- Local variables
     59#if defined( __parallel )
    5660    INTEGER(iwp) ::  i            !<
    5761    INTEGER(iwp) ::  inter_color  !<
     62#endif
    5863   
    5964    INTEGER(iwp), DIMENSION(:) ::  bc_data(0:3) = 0  !<
  • palm/trunk/SOURCE/init_grid.f90

    r4414 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4414 2020-02-19 20:16:04Z suehring
    2730! - Remove deprecated topography arrays nzb_s_inner, nzb_u_inner, etc.
    2831! - Move initialization of boundary conditions and multigrid into an extra
     
    172175    USE pegrid
    173176
     177#if defined( __parallel )
    174178    USE vertical_nesting_mod,                                                  &
    175179        ONLY:  vnested, vnest_init_grid
     180#endif
    176181
    177182    IMPLICIT NONE
     
    751756       ENDDO
    752757    ENDIF
     758
     759#if defined( __parallel )
    753760!
    754761!-- Vertical nesting: communicate vertical grid level arrays between fine and
    755762!-- coarse grid
    756763    IF ( vnested )  CALL vnest_init_grid
     764#endif
    757765
    758766 END SUBROUTINE init_grid
     
    948956    INTEGER(iwp) ::  topo_top_index   !< orography top index, used to map 3D buildings onto terrain
    949957
     958#if defined( __parallel )
    950959    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  displace_dum        !< displacements of start addresses, used for MPI_ALLGATHERV
     960#endif
    951961    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  build_ids           !< building IDs on entire model domain
    952962    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  build_ids_final     !< building IDs on entire model domain, multiple occurences are sorted out
  • palm/trunk/SOURCE/init_masks.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Corrected "Former revisions" section
    2831!
     
    100103    INTEGER(iwp) ::  i            !< loop index
    101104    INTEGER(iwp) ::  ilen         !< length of string saved in 'do_mask'
    102     INTEGER(iwp) ::  ind(6)       !< index limits (lower/upper bounds) of output array
    103105    INTEGER(iwp) ::  ind_array(1) !< array index
    104106    INTEGER(iwp) ::  j            !< loop index
     
    106108    INTEGER(iwp) ::  m            !< mask index
    107109    INTEGER(iwp) ::  mid            !< masked output running index
     110#if defined( __parallel )
     111    INTEGER(iwp) ::  ind(6)       !< index limits (lower/upper bounds) of output array
    108112    INTEGER(iwp) ::  n            !< loop index
    109113    INTEGER(iwp) ::  sender       !< PE id of sending PE
     114#endif
    110115   
    111116    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  tmp_array !< temporary 1D array
  • palm/trunk/SOURCE/init_pegrid.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! changed message PA0467
    2831!
     
    8285        ONLY:  bc_dirichlet_l, bc_dirichlet_n, bc_dirichlet_r, bc_dirichlet_s, &
    8386               bc_lr, bc_ns, bc_radiation_l, bc_radiation_n, bc_radiation_r,   &
    84                bc_radiation_s, coupling_mode, coupling_topology, gathered_size,&
     87               bc_radiation_s, &
    8588               grid_level, grid_level_count, maximum_grid_level,               &
    86                message_string, mg_switch_to_pe0_level, momentum_advec,         &
    87                psolver, outflow_source_plane, recycling_width, scalar_advec,   &
    88                subdomain_size, turbulent_inflow, turbulent_outflow, y_shift
     89               message_string, mg_switch_to_pe0_level,         &
     90               psolver
     91
     92
     93#if defined( __parallel )
     94    USE control_parameters,                                                    &
     95        ONLY:  coupling_mode, coupling_topology, gathered_size, momentum_advec, &
     96               outflow_source_plane, recycling_width, scalar_advec, subdomain_size, &
     97               turbulent_inflow, turbulent_outflow, y_shift
    8998
    9099    USE grid_variables,                                                        &
    91100        ONLY:  dx
     101#endif
    92102       
    93103    USE indices,                                                               &
    94         ONLY:  mg_loc_ind, nbgp, nnx, nny, nnz, nx, nx_a, nx_o, nxl, nxl_mg,   &
    95                nxlu, nxr, nxr_mg, ny, ny_a, ny_o, nyn, nyn_mg, nys, nys_mg,    &
     104        ONLY:  nnx, nny, nnz, nx, nxl, nxl_mg,   &
     105               nxlu, nxr, nxr_mg, ny, nyn, nyn_mg, nys, nys_mg,    &
    96106               nysv, nz, nzb, nzt, nzt_mg, wall_flags_1, wall_flags_2,         &
    97107               wall_flags_3, wall_flags_4, wall_flags_5, wall_flags_6,         &
    98108               wall_flags_7, wall_flags_8, wall_flags_9, wall_flags_10
     109
     110#if defined( __parallel )
     111    USE indices,                                                               &
     112        ONLY:  mg_loc_ind, nbgp, nx_a, nx_o, ny_a, ny_o
     113#endif
    99114
    100115    USE kinds
     
    102117    USE pegrid
    103118   
     119#if defined( __parallel )
    104120    USE pmc_interface,                                                         &
    105121        ONLY:  nested_run
    106      
     122
    107123    USE spectra_mod,                                                           &
    108         ONLY:  calculate_spectra, dt_dosp
     124        ONLY:  calculate_spectra
    109125
    110126    USE synthetic_turbulence_generator_mod,                                    &
    111127        ONLY:  id_stg_left, id_stg_north, id_stg_right, id_stg_south,          &
    112128               use_syn_turb_gen
     129#endif
    113130
    114131    USE transpose_indices,                                                     &
    115         ONLY:  nxl_y, nxl_yd, nxl_z, nxr_y, nxr_yd, nxr_z, nyn_x, nyn_z, nys_x,&
    116                nys_z, nzb_x, nzb_y, nzb_yd, nzt_x, nzt_yd, nzt_y
     132        ONLY:  nxl_y, nxl_z, nxr_y, nxr_z, nyn_x, nyn_z, nys_x,&
     133               nys_z, nzb_x, nzb_y, nzt_x, nzt_y
     134
     135#if defined( __parallel )
     136    USE transpose_indices,                                                     &
     137        ONLY:  nxl_yd, nxr_yd, nzb_yd, nzt_yd
    117138
    118139    USE vertical_nesting_mod,                                                  &
    119140        ONLY:  vnested, vnest_init_pegrid_domain, vnest_init_pegrid_rank
     141#endif
    120142
    121143    IMPLICIT NONE
    122144
    123145    INTEGER(iwp) ::  i                        !< running index over number of processors or number of multigrid level
     146#if defined( __parallel )
    124147    INTEGER(iwp) ::  id_inflow_l              !< ID indicating processors located at the left inflow boundary
    125148    INTEGER(iwp) ::  id_outflow_l             !< local value of id_outflow
     
    131154    INTEGER(iwp) ::  id_stg_south_l           !< south lateral boundary local core id in case of turbulence generator 
    132155    INTEGER(iwp) ::  ind(5)                   !< array containing the subdomain bounds
     156#endif
    133157    INTEGER(iwp) ::  j                        !< running index, used for various loops
    134158    INTEGER(iwp) ::  k                        !< number of vertical grid points in different multigrid level
     
    138162    INTEGER(iwp) ::  mg_levels_z              !< maximum number of grid level allowed along z-direction
    139163    INTEGER(iwp) ::  mg_switch_to_pe0_level_l !< maximum number of grid level with switching to PE 0
     164#if defined( __parallel )
    140165    INTEGER(iwp) ::  nnx_y                    !< quotient of number of grid points along x-direction and number of PEs used along y-direction
    141166    INTEGER(iwp) ::  nny_x                    !< quotient of number of grid points along y-direction and number of PEs used along x-direction
     
    144169    INTEGER(iwp) ::  nnz_y                    !< quotient of number of grid points along z-direction and number of PEs used along x-direction
    145170    INTEGER(iwp) ::  numproc_sqr              !< square root of the number of processors
     171#endif
    146172    INTEGER(iwp) ::  nxl_l                    !< lower index bound along x-direction on subdomain and different multigrid level
    147173    INTEGER(iwp) ::  nxr_l                    !< upper index bound along x-direction on subdomain and different multigrid level
    148174    INTEGER(iwp) ::  nyn_l                    !< lower index bound along y-direction on subdomain and different multigrid level
    149175    INTEGER(iwp) ::  nys_l                    !< upper index bound along y-direction on subdomain and different multigrid level
     176#if defined( __parallel )
    150177    INTEGER(iwp) ::  nzb_l                    !< lower index bound along z-direction on subdomain and different multigrid level
     178#endif
    151179    INTEGER(iwp) ::  nzt_l                    !< upper index bound along z-direction on subdomain and different multigrid level
    152180!$  INTEGER(iwp) ::  omp_get_num_threads      !< number of OpenMP threads
    153181
     182#if defined( __parallel )
    154183    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  ind_all !< dummy array containing index bounds on subdomain, used for gathering
    155184    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  nxlf    !< lower index bound allong x-direction for every PE
     
    161190    INTEGER(iwp)               ::  lcoord(2)            !< PE coordinates of left neighbor along x and y
    162191    INTEGER(iwp)               ::  rcoord(2)            !< PE coordinates of right neighbor along x and y
     192#endif
    163193
    164194!
  • palm/trunk/SOURCE/lagrangian_particle_model_mod.f90

    r4430 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4430 2020-02-27 18:02:20Z suehring
    2730! - Bugfix in logarithmic interpolation of near-ground particle speed (density
    2831!   was not considered).
     
    175178    USE particle_attributes
    176179
     180#if defined( __parallel )
    177181    USE pmc_particle_interface,                                                &
    178182        ONLY: pmcp_c_get_particle_from_parent, pmcp_p_fill_particle_win,       &
     
    180184              pmcp_p_delete_particles_in_fine_grid_area, pmcp_g_init,          &
    181185              pmcp_g_print_number_of_particles
     186#endif
    182187
    183188    USE pmc_interface,                                                         &
     
    307312
    308313    INTEGER(iwp), PARAMETER ::  NR_2_direction_move = 10000 !<
     314
     315#if defined( __parallel )
    309316    INTEGER(iwp)            ::  nr_move_north               !<
    310317    INTEGER(iwp)            ::  nr_move_south               !<
     
    312319    TYPE(particle_type), DIMENSION(:), ALLOCATABLE ::  move_also_north
    313320    TYPE(particle_type), DIMENSION(:), ALLOCATABLE ::  move_also_south
     321#endif
    314322
    315323    REAL(wp) ::  epsilon_collision !<
     
    12341242    ENDIF
    12351243
     1244#if defined( __parallel )
    12361245    IF ( nested_run )  CALL pmcp_g_init
     1246#endif
     1247
    12371248!
    12381249!-- To avoid programm abort, assign particles array to the local version of
     
    22632274                first_loop_stride = .FALSE.
    22642275             ENDDO   ! timestep loop
     2276
     2277#if defined( __parallel )
    22652278!
    22662279!--          in case of nested runs do the transfer of particles after every full model time step
     
    22742287                deleted_particles = 0
    22752288             ENDIF
     2289#endif
    22762290
    22772291!
     
    23412355 END SUBROUTINE lpm_actions
    23422356 
     2357
     2358#if defined( __parallel )
     2359!------------------------------------------------------------------------------!
     2360! Description:
     2361! ------------
     2362!
     2363!------------------------------------------------------------------------------!
     2364 SUBROUTINE particles_from_parent_to_child
     2365
     2366    CALL pmcp_c_get_particle_from_parent                         ! Child actions
     2367    CALL pmcp_p_fill_particle_win                                ! Parent actions
     2368
     2369    RETURN
     2370
     2371 END SUBROUTINE particles_from_parent_to_child
     2372
    23432373 
    23442374!------------------------------------------------------------------------------!
     
    23472377!
    23482378!------------------------------------------------------------------------------!
    2349  SUBROUTINE particles_from_parent_to_child
    2350 
    2351     CALL pmcp_c_get_particle_from_parent                         ! Child actions
    2352     CALL pmcp_p_fill_particle_win                                ! Parent actions
    2353 
    2354     RETURN
    2355 
    2356  END SUBROUTINE particles_from_parent_to_child
    2357 
    2358  
    2359 !------------------------------------------------------------------------------!
    2360 ! Description:
    2361 ! ------------
    2362 !
    2363 !------------------------------------------------------------------------------!
    23642379 SUBROUTINE particles_from_child_to_parent
    23652380
     
    23702385
    23712386 END SUBROUTINE particles_from_child_to_parent
     2387#endif
    23722388 
    23732389!------------------------------------------------------------------------------!
     
    24212437#endif
    24222438
     2439#if defined( __parallel )
    24232440    IF ( nested_run )  THEN
    24242441       CALL pmcp_g_print_number_of_particles( simulated_time+dt_3d,            &
    24252442                                              tot_number_of_particles)
    24262443    ENDIF
     2444#endif
    24272445
    24282446!
     
    69736991 SUBROUTINE lpm_exchange_horiz
    69746992
    6975     INTEGER(iwp) ::  i                 !< grid index (x) of particle positition
    69766993    INTEGER(iwp) ::  ip                !< index variable along x
    6977     INTEGER(iwp) ::  j                 !< grid index (y) of particle positition
    69786994    INTEGER(iwp) ::  jp                !< index variable along y
    69796995    INTEGER(iwp) ::  kp                !< index variable along z
    69806996    INTEGER(iwp) ::  n                 !< particle index variable
     6997
     6998#if defined( __parallel )
     6999    INTEGER(iwp) ::  i                 !< grid index (x) of particle positition
     7000    INTEGER(iwp) ::  j                 !< grid index (y) of particle positition
    69817001    INTEGER(iwp) ::  par_size          !< Particle size in bytes
    69827002    INTEGER(iwp) ::  trlp_count        !< number of particles send to left PE
     
    69977017    TYPE(particle_type), DIMENSION(:), ALLOCATABLE ::  trrp  !< particles send to right PE
    69987018    TYPE(particle_type), DIMENSION(:), ALLOCATABLE ::  trsp  !< particles send to south PE
     7019#endif
    69997020
    70007021    CALL cpu_log( log_point_s(23), 'lpm_exchange_horiz', 'start' )
     
    76147635 END SUBROUTINE lpm_exchange_horiz
    76157636
     7637#if defined( __parallel )
    76167638!------------------------------------------------------------------------------!
    76177639! Description:
     
    77737795    ENDDO
    77747796
    7775     RETURN
    7776 
    77777797 END SUBROUTINE lpm_add_particles_to_gridcell
     7798#endif
    77787799 
    77797800 
  • palm/trunk/SOURCE/land_surface_model_mod.f90

    r4442 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directive moved
     28!
     29! 4442 2020-03-04 19:21:13Z suehring
    2730! Change order of dimension in surface arrays %frac, %emissivity and %albedo
    2831! to allow for better vectorization in the radiation interactions.
     
    23762379       REAL(wp), DIMENSION(:), ALLOCATABLE ::  bound          !< temporary arrays for storing index bounds
    23772380       REAL(wp), DIMENSION(:), ALLOCATABLE ::  bound_root_fr  !< temporary arrays for storing index bounds
     2381#if defined( __parallel )
    23782382       REAL(wp), DIMENSION(:), ALLOCATABLE ::  pr_soil_init   !< temporary array used for averaging soil profiles
    2379 #if defined( __parallel )
    23802383       REAL(wp), DIMENSION(:), ALLOCATABLE ::  z_soil_root    !< vertical dimension of soil grid in root domain
    23812384#endif
  • palm/trunk/SOURCE/local_stop.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: misplaced cpp-directive moved
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Corrected "Former revisions" section
    2831!
     
    4043 SUBROUTINE local_stop
    4144 
     45#if defined( __parallel )
    4246
    4347    USE control_parameters,                                                    &
     
    5256
    5357
    54 #if defined( __parallel )
    5558    IF ( coupling_mode == 'uncoupled' )  THEN
    5659       IF ( nested_run )  THEN
  • palm/trunk/SOURCE/multi_agent_system_mod.f90

    r4346 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4346 2019-12-18 11:55:56Z motisi
    2730! Removed wall_flags_static_0 from USE statements as it's not used within
    2831! the module
     
    115118    INTEGER(iwp) ::  iran_agent = -1234567             !< number for random generator
    116119    INTEGER(iwp) ::  min_nr_agent = 2                  !< namelist parameter (see documentation)
     120#if defined( __parallel )
    117121    INTEGER(iwp) ::  ghla_count_recv                   !< number of agents in left ghost layer
    118122    INTEGER(iwp) ::  ghna_count_recv                   !< number of agents in north ghost layer
    119123    INTEGER(iwp) ::  ghra_count_recv                   !< number of agents in right ghost layer
    120124    INTEGER(iwp) ::  ghsa_count_recv                   !< number of agents in south ghost layer
    121     INTEGER(iwp) ::  maximum_number_of_agents = 0      !< maximum number of agents during run
    122125    INTEGER(iwp) ::  nr_move_north                     !< number of agts to move north during exchange_horiz
    123126    INTEGER(iwp) ::  nr_move_south                     !< number of agts to move south during exchange_horiz
     127#endif
     128    INTEGER(iwp) ::  maximum_number_of_agents = 0      !< maximum number of agents during run
    124129    INTEGER(iwp) ::  number_of_agents = 0              !< number of agents for each grid box (3d array is saved on agt_count)
    125130    INTEGER(iwp) ::  number_of_agent_groups = 1        !< namelist parameter (see documentation)
     
    239244    TYPE(agent_type), DIMENSION(:), POINTER ::  agents               !< Agent array for this grid cell
    240245    TYPE(agent_type)                        ::  zero_agent           !< zero agent to avoid weird thing
     246#if defined( __parallel )
    241247    TYPE(agent_type), DIMENSION(:), ALLOCATABLE ::  move_also_north  !< for agent exchange between PEs
    242248    TYPE(agent_type), DIMENSION(:), ALLOCATABLE ::  move_also_south  !< for agent exchange between PEs
     
    245251    TYPE(agent_type), DIMENSION(:), ALLOCATABLE ::  agt_gh_r         !< ghost layer right of pe domain
    246252    TYPE(agent_type), DIMENSION(:), ALLOCATABLE ::  agt_gh_s         !< ghost layer south of pe domain
     253#endif
    247254!
    248255!-- Type for 2D grid on which agents are stored
     
    12161223       IMPLICIT NONE
    12171224
     1225#if defined( __parallel )
    12181226       INTEGER(iwp) ::  agt_size !< Agent size in bytes
     1227       INTEGER(iwp) ::  n        !< counter (number of PEs)
     1228       INTEGER(iwp) ::  noa_rcv  !< received number of agents
     1229#endif
    12191230       INTEGER(iwp) ::  dummy    !< dummy
    12201231       INTEGER(iwp) ::  ii       !< counter (x)
    12211232       INTEGER(iwp) ::  ip       !< counter (x)
    12221233       INTEGER(iwp) ::  jp       !< counter (y)
    1223        INTEGER(iwp) ::  n        !< counter (number of PEs)
    12241234       INTEGER(iwp) ::  noa      !< number of agents
    1225        INTEGER(iwp) ::  noa_rcv  !< received number of agents
    12261235       INTEGER(iwp) ::  out_noa  !< number of agents for output
    12271236
     1237#if defined( __parallel )
    12281238       INTEGER(iwp), DIMENSION(0:numprocs-1) ::  noa_arr !< number of agents on each PE
     1239#endif
    12291240!
    12301241!--    SAVE attribute required to avoid compiler warning about pointer outlive the pointer target
    12311242       TYPE(agent_type), DIMENSION(:), ALLOCATABLE, TARGET, SAVE ::  trf_agents !< all agents on current PE
     1243#if defined( __parallel )
    12321244       TYPE(agent_type), DIMENSION(:), ALLOCATABLE, TARGET, SAVE ::  out_agents !< all agents in entire domain
     1245#endif
    12331246
    12341247       LOGICAL, INTENT (INOUT) :: ftest
     
    14541467    END SUBROUTINE mas_data_output_agents
    14551468
     1469#if defined( __parallel )
    14561470!------------------------------------------------------------------------------!
    14571471! Description:
     
    15961610
    15971611    END SUBROUTINE mas_eh_add_agents_to_gridcell
    1598 
     1612#endif
     1613
     1614
     1615#if defined( __parallel )
    15991616!------------------------------------------------------------------------------!
    16001617! Description:
     
    16481665       ENDDO
    16491666    END SUBROUTINE mas_eh_add_ghost_agents_to_gridcell
     1667#endif
    16501668
    16511669!------------------------------------------------------------------------------!
     
    17421760       IMPLICIT NONE
    17431761
     1762       INTEGER(iwp) ::  ip               !< index variable along x
     1763       INTEGER(iwp) ::  jp               !< index variable along y
     1764       INTEGER(iwp) ::  n                !< agent index variable
     1765
     1766#if defined( __parallel )
     1767
    17441768       INTEGER(iwp) ::  i                !< grid index (x) of agent positition
    1745        INTEGER(iwp) ::  ip               !< index variable along x
    17461769       INTEGER(iwp) ::  j                !< grid index (y) of agent positition
    1747        INTEGER(iwp) ::  jp               !< index variable along y
    1748        INTEGER(iwp) ::  n                !< agent index variable
    17491770       INTEGER(iwp) ::  par_size         !< Agent size in bytes
     1771
    17501772       INTEGER(iwp) ::  trla_count       !< number of agents send to left PE
    17511773       INTEGER(iwp) ::  trla_count_recv  !< number of agents receive from right PE
     
    17651787       TYPE(agent_type), DIMENSION(:), ALLOCATABLE ::  trra  !< agents send to right PE
    17661788       TYPE(agent_type), DIMENSION(:), ALLOCATABLE ::  trsa  !< agents send to south PE
    1767 
    1768 #if defined( __parallel )
    17691789
    17701790!
     
    22282248       DEALLOCATE( move_also_south )
    22292249
     2250!
     2251!--    Accumulate the number of agents transferred between the subdomains)
     2252       CALL mas_eh_ghost_exchange
     2253
    22302254#else
    22312255
     
    23162340#endif
    23172341
    2318 !
    2319 !--    Accumulate the number of agents transferred between the subdomains)
    2320        CALL mas_eh_ghost_exchange
    2321 
    23222342    END SUBROUTINE mas_eh_exchange_horiz
    23232343
     2344
     2345#if defined( __parallel )
    23242346!------------------------------------------------------------------------------!
    23252347! Description:
     
    23322354
    23332355       IMPLICIT NONE
    2334 
    2335 #if defined( __parallel )
    23362356
    23372357       INTEGER(iwp) ::  ip          !< index variable along x
     
    26172637       ENDIF
    26182638
     2639    END SUBROUTINE mas_eh_ghost_exchange
    26192640#endif
    2620 
    2621     END SUBROUTINE mas_eh_ghost_exchange
    26222641
    26232642!------------------------------------------------------------------------------!
     
    39223941    END SUBROUTINE mas_ps_sort_in_subboxes
    39233942
     3943#if defined( __parallel )
    39243944!------------------------------------------------------------------------------!
    39253945! Description:
     
    39623982       number_of_agents = nn
    39633983
    3964     END SUBROUTINE mas_ps_pack
     3984    END SUBROUTINE mas_ps_pack
     3985#endif
    39653986
    39663987!------------------------------------------------------------------------------!
  • palm/trunk/SOURCE/palm.f90

    r4414 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4414 2020-02-19 20:16:04Z suehring
    2730! Call to module_interface_init_numerics
    2831!
     
    7376    USE arrays_3d
    7477
     78#if defined( __parallel )
    7579    USE bulk_cloud_model_mod,                                                  &
    7680        ONLY: bulk_cloud_model, microphysics_morrison, microphysics_seifert
     81#endif
    7782
    7883    USE control_parameters,                                                    &
    79         ONLY:  constant_diffusion, child_domain,                               &
    80                coupling_char, do2d_at_begin, do3d_at_begin, humidity,          &
    81                initializing_actions, io_blocks, io_group, message_string,      &
    82                neutral, passive_scalar, runnr, simulated_time_chr, spinup,     &
     84        ONLY:  coupling_char, do2d_at_begin, do3d_at_begin, io_blocks,         &
     85               io_group, message_string, runnr, simulated_time_chr, spinup,    &
    8386               time_since_reference_point, user_interface_current_revision,    &
    8487               user_interface_required_revision, version, write_binary
    8588
     89#if defined( __parallel )
     90    USE control_parameters,                                                    &
     91        ONLY:  child_domain, constant_diffusion, humidity,                     &
     92               initializing_actions, neutral, passive_scalar
     93#endif
     94
    8695    USE cpulog,                                                                &
    87         ONLY:  cpu_log, log_point, log_point_s, cpu_statistics
     96        ONLY:  cpu_log, log_point, cpu_statistics
     97
     98#if defined( __parallel )
     99    USE cpulog,                                                                &
     100        ONLY:  log_point_s
     101#endif
    88102
    89103    USE diagnostic_output_quantities_mod,                                      &
    90104        ONLY:  doq_calculate
    91105
     106#if defined( __parallel )
    92107    USE indices,                                                               &
    93108        ONLY:  nbgp
     109#endif
    94110
    95111    USE kinds
     
    110126    USE pegrid
    111127
     128#if defined( __parallel )
    112129    USE pmc_particle_interface,                                                &
    113130        ONLY: pmcp_g_alloc_win
     
    116133        ONLY:  nested_run, pmci_child_initialize, pmci_init,                   &
    117134               pmci_modelconfiguration, pmci_parent_initialize
     135#endif
    118136
    119137    USE surface_data_output_mod,                                               &
     
    123141        ONLY:  wrd_global, wrd_local
    124142
    125 #if defined( __parallel) && defined( _OPENACC )
     143#if defined( __parallel )  && defined( _OPENACC )
    126144    USE openacc
    127145#endif
     
    285303    CALL module_interface_init_output
    286304
     305#if defined( __parallel )
    287306!
    288307!-- Coupling protocol setup for nested-domain runs
     
    325344       CALL pmcp_g_alloc_win                    ! Must be called after pmci_child_initialize and pmci_parent_initialize
    326345    ENDIF
     346#endif
    327347
    328348!
  • palm/trunk/SOURCE/pmc_interface_mod.f90

    r4413 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives and variable declarations for serial mode added
     28!
     29! 4413 2020-02-19 15:52:19Z hellstea
    2730! All the USE-statements within subroutines moved up to the module declaration section.
    2831!
     
    183186 MODULE pmc_interface
    184187
     188#if ! defined( __parallel )
     189!
     190!-- Serial mode does not allow nesting, but requires the following variables as steering
     191!-- quantities
     192    USE kinds
     193
     194    IMPLICIT NONE
     195
     196    PUBLIC
     197
     198    CHARACTER(LEN=8), SAVE ::  nesting_mode = 'none'   !< steering parameter for 1- or 2-way nesting
     199
     200    INTEGER(iwp), SAVE     ::  comm_world_nesting    !< Global nesting communicator
     201    INTEGER(iwp), SAVE     ::  cpl_id  = 1           !<
     202
     203    LOGICAL, SAVE ::  nested_run = .FALSE.        !< general switch
     204    LOGICAL, SAVE ::  rans_mode_parent = .FALSE.  !< parent model mode (.F.-LES mode, .T.-RANS mode)
     205
     206#else
     207
    185208    USE ISO_C_BINDING
    186209
     
    306329   
    307330    LOGICAL, SAVE ::  nested_run = .FALSE.  !< general switch
    308     LOGICAL      ::  rans_mode_parent = .FALSE. !< mode of parent model (.F. - LES mode, .T. - RANS mode)
     331    LOGICAL, SAVE ::  rans_mode_parent = .FALSE. !< mode of parent model (.F. - LES mode, .T. - RANS mode)
    309332!
    310333!-- Geometry
     
    51835206 END SUBROUTINE pmci_ensure_nest_mass_conservation_vertical
    51845207
    5185  
     5208#endif
    51865209END MODULE pmc_interface
  • palm/trunk/SOURCE/pmc_particle_interface.f90

    r4360 r4444  
    2626! -----------------!
    2727! $Id$
     28! bugfix: preprocessor directives for serial mode added
     29!
     30! 4360 2020-01-07 11:25:50Z suehring
    2831! Corrected "Former revisions" section
    2932!
     
    5053! child model.
    5154!------------------------------------------------------------------------------!
     55#if defined( __parallel )
    5256
    5357   USE, INTRINSIC ::  ISO_C_BINDING
     
    11991203 END SUBROUTINE pmc_realloc_particles_array
    12001204
     1205#endif
    12011206END MODULE pmc_particle_interface
  • palm/trunk/SOURCE/surface_data_output_mod.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Fix wrongly declared nc_stat variable in surface_data_output_mod
    2831!
     
    14101413          ONLY:  io_blocks, io_group, time_since_reference_point
    14111414
     1415#if defined( __parallel )
    14121416      USE pegrid,                                                              &
    14131417          ONLY:  comm2d, ierr
     1418#endif
    14141419
    14151420
     
    44694474          ONLY:  io_blocks, io_group
    44704475
     4476#if defined( __parallel )
    44714477      USE pegrid,                                                              &
    44724478          ONLY:  comm2d, ierr
     4479#endif
    44734480
    44744481      IMPLICIT NONE
  • palm/trunk/SOURCE/synthetic_turbulence_generator_mod.f90

    r4442 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added, dummy statements to prevent compile errors added
     28!
     29! 4442 2020-03-04 19:21:13Z suehring
    2730! Set back turbulent length scale to 8 x grid spacing in the parametrized mode
    2831! (was accidantly changed).
     
    286289    INTEGER(iwp) ::  nzb_y_stg          !< lower bound of z coordinate (required for transposing z on PEs along y)
    287290    INTEGER(iwp) ::  nzt_y_stg          !< upper bound of z coordinate (required for transposing z on PEs along y)
     291#if defined( __parallel )
    288292    INTEGER(iwp) ::  stg_type_xz        !< MPI type for full z range
    289293    INTEGER(iwp) ::  stg_type_xz_small  !< MPI type for small z range
    290294    INTEGER(iwp) ::  stg_type_yz        !< MPI type for full z range
    291295    INTEGER(iwp) ::  stg_type_yz_small  !< MPI type for small z range
     296#endif
    292297
    293298    INTEGER(iwp), DIMENSION(3) ::  nr_non_topo_xz = 0 !< number of non-topography grid points at xz cross-sections,
     
    296301                                                      !< required for bias correction of imposed perturbations
    297302   
     303#if defined( __parallel )
    298304    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  displs_xz      !< displacement for MPI_GATHERV
    299305    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  recv_count_xz  !< receive count for MPI_GATHERV
    300306    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  displs_yz      !< displacement for MPI_GATHERV
    301307    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  recv_count_yz  !< receive count for MPI_GATHERV
     308#endif
    302309    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  nux            !< length scale of u in x direction (in gp)
    303310    INTEGER(iwp), DIMENSION(:), ALLOCATABLE ::  nuy            !< length scale of u in y direction (in gp)
     
    595602    INTEGER(iwp) :: j                        !> loop index
    596603    INTEGER(iwp) :: k                        !< index
     604#if defined( __parallel )
    597605    INTEGER(iwp) :: newtype                  !< dummy MPI type
    598606    INTEGER(iwp) :: realsize                 !< size of REAL variables
     607#endif
    599608
    600609    INTEGER(iwp), DIMENSION(3) ::  nr_non_topo_xz_l = 0 !< number of non-topography grid points at xz-cross-section on subdomain
     
    612621    REAL(wp) :: lwy     !< length scale for w in y direction
    613622    REAL(wp) :: lwz     !< length scale for w in z direction
     623#if defined( __parallel )
    614624    REAL(wp) :: nnz     !< increment used to determine processor decomposition of z-axis along x and y direction
     625#endif
    615626    REAL(wp) :: zz      !< height
    616627
     
    622633!-- Create mpi-datatypes for exchange in case of non-local but distributed
    623634!-- computation of the velocity seeds. This option is useful in
    624 !-- case large turbulent length scales are presentm, where the computational
    625 !-- effort becomes large and need to be parallelized. For parametrized
     635!-- case large turbulent length scales are present, where the computational
     636!-- effort becomes large and need to be parallelized. For parameterized
    626637!-- turbulence the length scales are small and computing the velocity seeds
    627638!-- locally is faster (no overhead by communication).
     
    18921903#else
    18931904       f_n(nzb+1:nzt+1,nys:nyn) = f_n_l(nzb_x_stg:nzt_x_stg+1,nys:nyn)
     1905!
     1906!--    Next line required to avoid compile errors because of unused dummy arguments
     1907       IF ( id_left == 0 )  id_right = 0
    18941908#endif
    18951909
     
    20732087#else
    20742088       f_n(nzb+1:nzt+1,nxl:nxr) = f_n_l(nzb_y_stg:nzt_y_stg+1,nxl:nxr)
     2089!
     2090!--    Next line required to avoid compile errors because of unused dummy arguments
     2091       IF ( id_north == 0 )  id_south = 0
    20752092#endif
    20762093
  • palm/trunk/SOURCE/time_integration.f90

    r4420 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4420 2020-02-24 14:13:56Z maronga
    2730! Added output control for wind turbine model
    2831!
     
    198201
    199202    USE arrays_3d,                                                                                 &
    200         ONLY:  diss, diss_p, dzu, e, e_p, nc, nc_p, nr, nr_p, prho, pt, pt_p, pt_init, q_init, q,  &
    201                qc, qc_p, qr, qr_p, q_p, ref_state, rho_ocean, s, s_p, sa_p, &
    202                tend, u, u_p, v, vpt, v_p, w, w_p
     203        ONLY:  diss, diss_p, dzu, e_p, nc_p, nr_p, prho, pt, pt_p, pt_init, q, qc_p, qr_p, q_init, &
     204               q_p, ref_state, rho_ocean, sa_p, s_p, tend, u, u_p, v, vpt, v_p, w_p
     205
     206#if defined( __parallel )  &&  ! defined( _OPENACC )
     207    USE arrays_3d,                                                                                 &
     208        ONLY:  e, nc, nr, qc, qr, s, w
     209#endif
    203210
    204211    USE biometeorology_mod,                                                                        &
     
    220227
    221228    USE chem_modules,                                                                              &
    222         ONLY:  bc_cs_t_val, chem_species, cs_name,                                                 &
    223                emissions_anthropogenic, emiss_read_legacy_mode,                                    &
     229        ONLY:  bc_cs_t_val, chem_species, emissions_anthropogenic, emiss_read_legacy_mode,         &
    224230               n_matched_vars
     231
     232#if defined( __parallel )
     233    USE chem_modules,                                                                              &
     234        ONLY:  cs_name
     235#endif
    225236
    226237    USE chemistry_model_mod,                                                                       &
     
    242253               multi_agent_system_end, multi_agent_system_start, nesting_offline, neutral,         &
    243254               nr_timesteps_this_run, nudging, ocean_mode, passive_scalar, pt_reference,           &
    244                pt_slope_offset, random_heatflux, rans_mode, rans_tke_e, run_coupled, salsa,        &
     255               pt_slope_offset, random_heatflux, rans_tke_e, run_coupled, salsa,                   &
    245256               simulated_time, simulated_time_chr, skip_time_do2d_xy, skip_time_do2d_xz,           &
    246257               skip_time_do2d_yz, skip_time_do3d, skip_time_domask, skip_time_dopr,                &
     
    254265               virtual_flight, virtual_measurement, ws_scheme_mom, ws_scheme_sca, timestep_count
    255266
     267#if defined( __parallel )
     268    USE control_parameters,                                                                        &
     269        ONLY:  rans_mode
     270#endif
     271
    256272    USE cpulog,                                                                                    &
    257273        ONLY:  cpu_log, log_point, log_point_s
     
    311327    USE pegrid
    312328
     329#if defined( __parallel )
    313330    USE pmc_interface,                                                                             &
    314331        ONLY:  nested_run, nesting_mode, pmci_boundary_conds, pmci_datatrans, pmci_synchronize,    &
    315332        pmci_ensure_nest_mass_conservation, pmci_ensure_nest_mass_conservation_vertical,           &
    316333        pmci_set_swaplevel
     334#endif
    317335
    318336    USE progress_bar,                                                                              &
     
    361379
    362380    USE vertical_nesting_mod,                                                                      &
    363         ONLY:  vnested, vnest_anterpolate, vnest_anterpolate_e, vnest_boundary_conds,              &
    364                vnest_boundary_conds_khkm, vnest_deallocate, vnest_init, vnest_init_fine,           &
    365                vnest_start_time
     381        ONLY:  vnested, vnest_init
     382
     383#if defined( __parallel )
     384    USE vertical_nesting_mod,                                                                      &
     385        ONLY:  vnest_anterpolate, vnest_anterpolate_e, vnest_boundary_conds,                       &
     386               vnest_boundary_conds_khkm, vnest_deallocate, vnest_init_fine, vnest_start_time
     387#endif
    366388
    367389    USE virtual_measurement_mod,                                                                   &
     
    377399
    378400#if defined( _OPENACC )
    379     USE arrays_3d,                                                             &
    380         ONLY:  d, dd2zu, ddzu, ddzw, drho_air, drho_air_zw, dzw, heatflux_output_conversion, kh,   &
    381                km, momentumflux_output_conversion, p, ptdf_x, ptdf_y, rdf, rdf_sc, rho_air,        &
    382                rho_air_zw, tdiss_m, te_m, tpt_m, tu_m, tv_m, tw_m, ug, u_init, u_stokes_zu, vg,    &
    383                v_init, v_stokes_zu, zu
     401    USE arrays_3d,                                                                                 &
     402        ONLY:  d, dd2zu, ddzu, ddzw, drho_air, drho_air_zw, dzw, e, heatflux_output_conversion,    &
     403               kh, km, momentumflux_output_conversion, nc, nr, p, ptdf_x, ptdf_y, qc, qr, rdf,     &
     404               rdf_sc, rho_air, rho_air_zw, s, tdiss_m, te_m, tpt_m, tu_m, tv_m, tw_m, ug, u_init, &
     405               u_stokes_zu, vg, v_init, v_stokes_zu, w, zu
    384406
    385407    USE control_parameters,                                                                        &
     
    411433    INTEGER(iwp) ::  ig                  !< index for salsa gases
    412434    INTEGER(iwp) ::  lsp                 !<
     435    INTEGER(iwp) ::  mid                 !< masked output running index
     436#if defined( __parallel )
    413437    INTEGER(iwp) ::  lsp_usr             !<
    414     INTEGER(iwp) ::  mid                 !< masked output running index
    415438    INTEGER(iwp) ::  n                   !< loop counter for chemistry species
     439#endif
    416440
    417441    REAL(wp) ::  dt_3d_old  !< temporary storage of timestep to be used for
     
    520544!-- At beginning determine the first time step
    521545    CALL timestep
     546
     547#if defined( __parallel )
    522548!
    523549!-- Synchronize the timestep in case of nested run.
     
    528554       CALL pmci_synchronize
    529555    ENDIF
     556#endif
    530557
    531558!
     
    558585
    559586       CALL cpu_log( log_point_s(10), 'timesteps', 'start' )
     587
     588#if defined( __parallel )
    560589!
    561590!--    Vertical nesting: initialize fine grid
     
    568597          ENDIF
    569598       ENDIF
     599#endif
     600
    570601!
    571602!--    Determine ug, vg and w_subs in dependence on data from external file
     
    764795!--       Set the swap level for all modules
    765796          CALL module_interface_swap_timelevel( MOD( timestep_count, 2) )
     797
     798#if defined( __parallel )
    766799!
    767800!--       Set the swap level for steering the pmc data transfer
    768801          IF ( nested_run )  CALL pmci_set_swaplevel( MOD( timestep_count, 2) + 1 )  !> @todo: why the +1 ?
     802#endif
    769803
    770804          CALL cpu_log( log_point(28), 'swap_timelevel', 'stop' )
    771805
     806#if defined( __parallel )
    772807!
    773808!--       Vertical nesting: Interpolate fine grid data to the coarse grid
     
    886921
    887922          ENDIF
     923#endif
    888924
    889925!
     
    962998
    963999             IF (  vnest_init ) THEN
     1000#if defined( __parallel )
    9641001!
    9651002!--             Compute pressure in the CG, interpolate top boundary conditions
     
    9771014                CALL vnest_anterpolate_e
    9781015                CALL cpu_log( log_point_s(28), 'vnest_anter_e', 'stop' )
     1016#else
     1017                CONTINUE
     1018#endif
    9791019
    9801020             ELSE
     1021#if defined( __parallel )
    9811022!
    9821023!--             Mass (volume) flux correction to ensure global mass conservation for child domains.
     
    9881029                   ENDIF
    9891030                ENDIF
    990 
     1031#endif
    9911032                CALL pres
    9921033
     
    11071148             ENDIF
    11081149             CALL cpu_log( log_point(17), 'diffusivities', 'stop' )
     1150
     1151#if defined( __parallel )
    11091152!
    11101153!--          Vertical nesting: set fine grid eddy viscosity top boundary condition
    11111154             IF ( vnest_init )  CALL vnest_boundary_conds_khkm
     1155#endif
    11121156
    11131157          ENDIF
     
    16121656       CALL timestep
    16131657
     1658#if defined( __parallel )
    16141659!
    16151660!--    Synchronize the timestep in case of nested run.
     
    16201665          CALL pmci_synchronize
    16211666       ENDIF
     1667#endif
    16221668
    16231669!
     
    16531699!$ACC END DATA
    16541700
     1701#if defined( __parallel )
    16551702!
    16561703!-- Vertical nesting: Deallocate variables initialized for vertical nesting   
    16571704    IF ( vnest_init )  CALL vnest_deallocate
     1705#endif
    16581706
    16591707    IF ( myid == 0 )  CALL finish_progress_bar
  • palm/trunk/SOURCE/time_integration_spinup.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Enable output of diagnostic quantities, e.g. 2-m temperature
    2831!
     
    9699    USE pegrid
    97100
     101#if defined( __parallel )
    98102    USE pmc_interface,                                                         &
    99103        ONLY:  nested_run
     104#endif
    100105
    101106    USE kinds
  • palm/trunk/SOURCE/timestep.f90

    r4360 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4360 2020-01-07 11:25:50Z suehring
    2730! Added missing OpenMP directives
    2831!
     
    5861
    5962    USE control_parameters,                                                    &
    60         ONLY:  cfl_factor, coupling_mode, dt_3d, dt_fixed, dt_max,             &
    61                galilei_transformation, message_string, rans_mode,              &
    62                stop_dt, terminate_coupled, terminate_coupled_remote,           &
    63                timestep_reason, u_gtrans, use_ug_for_galilei_tr, v_gtrans
     63        ONLY:  cfl_factor, dt_3d, dt_fixed, dt_max, galilei_transformation,    &
     64               message_string, rans_mode, stop_dt, timestep_reason, u_gtrans,  &
     65               use_ug_for_galilei_tr, v_gtrans
     66
     67#if defined( __parallel )
     68    USE control_parameters,                                                    &
     69        ONLY:  coupling_mode, terminate_coupled, terminate_coupled_remote
     70#endif
    6471
    6572    USE cpulog,                                                                &
     
    8895               w_max, w_max_ijk
    8996
     97#if defined( __parallel )
    9098    USE vertical_nesting_mod,                                                  &
    9199        ONLY:  vnested, vnest_timestep_sync
     100#endif
    92101
    93102    IMPLICIT NONE
     
    115124    REAL(wp) ::  v_gtrans_l        !<
    116125 
     126    REAL(wp), DIMENSION(2)         ::  uv_gtrans_l !<
     127#if defined( __parallel )
    117128    REAL(wp), DIMENSION(2)         ::  uv_gtrans   !<
    118     REAL(wp), DIMENSION(2)         ::  uv_gtrans_l !<
    119129    REAL(wp), DIMENSION(3)         ::  reduce      !<
    120     REAL(wp), DIMENSION(3)         ::  reduce_l    !<
     130    REAL(wp), DIMENSION(3)         ::  reduce_l    !<
     131#endif
    121132    REAL(wp), DIMENSION(nzb+1:nzt) ::  dxyz2_min   !< 
    122133    !$ACC DECLARE CREATE(dxyz2_min)
     
    382393    ENDIF
    383394
     395#if defined( __parallel )
    384396!
    385397!-- Vertical nesting: coarse and fine grid timestep has to be identical   
    386398    IF ( vnested )  CALL vnest_timestep_sync
     399#endif
    387400
    388401    CALL cpu_log( log_point(12), 'calculate_timestep', 'stop' )
  • palm/trunk/SOURCE/vertical_nesting_mod.f90

    r4360 r4444  
    2626! -----------------
    2727! $Id$
     28! bugfix: cpp-directives for serial mode added
     29!
     30! 4360 2020-01-07 11:25:50Z suehring
    2831! Corrected "Former revisions" section
    2932!
     
    8588                                                                               !> identical in PARIN & PARIN_N
    8689
    87 
     90#if defined( __parallel )
    8891
    8992    INTEGER(iwp),DIMENSION(3,2)               :: bdims = 0        !> sub-domain grid topology of current PE
     
    39683971       END SUBROUTINE vnest_deallocate
    39693972
     3973#endif
    39703974 END MODULE vertical_nesting_mod
  • palm/trunk/SOURCE/virtual_measurement_mod.f90

    r4438 r4444  
    2525! -----------------
    2626! $Id$
     27! bugfix: cpp-directives for serial mode added
     28!
     29! 4438 2020-03-03 20:49:28Z suehring
    2730! Add cpu-log points
    2831!
     
    868871    INTEGER(iwp), DIMENSION(:), ALLOCATABLE     ::  ns_all !< dummy array used to sum-up the number of observation coordinates
    869872
     873#if defined( __parallel )
    870874    INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE   ::  ns_atmos !< number of observation points for each station on each mpi rank
    871875    INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE   ::  ns_soil  !< number of observation points for each station on each mpi rank
     876#endif
    872877
    873878    INTEGER(iwp), DIMENSION(:,:,:), ALLOCATABLE ::  meas_flag !< mask array indicating measurement positions
Note: See TracChangeset for help on using the changeset viewer.