Changeset 4444
- Timestamp:
- Mar 5, 2020 3:59:50 PM (5 years ago)
- Location:
- palm/trunk/SOURCE
- Files:
-
- 26 edited
Legend:
- Unmodified
- Added
- Removed
-
palm/trunk/SOURCE/check_open.f90
r4400 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4400 2020-02-10 20:32:41Z suehring 27 30 ! Remove binary output for virtual measurements 28 31 ! … … 66 69 67 70 71 USE control_parameters, & 72 ONLY: coupling_char, data_output_2d_on_each_pe, max_masks, message_string, openfile, & 73 run_description_header 74 75 #if defined( __parallel ) 68 76 USE control_parameters, & 69 ONLY: coupling_char, data_output_2d_on_each_pe, & 70 max_masks, message_string, nz_do3d, openfile, & 71 run_description_header 77 ONLY: nz_do3d 78 #endif 72 79 73 80 USE indices, & -
palm/trunk/SOURCE/check_parameters.f90
r4392 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4392 2020-01-31 16:14:57Z pavelkrc 27 30 ! Some error numbers revised to prevent double usage 28 31 ! … … 169 172 USE transpose_indices 170 173 174 #if defined( __parallel ) 171 175 USE vertical_nesting_mod, & 172 176 ONLY: vnested, & 173 177 vnest_check_parameters 178 #endif 174 179 175 180 … … 196 201 LOGICAL :: found !< flag, true if output variable is already marked for averaging 197 202 203 REAL(wp) :: gradient !< local gradient 204 #if defined( __parallel ) 198 205 REAL(wp) :: dt_spinup_max !< maximum spinup timestep in nested domains 199 REAL(wp) :: gradient !< local gradient200 206 REAL(wp) :: remote = 0.0_wp !< MPI id of remote processor 201 207 REAL(wp) :: spinup_time_max !< maximum spinup time in nested domains 202 208 REAL(wp) :: time_to_be_simulated_from_reference_point !< time to be simulated from reference point 209 #endif 203 210 204 211 … … 3175 3182 ENDIF 3176 3183 3184 #if defined( __parallel ) 3177 3185 ! 3178 3186 !-- Vertical nesting: check fine and coarse grid compatibility for data exchange 3179 3187 IF ( vnested ) CALL vnest_check_parameters 3188 #endif 3180 3189 3181 3190 ! -
palm/trunk/SOURCE/data_output_2d.f90
r4442 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4442 2020-03-04 19:21:13Z suehring 27 30 ! Change order of dimension in surface array %frac to allow for better 28 31 ! vectorization. … … 152 155 INTEGER(iwp) :: flag_nr !< number of masking flag 153 156 INTEGER(iwp) :: i !< loop index 154 INTEGER(iwp) :: iis !< vertical index of a xy slice in array 'local_2d_sections'155 157 INTEGER(iwp) :: is !< slice index 156 158 INTEGER(iwp) :: ivar !< variable index … … 166 168 INTEGER(iwp) :: nzt_do !< upper limit of the data field (usually nzt+1) 167 169 INTEGER(iwp) :: s_ind !< index of slice types (xy=1, xz=2, yz=3) 170 #if defined( __parallel ) 171 INTEGER(iwp) :: iis !< vertical index of a xy slice in array 'local_2d_sections' 168 172 INTEGER(iwp) :: sender !< PE id of sending PE 169 173 INTEGER(iwp) :: ind(4) !< index limits (lower/upper bounds) of array 'local_2d' 174 #endif 170 175 171 176 LOGICAL :: found !< true if output variable was found -
palm/trunk/SOURCE/data_output_3d.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Introduction of wall_flags_total_0, which currently sets bits based on static 28 31 ! topography information used in wall_flags_static_0 … … 90 93 91 94 USE control_parameters, & 92 ONLY: debug_output_timestep, & 93 do3d, do3d_no, do3d_time_count, io_blocks, io_group, & 95 ONLY: debug_output_timestep, do3d, do3d_no, do3d_time_count, & 94 96 land_surface, message_string, ntdim_3d, nz_do3d, plant_canopy, & 95 97 psolver, time_since_reference_point, urban_surface, & 96 98 varnamelength 97 99 100 #if defined( __parallel ) 101 USE control_parameters, & 102 ONLY: io_blocks, io_group 103 #endif 104 98 105 USE cpulog, & 99 106 ONLY: log_point, cpu_log 100 107 101 #if defined( __parallel )102 108 USE indices, & 103 109 ONLY: nbgp, nxl, nxlg, nxr, nxrg, nyn, nyng, nys, nysg, nzb, nzt, & 104 110 wall_flags_total_0 105 #else 111 112 #if ! defined( __parallel ) 106 113 USE indices, & 107 ONLY: nbgp, nx, nxl, nxlg, nxr, nxrg, ny, nyn, nyng, nys, nysg, nzb, & 108 nzt, wall_flags_total_0 114 ONLY: nx, ny 109 115 #endif 110 116 -
palm/trunk/SOURCE/data_output_mask.f90
r4377 r4444 20 20 ! Current revisions: 21 21 ! ----------------- 22 ! 23 ! 22 ! 23 ! 24 24 ! Former revisions: 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4377 2020-01-15 11:10:51Z gronemeier 27 30 ! bugfix: set fill value for output according to wall_flags_total_0 for 28 31 ! non-terrain following output … … 93 96 USE control_parameters, & 94 97 ONLY: air_chemistry, domask, domask_no, domask_time_count, mask_i, & 95 mask_j, mask_k, mask_size, mask_size_l, mask_start_l, & 96 mask_surface, & 98 mask_j, mask_k, mask_size_l, mask_surface, & 97 99 max_masks, message_string, nz_do3d, salsa, & 98 100 time_since_reference_point 101 102 #if defined( __parallel ) 103 USE control_parameters, & 104 ONLY: mask_size, mask_start_l 105 #endif 99 106 100 107 USE diagnostic_output_quantities_mod, & … … 134 141 135 142 INTEGER(iwp) :: av !< flag for (non-)average output 136 INTEGER(iwp) :: ngp !< number of grid points of an output slice137 143 INTEGER(iwp) :: flag_nr !< number of masking flag 138 144 INTEGER(iwp) :: i !< loop index … … 146 152 INTEGER(iwp) :: n !< loop index 147 153 INTEGER(iwp) :: netcdf_data_format_save !< value of netcdf_data_format 154 INTEGER(iwp) :: ktt !< k index of highest terrain surface 155 #if defined( __parallel ) 156 INTEGER(iwp) :: ngp !< number of grid points of an output slice 148 157 INTEGER(iwp) :: sender !< PE id of sending PE 149 INTEGER(iwp) :: ktt !< k index of highest terrain surface150 158 INTEGER(iwp) :: ind(6) !< index limits (lower/upper bounds) of array 'local_2d' 159 #endif 151 160 152 161 LOGICAL :: found !< true if output variable was found -
palm/trunk/SOURCE/exchange_horiz_2d.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Corrected "Former revisions" section 28 31 ! … … 332 335 ONLY: bc_dirichlet_l, bc_dirichlet_n, bc_dirichlet_r, bc_dirichlet_s, & 333 336 bc_radiation_l, bc_radiation_n, bc_radiation_r, bc_radiation_s, & 334 bc_radiation_l, bc_radiation_n, bc_radiation_r, bc_radiation_s, & 335 grid_level 336 337 bc_radiation_l, bc_radiation_n, bc_radiation_r, bc_radiation_s 338 339 #if defined( __parallel ) 340 USE control_parameters, & 341 ONLY: grid_level 342 #endif 343 337 344 USE cpulog, & 338 345 ONLY: cpu_log, log_point_s -
palm/trunk/SOURCE/flow_statistics.f90
r4442 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4442 2020-03-04 19:21:13Z suehring 27 30 ! Change order of dimension in surface array %frac to allow for better 28 31 ! vectorization. … … 101 104 102 105 USE indices, & 103 ONLY: ngp_2dh, ngp_2dh_s_inner, ngp_3d, ngp_3d_inner, ngp_sums, & 104 ngp_sums_ls, nxl, nxr, nyn, nys, nzb, nzt, topo_min_level, & 105 wall_flags_total_0 106 ONLY: ngp_2dh, ngp_2dh_s_inner, ngp_3d, ngp_3d_inner, nxl, nxr, nyn, & 107 nys, nzb, nzt, topo_min_level, wall_flags_total_0 108 109 #if defined( __parallel ) 110 USE indices, & 111 ONLY: ngp_sums, ngp_sums_ls 112 #endif 106 113 107 114 USE kinds -
palm/trunk/SOURCE/header.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Bugfix, character length too short, caused crash on NEC. 28 31 ! … … 123 126 USE pmc_handle_communicator, & 124 127 ONLY: pmc_get_model_info 125 #endif126 128 127 129 USE pmc_interface, & 128 130 ONLY: nested_run, nesting_datatransfer_mode, nesting_mode 131 #endif 129 132 130 133 USE surface_mod, & … … 148 151 CHARACTER (LEN=26) :: ver_rev !< string for run identification 149 152 153 #if defined( __parallel ) 150 154 CHARACTER (LEN=32) :: cpl_name !< name of child domain (nesting mode only) 155 #endif 151 156 152 157 CHARACTER (LEN=40) :: output_format !< netcdf format … … 185 190 INTEGER(iwp) :: ch !< canyon depth in generic street-canyon setup 186 191 INTEGER(iwp) :: count !< number of masked output locations 187 INTEGER(iwp) :: cpl_parent_id !< parent ID for the respective child model188 192 INTEGER(iwp) :: cwx !< canyon width along x in generic street-canyon setup 189 193 INTEGER(iwp) :: cwy !< canyon width along y in generic street-canyon setup … … 198 202 INTEGER(iwp) :: ll !< substring length 199 203 INTEGER(iwp) :: mid !< masked output running index 204 #if defined( __parallel ) 205 INTEGER(iwp) :: cpl_parent_id !< parent ID for the respective child model 200 206 INTEGER(iwp) :: my_cpl_id !< run id in a nested model setup 201 207 INTEGER(iwp) :: n !< running index over number of couplers in a nested model setup 202 208 INTEGER(iwp) :: ncpl !< number of coupler in a nested model setup 203 209 INTEGER(iwp) :: npe_total !< number of total PEs in a coupler (parent + child) 210 #endif 204 211 205 212 206 213 REAL(wp) :: cpuseconds_per_simulated_second !< CPU time (in s) per simulated second 214 #if defined( __parallel ) 207 215 REAL(wp) :: lower_left_coord_x !< x-coordinate of nest domain 208 216 REAL(wp) :: lower_left_coord_y !< y-coordinate of nest domain 217 #endif 209 218 210 219 ! … … 246 255 CALL message( 'header', 'PA0191', 0, 0, 0, 6, 0 ) 247 256 ENDIF 257 #if defined( __parallel ) 248 258 IF ( nested_run ) run_classification = 'nested ' // run_classification(1:63) 259 #endif 249 260 IF ( ocean_mode ) THEN 250 261 run_classification = 'ocean - ' // run_classification(1:61) … … 300 311 #endif 301 312 313 #if defined( __parallel ) 302 314 ! 303 315 !-- Nesting informations 304 316 IF ( nested_run ) THEN 305 317 306 #if defined( __parallel )307 318 WRITE ( io, 600 ) TRIM( nesting_mode ), & 308 319 TRIM( nesting_datatransfer_mode ) … … 324 335 TRIM( cpl_name ) 325 336 ENDDO 337 338 ENDIF 326 339 #endif 327 340 328 ENDIF329 341 WRITE ( io, 99 ) 330 342 … … 1919 1931 ' Time: ',A8,6X,'Run-No.: ',I2.2/ & 1920 1932 ' Run on host: ',A10,6X,'En-No.: ',I2.2) 1933 #if defined( __parallel ) 1921 1934 600 FORMAT (/' Nesting informations:'/ & 1922 1935 ' --------------------'/ & … … 1926 1939 ' (*=me) id of PEs x (m) y (m)' ) 1927 1940 601 FORMAT (2X,A1,1X,I2.2,6X,I2.2,5X,I5,5X,F8.2,2X,F8.2,5X,A) 1941 #endif 1928 1942 1929 1943 END SUBROUTINE header -
palm/trunk/SOURCE/init_coupling.f90
r4360 r4444 25 25 ! ------------------ 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Corrected "Former revisions" section 28 31 ! … … 54 57 ! 55 58 !-- Local variables 59 #if defined( __parallel ) 56 60 INTEGER(iwp) :: i !< 57 61 INTEGER(iwp) :: inter_color !< 62 #endif 58 63 59 64 INTEGER(iwp), DIMENSION(:) :: bc_data(0:3) = 0 !< -
palm/trunk/SOURCE/init_grid.f90
r4414 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4414 2020-02-19 20:16:04Z suehring 27 30 ! - Remove deprecated topography arrays nzb_s_inner, nzb_u_inner, etc. 28 31 ! - Move initialization of boundary conditions and multigrid into an extra … … 172 175 USE pegrid 173 176 177 #if defined( __parallel ) 174 178 USE vertical_nesting_mod, & 175 179 ONLY: vnested, vnest_init_grid 180 #endif 176 181 177 182 IMPLICIT NONE … … 751 756 ENDDO 752 757 ENDIF 758 759 #if defined( __parallel ) 753 760 ! 754 761 !-- Vertical nesting: communicate vertical grid level arrays between fine and 755 762 !-- coarse grid 756 763 IF ( vnested ) CALL vnest_init_grid 764 #endif 757 765 758 766 END SUBROUTINE init_grid … … 948 956 INTEGER(iwp) :: topo_top_index !< orography top index, used to map 3D buildings onto terrain 949 957 958 #if defined( __parallel ) 950 959 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: displace_dum !< displacements of start addresses, used for MPI_ALLGATHERV 960 #endif 951 961 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: build_ids !< building IDs on entire model domain 952 962 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: build_ids_final !< building IDs on entire model domain, multiple occurences are sorted out -
palm/trunk/SOURCE/init_masks.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Corrected "Former revisions" section 28 31 ! … … 100 103 INTEGER(iwp) :: i !< loop index 101 104 INTEGER(iwp) :: ilen !< length of string saved in 'do_mask' 102 INTEGER(iwp) :: ind(6) !< index limits (lower/upper bounds) of output array103 105 INTEGER(iwp) :: ind_array(1) !< array index 104 106 INTEGER(iwp) :: j !< loop index … … 106 108 INTEGER(iwp) :: m !< mask index 107 109 INTEGER(iwp) :: mid !< masked output running index 110 #if defined( __parallel ) 111 INTEGER(iwp) :: ind(6) !< index limits (lower/upper bounds) of output array 108 112 INTEGER(iwp) :: n !< loop index 109 113 INTEGER(iwp) :: sender !< PE id of sending PE 114 #endif 110 115 111 116 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: tmp_array !< temporary 1D array -
palm/trunk/SOURCE/init_pegrid.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! changed message PA0467 28 31 ! … … 82 85 ONLY: bc_dirichlet_l, bc_dirichlet_n, bc_dirichlet_r, bc_dirichlet_s, & 83 86 bc_lr, bc_ns, bc_radiation_l, bc_radiation_n, bc_radiation_r, & 84 bc_radiation_s, coupling_mode, coupling_topology, gathered_size,&87 bc_radiation_s, & 85 88 grid_level, grid_level_count, maximum_grid_level, & 86 message_string, mg_switch_to_pe0_level, momentum_advec, & 87 psolver, outflow_source_plane, recycling_width, scalar_advec, & 88 subdomain_size, turbulent_inflow, turbulent_outflow, y_shift 89 message_string, mg_switch_to_pe0_level, & 90 psolver 91 92 93 #if defined( __parallel ) 94 USE control_parameters, & 95 ONLY: coupling_mode, coupling_topology, gathered_size, momentum_advec, & 96 outflow_source_plane, recycling_width, scalar_advec, subdomain_size, & 97 turbulent_inflow, turbulent_outflow, y_shift 89 98 90 99 USE grid_variables, & 91 100 ONLY: dx 101 #endif 92 102 93 103 USE indices, & 94 ONLY: mg_loc_ind, nbgp, nnx, nny, nnz, nx, nx_a, nx_o, nxl, nxl_mg, &95 nxlu, nxr, nxr_mg, ny, ny _a, ny_o, nyn, nyn_mg, nys, nys_mg, &104 ONLY: nnx, nny, nnz, nx, nxl, nxl_mg, & 105 nxlu, nxr, nxr_mg, ny, nyn, nyn_mg, nys, nys_mg, & 96 106 nysv, nz, nzb, nzt, nzt_mg, wall_flags_1, wall_flags_2, & 97 107 wall_flags_3, wall_flags_4, wall_flags_5, wall_flags_6, & 98 108 wall_flags_7, wall_flags_8, wall_flags_9, wall_flags_10 109 110 #if defined( __parallel ) 111 USE indices, & 112 ONLY: mg_loc_ind, nbgp, nx_a, nx_o, ny_a, ny_o 113 #endif 99 114 100 115 USE kinds … … 102 117 USE pegrid 103 118 119 #if defined( __parallel ) 104 120 USE pmc_interface, & 105 121 ONLY: nested_run 106 122 107 123 USE spectra_mod, & 108 ONLY: calculate_spectra , dt_dosp124 ONLY: calculate_spectra 109 125 110 126 USE synthetic_turbulence_generator_mod, & 111 127 ONLY: id_stg_left, id_stg_north, id_stg_right, id_stg_south, & 112 128 use_syn_turb_gen 129 #endif 113 130 114 131 USE transpose_indices, & 115 ONLY: nxl_y, nxl_yd, nxl_z, nxr_y, nxr_yd, nxr_z, nyn_x, nyn_z, nys_x,& 116 nys_z, nzb_x, nzb_y, nzb_yd, nzt_x, nzt_yd, nzt_y 132 ONLY: nxl_y, nxl_z, nxr_y, nxr_z, nyn_x, nyn_z, nys_x,& 133 nys_z, nzb_x, nzb_y, nzt_x, nzt_y 134 135 #if defined( __parallel ) 136 USE transpose_indices, & 137 ONLY: nxl_yd, nxr_yd, nzb_yd, nzt_yd 117 138 118 139 USE vertical_nesting_mod, & 119 140 ONLY: vnested, vnest_init_pegrid_domain, vnest_init_pegrid_rank 141 #endif 120 142 121 143 IMPLICIT NONE 122 144 123 145 INTEGER(iwp) :: i !< running index over number of processors or number of multigrid level 146 #if defined( __parallel ) 124 147 INTEGER(iwp) :: id_inflow_l !< ID indicating processors located at the left inflow boundary 125 148 INTEGER(iwp) :: id_outflow_l !< local value of id_outflow … … 131 154 INTEGER(iwp) :: id_stg_south_l !< south lateral boundary local core id in case of turbulence generator 132 155 INTEGER(iwp) :: ind(5) !< array containing the subdomain bounds 156 #endif 133 157 INTEGER(iwp) :: j !< running index, used for various loops 134 158 INTEGER(iwp) :: k !< number of vertical grid points in different multigrid level … … 138 162 INTEGER(iwp) :: mg_levels_z !< maximum number of grid level allowed along z-direction 139 163 INTEGER(iwp) :: mg_switch_to_pe0_level_l !< maximum number of grid level with switching to PE 0 164 #if defined( __parallel ) 140 165 INTEGER(iwp) :: nnx_y !< quotient of number of grid points along x-direction and number of PEs used along y-direction 141 166 INTEGER(iwp) :: nny_x !< quotient of number of grid points along y-direction and number of PEs used along x-direction … … 144 169 INTEGER(iwp) :: nnz_y !< quotient of number of grid points along z-direction and number of PEs used along x-direction 145 170 INTEGER(iwp) :: numproc_sqr !< square root of the number of processors 171 #endif 146 172 INTEGER(iwp) :: nxl_l !< lower index bound along x-direction on subdomain and different multigrid level 147 173 INTEGER(iwp) :: nxr_l !< upper index bound along x-direction on subdomain and different multigrid level 148 174 INTEGER(iwp) :: nyn_l !< lower index bound along y-direction on subdomain and different multigrid level 149 175 INTEGER(iwp) :: nys_l !< upper index bound along y-direction on subdomain and different multigrid level 176 #if defined( __parallel ) 150 177 INTEGER(iwp) :: nzb_l !< lower index bound along z-direction on subdomain and different multigrid level 178 #endif 151 179 INTEGER(iwp) :: nzt_l !< upper index bound along z-direction on subdomain and different multigrid level 152 180 !$ INTEGER(iwp) :: omp_get_num_threads !< number of OpenMP threads 153 181 182 #if defined( __parallel ) 154 183 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: ind_all !< dummy array containing index bounds on subdomain, used for gathering 155 184 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: nxlf !< lower index bound allong x-direction for every PE … … 161 190 INTEGER(iwp) :: lcoord(2) !< PE coordinates of left neighbor along x and y 162 191 INTEGER(iwp) :: rcoord(2) !< PE coordinates of right neighbor along x and y 192 #endif 163 193 164 194 ! -
palm/trunk/SOURCE/lagrangian_particle_model_mod.f90
r4430 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4430 2020-02-27 18:02:20Z suehring 27 30 ! - Bugfix in logarithmic interpolation of near-ground particle speed (density 28 31 ! was not considered). … … 175 178 USE particle_attributes 176 179 180 #if defined( __parallel ) 177 181 USE pmc_particle_interface, & 178 182 ONLY: pmcp_c_get_particle_from_parent, pmcp_p_fill_particle_win, & … … 180 184 pmcp_p_delete_particles_in_fine_grid_area, pmcp_g_init, & 181 185 pmcp_g_print_number_of_particles 186 #endif 182 187 183 188 USE pmc_interface, & … … 307 312 308 313 INTEGER(iwp), PARAMETER :: NR_2_direction_move = 10000 !< 314 315 #if defined( __parallel ) 309 316 INTEGER(iwp) :: nr_move_north !< 310 317 INTEGER(iwp) :: nr_move_south !< … … 312 319 TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: move_also_north 313 320 TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: move_also_south 321 #endif 314 322 315 323 REAL(wp) :: epsilon_collision !< … … 1234 1242 ENDIF 1235 1243 1244 #if defined( __parallel ) 1236 1245 IF ( nested_run ) CALL pmcp_g_init 1246 #endif 1247 1237 1248 ! 1238 1249 !-- To avoid programm abort, assign particles array to the local version of … … 2263 2274 first_loop_stride = .FALSE. 2264 2275 ENDDO ! timestep loop 2276 2277 #if defined( __parallel ) 2265 2278 ! 2266 2279 !-- in case of nested runs do the transfer of particles after every full model time step … … 2274 2287 deleted_particles = 0 2275 2288 ENDIF 2289 #endif 2276 2290 2277 2291 ! … … 2341 2355 END SUBROUTINE lpm_actions 2342 2356 2357 2358 #if defined( __parallel ) 2359 !------------------------------------------------------------------------------! 2360 ! Description: 2361 ! ------------ 2362 ! 2363 !------------------------------------------------------------------------------! 2364 SUBROUTINE particles_from_parent_to_child 2365 2366 CALL pmcp_c_get_particle_from_parent ! Child actions 2367 CALL pmcp_p_fill_particle_win ! Parent actions 2368 2369 RETURN 2370 2371 END SUBROUTINE particles_from_parent_to_child 2372 2343 2373 2344 2374 !------------------------------------------------------------------------------! … … 2347 2377 ! 2348 2378 !------------------------------------------------------------------------------! 2349 SUBROUTINE particles_from_parent_to_child2350 2351 CALL pmcp_c_get_particle_from_parent ! Child actions2352 CALL pmcp_p_fill_particle_win ! Parent actions2353 2354 RETURN2355 2356 END SUBROUTINE particles_from_parent_to_child2357 2358 2359 !------------------------------------------------------------------------------!2360 ! Description:2361 ! ------------2362 !2363 !------------------------------------------------------------------------------!2364 2379 SUBROUTINE particles_from_child_to_parent 2365 2380 … … 2370 2385 2371 2386 END SUBROUTINE particles_from_child_to_parent 2387 #endif 2372 2388 2373 2389 !------------------------------------------------------------------------------! … … 2421 2437 #endif 2422 2438 2439 #if defined( __parallel ) 2423 2440 IF ( nested_run ) THEN 2424 2441 CALL pmcp_g_print_number_of_particles( simulated_time+dt_3d, & 2425 2442 tot_number_of_particles) 2426 2443 ENDIF 2444 #endif 2427 2445 2428 2446 ! … … 6973 6991 SUBROUTINE lpm_exchange_horiz 6974 6992 6975 INTEGER(iwp) :: i !< grid index (x) of particle positition6976 6993 INTEGER(iwp) :: ip !< index variable along x 6977 INTEGER(iwp) :: j !< grid index (y) of particle positition6978 6994 INTEGER(iwp) :: jp !< index variable along y 6979 6995 INTEGER(iwp) :: kp !< index variable along z 6980 6996 INTEGER(iwp) :: n !< particle index variable 6997 6998 #if defined( __parallel ) 6999 INTEGER(iwp) :: i !< grid index (x) of particle positition 7000 INTEGER(iwp) :: j !< grid index (y) of particle positition 6981 7001 INTEGER(iwp) :: par_size !< Particle size in bytes 6982 7002 INTEGER(iwp) :: trlp_count !< number of particles send to left PE … … 6997 7017 TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: trrp !< particles send to right PE 6998 7018 TYPE(particle_type), DIMENSION(:), ALLOCATABLE :: trsp !< particles send to south PE 7019 #endif 6999 7020 7000 7021 CALL cpu_log( log_point_s(23), 'lpm_exchange_horiz', 'start' ) … … 7614 7635 END SUBROUTINE lpm_exchange_horiz 7615 7636 7637 #if defined( __parallel ) 7616 7638 !------------------------------------------------------------------------------! 7617 7639 ! Description: … … 7773 7795 ENDDO 7774 7796 7775 RETURN7776 7777 7797 END SUBROUTINE lpm_add_particles_to_gridcell 7798 #endif 7778 7799 7779 7800 -
palm/trunk/SOURCE/land_surface_model_mod.f90
r4442 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directive moved 28 ! 29 ! 4442 2020-03-04 19:21:13Z suehring 27 30 ! Change order of dimension in surface arrays %frac, %emissivity and %albedo 28 31 ! to allow for better vectorization in the radiation interactions. … … 2376 2379 REAL(wp), DIMENSION(:), ALLOCATABLE :: bound !< temporary arrays for storing index bounds 2377 2380 REAL(wp), DIMENSION(:), ALLOCATABLE :: bound_root_fr !< temporary arrays for storing index bounds 2381 #if defined( __parallel ) 2378 2382 REAL(wp), DIMENSION(:), ALLOCATABLE :: pr_soil_init !< temporary array used for averaging soil profiles 2379 #if defined( __parallel )2380 2383 REAL(wp), DIMENSION(:), ALLOCATABLE :: z_soil_root !< vertical dimension of soil grid in root domain 2381 2384 #endif -
palm/trunk/SOURCE/local_stop.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: misplaced cpp-directive moved 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Corrected "Former revisions" section 28 31 ! … … 40 43 SUBROUTINE local_stop 41 44 45 #if defined( __parallel ) 42 46 43 47 USE control_parameters, & … … 52 56 53 57 54 #if defined( __parallel )55 58 IF ( coupling_mode == 'uncoupled' ) THEN 56 59 IF ( nested_run ) THEN -
palm/trunk/SOURCE/multi_agent_system_mod.f90
r4346 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4346 2019-12-18 11:55:56Z motisi 27 30 ! Removed wall_flags_static_0 from USE statements as it's not used within 28 31 ! the module … … 115 118 INTEGER(iwp) :: iran_agent = -1234567 !< number for random generator 116 119 INTEGER(iwp) :: min_nr_agent = 2 !< namelist parameter (see documentation) 120 #if defined( __parallel ) 117 121 INTEGER(iwp) :: ghla_count_recv !< number of agents in left ghost layer 118 122 INTEGER(iwp) :: ghna_count_recv !< number of agents in north ghost layer 119 123 INTEGER(iwp) :: ghra_count_recv !< number of agents in right ghost layer 120 124 INTEGER(iwp) :: ghsa_count_recv !< number of agents in south ghost layer 121 INTEGER(iwp) :: maximum_number_of_agents = 0 !< maximum number of agents during run122 125 INTEGER(iwp) :: nr_move_north !< number of agts to move north during exchange_horiz 123 126 INTEGER(iwp) :: nr_move_south !< number of agts to move south during exchange_horiz 127 #endif 128 INTEGER(iwp) :: maximum_number_of_agents = 0 !< maximum number of agents during run 124 129 INTEGER(iwp) :: number_of_agents = 0 !< number of agents for each grid box (3d array is saved on agt_count) 125 130 INTEGER(iwp) :: number_of_agent_groups = 1 !< namelist parameter (see documentation) … … 239 244 TYPE(agent_type), DIMENSION(:), POINTER :: agents !< Agent array for this grid cell 240 245 TYPE(agent_type) :: zero_agent !< zero agent to avoid weird thing 246 #if defined( __parallel ) 241 247 TYPE(agent_type), DIMENSION(:), ALLOCATABLE :: move_also_north !< for agent exchange between PEs 242 248 TYPE(agent_type), DIMENSION(:), ALLOCATABLE :: move_also_south !< for agent exchange between PEs … … 245 251 TYPE(agent_type), DIMENSION(:), ALLOCATABLE :: agt_gh_r !< ghost layer right of pe domain 246 252 TYPE(agent_type), DIMENSION(:), ALLOCATABLE :: agt_gh_s !< ghost layer south of pe domain 253 #endif 247 254 ! 248 255 !-- Type for 2D grid on which agents are stored … … 1216 1223 IMPLICIT NONE 1217 1224 1225 #if defined( __parallel ) 1218 1226 INTEGER(iwp) :: agt_size !< Agent size in bytes 1227 INTEGER(iwp) :: n !< counter (number of PEs) 1228 INTEGER(iwp) :: noa_rcv !< received number of agents 1229 #endif 1219 1230 INTEGER(iwp) :: dummy !< dummy 1220 1231 INTEGER(iwp) :: ii !< counter (x) 1221 1232 INTEGER(iwp) :: ip !< counter (x) 1222 1233 INTEGER(iwp) :: jp !< counter (y) 1223 INTEGER(iwp) :: n !< counter (number of PEs)1224 1234 INTEGER(iwp) :: noa !< number of agents 1225 INTEGER(iwp) :: noa_rcv !< received number of agents1226 1235 INTEGER(iwp) :: out_noa !< number of agents for output 1227 1236 1237 #if defined( __parallel ) 1228 1238 INTEGER(iwp), DIMENSION(0:numprocs-1) :: noa_arr !< number of agents on each PE 1239 #endif 1229 1240 ! 1230 1241 !-- SAVE attribute required to avoid compiler warning about pointer outlive the pointer target 1231 1242 TYPE(agent_type), DIMENSION(:), ALLOCATABLE, TARGET, SAVE :: trf_agents !< all agents on current PE 1243 #if defined( __parallel ) 1232 1244 TYPE(agent_type), DIMENSION(:), ALLOCATABLE, TARGET, SAVE :: out_agents !< all agents in entire domain 1245 #endif 1233 1246 1234 1247 LOGICAL, INTENT (INOUT) :: ftest … … 1454 1467 END SUBROUTINE mas_data_output_agents 1455 1468 1469 #if defined( __parallel ) 1456 1470 !------------------------------------------------------------------------------! 1457 1471 ! Description: … … 1596 1610 1597 1611 END SUBROUTINE mas_eh_add_agents_to_gridcell 1598 1612 #endif 1613 1614 1615 #if defined( __parallel ) 1599 1616 !------------------------------------------------------------------------------! 1600 1617 ! Description: … … 1648 1665 ENDDO 1649 1666 END SUBROUTINE mas_eh_add_ghost_agents_to_gridcell 1667 #endif 1650 1668 1651 1669 !------------------------------------------------------------------------------! … … 1742 1760 IMPLICIT NONE 1743 1761 1762 INTEGER(iwp) :: ip !< index variable along x 1763 INTEGER(iwp) :: jp !< index variable along y 1764 INTEGER(iwp) :: n !< agent index variable 1765 1766 #if defined( __parallel ) 1767 1744 1768 INTEGER(iwp) :: i !< grid index (x) of agent positition 1745 INTEGER(iwp) :: ip !< index variable along x1746 1769 INTEGER(iwp) :: j !< grid index (y) of agent positition 1747 INTEGER(iwp) :: jp !< index variable along y1748 INTEGER(iwp) :: n !< agent index variable1749 1770 INTEGER(iwp) :: par_size !< Agent size in bytes 1771 1750 1772 INTEGER(iwp) :: trla_count !< number of agents send to left PE 1751 1773 INTEGER(iwp) :: trla_count_recv !< number of agents receive from right PE … … 1765 1787 TYPE(agent_type), DIMENSION(:), ALLOCATABLE :: trra !< agents send to right PE 1766 1788 TYPE(agent_type), DIMENSION(:), ALLOCATABLE :: trsa !< agents send to south PE 1767 1768 #if defined( __parallel )1769 1789 1770 1790 ! … … 2228 2248 DEALLOCATE( move_also_south ) 2229 2249 2250 ! 2251 !-- Accumulate the number of agents transferred between the subdomains) 2252 CALL mas_eh_ghost_exchange 2253 2230 2254 #else 2231 2255 … … 2316 2340 #endif 2317 2341 2318 !2319 !-- Accumulate the number of agents transferred between the subdomains)2320 CALL mas_eh_ghost_exchange2321 2322 2342 END SUBROUTINE mas_eh_exchange_horiz 2323 2343 2344 2345 #if defined( __parallel ) 2324 2346 !------------------------------------------------------------------------------! 2325 2347 ! Description: … … 2332 2354 2333 2355 IMPLICIT NONE 2334 2335 #if defined( __parallel )2336 2356 2337 2357 INTEGER(iwp) :: ip !< index variable along x … … 2617 2637 ENDIF 2618 2638 2639 END SUBROUTINE mas_eh_ghost_exchange 2619 2640 #endif 2620 2621 END SUBROUTINE mas_eh_ghost_exchange2622 2641 2623 2642 !------------------------------------------------------------------------------! … … 3922 3941 END SUBROUTINE mas_ps_sort_in_subboxes 3923 3942 3943 #if defined( __parallel ) 3924 3944 !------------------------------------------------------------------------------! 3925 3945 ! Description: … … 3962 3982 number_of_agents = nn 3963 3983 3964 END SUBROUTINE mas_ps_pack 3984 END SUBROUTINE mas_ps_pack 3985 #endif 3965 3986 3966 3987 !------------------------------------------------------------------------------! -
palm/trunk/SOURCE/palm.f90
r4414 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4414 2020-02-19 20:16:04Z suehring 27 30 ! Call to module_interface_init_numerics 28 31 ! … … 73 76 USE arrays_3d 74 77 78 #if defined( __parallel ) 75 79 USE bulk_cloud_model_mod, & 76 80 ONLY: bulk_cloud_model, microphysics_morrison, microphysics_seifert 81 #endif 77 82 78 83 USE control_parameters, & 79 ONLY: constant_diffusion, child_domain, & 80 coupling_char, do2d_at_begin, do3d_at_begin, humidity, & 81 initializing_actions, io_blocks, io_group, message_string, & 82 neutral, passive_scalar, runnr, simulated_time_chr, spinup, & 84 ONLY: coupling_char, do2d_at_begin, do3d_at_begin, io_blocks, & 85 io_group, message_string, runnr, simulated_time_chr, spinup, & 83 86 time_since_reference_point, user_interface_current_revision, & 84 87 user_interface_required_revision, version, write_binary 85 88 89 #if defined( __parallel ) 90 USE control_parameters, & 91 ONLY: child_domain, constant_diffusion, humidity, & 92 initializing_actions, neutral, passive_scalar 93 #endif 94 86 95 USE cpulog, & 87 ONLY: cpu_log, log_point, log_point_s, cpu_statistics 96 ONLY: cpu_log, log_point, cpu_statistics 97 98 #if defined( __parallel ) 99 USE cpulog, & 100 ONLY: log_point_s 101 #endif 88 102 89 103 USE diagnostic_output_quantities_mod, & 90 104 ONLY: doq_calculate 91 105 106 #if defined( __parallel ) 92 107 USE indices, & 93 108 ONLY: nbgp 109 #endif 94 110 95 111 USE kinds … … 110 126 USE pegrid 111 127 128 #if defined( __parallel ) 112 129 USE pmc_particle_interface, & 113 130 ONLY: pmcp_g_alloc_win … … 116 133 ONLY: nested_run, pmci_child_initialize, pmci_init, & 117 134 pmci_modelconfiguration, pmci_parent_initialize 135 #endif 118 136 119 137 USE surface_data_output_mod, & … … 123 141 ONLY: wrd_global, wrd_local 124 142 125 #if defined( __parallel ) &&defined( _OPENACC )143 #if defined( __parallel ) && defined( _OPENACC ) 126 144 USE openacc 127 145 #endif … … 285 303 CALL module_interface_init_output 286 304 305 #if defined( __parallel ) 287 306 ! 288 307 !-- Coupling protocol setup for nested-domain runs … … 325 344 CALL pmcp_g_alloc_win ! Must be called after pmci_child_initialize and pmci_parent_initialize 326 345 ENDIF 346 #endif 327 347 328 348 ! -
palm/trunk/SOURCE/pmc_interface_mod.f90
r4413 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives and variable declarations for serial mode added 28 ! 29 ! 4413 2020-02-19 15:52:19Z hellstea 27 30 ! All the USE-statements within subroutines moved up to the module declaration section. 28 31 ! … … 183 186 MODULE pmc_interface 184 187 188 #if ! defined( __parallel ) 189 ! 190 !-- Serial mode does not allow nesting, but requires the following variables as steering 191 !-- quantities 192 USE kinds 193 194 IMPLICIT NONE 195 196 PUBLIC 197 198 CHARACTER(LEN=8), SAVE :: nesting_mode = 'none' !< steering parameter for 1- or 2-way nesting 199 200 INTEGER(iwp), SAVE :: comm_world_nesting !< Global nesting communicator 201 INTEGER(iwp), SAVE :: cpl_id = 1 !< 202 203 LOGICAL, SAVE :: nested_run = .FALSE. !< general switch 204 LOGICAL, SAVE :: rans_mode_parent = .FALSE. !< parent model mode (.F.-LES mode, .T.-RANS mode) 205 206 #else 207 185 208 USE ISO_C_BINDING 186 209 … … 306 329 307 330 LOGICAL, SAVE :: nested_run = .FALSE. !< general switch 308 LOGICAL 331 LOGICAL, SAVE :: rans_mode_parent = .FALSE. !< mode of parent model (.F. - LES mode, .T. - RANS mode) 309 332 ! 310 333 !-- Geometry … … 5183 5206 END SUBROUTINE pmci_ensure_nest_mass_conservation_vertical 5184 5207 5185 5208 #endif 5186 5209 END MODULE pmc_interface -
palm/trunk/SOURCE/pmc_particle_interface.f90
r4360 r4444 26 26 ! -----------------! 27 27 ! $Id$ 28 ! bugfix: preprocessor directives for serial mode added 29 ! 30 ! 4360 2020-01-07 11:25:50Z suehring 28 31 ! Corrected "Former revisions" section 29 32 ! … … 50 53 ! child model. 51 54 !------------------------------------------------------------------------------! 55 #if defined( __parallel ) 52 56 53 57 USE, INTRINSIC :: ISO_C_BINDING … … 1199 1203 END SUBROUTINE pmc_realloc_particles_array 1200 1204 1205 #endif 1201 1206 END MODULE pmc_particle_interface -
palm/trunk/SOURCE/surface_data_output_mod.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Fix wrongly declared nc_stat variable in surface_data_output_mod 28 31 ! … … 1410 1413 ONLY: io_blocks, io_group, time_since_reference_point 1411 1414 1415 #if defined( __parallel ) 1412 1416 USE pegrid, & 1413 1417 ONLY: comm2d, ierr 1418 #endif 1414 1419 1415 1420 … … 4469 4474 ONLY: io_blocks, io_group 4470 4475 4476 #if defined( __parallel ) 4471 4477 USE pegrid, & 4472 4478 ONLY: comm2d, ierr 4479 #endif 4473 4480 4474 4481 IMPLICIT NONE -
palm/trunk/SOURCE/synthetic_turbulence_generator_mod.f90
r4442 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added, dummy statements to prevent compile errors added 28 ! 29 ! 4442 2020-03-04 19:21:13Z suehring 27 30 ! Set back turbulent length scale to 8 x grid spacing in the parametrized mode 28 31 ! (was accidantly changed). … … 286 289 INTEGER(iwp) :: nzb_y_stg !< lower bound of z coordinate (required for transposing z on PEs along y) 287 290 INTEGER(iwp) :: nzt_y_stg !< upper bound of z coordinate (required for transposing z on PEs along y) 291 #if defined( __parallel ) 288 292 INTEGER(iwp) :: stg_type_xz !< MPI type for full z range 289 293 INTEGER(iwp) :: stg_type_xz_small !< MPI type for small z range 290 294 INTEGER(iwp) :: stg_type_yz !< MPI type for full z range 291 295 INTEGER(iwp) :: stg_type_yz_small !< MPI type for small z range 296 #endif 292 297 293 298 INTEGER(iwp), DIMENSION(3) :: nr_non_topo_xz = 0 !< number of non-topography grid points at xz cross-sections, … … 296 301 !< required for bias correction of imposed perturbations 297 302 303 #if defined( __parallel ) 298 304 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: displs_xz !< displacement for MPI_GATHERV 299 305 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: recv_count_xz !< receive count for MPI_GATHERV 300 306 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: displs_yz !< displacement for MPI_GATHERV 301 307 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: recv_count_yz !< receive count for MPI_GATHERV 308 #endif 302 309 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: nux !< length scale of u in x direction (in gp) 303 310 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: nuy !< length scale of u in y direction (in gp) … … 595 602 INTEGER(iwp) :: j !> loop index 596 603 INTEGER(iwp) :: k !< index 604 #if defined( __parallel ) 597 605 INTEGER(iwp) :: newtype !< dummy MPI type 598 606 INTEGER(iwp) :: realsize !< size of REAL variables 607 #endif 599 608 600 609 INTEGER(iwp), DIMENSION(3) :: nr_non_topo_xz_l = 0 !< number of non-topography grid points at xz-cross-section on subdomain … … 612 621 REAL(wp) :: lwy !< length scale for w in y direction 613 622 REAL(wp) :: lwz !< length scale for w in z direction 623 #if defined( __parallel ) 614 624 REAL(wp) :: nnz !< increment used to determine processor decomposition of z-axis along x and y direction 625 #endif 615 626 REAL(wp) :: zz !< height 616 627 … … 622 633 !-- Create mpi-datatypes for exchange in case of non-local but distributed 623 634 !-- computation of the velocity seeds. This option is useful in 624 !-- case large turbulent length scales are present m, where the computational625 !-- effort becomes large and need to be parallelized. For paramet rized635 !-- case large turbulent length scales are present, where the computational 636 !-- effort becomes large and need to be parallelized. For parameterized 626 637 !-- turbulence the length scales are small and computing the velocity seeds 627 638 !-- locally is faster (no overhead by communication). … … 1892 1903 #else 1893 1904 f_n(nzb+1:nzt+1,nys:nyn) = f_n_l(nzb_x_stg:nzt_x_stg+1,nys:nyn) 1905 ! 1906 !-- Next line required to avoid compile errors because of unused dummy arguments 1907 IF ( id_left == 0 ) id_right = 0 1894 1908 #endif 1895 1909 … … 2073 2087 #else 2074 2088 f_n(nzb+1:nzt+1,nxl:nxr) = f_n_l(nzb_y_stg:nzt_y_stg+1,nxl:nxr) 2089 ! 2090 !-- Next line required to avoid compile errors because of unused dummy arguments 2091 IF ( id_north == 0 ) id_south = 0 2075 2092 #endif 2076 2093 -
palm/trunk/SOURCE/time_integration.f90
r4420 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4420 2020-02-24 14:13:56Z maronga 27 30 ! Added output control for wind turbine model 28 31 ! … … 198 201 199 202 USE arrays_3d, & 200 ONLY: diss, diss_p, dzu, e, e_p, nc, nc_p, nr, nr_p, prho, pt, pt_p, pt_init, q_init, q, & 201 qc, qc_p, qr, qr_p, q_p, ref_state, rho_ocean, s, s_p, sa_p, & 202 tend, u, u_p, v, vpt, v_p, w, w_p 203 ONLY: diss, diss_p, dzu, e_p, nc_p, nr_p, prho, pt, pt_p, pt_init, q, qc_p, qr_p, q_init, & 204 q_p, ref_state, rho_ocean, sa_p, s_p, tend, u, u_p, v, vpt, v_p, w_p 205 206 #if defined( __parallel ) && ! defined( _OPENACC ) 207 USE arrays_3d, & 208 ONLY: e, nc, nr, qc, qr, s, w 209 #endif 203 210 204 211 USE biometeorology_mod, & … … 220 227 221 228 USE chem_modules, & 222 ONLY: bc_cs_t_val, chem_species, cs_name, & 223 emissions_anthropogenic, emiss_read_legacy_mode, & 229 ONLY: bc_cs_t_val, chem_species, emissions_anthropogenic, emiss_read_legacy_mode, & 224 230 n_matched_vars 231 232 #if defined( __parallel ) 233 USE chem_modules, & 234 ONLY: cs_name 235 #endif 225 236 226 237 USE chemistry_model_mod, & … … 242 253 multi_agent_system_end, multi_agent_system_start, nesting_offline, neutral, & 243 254 nr_timesteps_this_run, nudging, ocean_mode, passive_scalar, pt_reference, & 244 pt_slope_offset, random_heatflux, rans_ mode, rans_tke_e, run_coupled, salsa,&255 pt_slope_offset, random_heatflux, rans_tke_e, run_coupled, salsa, & 245 256 simulated_time, simulated_time_chr, skip_time_do2d_xy, skip_time_do2d_xz, & 246 257 skip_time_do2d_yz, skip_time_do3d, skip_time_domask, skip_time_dopr, & … … 254 265 virtual_flight, virtual_measurement, ws_scheme_mom, ws_scheme_sca, timestep_count 255 266 267 #if defined( __parallel ) 268 USE control_parameters, & 269 ONLY: rans_mode 270 #endif 271 256 272 USE cpulog, & 257 273 ONLY: cpu_log, log_point, log_point_s … … 311 327 USE pegrid 312 328 329 #if defined( __parallel ) 313 330 USE pmc_interface, & 314 331 ONLY: nested_run, nesting_mode, pmci_boundary_conds, pmci_datatrans, pmci_synchronize, & 315 332 pmci_ensure_nest_mass_conservation, pmci_ensure_nest_mass_conservation_vertical, & 316 333 pmci_set_swaplevel 334 #endif 317 335 318 336 USE progress_bar, & … … 361 379 362 380 USE vertical_nesting_mod, & 363 ONLY: vnested, vnest_anterpolate, vnest_anterpolate_e, vnest_boundary_conds, & 364 vnest_boundary_conds_khkm, vnest_deallocate, vnest_init, vnest_init_fine, & 365 vnest_start_time 381 ONLY: vnested, vnest_init 382 383 #if defined( __parallel ) 384 USE vertical_nesting_mod, & 385 ONLY: vnest_anterpolate, vnest_anterpolate_e, vnest_boundary_conds, & 386 vnest_boundary_conds_khkm, vnest_deallocate, vnest_init_fine, vnest_start_time 387 #endif 366 388 367 389 USE virtual_measurement_mod, & … … 377 399 378 400 #if defined( _OPENACC ) 379 USE arrays_3d, &380 ONLY: d, dd2zu, ddzu, ddzw, drho_air, drho_air_zw, dzw, heatflux_output_conversion, kh,&381 k m, momentumflux_output_conversion, p, ptdf_x, ptdf_y, rdf, rdf_sc, rho_air,&382 r ho_air_zw, tdiss_m, te_m, tpt_m, tu_m, tv_m, tw_m, ug, u_init, u_stokes_zu, vg,&383 v_init, v_stokes_zu, zu401 USE arrays_3d, & 402 ONLY: d, dd2zu, ddzu, ddzw, drho_air, drho_air_zw, dzw, e, heatflux_output_conversion, & 403 kh, km, momentumflux_output_conversion, nc, nr, p, ptdf_x, ptdf_y, qc, qr, rdf, & 404 rdf_sc, rho_air, rho_air_zw, s, tdiss_m, te_m, tpt_m, tu_m, tv_m, tw_m, ug, u_init, & 405 u_stokes_zu, vg, v_init, v_stokes_zu, w, zu 384 406 385 407 USE control_parameters, & … … 411 433 INTEGER(iwp) :: ig !< index for salsa gases 412 434 INTEGER(iwp) :: lsp !< 435 INTEGER(iwp) :: mid !< masked output running index 436 #if defined( __parallel ) 413 437 INTEGER(iwp) :: lsp_usr !< 414 INTEGER(iwp) :: mid !< masked output running index415 438 INTEGER(iwp) :: n !< loop counter for chemistry species 439 #endif 416 440 417 441 REAL(wp) :: dt_3d_old !< temporary storage of timestep to be used for … … 520 544 !-- At beginning determine the first time step 521 545 CALL timestep 546 547 #if defined( __parallel ) 522 548 ! 523 549 !-- Synchronize the timestep in case of nested run. … … 528 554 CALL pmci_synchronize 529 555 ENDIF 556 #endif 530 557 531 558 ! … … 558 585 559 586 CALL cpu_log( log_point_s(10), 'timesteps', 'start' ) 587 588 #if defined( __parallel ) 560 589 ! 561 590 !-- Vertical nesting: initialize fine grid … … 568 597 ENDIF 569 598 ENDIF 599 #endif 600 570 601 ! 571 602 !-- Determine ug, vg and w_subs in dependence on data from external file … … 764 795 !-- Set the swap level for all modules 765 796 CALL module_interface_swap_timelevel( MOD( timestep_count, 2) ) 797 798 #if defined( __parallel ) 766 799 ! 767 800 !-- Set the swap level for steering the pmc data transfer 768 801 IF ( nested_run ) CALL pmci_set_swaplevel( MOD( timestep_count, 2) + 1 ) !> @todo: why the +1 ? 802 #endif 769 803 770 804 CALL cpu_log( log_point(28), 'swap_timelevel', 'stop' ) 771 805 806 #if defined( __parallel ) 772 807 ! 773 808 !-- Vertical nesting: Interpolate fine grid data to the coarse grid … … 886 921 887 922 ENDIF 923 #endif 888 924 889 925 ! … … 962 998 963 999 IF ( vnest_init ) THEN 1000 #if defined( __parallel ) 964 1001 ! 965 1002 !-- Compute pressure in the CG, interpolate top boundary conditions … … 977 1014 CALL vnest_anterpolate_e 978 1015 CALL cpu_log( log_point_s(28), 'vnest_anter_e', 'stop' ) 1016 #else 1017 CONTINUE 1018 #endif 979 1019 980 1020 ELSE 1021 #if defined( __parallel ) 981 1022 ! 982 1023 !-- Mass (volume) flux correction to ensure global mass conservation for child domains. … … 988 1029 ENDIF 989 1030 ENDIF 990 1031 #endif 991 1032 CALL pres 992 1033 … … 1107 1148 ENDIF 1108 1149 CALL cpu_log( log_point(17), 'diffusivities', 'stop' ) 1150 1151 #if defined( __parallel ) 1109 1152 ! 1110 1153 !-- Vertical nesting: set fine grid eddy viscosity top boundary condition 1111 1154 IF ( vnest_init ) CALL vnest_boundary_conds_khkm 1155 #endif 1112 1156 1113 1157 ENDIF … … 1612 1656 CALL timestep 1613 1657 1658 #if defined( __parallel ) 1614 1659 ! 1615 1660 !-- Synchronize the timestep in case of nested run. … … 1620 1665 CALL pmci_synchronize 1621 1666 ENDIF 1667 #endif 1622 1668 1623 1669 ! … … 1653 1699 !$ACC END DATA 1654 1700 1701 #if defined( __parallel ) 1655 1702 ! 1656 1703 !-- Vertical nesting: Deallocate variables initialized for vertical nesting 1657 1704 IF ( vnest_init ) CALL vnest_deallocate 1705 #endif 1658 1706 1659 1707 IF ( myid == 0 ) CALL finish_progress_bar -
palm/trunk/SOURCE/time_integration_spinup.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Enable output of diagnostic quantities, e.g. 2-m temperature 28 31 ! … … 96 99 USE pegrid 97 100 101 #if defined( __parallel ) 98 102 USE pmc_interface, & 99 103 ONLY: nested_run 104 #endif 100 105 101 106 USE kinds -
palm/trunk/SOURCE/timestep.f90
r4360 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4360 2020-01-07 11:25:50Z suehring 27 30 ! Added missing OpenMP directives 28 31 ! … … 58 61 59 62 USE control_parameters, & 60 ONLY: cfl_factor, coupling_mode, dt_3d, dt_fixed, dt_max, & 61 galilei_transformation, message_string, rans_mode, & 62 stop_dt, terminate_coupled, terminate_coupled_remote, & 63 timestep_reason, u_gtrans, use_ug_for_galilei_tr, v_gtrans 63 ONLY: cfl_factor, dt_3d, dt_fixed, dt_max, galilei_transformation, & 64 message_string, rans_mode, stop_dt, timestep_reason, u_gtrans, & 65 use_ug_for_galilei_tr, v_gtrans 66 67 #if defined( __parallel ) 68 USE control_parameters, & 69 ONLY: coupling_mode, terminate_coupled, terminate_coupled_remote 70 #endif 64 71 65 72 USE cpulog, & … … 88 95 w_max, w_max_ijk 89 96 97 #if defined( __parallel ) 90 98 USE vertical_nesting_mod, & 91 99 ONLY: vnested, vnest_timestep_sync 100 #endif 92 101 93 102 IMPLICIT NONE … … 115 124 REAL(wp) :: v_gtrans_l !< 116 125 126 REAL(wp), DIMENSION(2) :: uv_gtrans_l !< 127 #if defined( __parallel ) 117 128 REAL(wp), DIMENSION(2) :: uv_gtrans !< 118 REAL(wp), DIMENSION(2) :: uv_gtrans_l !<119 129 REAL(wp), DIMENSION(3) :: reduce !< 120 REAL(wp), DIMENSION(3) :: reduce_l !< 130 REAL(wp), DIMENSION(3) :: reduce_l !< 131 #endif 121 132 REAL(wp), DIMENSION(nzb+1:nzt) :: dxyz2_min !< 122 133 !$ACC DECLARE CREATE(dxyz2_min) … … 382 393 ENDIF 383 394 395 #if defined( __parallel ) 384 396 ! 385 397 !-- Vertical nesting: coarse and fine grid timestep has to be identical 386 398 IF ( vnested ) CALL vnest_timestep_sync 399 #endif 387 400 388 401 CALL cpu_log( log_point(12), 'calculate_timestep', 'stop' ) -
palm/trunk/SOURCE/vertical_nesting_mod.f90
r4360 r4444 26 26 ! ----------------- 27 27 ! $Id$ 28 ! bugfix: cpp-directives for serial mode added 29 ! 30 ! 4360 2020-01-07 11:25:50Z suehring 28 31 ! Corrected "Former revisions" section 29 32 ! … … 85 88 !> identical in PARIN & PARIN_N 86 89 87 90 #if defined( __parallel ) 88 91 89 92 INTEGER(iwp),DIMENSION(3,2) :: bdims = 0 !> sub-domain grid topology of current PE … … 3968 3971 END SUBROUTINE vnest_deallocate 3969 3972 3973 #endif 3970 3974 END MODULE vertical_nesting_mod -
palm/trunk/SOURCE/virtual_measurement_mod.f90
r4438 r4444 25 25 ! ----------------- 26 26 ! $Id$ 27 ! bugfix: cpp-directives for serial mode added 28 ! 29 ! 4438 2020-03-03 20:49:28Z suehring 27 30 ! Add cpu-log points 28 31 ! … … 868 871 INTEGER(iwp), DIMENSION(:), ALLOCATABLE :: ns_all !< dummy array used to sum-up the number of observation coordinates 869 872 873 #if defined( __parallel ) 870 874 INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE :: ns_atmos !< number of observation points for each station on each mpi rank 871 875 INTEGER(iwp), DIMENSION(:,:), ALLOCATABLE :: ns_soil !< number of observation points for each station on each mpi rank 876 #endif 872 877 873 878 INTEGER(iwp), DIMENSION(:,:,:), ALLOCATABLE :: meas_flag !< mask array indicating measurement positions
Note: See TracChangeset
for help on using the changeset viewer.